powerpc: Create mtmsrd_isync()

mtmsrd_isync() will do an mtmsrd followed by an isync on older
processors. On newer processors we avoid the isync via a feature fixup.

Signed-off-by: Anton Blanchard <anton@samba.org>
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
diff --git a/arch/powerpc/kernel/process.c b/arch/powerpc/kernel/process.c
index ef64219..5bf8ec2 100644
--- a/arch/powerpc/kernel/process.c
+++ b/arch/powerpc/kernel/process.c
@@ -130,7 +130,10 @@
 		check_if_tm_restore_required(current);
 		giveup_fpu(current);
 	} else {
-		giveup_fpu(NULL);	/* just enables FP for kernel */
+		u64 oldmsr = mfmsr();
+
+		if (!(oldmsr & MSR_FP))
+			mtmsr_isync(oldmsr | MSR_FP);
 	}
 }
 EXPORT_SYMBOL(enable_kernel_fp);
@@ -144,7 +147,10 @@
 		check_if_tm_restore_required(current);
 		giveup_altivec(current);
 	} else {
-		giveup_altivec_notask();
+		u64 oldmsr = mfmsr();
+
+		if (!(oldmsr & MSR_VEC))
+			mtmsr_isync(oldmsr | MSR_VEC);
 	}
 }
 EXPORT_SYMBOL(enable_kernel_altivec);
@@ -173,10 +179,14 @@
 {
 	WARN_ON(preemptible());
 
-	if (current->thread.regs && (current->thread.regs->msr & MSR_VSX))
+	if (current->thread.regs && (current->thread.regs->msr & MSR_VSX)) {
 		giveup_vsx(current);
-	else
-		giveup_vsx(NULL);	/* just enable vsx for kernel - force */
+	} else {
+		u64 oldmsr = mfmsr();
+
+		if (!(oldmsr & MSR_VSX))
+			mtmsr_isync(oldmsr | MSR_VSX);
+	}
 }
 EXPORT_SYMBOL(enable_kernel_vsx);
 
@@ -209,10 +219,14 @@
 {
 	WARN_ON(preemptible());
 
-	if (current->thread.regs && (current->thread.regs->msr & MSR_SPE))
+	if (current->thread.regs && (current->thread.regs->msr & MSR_SPE)) {
 		giveup_spe(current);
-	else
-		giveup_spe(NULL);	/* just enable SPE for kernel - force */
+	} else {
+		u64 oldmsr = mfmsr();
+
+		if (!(oldmsr & MSR_SPE))
+			mtmsr_isync(oldmsr | MSR_SPE);
+	}
 }
 EXPORT_SYMBOL(enable_kernel_spe);