diff options
Diffstat (limited to 'arch')
-rw-r--r-- | arch/powerpc/include/asm/asm-compat.h | 2 | ||||
-rw-r--r-- | arch/powerpc/include/asm/ppc_asm.h | 4 | ||||
-rw-r--r-- | arch/powerpc/lib/copyuser_64.S | 6 | ||||
-rw-r--r-- | arch/powerpc/lib/mem_64.S | 6 | ||||
-rw-r--r-- | arch/powerpc/lib/memcpy_64.S | 6 |
5 files changed, 12 insertions, 12 deletions
diff --git a/arch/powerpc/include/asm/asm-compat.h b/arch/powerpc/include/asm/asm-compat.h index 5d7fbe1950f9..6e82f5f9a6fd 100644 --- a/arch/powerpc/include/asm/asm-compat.h +++ b/arch/powerpc/include/asm/asm-compat.h @@ -29,7 +29,7 @@ #define PPC_LLARX(t, a, b, eh) PPC_LDARX(t, a, b, eh) #define PPC_STLCX stringify_in_c(stdcx.) #define PPC_CNTLZL stringify_in_c(cntlzd) -#define PPC_MTOCRF(FXM, RS) MTOCRF((FXM), (RS)) +#define PPC_MTOCRF(FXM, RS) MTOCRF((FXM), RS) #define PPC_LR_STKOFF 16 #define PPC_MIN_STKFRM 112 #else /* 32-bit */ diff --git a/arch/powerpc/include/asm/ppc_asm.h b/arch/powerpc/include/asm/ppc_asm.h index d4c589b4a2b8..dbc768358ac1 100644 --- a/arch/powerpc/include/asm/ppc_asm.h +++ b/arch/powerpc/include/asm/ppc_asm.h @@ -384,9 +384,9 @@ END_FTR_SECTION_IFCLR(CPU_FTR_601) #ifdef CONFIG_PPC64 #define MTOCRF(FXM, RS) \ BEGIN_FTR_SECTION_NESTED(848); \ - mtcrf (FXM), (RS); \ + mtcrf (FXM), RS; \ FTR_SECTION_ELSE_NESTED(848); \ - mtocrf (FXM), (RS); \ + mtocrf (FXM), RS; \ ALT_FTR_SECTION_END_NESTED_IFCLR(CPU_FTR_NOEXECUTE, 848) #endif diff --git a/arch/powerpc/lib/copyuser_64.S b/arch/powerpc/lib/copyuser_64.S index f47d05a51eb8..d73a59014900 100644 --- a/arch/powerpc/lib/copyuser_64.S +++ b/arch/powerpc/lib/copyuser_64.S @@ -30,7 +30,7 @@ _GLOBAL(__copy_tofrom_user_base) dcbt 0,r4 beq .Lcopy_page_4K andi. r6,r6,7 - PPC_MTOCRF(0x01,R5) + PPC_MTOCRF(0x01,r5) blt cr1,.Lshort_copy /* Below we want to nop out the bne if we're on a CPU that has the * CPU_FTR_UNALIGNED_LD_STD bit set and the CPU_FTR_CP_USE_DCBTZ bit @@ -186,7 +186,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD) blr .Ldst_unaligned: - PPC_MTOCRF(0x01,R6) /* put #bytes to 8B bdry into cr7 */ + PPC_MTOCRF(0x01,r6) /* put #bytes to 8B bdry into cr7 */ subf r5,r6,r5 li r7,0 cmpldi cr1,r5,16 @@ -201,7 +201,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD) 2: bf cr7*4+1,3f 37: lwzx r0,r7,r4 83: stwx r0,r7,r3 -3: PPC_MTOCRF(0x01,R5) +3: PPC_MTOCRF(0x01,r5) add r4,r6,r4 add r3,r6,r3 b .Ldst_aligned diff --git a/arch/powerpc/lib/mem_64.S b/arch/powerpc/lib/mem_64.S index 886bfc780681..f4fcb0bc6563 100644 --- a/arch/powerpc/lib/mem_64.S +++ b/arch/powerpc/lib/mem_64.S @@ -19,7 +19,7 @@ _GLOBAL(memset) rlwimi r4,r4,16,0,15 cmplw cr1,r5,r0 /* do we get that far? */ rldimi r4,r4,32,0 - PPC_MTOCRF(1,R0) + PPC_MTOCRF(1,r0) mr r6,r3 blt cr1,8f beq+ 3f /* if already 8-byte aligned */ @@ -49,7 +49,7 @@ _GLOBAL(memset) bdnz 4b 5: srwi. r0,r5,3 clrlwi r5,r5,29 - PPC_MTOCRF(1,R0) + PPC_MTOCRF(1,r0) beq 8f bf 29,6f std r4,0(r6) @@ -65,7 +65,7 @@ _GLOBAL(memset) std r4,0(r6) addi r6,r6,8 8: cmpwi r5,0 - PPC_MTOCRF(1,R5) + PPC_MTOCRF(1,r5) beqlr+ bf 29,9f stw r4,0(r6) diff --git a/arch/powerpc/lib/memcpy_64.S b/arch/powerpc/lib/memcpy_64.S index 0a87b37e16fe..d2bbbc8d7dc0 100644 --- a/arch/powerpc/lib/memcpy_64.S +++ b/arch/powerpc/lib/memcpy_64.S @@ -16,7 +16,7 @@ BEGIN_FTR_SECTION FTR_SECTION_ELSE b memcpy_power7 ALT_FTR_SECTION_END_IFCLR(CPU_FTR_VMX_COPY) - PPC_MTOCRF(0x01,R5) + PPC_MTOCRF(0x01,r5) cmpldi cr1,r5,16 neg r6,r3 # LS 3 bits = # bytes to 8-byte dest bdry andi. r6,r6,7 @@ -158,7 +158,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD) blr .Ldst_unaligned: - PPC_MTOCRF(0x01,R6) # put #bytes to 8B bdry into cr7 + PPC_MTOCRF(0x01,r6) # put #bytes to 8B bdry into cr7 subf r5,r6,r5 li r7,0 cmpldi cr1,r5,16 @@ -173,7 +173,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD) 2: bf cr7*4+1,3f lwzx r0,r7,r4 stwx r0,r7,r3 -3: PPC_MTOCRF(0x01,R5) +3: PPC_MTOCRF(0x01,r5) add r4,r6,r4 add r3,r6,r3 b .Ldst_aligned |