aboutsummaryrefslogtreecommitdiff
path: root/arch/powerpc/lib/copyuser_power7.S
diff options
context:
space:
mode:
authorUlrich Weigand2014-02-14 19:21:03 +0100
committerAnton Blanchard2014-04-23 10:05:24 +1000
commit752a6422fec3c0f5f9d4ac43d92f5dd13e22fde4 (patch)
tree6ef91c1ad3c067345ce45bb6d7730ab9f38c9241 /arch/powerpc/lib/copyuser_power7.S
parentb37c10d128a2fa3256d4e67c184177270eac4b86 (diff)
powerpc: Fix unsafe accesses to parameter area in ELFv2
Some of the assembler files in lib/ make use of the fact that in the ELFv1 ABI, the caller guarantees to provide stack space to save the parameter registers r3 ... r10. This guarantee is no longer present in ELFv2 for functions that have no variable argument list and no more than 8 arguments. Change the affected routines to temporarily store registers in the red zone and/or the top of their own stack frame (in the space provided to save r31 .. r29, which is actually not used in these routines). In opal_query_takeover, simply always allocate a stack frame; the routine is not performance critical. Signed-off-by: Ulrich Weigand <ulrich.weigand@de.ibm.com> Signed-off-by: Anton Blanchard <anton@samba.org>
Diffstat (limited to 'arch/powerpc/lib/copyuser_power7.S')
-rw-r--r--arch/powerpc/lib/copyuser_power7.S24
1 files changed, 12 insertions, 12 deletions
diff --git a/arch/powerpc/lib/copyuser_power7.S b/arch/powerpc/lib/copyuser_power7.S
index db0fcbcc1d60..c46c876ac96a 100644
--- a/arch/powerpc/lib/copyuser_power7.S
+++ b/arch/powerpc/lib/copyuser_power7.S
@@ -85,9 +85,9 @@
.Lexit:
addi r1,r1,STACKFRAMESIZE
.Ldo_err1:
- ld r3,STK_PARAM(R3)(r1)
- ld r4,STK_PARAM(R4)(r1)
- ld r5,STK_PARAM(R5)(r1)
+ ld r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
+ ld r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
+ ld r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
b __copy_tofrom_user_base
@@ -96,18 +96,18 @@ _GLOBAL(__copy_tofrom_user_power7)
cmpldi r5,16
cmpldi cr1,r5,4096
- std r3,STK_PARAM(R3)(r1)
- std r4,STK_PARAM(R4)(r1)
- std r5,STK_PARAM(R5)(r1)
+ std r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
+ std r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
+ std r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
blt .Lshort_copy
bgt cr1,.Lvmx_copy
#else
cmpldi r5,16
- std r3,STK_PARAM(R3)(r1)
- std r4,STK_PARAM(R4)(r1)
- std r5,STK_PARAM(R5)(r1)
+ std r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
+ std r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
+ std r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
blt .Lshort_copy
#endif
@@ -298,9 +298,9 @@ err1; stb r0,0(r3)
bl enter_vmx_usercopy
cmpwi cr1,r3,0
ld r0,STACKFRAMESIZE+16(r1)
- ld r3,STACKFRAMESIZE+STK_PARAM(R3)(r1)
- ld r4,STACKFRAMESIZE+STK_PARAM(R4)(r1)
- ld r5,STACKFRAMESIZE+STK_PARAM(R5)(r1)
+ ld r3,STK_REG(R31)(r1)
+ ld r4,STK_REG(R30)(r1)
+ ld r5,STK_REG(R29)(r1)
mtlr r0
/*