]> git.karo-electronics.de Git - linux-beck.git/commitdiff
powerpc: Fix unsafe accesses to parameter area in ELFv2
authorUlrich Weigand <ulrich.weigand@de.ibm.com>
Fri, 14 Feb 2014 18:21:03 +0000 (19:21 +0100)
committerAnton Blanchard <anton@samba.org>
Wed, 23 Apr 2014 00:05:24 +0000 (10:05 +1000)
Some of the assembler files in lib/ make use of the fact that in the
ELFv1 ABI, the caller guarantees to provide stack space to save the
parameter registers r3 ... r10.  This guarantee is no longer present
in ELFv2 for functions that have no variable argument list and no
more than 8 arguments.

Change the affected routines to temporarily store registers in the
red zone and/or the top of their own stack frame (in the space
provided to save r31 .. r29, which is actually not used in these
routines).

In opal_query_takeover, simply always allocate a stack frame;
the routine is not performance critical.

Signed-off-by: Ulrich Weigand <ulrich.weigand@de.ibm.com>
Signed-off-by: Anton Blanchard <anton@samba.org>
arch/powerpc/lib/copypage_power7.S
arch/powerpc/lib/copyuser_power7.S
arch/powerpc/lib/memcpy_64.S
arch/powerpc/lib/memcpy_power7.S
arch/powerpc/platforms/powernv/opal-takeover.S

index affc6d308e13c4c856b3a81fe857fe660d16b51c..d7dafb3777acc6badda7b391ebc48580f37710a8 100644 (file)
@@ -56,15 +56,15 @@ _GLOBAL(copypage_power7)
 
 #ifdef CONFIG_ALTIVEC
        mflr    r0
-       std     r3,STK_PARAM(R3)(r1)
-       std     r4,STK_PARAM(R4)(r1)
+       std     r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
+       std     r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
        std     r0,16(r1)
        stdu    r1,-STACKFRAMESIZE(r1)
        bl      enter_vmx_copy
        cmpwi   r3,0
        ld      r0,STACKFRAMESIZE+16(r1)
-       ld      r3,STACKFRAMESIZE+STK_PARAM(R3)(r1)
-       ld      r4,STACKFRAMESIZE+STK_PARAM(R4)(r1)
+       ld      r3,STK_REG(R31)(r1)
+       ld      r4,STK_REG(R30)(r1)
        mtlr    r0
 
        li      r0,(PAGE_SIZE/128)
index db0fcbcc1d60468f73f40af9a7c3911a98323f98..c46c876ac96af693445e91da8204ade16169ece6 100644 (file)
@@ -85,9 +85,9 @@
 .Lexit:
        addi    r1,r1,STACKFRAMESIZE
 .Ldo_err1:
-       ld      r3,STK_PARAM(R3)(r1)
-       ld      r4,STK_PARAM(R4)(r1)
-       ld      r5,STK_PARAM(R5)(r1)
+       ld      r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
+       ld      r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
+       ld      r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
        b       __copy_tofrom_user_base
 
 
@@ -96,18 +96,18 @@ _GLOBAL(__copy_tofrom_user_power7)
        cmpldi  r5,16
        cmpldi  cr1,r5,4096
 
-       std     r3,STK_PARAM(R3)(r1)
-       std     r4,STK_PARAM(R4)(r1)
-       std     r5,STK_PARAM(R5)(r1)
+       std     r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
+       std     r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
+       std     r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
 
        blt     .Lshort_copy
        bgt     cr1,.Lvmx_copy
 #else
        cmpldi  r5,16
 
-       std     r3,STK_PARAM(R3)(r1)
-       std     r4,STK_PARAM(R4)(r1)
-       std     r5,STK_PARAM(R5)(r1)
+       std     r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
+       std     r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
+       std     r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
 
        blt     .Lshort_copy
 #endif
@@ -298,9 +298,9 @@ err1;       stb     r0,0(r3)
        bl      enter_vmx_usercopy
        cmpwi   cr1,r3,0
        ld      r0,STACKFRAMESIZE+16(r1)
-       ld      r3,STACKFRAMESIZE+STK_PARAM(R3)(r1)
-       ld      r4,STACKFRAMESIZE+STK_PARAM(R4)(r1)
-       ld      r5,STACKFRAMESIZE+STK_PARAM(R5)(r1)
+       ld      r3,STK_REG(R31)(r1)
+       ld      r4,STK_REG(R30)(r1)
+       ld      r5,STK_REG(R29)(r1)
        mtlr    r0
 
        /*
index 01da956a52fbec8618b83a79b71077f903182c30..9d3960c16fdea739ca996f6c0c6ac15ecc9f9426 100644 (file)
@@ -12,7 +12,7 @@
        .align  7
 _GLOBAL(memcpy)
 BEGIN_FTR_SECTION
-       std     r3,STK_PARAM(R3)(r1)    /* save destination pointer for return value */
+       std     r3,-STACKFRAMESIZE+STK_REG(R31)(r1)     /* save destination pointer for return value */
 FTR_SECTION_ELSE
 #ifndef SELFTEST
        b       memcpy_power7
@@ -73,7 +73,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
 2:     bf      cr7*4+3,3f
        lbz     r9,8(r4)
        stb     r9,0(r3)
-3:     ld      r3,STK_PARAM(R3)(r1)    /* return dest pointer */
+3:     ld      r3,-STACKFRAMESIZE+STK_REG(R31)(r1)     /* return dest pointer */
        blr
 
 .Lsrc_unaligned:
@@ -156,7 +156,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
 2:     bf      cr7*4+3,3f
        rotldi  r9,r9,8
        stb     r9,0(r3)
-3:     ld      r3,STK_PARAM(R3)(r1)    /* return dest pointer */
+3:     ld      r3,-STACKFRAMESIZE+STK_REG(R31)(r1)     /* return dest pointer */
        blr
 
 .Ldst_unaligned:
@@ -201,5 +201,5 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
 3:     bf      cr7*4+3,4f
        lbz     r0,0(r4)
        stb     r0,0(r3)
-4:     ld      r3,STK_PARAM(R3)(r1)    /* return dest pointer */
+4:     ld      r3,-STACKFRAMESIZE+STK_REG(R31)(r1)     /* return dest pointer */
        blr
index 87d8eeccd4b7c9365e41b31f26e0a3e0bc9a2ed4..2ff5c142f87ba061257f1f00fcc948bc4cb04780 100644 (file)
@@ -33,14 +33,14 @@ _GLOBAL(memcpy_power7)
        cmpldi  r5,16
        cmpldi  cr1,r5,4096
 
-       std     r3,STK_PARAM(R1)(r1)
+       std     r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
 
        blt     .Lshort_copy
        bgt     cr1,.Lvmx_copy
 #else
        cmpldi  r5,16
 
-       std     r3,STK_PARAM(R1)(r1)
+       std     r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
 
        blt     .Lshort_copy
 #endif
@@ -216,7 +216,7 @@ _GLOBAL(memcpy_power7)
        lbz     r0,0(r4)
        stb     r0,0(r3)
 
-15:    ld      r3,STK_PARAM(R3)(r1)
+15:    ld      r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
        blr
 
 .Lunwind_stack_nonvmx_copy:
@@ -226,16 +226,16 @@ _GLOBAL(memcpy_power7)
 #ifdef CONFIG_ALTIVEC
 .Lvmx_copy:
        mflr    r0
-       std     r4,STK_PARAM(R4)(r1)
-       std     r5,STK_PARAM(R5)(r1)
+       std     r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
+       std     r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
        std     r0,16(r1)
        stdu    r1,-STACKFRAMESIZE(r1)
        bl      enter_vmx_copy
        cmpwi   cr1,r3,0
        ld      r0,STACKFRAMESIZE+16(r1)
-       ld      r3,STACKFRAMESIZE+STK_PARAM(R3)(r1)
-       ld      r4,STACKFRAMESIZE+STK_PARAM(R4)(r1)
-       ld      r5,STACKFRAMESIZE+STK_PARAM(R5)(r1)
+       ld      r3,STK_REG(R31)(r1)
+       ld      r4,STK_REG(R30)(r1)
+       ld      r5,STK_REG(R29)(r1)
        mtlr    r0
 
        /*
@@ -447,7 +447,7 @@ _GLOBAL(memcpy_power7)
        stb     r0,0(r3)
 
 15:    addi    r1,r1,STACKFRAMESIZE
-       ld      r3,STK_PARAM(R3)(r1)
+       ld      r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
        b       exit_vmx_copy           /* tail call optimise */
 
 .Lvmx_unaligned_copy:
@@ -651,6 +651,6 @@ _GLOBAL(memcpy_power7)
        stb     r0,0(r3)
 
 15:    addi    r1,r1,STACKFRAMESIZE
-       ld      r3,STK_PARAM(R3)(r1)
+       ld      r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
        b       exit_vmx_copy           /* tail call optimise */
 #endif /* CONFiG_ALTIVEC */
index 3cd262897c272c5ee62f577bf4c9cef052f5a8c0..11a3169ee583635607a41406a7a571872cee97d9 100644 (file)
 _GLOBAL(opal_query_takeover)
        mfcr    r0
        stw     r0,8(r1)
+       stdu    r1,-STACKFRAMESIZE(r1)
        std     r3,STK_PARAM(R3)(r1)
        std     r4,STK_PARAM(R4)(r1)
        li      r3,H_HAL_TAKEOVER
        li      r4,H_HAL_TAKEOVER_QUERY_MAGIC
        HVSC
+       addi    r1,r1,STACKFRAMESIZE
        ld      r10,STK_PARAM(R3)(r1)
        std     r4,0(r10)
        ld      r10,STK_PARAM(R4)(r1)