summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorNicolas Pitre <nicolas.pitre@linaro.org>2013-03-12 13:00:42 +0100
committerSascha Hauer <s.hauer@pengutronix.de>2013-05-23 08:27:57 +0200
commit4586f9626b851a312eae3e452e2be5a5a2cf585d (patch)
treedfc7e15f51f90a396686cfe0e5dc754ff2a13f8c
parent2683e56174705d4c13bd773b3066eb308ae6e801 (diff)
downloadbarebox-4586f9626b851a312eae3e452e2be5a5a2cf585d.tar.gz
barebox-4586f9626b851a312eae3e452e2be5a5a2cf585d.tar.xz
ARM: fix the memset fix
From Kernel commit 418df63a ARM: 7670/1: fix the memset fix | Commit 455bd4c430b0 ("ARM: 7668/1: fix memset-related crashes caused by | recent GCC (4.7.2) optimizations") attempted to fix a compliance issue | with the memset return value. However the memset itself became broken | by that patch for misaligned pointers. | | This fixes the above by branching over the entry code from the | misaligned fixup code to avoid reloading the original pointer. | | Also, because the function entry alignment is wrong in the Thumb mode | compilation, that fixup code is moved to the end. | | While at it, the entry instructions are slightly reworked to help dual | issue pipelines. | | Signed-off-by: Nicolas Pitre <nico@linaro.org> | Tested-by: Alexander Holler <holler@ahsoftware.de> | Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk> Signed-off-by: Sascha Hauer <s.hauer@pengutronix.de>
-rw-r--r--arch/arm/lib/memset.S33
1 files changed, 13 insertions, 20 deletions
diff --git a/arch/arm/lib/memset.S b/arch/arm/lib/memset.S
index 5e35d7f5f6..c4d2672038 100644
--- a/arch/arm/lib/memset.S
+++ b/arch/arm/lib/memset.S
@@ -14,31 +14,15 @@
.text
.align 5
- .word 0
-
-1: subs r2, r2, #4 @ 1 do we have enough
- blt 5f @ 1 bytes to align with?
- cmp r3, #2 @ 1
- strltb r1, [ip], #1 @ 1
- strleb r1, [ip], #1 @ 1
- strb r1, [ip], #1 @ 1
- add r2, r2, r3 @ 1 (r2 = r2 - (4 - r3))
-/*
- * The pointer is now aligned and the length is adjusted. Try doing the
- * memset again.
- */
ENTRY(memset)
-/*
- * Preserve the contents of r0 for the return value.
- */
- mov ip, r0
- ands r3, ip, #3 @ 1 unaligned?
- bne 1b @ 1
+ ands r3, r0, #3 @ 1 unaligned?
+ mov ip, r0 @ preserve r0 as return value
+ bne 6f @ 1
/*
* we know that the pointer in ip is aligned to a word boundary.
*/
- orr r1, r1, r1, lsl #8
+1: orr r1, r1, r1, lsl #8
orr r1, r1, r1, lsl #16
mov r3, r1
cmp r2, #16
@@ -127,5 +111,14 @@ ENTRY(memset)
tst r2, #1
strneb r1, [ip], #1
mov pc, lr
+
+6: subs r2, r2, #4 @ 1 do we have enough
+ blt 5b @ 1 bytes to align with?
+ cmp r3, #2 @ 1
+ strltb r1, [ip], #1 @ 1
+ strleb r1, [ip], #1 @ 1
+ strb r1, [ip], #1 @ 1
+ add r2, r2, r3 @ 1 (r2 = r2 - (4 - r3))
+ b 1b
ENDPROC(memset)