summaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
Diffstat (limited to 'arch')
-rw-r--r--arch/arm/Kconfig7
-rw-r--r--arch/arm/lib64/Makefile2
-rw-r--r--arch/arm/lib64/memcpy.S6
-rw-r--r--arch/arm/lib64/memset.S4
-rw-r--r--arch/arm/lib64/string.c22
5 files changed, 35 insertions, 6 deletions
diff --git a/arch/arm/Kconfig b/arch/arm/Kconfig
index 37cde0c0c5..c6a4cadb32 100644
--- a/arch/arm/Kconfig
+++ b/arch/arm/Kconfig
@@ -353,6 +353,13 @@ menu "ARM specific settings"
config ARM_OPTIMZED_STRING_FUNCTIONS
bool "use assembler optimized string functions"
+ #
+ # memset() and memcpy() in arm/lib64/mem[set|cpy].S are
+ # written with assumption of enabled MMU and cache. Depending
+ # on the inputs in may fail with Alignement exception if used
+ # without MMU
+ #
+ depends on !CPU_V8 || MMU
help
Say yes here to use assembler optimized memcpy / memset functions.
These functions work much faster than the normal versions but
diff --git a/arch/arm/lib64/Makefile b/arch/arm/lib64/Makefile
index 77647128a5..4c0019fabe 100644
--- a/arch/arm/lib64/Makefile
+++ b/arch/arm/lib64/Makefile
@@ -2,7 +2,7 @@ obj-y += stacktrace.o
obj-$(CONFIG_ARM_LINUX) += armlinux.o
obj-y += div0.o
obj-$(CONFIG_ARM_OPTIMZED_STRING_FUNCTIONS) += memcpy.o
-obj-$(CONFIG_ARM_OPTIMZED_STRING_FUNCTIONS) += memset.o
+obj-$(CONFIG_ARM_OPTIMZED_STRING_FUNCTIONS) += memset.o string.o
extra-y += barebox.lds
obj-pbl-y += runtime-offset.o
diff --git a/arch/arm/lib64/memcpy.S b/arch/arm/lib64/memcpy.S
index cfed3191c5..a70e96ca29 100644
--- a/arch/arm/lib64/memcpy.S
+++ b/arch/arm/lib64/memcpy.S
@@ -67,8 +67,8 @@
stp \ptr, \regB, [\regC], \val
.endm
- .weak memcpy
-ENTRY(memcpy)
+ .weak __arch_memcpy
+ENTRY(__arch_memcpy)
#include "copy_template.S"
ret
-ENDPROC(memcpy)
+ENDPROC(__arch_memcpy)
diff --git a/arch/arm/lib64/memset.S b/arch/arm/lib64/memset.S
index 380a54097e..d17bcc6125 100644
--- a/arch/arm/lib64/memset.S
+++ b/arch/arm/lib64/memset.S
@@ -54,7 +54,7 @@ tmp3w .req w9
tmp3 .req x9
.weak memset
-ENTRY(memset)
+ENTRY(__arch_memset)
mov dst, dstin /* Preserve return value. */
and A_lw, val, #255
orr A_lw, A_lw, A_lw, lsl #8
@@ -212,4 +212,4 @@ ENTRY(memset)
ands count, count, zva_bits_x
b.ne .Ltail_maybe_long
ret
-ENDPROC(memset)
+ENDPROC(__arch_memset)
diff --git a/arch/arm/lib64/string.c b/arch/arm/lib64/string.c
new file mode 100644
index 0000000000..cb26331527
--- /dev/null
+++ b/arch/arm/lib64/string.c
@@ -0,0 +1,22 @@
+#include <common.h>
+#include <asm/system.h>
+#include <string.h>
+
+void *__arch_memset(void *dst, int c, __kernel_size_t size);
+void *__arch_memcpy(void * dest, const void *src, size_t count);
+
+void *memset(void *dst, int c, __kernel_size_t size)
+{
+ if (likely(get_cr() & CR_M))
+ return __arch_memset(dst, c, size);
+
+ return __default_memset(dst, c, size);
+}
+
+void *memcpy(void * dest, const void *src, size_t count)
+{
+ if (likely(get_cr() & CR_M))
+ return __arch_memcpy(dest, src, count);
+
+ return __default_memcpy(dest, src, count);
+} \ No newline at end of file