summaryrefslogtreecommitdiffstats
path: root/arch/arm/cpu
diff options
context:
space:
mode:
authorSascha Hauer <s.hauer@pengutronix.de>2012-07-09 09:05:37 +0200
committerSascha Hauer <s.hauer@pengutronix.de>2012-07-23 23:57:44 +0200
commit7c3e50c83db91e2a8679c8e7fc3e77c091c20697 (patch)
tree43f4ae5cd1e1b0a0e246405fadf47f9f276b9ba6 /arch/arm/cpu
parent244198ea8bdf592799ebfd430fe9ab165284e480 (diff)
downloadbarebox-7c3e50c83db91e2a8679c8e7fc3e77c091c20697.tar.gz
barebox-7c3e50c83db91e2a8679c8e7fc3e77c091c20697.tar.xz
ARM: Separate assembler functions into their own section
To let the linker remove unused functions. Signed-off-by: Sascha Hauer <s.hauer@pengutronix.de>
Diffstat (limited to 'arch/arm/cpu')
-rw-r--r--arch/arm/cpu/cache-armv4.S7
-rw-r--r--arch/arm/cpu/cache-armv5.S7
-rw-r--r--arch/arm/cpu/cache-armv6.S11
-rw-r--r--arch/arm/cpu/cache-armv7.S8
4 files changed, 27 insertions, 6 deletions
diff --git a/arch/arm/cpu/cache-armv4.S b/arch/arm/cpu/cache-armv4.S
index 6d03565c58..2231eee06b 100644
--- a/arch/arm/cpu/cache-armv4.S
+++ b/arch/arm/cpu/cache-armv4.S
@@ -3,6 +3,7 @@
#define CACHE_DLINESIZE 32
+.section .text.__mmu_cache_on
ENTRY(__mmu_cache_on)
mov r12, lr
#ifdef CONFIG_MMU
@@ -30,6 +31,7 @@ __common_mmu_cache_on:
mrc p15, 0, r0, c1, c0, 0 @ and read it back to
sub pc, lr, r0, lsr #32 @ properly flush pipeline
+.section .text.__mmu_cache_off
ENTRY(__mmu_cache_off)
#ifdef CONFIG_MMU
mrc p15, 0, r0, c1, c0
@@ -42,6 +44,7 @@ ENTRY(__mmu_cache_off)
mov pc, lr
ENDPROC(__mmu_cache_off)
+.section .text.__mmu_cache_flush
ENTRY(__mmu_cache_flush)
mrc p15, 0, r6, c0, c0 @ get processor ID
mov r2, #64*1024 @ default: 32K dcache size (*2)
@@ -74,7 +77,6 @@ no_cache_id:
mov pc, lr
ENDPROC(__mmu_cache_flush)
-.section ".text.text"
/*
* dma_inv_range(start, end)
*
@@ -88,6 +90,7 @@ ENDPROC(__mmu_cache_flush)
*
* (same as v4wb)
*/
+.section .text.__dma_inv_range
ENTRY(__dma_inv_range)
tst r0, #CACHE_DLINESIZE - 1
bic r0, r0, #CACHE_DLINESIZE - 1
@@ -111,6 +114,7 @@ ENTRY(__dma_inv_range)
*
* (same as v4wb)
*/
+.section .text.__dma_clean_range
ENTRY(__dma_clean_range)
bic r0, r0, #CACHE_DLINESIZE - 1
1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
@@ -128,6 +132,7 @@ ENTRY(__dma_clean_range)
* - start - virtual start address
* - end - virtual end address
*/
+.section .text.__dma_flush_range
ENTRY(__dma_flush_range)
bic r0, r0, #CACHE_DLINESIZE - 1
1: mcr p15, 0, r0, c7, c14, 1 @ clean+invalidate D entry
diff --git a/arch/arm/cpu/cache-armv5.S b/arch/arm/cpu/cache-armv5.S
index a1193a6a66..d6ffaf10e2 100644
--- a/arch/arm/cpu/cache-armv5.S
+++ b/arch/arm/cpu/cache-armv5.S
@@ -3,6 +3,7 @@
#define CACHE_DLINESIZE 32
+.section .text.__mmu_cache_on
ENTRY(__mmu_cache_on)
mov r12, lr
#ifdef CONFIG_MMU
@@ -30,6 +31,7 @@ __common_mmu_cache_on:
mrc p15, 0, r0, c1, c0, 0 @ and read it back to
sub pc, lr, r0, lsr #32 @ properly flush pipeline
+.section .text.__mmu_cache_off
ENTRY(__mmu_cache_off)
#ifdef CONFIG_MMU
mrc p15, 0, r0, c1, c0
@@ -42,6 +44,7 @@ ENTRY(__mmu_cache_off)
mov pc, lr
ENDPROC(__mmu_cache_off)
+.section .text.__mmu_cache_flush
ENTRY(__mmu_cache_flush)
1: mrc p15, 0, r15, c7, c14, 3 @ test,clean,invalidate D cache
bne 1b
@@ -49,7 +52,6 @@ ENTRY(__mmu_cache_flush)
mcr p15, 0, r0, c7, c10, 4 @ drain WB
mov pc, lr
ENDPROC(__mmu_cache_flush)
-.section ".text.text"
/*
* dma_inv_range(start, end)
@@ -64,6 +66,7 @@ ENDPROC(__mmu_cache_flush)
*
* (same as v4wb)
*/
+.section .text.__dma_inv_range
ENTRY(__dma_inv_range)
tst r0, #CACHE_DLINESIZE - 1
bic r0, r0, #CACHE_DLINESIZE - 1
@@ -87,6 +90,7 @@ ENTRY(__dma_inv_range)
*
* (same as v4wb)
*/
+.section .text.__dma_clean_range
ENTRY(__dma_clean_range)
bic r0, r0, #CACHE_DLINESIZE - 1
1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
@@ -104,6 +108,7 @@ ENTRY(__dma_clean_range)
* - start - virtual start address
* - end - virtual end address
*/
+.section .text.__dma_flush_range
ENTRY(__dma_flush_range)
bic r0, r0, #CACHE_DLINESIZE - 1
1: mcr p15, 0, r0, c7, c14, 1 @ clean+invalidate D entry
diff --git a/arch/arm/cpu/cache-armv6.S b/arch/arm/cpu/cache-armv6.S
index 335bac2a45..02b1d3e58a 100644
--- a/arch/arm/cpu/cache-armv6.S
+++ b/arch/arm/cpu/cache-armv6.S
@@ -5,6 +5,7 @@
#define CACHE_LINE_SIZE 32
#define D_CACHE_LINE_SIZE 32
+.section .text.__mmu_cache_on
ENTRY(__mmu_cache_on)
mov r12, lr
#ifdef CONFIG_MMU
@@ -33,6 +34,7 @@ __common_mmu_cache_on:
sub pc, lr, r0, lsr #32 @ properly flush pipeline
+.section .text.__mmu_cache_off
ENTRY(__mmu_cache_off)
#ifdef CONFIG_MMU
mrc p15, 0, r0, c1, c0
@@ -44,6 +46,7 @@ ENTRY(__mmu_cache_off)
#endif
mov pc, lr
+.section .text.__mmu_cache_flush
ENTRY(__mmu_cache_flush)
mov r1, #0
mcr p15, 0, r1, c7, c14, 0 @ clean+invalidate D
@@ -52,7 +55,6 @@ ENTRY(__mmu_cache_flush)
mcr p15, 0, r1, c7, c10, 4 @ drain WB
mov pc, lr
ENDPROC(__mmu_cache_flush)
-.section ".text.text"
/*
* v6_dma_inv_range(start,end)
@@ -64,6 +66,7 @@ ENDPROC(__mmu_cache_flush)
* - start - virtual start address of region
* - end - virtual end address of region
*/
+.section .text.__dma_inv_range
ENTRY(__dma_inv_range)
tst r0, #D_CACHE_LINE_SIZE - 1
bic r0, r0, #D_CACHE_LINE_SIZE - 1
@@ -91,12 +94,14 @@ ENTRY(__dma_inv_range)
mov r0, #0
mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
mov pc, lr
+ENDPROC(__dma_inv_range)
/*
* v6_dma_clean_range(start,end)
* - start - virtual start address of region
* - end - virtual end address of region
*/
+.section .text.__dma_clean_range
ENTRY(__dma_clean_range)
bic r0, r0, #D_CACHE_LINE_SIZE - 1
1:
@@ -111,12 +116,14 @@ ENTRY(__dma_clean_range)
mov r0, #0
mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
mov pc, lr
+ENDPROC(__dma_clean_range)
/*
* v6_dma_flush_range(start,end)
* - start - virtual start address of region
* - end - virtual end address of region
*/
+.section .text.__dma_flush_range
ENTRY(__dma_flush_range)
bic r0, r0, #D_CACHE_LINE_SIZE - 1
1:
@@ -131,4 +138,4 @@ ENTRY(__dma_flush_range)
mov r0, #0
mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
mov pc, lr
-
+ENDPROC(__dma_flush_range)
diff --git a/arch/arm/cpu/cache-armv7.S b/arch/arm/cpu/cache-armv7.S
index 28a6315522..9bd74254f3 100644
--- a/arch/arm/cpu/cache-armv7.S
+++ b/arch/arm/cpu/cache-armv7.S
@@ -1,6 +1,7 @@
#include <linux/linkage.h>
#include <init.h>
+.section .text.__mmu_cache_on
ENTRY(__mmu_cache_on)
mov r12, lr
#ifdef CONFIG_MMU
@@ -30,6 +31,7 @@ ENTRY(__mmu_cache_on)
mov pc, r12
ENDPROC(__mmu_cache_on)
+.section .text.__mmu_cache_off
ENTRY(__mmu_cache_off)
mrc p15, 0, r0, c1, c0
#ifdef CONFIG_MMU
@@ -50,6 +52,7 @@ ENTRY(__mmu_cache_off)
mov pc, r12
ENDPROC(__mmu_cache_off)
+.section .text.__mmu_cache_flush
ENTRY(__mmu_cache_flush)
mrc p15, 0, r10, c0, c1, 5 @ read ID_MMFR1
tst r10, #0xf << 16 @ hierarchical cache (ARMv7)
@@ -110,7 +113,6 @@ iflush:
mcr p15, 0, r10, c7, c5, 4 @ ISB
mov pc, lr
ENDPROC(__mmu_cache_flush)
-.section ".text.text"
/*
* cache_line_size - get the cache line size from the CSIDR register
@@ -134,6 +136,7 @@ ENDPROC(__mmu_cache_flush)
* - start - virtual start address of region
* - end - virtual end address of region
*/
+.section .text.__dma_inv_range
ENTRY(__dma_inv_range)
dcache_line_size r2, r3
sub r3, r2, #1
@@ -158,6 +161,7 @@ ENDPROC(__dma_inv_range)
* - start - virtual start address of region
* - end - virtual end address of region
*/
+.section .text.__dma_clean_range
ENTRY(__dma_clean_range)
dcache_line_size r2, r3
sub r3, r2, #1
@@ -176,6 +180,7 @@ ENDPROC(__dma_clean_range)
* - start - virtual start address of region
* - end - virtual end address of region
*/
+.section .text.__dma_flush_range
ENTRY(__dma_flush_range)
dcache_line_size r2, r3
sub r3, r2, #1
@@ -188,4 +193,3 @@ ENTRY(__dma_flush_range)
dsb
mov pc, lr
ENDPROC(__dma_flush_range)
-