summaryrefslogtreecommitdiffstats
path: root/arch/arm/cpu/cache.S
diff options
context:
space:
mode:
authorSascha Hauer <s.hauer@pengutronix.de>2009-02-20 17:44:46 +0100
committerSascha Hauer <s.hauer@pengutronix.de>2009-08-19 10:51:30 +0200
commite2c8e8a1809aa14b60af88901ed3df85348e2987 (patch)
tree2f16dc4043d0b3b8220370346662266a96f9e2aa /arch/arm/cpu/cache.S
parent49ff3691b435c04d938ae6b75eaf18dac93817cc (diff)
downloadbarebox-e2c8e8a1809aa14b60af88901ed3df85348e2987.tar.gz
barebox-e2c8e8a1809aa14b60af88901ed3df85348e2987.tar.xz
Add MMU support
Signed-off-by: Sascha Hauer <s.hauer@pengutronix.de>
Diffstat (limited to 'arch/arm/cpu/cache.S')
-rw-r--r--arch/arm/cpu/cache.S70
1 files changed, 70 insertions, 0 deletions
diff --git a/arch/arm/cpu/cache.S b/arch/arm/cpu/cache.S
new file mode 100644
index 0000000000..f91500aec1
--- /dev/null
+++ b/arch/arm/cpu/cache.S
@@ -0,0 +1,70 @@
+#ifndef ENTRY
+#define ENTRY(name) \
+ .globl name; \
+ name:
+#endif
+
+#define CACHE_DLINESIZE 32
+
+/*
+ * dma_inv_range(start, end)
+ *
+ * Invalidate (discard) the specified virtual address range.
+ * May not write back any entries. If 'start' or 'end'
+ * are not cache line aligned, those lines must be written
+ * back.
+ *
+ * - start - virtual start address
+ * - end - virtual end address
+ *
+ * (same as v4wb)
+ */
+ENTRY(dma_inv_range)
+ tst r0, #CACHE_DLINESIZE - 1
+ bic r0, r0, #CACHE_DLINESIZE - 1
+ mcrne p15, 0, r0, c7, c10, 1 @ clean D entry
+ tst r1, #CACHE_DLINESIZE - 1
+ mcrne p15, 0, r1, c7, c10, 1 @ clean D entry
+1: mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry
+ add r0, r0, #CACHE_DLINESIZE
+ cmp r0, r1
+ blo 1b
+ mcr p15, 0, r0, c7, c10, 4 @ drain WB
+ mov pc, lr
+
+/*
+ * dma_clean_range(start, end)
+ *
+ * Clean the specified virtual address range.
+ *
+ * - start - virtual start address
+ * - end - virtual end address
+ *
+ * (same as v4wb)
+ */
+ENTRY(dma_clean_range)
+ bic r0, r0, #CACHE_DLINESIZE - 1
+1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
+ add r0, r0, #CACHE_DLINESIZE
+ cmp r0, r1
+ blo 1b
+ mcr p15, 0, r0, c7, c10, 4 @ drain WB
+ mov pc, lr
+
+/*
+ * dma_flush_range(start, end)
+ *
+ * Clean and invalidate the specified virtual address range.
+ *
+ * - start - virtual start address
+ * - end - virtual end address
+ */
+ENTRY(dma_flush_range)
+ bic r0, r0, #CACHE_DLINESIZE - 1
+1: mcr p15, 0, r0, c7, c14, 1 @ clean+invalidate D entry
+ add r0, r0, #CACHE_DLINESIZE
+ cmp r0, r1
+ blo 1b
+ mcr p15, 0, r0, c7, c10, 4 @ drain WB
+ mov pc, lr
+