summaryrefslogtreecommitdiffstats
path: root/arch/arm/include/asm/mmu.h
blob: f01c00389d6b2817ed21d0d0bd5bef0b3dddf708 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
#ifndef __ASM_MMU_H
#define __ASM_MMU_H

#include <common.h>
#include <errno.h>
#include <malloc.h>
#include <xfuncs.h>

#include <asm/pgtable.h>

#define PMD_SECT_DEF_UNCACHED (PMD_SECT_AP_WRITE | PMD_SECT_AP_READ | PMD_TYPE_SECT)
#define PMD_SECT_DEF_CACHED (PMD_SECT_WB | PMD_SECT_DEF_UNCACHED)

#define DMA_ADDRESS_BROKEN	NULL

struct arm_memory;

static inline void mmu_enable(void)
{
}
void mmu_disable(void);
static inline void arm_create_section(unsigned long virt, unsigned long phys, int size_m,
		unsigned int flags)
{
}

static inline void setup_dma_coherent(unsigned long offset)
{
}

#define dma_alloc dma_alloc
static inline void *dma_alloc(size_t size)
{
	return xmemalign(64, ALIGN(size, 64));
}

#ifdef CONFIG_MMU
void *dma_alloc_coherent(size_t size, dma_addr_t *dma_handle);
void dma_free_coherent(void *mem, dma_addr_t dma_handle, size_t size);

void dma_clean_range(unsigned long, unsigned long);
void dma_flush_range(unsigned long, unsigned long);
void dma_inv_range(unsigned long, unsigned long);
unsigned long virt_to_phys(volatile void *virt);
void *phys_to_virt(unsigned long phys);
void remap_range(void *_start, size_t size, uint32_t flags);
void *map_io_sections(unsigned long physaddr, void *start, size_t size);
uint32_t mmu_get_pte_cached_flags(void);
uint32_t mmu_get_pte_uncached_flags(void);

#else
static inline void *dma_alloc_coherent(size_t size, dma_addr_t *dma_handle)
{
	void *ret = xmemalign(4096, size);
	if (dma_handle)
		*dma_handle = (dma_addr_t)ret;

	return ret;
}

static inline void dma_free_coherent(void *mem, dma_addr_t dma_handle,
				     size_t size)
{
	free(mem);
}

static inline void *phys_to_virt(unsigned long phys)
{
	return (void *)phys;
}

static inline unsigned long virt_to_phys(volatile void *mem)
{
	return (unsigned long)mem;
}

static inline void dma_clean_range(unsigned long s, unsigned long e)
{
}

static inline void dma_flush_range(unsigned long s, unsigned long e)
{
}

static inline void dma_inv_range(unsigned long s, unsigned long e)
{
}

static inline void remap_range(void *_start, size_t size, uint32_t flags)
{
}

static inline void *map_io_sections(unsigned long phys, void *start, size_t size)
{
	return (void *)phys;
}

static inline uint32_t mmu_get_pte_cached_flags(void)
{
	return 0;
}

static inline uint32_t mmu_get_pte_uncached_flags(void)
{
	return 0;
}

#endif

#ifdef CONFIG_CACHE_L2X0
void __init l2x0_init(void __iomem *base, __u32 aux_val, __u32 aux_mask);
#else
static inline void __init l2x0_init(void __iomem *base, __u32 aux_val, __u32 aux_mask)
{
}
#endif

struct outer_cache_fns {
	void (*inv_range)(unsigned long, unsigned long);
	void (*clean_range)(unsigned long, unsigned long);
	void (*flush_range)(unsigned long, unsigned long);
	void (*disable)(void);
};

extern struct outer_cache_fns outer_cache;

void __dma_clean_range(unsigned long, unsigned long);
void __dma_flush_range(unsigned long, unsigned long);
void __dma_inv_range(unsigned long, unsigned long);

#endif /* __ASM_MMU_H */