1 /*
2 * Copyright (c) 2006-2018, RT-Thread Development Team
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Change Logs:
7 * Date Author Notes
8 * 2015-04-15 ArdaFu Add code for IAR
9 */
10
11 #include "mmu.h"
12
13 /*----- Keil -----------------------------------------------------------------*/
14 #ifdef __CC_ARM
mmu_setttbase(rt_uint32_t i)15 void mmu_setttbase(rt_uint32_t i)
16 {
17 register rt_uint32_t value;
18
19 /* Invalidates all TLBs.Domain access is selected as
20 * client by configuring domain access register,
21 * in that case access controlled by permission value
22 * set by page table entry
23 */
24 value = 0;
25 __asm volatile{ mcr p15, 0, value, c8, c7, 0 }
26 value = 0x55555555;
27 __asm volatile { mcr p15, 0, value, c3, c0, 0 }
28 __asm volatile { mcr p15, 0, i, c2, c0, 0 }
29 }
30
mmu_set_domain(rt_uint32_t i)31 void mmu_set_domain(rt_uint32_t i)
32 {
33 __asm volatile { mcr p15, 0, i, c3, c0, 0 }
34 }
35
mmu_enable()36 void mmu_enable()
37 {
38 register rt_uint32_t value;
39
40 __asm volatile
41 {
42 mrc p15, 0, value, c1, c0, 0
43 orr value, value, #0x01
44 mcr p15, 0, value, c1, c0, 0
45 }
46 }
47
mmu_disable()48 void mmu_disable()
49 {
50 register rt_uint32_t value;
51
52 __asm volatile
53 {
54 mrc p15, 0, value, c1, c0, 0
55 bic value, value, #0x01
56 mcr p15, 0, value, c1, c0, 0
57 }
58 }
59
mmu_enable_icache()60 void mmu_enable_icache()
61 {
62 register rt_uint32_t value;
63
64 __asm volatile
65 {
66 mrc p15, 0, value, c1, c0, 0
67 orr value, value, #0x1000
68 mcr p15, 0, value, c1, c0, 0
69 }
70 }
71
mmu_enable_dcache()72 void mmu_enable_dcache()
73 {
74 register rt_uint32_t value;
75
76 __asm volatile
77 {
78 mrc p15, 0, value, c1, c0, 0
79 orr value, value, #0x04
80 mcr p15, 0, value, c1, c0, 0
81 }
82 }
83
mmu_disable_icache()84 void mmu_disable_icache()
85 {
86 register rt_uint32_t value;
87
88 __asm volatile
89 {
90 mrc p15, 0, value, c1, c0, 0
91 bic value, value, #0x1000
92 mcr p15, 0, value, c1, c0, 0
93 }
94 }
95
mmu_disable_dcache()96 void mmu_disable_dcache()
97 {
98 register rt_uint32_t value;
99
100 __asm volatile
101 {
102 mrc p15, 0, value, c1, c0, 0
103 bic value, value, #0x04
104 mcr p15, 0, value, c1, c0, 0
105 }
106 }
107
mmu_enable_alignfault()108 void mmu_enable_alignfault()
109 {
110 register rt_uint32_t value;
111
112 __asm volatile
113 {
114 mrc p15, 0, value, c1, c0, 0
115 orr value, value, #0x02
116 mcr p15, 0, value, c1, c0, 0
117 }
118 }
119
mmu_disable_alignfault()120 void mmu_disable_alignfault()
121 {
122 register rt_uint32_t value;
123
124 __asm volatile
125 {
126 mrc p15, 0, value, c1, c0, 0
127 bic value, value, #0x02
128 mcr p15, 0, value, c1, c0, 0
129 }
130 }
131
mmu_clean_invalidated_cache_index(int index)132 void mmu_clean_invalidated_cache_index(int index)
133 {
134 __asm volatile { mcr p15, 0, index, c7, c14, 2 }
135 }
136
mmu_clean_invalidated_dcache(rt_uint32_t buffer,rt_uint32_t size)137 void mmu_clean_invalidated_dcache(rt_uint32_t buffer, rt_uint32_t size)
138 {
139 unsigned int ptr;
140
141 ptr = buffer & ~(CACHE_LINE_SIZE - 1);
142
143 while(ptr < buffer + size)
144 {
145 __asm volatile { MCR p15, 0, ptr, c7, c14, 1 }
146 ptr += CACHE_LINE_SIZE;
147 }
148 }
149
mmu_clean_dcache(rt_uint32_t buffer,rt_uint32_t size)150 void mmu_clean_dcache(rt_uint32_t buffer, rt_uint32_t size)
151 {
152 unsigned int ptr;
153
154 ptr = buffer & ~(CACHE_LINE_SIZE - 1);
155
156 while (ptr < buffer + size)
157 {
158 __asm volatile { MCR p15, 0, ptr, c7, c10, 1 }
159 ptr += CACHE_LINE_SIZE;
160 }
161 }
162
mmu_invalidate_dcache(rt_uint32_t buffer,rt_uint32_t size)163 void mmu_invalidate_dcache(rt_uint32_t buffer, rt_uint32_t size)
164 {
165 unsigned int ptr;
166
167 ptr = buffer & ~(CACHE_LINE_SIZE - 1);
168
169 while (ptr < buffer + size)
170 {
171 __asm volatile { MCR p15, 0, ptr, c7, c6, 1 }
172 ptr += CACHE_LINE_SIZE;
173 }
174 }
175
mmu_invalidate_tlb()176 void mmu_invalidate_tlb()
177 {
178 register rt_uint32_t value;
179
180 value = 0;
181 __asm volatile { mcr p15, 0, value, c8, c7, 0 }
182 }
183
mmu_invalidate_icache()184 void mmu_invalidate_icache()
185 {
186 register rt_uint32_t value;
187
188 value = 0;
189
190 __asm volatile { mcr p15, 0, value, c7, c5, 0 }
191 }
192
193
mmu_invalidate_dcache_all()194 void mmu_invalidate_dcache_all()
195 {
196 register rt_uint32_t value;
197
198 value = 0;
199
200 __asm volatile { mcr p15, 0, value, c7, c6, 0 }
201 }
202 /*----- GNU ------------------------------------------------------------------*/
203 #elif defined(__GNUC__) || defined(__ICCARM__)
mmu_setttbase(register rt_uint32_t i)204 void mmu_setttbase(register rt_uint32_t i)
205 {
206 register rt_uint32_t value;
207
208 /* Invalidates all TLBs.Domain access is selected as
209 * client by configuring domain access register,
210 * in that case access controlled by permission value
211 * set by page table entry
212 */
213 value = 0;
214 asm volatile ("mcr p15, 0, %0, c8, c7, 0"::"r"(value));
215
216 value = 0x55555555;
217 asm volatile ("mcr p15, 0, %0, c3, c0, 0"::"r"(value));
218
219 asm volatile ("mcr p15, 0, %0, c2, c0, 0"::"r"(i));
220
221 }
222
mmu_set_domain(register rt_uint32_t i)223 void mmu_set_domain(register rt_uint32_t i)
224 {
225 asm volatile ("mcr p15,0, %0, c3, c0, 0": :"r" (i));
226 }
227
mmu_enable()228 void mmu_enable()
229 {
230 asm volatile
231 (
232 "mrc p15, 0, r0, c1, c0, 0 \n"
233 "orr r0, r0, #0x1 \n"
234 "mcr p15, 0, r0, c1, c0, 0 \n"
235 :::"r0"
236 );
237 }
238
mmu_disable()239 void mmu_disable()
240 {
241 asm volatile
242 (
243 "mrc p15, 0, r0, c1, c0, 0 \n"
244 "bic r0, r0, #0x1 \n"
245 "mcr p15, 0, r0, c1, c0, 0 \n"
246 :::"r0"
247 );
248
249 }
250
mmu_enable_icache()251 void mmu_enable_icache()
252 {
253 asm volatile
254 (
255 "mrc p15, 0, r0, c1, c0, 0 \n"
256 "orr r0, r0, #(1<<12) \n"
257 "mcr p15, 0, r0, c1, c0, 0 \n"
258 :::"r0"
259 );
260 }
261
mmu_enable_dcache()262 void mmu_enable_dcache()
263 {
264 asm volatile
265 (
266 "mrc p15, 0, r0, c1, c0, 0 \n"
267 "orr r0, r0, #(1<<2) \n"
268 "mcr p15, 0, r0, c1, c0, 0 \n"
269 :::"r0"
270 );
271
272 }
273
mmu_disable_icache()274 void mmu_disable_icache()
275 {
276 asm volatile
277 (
278 "mrc p15, 0, r0, c1, c0, 0 \n"
279 "bic r0, r0, #(1<<12) \n"
280 "mcr p15, 0, r0, c1, c0, 0 \n"
281 :::"r0"
282 );
283
284 }
285
mmu_disable_dcache()286 void mmu_disable_dcache()
287 {
288 asm volatile
289 (
290 "mrc p15, 0, r0, c1, c0, 0 \n"
291 "bic r0, r0, #(1<<2) \n"
292 "mcr p15, 0, r0, c1, c0, 0 \n"
293 :::"r0"
294 );
295
296 }
297
mmu_enable_alignfault()298 void mmu_enable_alignfault()
299 {
300 asm volatile
301 (
302 "mrc p15, 0, r0, c1, c0, 0 \n"
303 "orr r0, r0, #1 \n"
304 "mcr p15, 0, r0, c1, c0, 0 \n"
305 :::"r0"
306 );
307
308 }
309
mmu_disable_alignfault()310 void mmu_disable_alignfault()
311 {
312 asm volatile
313 (
314 "mrc p15, 0, r0, c1, c0, 0 \n"
315 "bic r0, r0, #1 \n"
316 "mcr p15, 0, r0, c1, c0, 0 \n"
317 :::"r0"
318 );
319
320 }
321
mmu_clean_invalidated_cache_index(int index)322 void mmu_clean_invalidated_cache_index(int index)
323 {
324 asm volatile ("mcr p15, 0, %0, c7, c14, 2": :"r" (index));
325 }
326
mmu_clean_invalidated_dcache(rt_uint32_t buffer,rt_uint32_t size)327 void mmu_clean_invalidated_dcache(rt_uint32_t buffer, rt_uint32_t size)
328 {
329 unsigned int ptr;
330
331 ptr = buffer & ~(CACHE_LINE_SIZE - 1);
332
333 while(ptr < buffer + size)
334 {
335 asm volatile ("mcr p15, 0, %0, c7, c14, 1": :"r" (ptr));
336
337 ptr += CACHE_LINE_SIZE;
338 }
339 }
340
341
mmu_clean_dcache(rt_uint32_t buffer,rt_uint32_t size)342 void mmu_clean_dcache(rt_uint32_t buffer, rt_uint32_t size)
343 {
344 unsigned int ptr;
345
346 ptr = buffer & ~(CACHE_LINE_SIZE - 1);
347
348 while (ptr < buffer + size)
349 {
350 asm volatile ("mcr p15, 0, %0, c7, c10, 1": :"r" (ptr));
351
352 ptr += CACHE_LINE_SIZE;
353 }
354 }
355
mmu_invalidate_dcache(rt_uint32_t buffer,rt_uint32_t size)356 void mmu_invalidate_dcache(rt_uint32_t buffer, rt_uint32_t size)
357 {
358 unsigned int ptr;
359
360 ptr = buffer & ~(CACHE_LINE_SIZE - 1);
361
362 while (ptr < buffer + size)
363 {
364 asm volatile ("mcr p15, 0, %0, c7, c6, 1": :"r" (ptr));
365
366 ptr += CACHE_LINE_SIZE;
367 }
368 }
369
mmu_invalidate_tlb()370 void mmu_invalidate_tlb()
371 {
372 asm volatile ("mcr p15, 0, %0, c8, c7, 0": :"r" (0));
373
374 }
375
mmu_invalidate_icache()376 void mmu_invalidate_icache()
377 {
378 asm volatile ("mcr p15, 0, %0, c7, c5, 0": :"r" (0));
379
380 }
381
mmu_invalidate_dcache_all()382 void mmu_invalidate_dcache_all()
383 {
384 asm volatile ("mcr p15, 0, %0, c7, c6, 0": :"r" (0));
385
386 }
387 #endif
388
389 /* level1 page table */
390 #if defined(__ICCARM__)
391 #pragma data_alignment=(16*1024)
392 static volatile rt_uint32_t _page_table[4*1024];
393 #else
394 static volatile rt_uint32_t _page_table[4*1024] \
395 __attribute__((aligned(16*1024)));
396 #endif
397
mmu_setmtt(rt_uint32_t vaddrStart,rt_uint32_t vaddrEnd,rt_uint32_t paddrStart,rt_uint32_t attr)398 void mmu_setmtt(rt_uint32_t vaddrStart, rt_uint32_t vaddrEnd,
399 rt_uint32_t paddrStart, rt_uint32_t attr)
400 {
401 volatile rt_uint32_t *pTT;
402 volatile int nSec;
403 int i = 0;
404 pTT=(rt_uint32_t *)_page_table+(vaddrStart>>20);
405 nSec=(vaddrEnd>>20)-(vaddrStart>>20);
406 for(i=0; i<=nSec; i++)
407 {
408 *pTT = attr |(((paddrStart>>20)+i)<<20);
409 pTT++;
410 }
411 }
412
rt_hw_mmu_init(struct mem_desc * mdesc,rt_uint32_t size)413 void rt_hw_mmu_init(struct mem_desc *mdesc, rt_uint32_t size)
414 {
415 /* disable I/D cache */
416 mmu_disable_dcache();
417 mmu_disable_icache();
418 mmu_disable();
419 mmu_invalidate_tlb();
420
421 /* set page table */
422 for (; size > 0; size--)
423 {
424 mmu_setmtt(mdesc->vaddr_start, mdesc->vaddr_end,
425 mdesc->paddr_start, mdesc->attr);
426 mdesc++;
427 }
428
429 /* set MMU table address */
430 mmu_setttbase((rt_uint32_t)_page_table);
431
432 /* enables MMU */
433 mmu_enable();
434
435 /* enable Instruction Cache */
436 mmu_enable_icache();
437
438 /* enable Data Cache */
439 mmu_enable_dcache();
440
441 mmu_invalidate_icache();
442 mmu_invalidate_dcache_all();
443 }
444