1 /*
2 * Copyright (c) 2006-2018, RT-Thread Development Team
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Change Logs:
7 * Date Author Notes
8 * 2008-04-25 Yi.qiu first version
9 * 2009-12-18 Bernard port to armcc
10 */
11
12 #include <rtthread.h>
13 #include "s3c24x0.h"
14
15 #define _MMUTT_STARTADDRESS 0x33FF0000
16
17 #define DESC_SEC (0x2|(1<<4))
18 #define CB (3<<2) //cache_on, write_back
19 #define CNB (2<<2) //cache_on, write_through
20 #define NCB (1<<2) //cache_off,WR_BUF on
21 #define NCNB (0<<2) //cache_off,WR_BUF off
22 #define AP_RW (3<<10) //supervisor=RW, user=RW
23 #define AP_RO (2<<10) //supervisor=RW, user=RO
24
25 #define DOMAIN_FAULT (0x0)
26 #define DOMAIN_CHK (0x1)
27 #define DOMAIN_NOTCHK (0x3)
28 #define DOMAIN0 (0x0<<5)
29 #define DOMAIN1 (0x1<<5)
30
31 #define DOMAIN0_ATTR (DOMAIN_CHK<<0)
32 #define DOMAIN1_ATTR (DOMAIN_FAULT<<2)
33
34 #define RW_CB (AP_RW|DOMAIN0|CB|DESC_SEC)
35 #define RW_CNB (AP_RW|DOMAIN0|CNB|DESC_SEC)
36 #define RW_NCNB (AP_RW|DOMAIN0|NCNB|DESC_SEC)
37 #define RW_FAULT (AP_RW|DOMAIN1|NCNB|DESC_SEC)
38
39 #ifdef __GNUC__
mmu_setttbase(register rt_uint32_t i)40 void mmu_setttbase(register rt_uint32_t i)
41 {
42 asm volatile ("mcr p15, 0, %0, c2, c0, 0": :"r" (i));
43 }
44
mmu_set_domain(register rt_uint32_t i)45 void mmu_set_domain(register rt_uint32_t i)
46 {
47 asm volatile ("mcr p15,0, %0, c3, c0, 0": :"r" (i));
48 }
49
mmu_enable()50 void mmu_enable()
51 {
52 register rt_uint32_t i;
53
54 /* read control register */
55 asm volatile ("mrc p15, 0, %0, c1, c0, 0":"=r" (i));
56
57 i |= 0x1;
58
59 /* write back to control register */
60 asm volatile ("mcr p15, 0, %0, c1, c0, 0": :"r" (i));
61 }
62
mmu_disable()63 void mmu_disable()
64 {
65 register rt_uint32_t i;
66
67 /* read control register */
68 asm volatile ("mrc p15, 0, %0, c1, c0, 0":"=r" (i));
69
70 i &= ~0x1;
71
72 /* write back to control register */
73 asm volatile ("mcr p15, 0, %0, c1, c0, 0": :"r" (i));
74 }
75
mmu_enable_icache()76 void mmu_enable_icache()
77 {
78 register rt_uint32_t i;
79
80 /* read control register */
81 asm volatile ("mrc p15, 0, %0, c1, c0, 0":"=r" (i));
82
83 i |= (1 << 12);
84
85 /* write back to control register */
86 asm volatile ("mcr p15, 0, %0, c1, c0, 0": :"r" (i));
87 }
88
mmu_enable_dcache()89 void mmu_enable_dcache()
90 {
91 register rt_uint32_t i;
92
93 /* read control register */
94 asm volatile ("mrc p15, 0, %0, c1, c0, 0":"=r" (i));
95
96 i |= (1 << 2);
97
98 /* write back to control register */
99 asm volatile ("mcr p15, 0, %0, c1, c0, 0": :"r" (i));
100 }
101
mmu_disable_icache()102 void mmu_disable_icache()
103 {
104 register rt_uint32_t i;
105
106 /* read control register */
107 asm volatile ("mrc p15, 0, %0, c1, c0, 0":"=r" (i));
108
109 i &= ~(1 << 12);
110
111 /* write back to control register */
112 asm volatile ("mcr p15, 0, %0, c1, c0, 0": :"r" (i));
113 }
114
mmu_disable_dcache()115 void mmu_disable_dcache()
116 {
117 register rt_uint32_t i;
118
119 /* read control register */
120 asm volatile ("mrc p15, 0, %0, c1, c0, 0":"=r" (i));
121
122 i &= ~(1 << 2);
123
124 /* write back to control register */
125 asm volatile ("mcr p15, 0, %0, c1, c0, 0": :"r" (i));
126 }
127
mmu_enable_alignfault()128 void mmu_enable_alignfault()
129 {
130 register rt_uint32_t i;
131
132 /* read control register */
133 asm volatile ("mrc p15, 0, %0, c1, c0, 0":"=r" (i));
134
135 i |= (1 << 1);
136
137 /* write back to control register */
138 asm volatile ("mcr p15, 0, %0, c1, c0, 0": :"r" (i));
139 }
140
mmu_disable_alignfault()141 void mmu_disable_alignfault()
142 {
143 register rt_uint32_t i;
144
145 /* read control register */
146 asm volatile ("mrc p15, 0, %0, c1, c0, 0":"=r" (i));
147
148 i &= ~(1 << 1);
149
150 /* write back to control register */
151 asm volatile ("mcr p15, 0, %0, c1, c0, 0": :"r" (i));
152 }
153
mmu_clean_invalidated_cache_index(int index)154 void mmu_clean_invalidated_cache_index(int index)
155 {
156 asm volatile ("mcr p15, 0, %0, c7, c14, 2": :"r" (index));
157 }
158
mmu_invalidate_tlb()159 void mmu_invalidate_tlb()
160 {
161 asm volatile ("mcr p15, 0, %0, c8, c7, 0": :"r" (0));
162 }
163
mmu_invalidate_icache()164 void mmu_invalidate_icache()
165 {
166 asm volatile ("mcr p15, 0, %0, c7, c5, 0": :"r" (0));
167 }
168 #endif
169
170 #ifdef __CC_ARM
mmu_setttbase(rt_uint32_t i)171 void mmu_setttbase(rt_uint32_t i)
172 {
173 __asm volatile
174 {
175 mcr p15, 0, i, c2, c0, 0
176 }
177 }
178
mmu_set_domain(rt_uint32_t i)179 void mmu_set_domain(rt_uint32_t i)
180 {
181 __asm volatile
182 {
183 mcr p15,0, i, c3, c0, 0
184 }
185 }
186
mmu_enable()187 void mmu_enable()
188 {
189 register rt_uint32_t value;
190
191 __asm volatile
192 {
193 mrc p15, 0, value, c1, c0, 0
194 orr value, value, #0x01
195 mcr p15, 0, value, c1, c0, 0
196 }
197 }
198
mmu_disable()199 void mmu_disable()
200 {
201 register rt_uint32_t value;
202
203 __asm volatile
204 {
205 mrc p15, 0, value, c1, c0, 0
206 bic value, value, #0x01
207 mcr p15, 0, value, c1, c0, 0
208 }
209 }
210
mmu_enable_icache()211 void mmu_enable_icache()
212 {
213 register rt_uint32_t value;
214
215 __asm volatile
216 {
217 mrc p15, 0, value, c1, c0, 0
218 orr value, value, #0x1000
219 mcr p15, 0, value, c1, c0, 0
220 }
221 }
222
mmu_enable_dcache()223 void mmu_enable_dcache()
224 {
225 register rt_uint32_t value;
226
227 __asm volatile
228 {
229 mrc p15, 0, value, c1, c0, 0
230 orr value, value, #0x04
231 mcr p15, 0, value, c1, c0, 0
232 }
233 }
234
mmu_disable_icache()235 void mmu_disable_icache()
236 {
237 register rt_uint32_t value;
238
239 __asm volatile
240 {
241 mrc p15, 0, value, c1, c0, 0
242 bic value, value, #0x1000
243 mcr p15, 0, value, c1, c0, 0
244 }
245 }
246
mmu_disable_dcache()247 void mmu_disable_dcache()
248 {
249 register rt_uint32_t value;
250
251 __asm volatile
252 {
253 mrc p15, 0, value, c1, c0, 0
254 bic value, value, #0x04
255 mcr p15, 0, value, c1, c0, 0
256 }
257 }
258
mmu_enable_alignfault()259 void mmu_enable_alignfault()
260 {
261 register rt_uint32_t value;
262
263 __asm volatile
264 {
265 mrc p15, 0, value, c1, c0, 0
266 orr value, value, #0x02
267 mcr p15, 0, value, c1, c0, 0
268 }
269 }
270
mmu_disable_alignfault()271 void mmu_disable_alignfault()
272 {
273 register rt_uint32_t value;
274
275 __asm volatile
276 {
277 mrc p15, 0, value, c1, c0, 0
278 bic value, value, #0x02
279 mcr p15, 0, value, c1, c0, 0
280 }
281 }
282
mmu_clean_invalidated_cache_index(int index)283 void mmu_clean_invalidated_cache_index(int index)
284 {
285 __asm volatile
286 {
287 mcr p15, 0, index, c7, c14, 2
288 }
289 }
290
mmu_invalidate_tlb()291 void mmu_invalidate_tlb()
292 {
293 register rt_uint32_t value;
294
295 value = 0;
296 __asm volatile
297 {
298 mcr p15, 0, value, c8, c7, 0
299 }
300 }
301
mmu_invalidate_icache()302 void mmu_invalidate_icache()
303 {
304 register rt_uint32_t value;
305
306 value = 0;
307
308 __asm volatile
309 {
310 mcr p15, 0, value, c7, c5, 0
311 }
312 }
313 #endif
314
mmu_setmtt(int vaddrStart,int vaddrEnd,int paddrStart,int attr)315 void mmu_setmtt(int vaddrStart,int vaddrEnd,int paddrStart,int attr)
316 {
317 volatile rt_uint32_t *pTT;
318 volatile int i,nSec;
319 pTT=(rt_uint32_t *)_MMUTT_STARTADDRESS+(vaddrStart>>20);
320 nSec=(vaddrEnd>>20)-(vaddrStart>>20);
321 for(i=0;i<=nSec;i++)
322 {
323 *pTT = attr |(((paddrStart>>20)+i)<<20);
324 pTT++;
325 }
326 }
327
rt_hw_mmu_init(void)328 void rt_hw_mmu_init(void)
329 {
330 int i,j;
331 //========================== IMPORTANT NOTE =========================
332 //The current stack and code area can't be re-mapped in this routine.
333 //If you want memory map mapped freely, your own sophiscated mmu
334 //initialization code is needed.
335 //===================================================================
336
337 mmu_disable_dcache();
338 mmu_disable_icache();
339
340 //If write-back is used,the DCache should be cleared.
341 for(i=0;i<64;i++)
342 for(j=0;j<8;j++)
343 mmu_clean_invalidated_cache_index((i<<26)|(j<<5));
344
345 mmu_invalidate_icache();
346
347 //To complete mmu_Init() fast, Icache may be turned on here.
348 mmu_enable_icache();
349
350 mmu_disable();
351 mmu_invalidate_tlb();
352
353 //mmu_setmtt(int vaddrStart,int vaddrEnd,int paddrStart,int attr);
354 mmu_setmtt(0x00000000,0x07f00000,0x00000000,RW_CNB); //bank0
355 mmu_setmtt(0x00000000,0x03f00000,(int)0x30000000,RW_CB); //bank0
356 mmu_setmtt(0x04000000,0x07f00000,0,RW_NCNB); //bank0
357 mmu_setmtt(0x08000000,0x0ff00000,0x08000000,RW_CNB); //bank1
358 mmu_setmtt(0x10000000,0x17f00000,0x10000000,RW_NCNB); //bank2
359 mmu_setmtt(0x18000000,0x1ff00000,0x18000000,RW_NCNB); //bank3
360 //mmu_setmtt(0x20000000,0x27f00000,0x20000000,RW_CB); //bank4
361 mmu_setmtt(0x20000000,0x27f00000,0x20000000,RW_NCNB); //bank4 for DM9000
362 mmu_setmtt(0x28000000,0x2ff00000,0x28000000,RW_NCNB); //bank5
363 //30f00000->30100000, 31000000->30200000
364 mmu_setmtt(0x30000000,0x30100000,0x30000000,RW_CB); //bank6-1
365 mmu_setmtt(0x30200000,0x33e00000,0x30200000,RW_CB); //bank6-2
366
367 mmu_setmtt(0x33f00000,0x34000000,0x33f00000,RW_NCNB); //bank6-3
368 mmu_setmtt(0x38000000,0x3ff00000,0x38000000,RW_NCNB); //bank7
369
370 mmu_setmtt(0x40000000,0x47f00000,0x40000000,RW_NCNB); //SFR
371 mmu_setmtt(0x48000000,0x5af00000,0x48000000,RW_NCNB); //SFR
372 mmu_setmtt(0x5b000000,0x5b000000,0x5b000000,RW_NCNB); //SFR
373 mmu_setmtt(0x5b100000,0xfff00000,0x5b100000,RW_FAULT);//not used
374 mmu_setmtt(0x60000000,0x67f00000,0x60000000,RW_NCNB); //SFR
375
376 mmu_setttbase(_MMUTT_STARTADDRESS);
377
378 /* DOMAIN1: no_access, DOMAIN0,2~15=client(AP is checked) */
379 mmu_set_domain(0x55555550|DOMAIN1_ATTR|DOMAIN0_ATTR);
380
381 mmu_enable_alignfault();
382
383 mmu_enable();
384
385 /* ICache enable */
386 mmu_enable_icache();
387 /* DCache should be turned on after mmu is turned on. */
388 mmu_enable_dcache();
389 }
390
391