1 /*
2  * Copyright (c) 2008-2013 Travis Geiselbrecht
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining
5  * a copy of this software and associated documentation files
6  * (the "Software"), to deal in the Software without restriction,
7  * including without limitation the rights to use, copy, modify, merge,
8  * publish, distribute, sublicense, and/or sell copies of the Software,
9  * and to permit persons to whom the Software is furnished to do so,
10  * subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be
13  * included in all copies or substantial portions of the Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
18  * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
19  * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
20  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
21  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
22  */
23 #ifndef __ARCH_ARM_H
24 #define __ARCH_ARM_H
25 
26 #include <stdbool.h>
27 #include <sys/types.h>
28 #include <arch/arm/cores.h>
29 #include <compiler.h>
30 
31 /* due to the cp15 accessors below, you're gonna have a bad time if you try
32  * to compile in thumb mode. Either compile in ARM only or get a thumb2 capable cpu.
33 
34 #if defined(__thumb__) && !defined(__thumb2__)
35 #error this file unsupported in thumb1 mode
36 #endif
37 */
38 __BEGIN_CDECLS
39 
40 #if ARM_ISA_ARMV7
41 #define DSB __asm__ volatile("dsb" ::: "memory")
42 #define DMB __asm__ volatile("dmb" ::: "memory")
43 #define ISB __asm__ volatile("isb" ::: "memory")
44 #elif ARM_ISA_ARMV6 || ARM_ISA_ARMV6M
45 #define DSB __asm__ volatile("mcr p15, 0, %0, c7, c10, 4" :: "r" (0) : "memory")
46 #define ISB __asm__ volatile("mcr p15, 0, %0, c7, c5, 4" :: "r" (0) : "memory")
47 #define DMB __asm__ volatile("nop")
48 #else
49 #error unhandled arm isa
50 #endif
51 #define NOP __asm__ volatile("nop");
52 
53 void arm_context_switch(vaddr_t *old_sp, vaddr_t new_sp);
54 
55 void arm_chain_load(paddr_t entry, ulong arg0, ulong arg1, ulong arg2, ulong arg3) __NO_RETURN;
56 
read_cpsr(void)57 static inline uint32_t read_cpsr(void)
58 {
59     uint32_t cpsr;
60 
61     __asm__ volatile("mrs   %0, cpsr" : "=r" (cpsr));
62     return cpsr;
63 }
64 
65 #define CPSR_MODE_MASK 0x1f
66 #define CPSR_MODE_USR 0x10
67 #define CPSR_MODE_FIQ 0x11
68 #define CPSR_MODE_IRQ 0x12
69 #define CPSR_MODE_SVC 0x13
70 #define CPSR_MODE_MON 0x16
71 #define CPSR_MODE_ABT 0x17
72 #define CPSR_MODE_UND 0x1b
73 #define CPSR_MODE_SYS 0x1f
74 #define CPSR_THUMB    (1<<5)
75 #define CPSR_FIQ_MASK (1<<6)
76 #define CPSR_IRQ_MASK (1<<7)
77 #define CPSR_ABORT    (1<<8)
78 #define CPSR_ENDIAN   (1<<9)
79 
80 struct arm_iframe {
81 #if ARM_WITH_VFP
82     uint32_t fpexc;
83 #endif
84     uint32_t usp;
85     uint32_t ulr;
86     uint32_t r0;
87     uint32_t r1;
88     uint32_t r2;
89     uint32_t r3;
90     uint32_t r12;
91     uint32_t lr;
92     uint32_t fp;
93     uint32_t pc;
94     uint32_t spsr;
95 };
96 
97 struct arm_fault_frame {
98 #if ARM_WITH_VFP
99     uint32_t fpexc;
100 #endif
101     uint32_t usp;
102     uint32_t ulr;
103     uint32_t r[13];
104     uint32_t lr;
105     uint32_t fp;
106     uint32_t pc;
107     uint32_t spsr;
108 };
109 
110 struct arm_mode_regs {
111     uint32_t usr_r13, usr_r14;
112     uint32_t fiq_r13, fiq_r14;
113     uint32_t irq_r13, irq_r14;
114     uint32_t svc_r13, svc_r14;
115     uint32_t abt_r13, abt_r14;
116     uint32_t und_r13, und_r14;
117     uint32_t sys_r13, sys_r14;
118 };
119 
120 void arm_save_mode_regs(struct arm_mode_regs *regs);
121 
122 #define GEN_CP_REG_FUNCS(cp, reg, op1, c1, c2, op2) \
123 static inline __ALWAYS_INLINE uint32_t arm_read_##reg(void) { \
124     uint32_t val; \
125     __asm__ volatile("mrc " #cp ", " #op1 ", %0, " #c1 ","  #c2 "," #op2 : "=r" (val)); \
126     return val; \
127 } \
128 \
129 static inline __ALWAYS_INLINE uint32_t arm_read_##reg##_relaxed(void) { \
130     uint32_t val; \
131     __asm__("mrc " #cp ", " #op1 ", %0, " #c1 ","  #c2 "," #op2 : "=r" (val)); \
132     return val; \
133 } \
134 \
135 static inline __ALWAYS_INLINE void arm_write_##reg(uint32_t val) { \
136     __asm__ volatile("mcr " #cp ", " #op1 ", %0, " #c1 ","  #c2 "," #op2 :: "r" (val)); \
137     ISB; \
138 } \
139 \
140 static inline __ALWAYS_INLINE void arm_write_##reg##_relaxed(uint32_t val) { \
141     __asm__ volatile("mcr " #cp ", " #op1 ", %0, " #c1 ","  #c2 "," #op2 :: "r" (val)); \
142 }
143 
144 #define GEN_CP_REG64_FUNCS(cp, reg, op1, crm) \
145 static inline __ALWAYS_INLINE uint32_t arm_read_##reg(void) { \
146     uint64_t _val; \
147     __asm__ volatile("mrrc " #cp ", " #op1 ", %0, %H0, " #crm : "=r" (_val)); \
148     return _val; \
149 } \
150 \
151 static inline __ALWAYS_INLINE uint32_t arm_read_##reg##_relaxed(void) { \
152     uint64_t _val; \
153     __asm__("mrrc " #cp ", " #op1 ", %0, %H0, " #crm : "=r" (_val)); \
154     return _val; \
155 } \
156 \
157 static inline __ALWAYS_INLINE void arm_write_##reg(uint64_t val) { \
158     __asm__ volatile("mcrr " #cp ", " #op1 ", %0, %H0, " #crm :: "r" (val)); \
159     ISB; \
160 } \
161 \
162 static inline __ALWAYS_INLINE void arm_write_##reg##_relaxed(uint64_t val) { \
163     __asm__ volatile("mcrr " #cp ", " #op1 ", %0, %H0, " #crm :: "r" (val)); \
164 }
165 
166 #define GEN_CP15_REG_FUNCS(reg, op1, c1, c2, op2) \
167     GEN_CP_REG_FUNCS(p15, reg, op1, c1, c2, op2)
168 
169 #define GEN_CP14_REG_FUNCS(reg, op1, c1, c2, op2) \
170     GEN_CP_REG_FUNCS(p14, reg, op1, c1, c2, op2)
171 
172 #define GEN_CP15_REG64_FUNCS(reg, op1, crm) \
173     GEN_CP_REG64_FUNCS(p15, reg, op1, crm)
174 
175 /* armv6+ control regs */
176 GEN_CP15_REG_FUNCS(sctlr, 0, c1, c0, 0);
177 GEN_CP15_REG_FUNCS(actlr, 0, c1, c0, 1);
178 GEN_CP15_REG_FUNCS(cpacr, 0, c1, c0, 2);
179 
180 GEN_CP15_REG_FUNCS(ttbr, 0, c2, c0, 0);
181 GEN_CP15_REG_FUNCS(ttbr0, 0, c2, c0, 0);
182 GEN_CP15_REG_FUNCS(ttbr1, 0, c2, c0, 1);
183 GEN_CP15_REG_FUNCS(ttbcr, 0, c2, c0, 2);
184 GEN_CP15_REG_FUNCS(dacr, 0, c3, c0, 0);
185 GEN_CP15_REG_FUNCS(dfsr, 0, c5, c0, 0);
186 GEN_CP15_REG_FUNCS(ifsr, 0, c5, c0, 1);
187 GEN_CP15_REG_FUNCS(dfar, 0, c6, c0, 0);
188 GEN_CP15_REG_FUNCS(wfar, 0, c6, c0, 1);
189 GEN_CP15_REG_FUNCS(ifar, 0, c6, c0, 2);
190 
191 GEN_CP15_REG_FUNCS(fcseidr, 0, c13, c0, 0);
192 GEN_CP15_REG_FUNCS(contextidr, 0, c13, c0, 1);
193 GEN_CP15_REG_FUNCS(tpidrurw, 0, c13, c0, 2);
194 GEN_CP15_REG_FUNCS(tpidruro, 0, c13, c0, 3);
195 GEN_CP15_REG_FUNCS(tpidrprw, 0, c13, c0, 4);
196 
197 /* armv7+ */
198 GEN_CP15_REG_FUNCS(midr, 0, c0, c0, 0);
199 GEN_CP15_REG_FUNCS(mpidr, 0, c0, c0, 5);
200 GEN_CP15_REG_FUNCS(vbar, 0, c12, c0, 0);
201 GEN_CP15_REG_FUNCS(cbar, 4, c15, c0, 0);
202 
203 GEN_CP15_REG_FUNCS(ats1cpr, 0, c7, c8, 0);
204 GEN_CP15_REG_FUNCS(ats1cpw, 0, c7, c8, 1);
205 GEN_CP15_REG_FUNCS(ats1cur, 0, c7, c8, 2);
206 GEN_CP15_REG_FUNCS(ats1cuw, 0, c7, c8, 3);
207 GEN_CP15_REG_FUNCS(ats12nsopr, 0, c7, c8, 4);
208 GEN_CP15_REG_FUNCS(ats12nsopw, 0, c7, c8, 5);
209 GEN_CP15_REG_FUNCS(ats12nsour, 0, c7, c8, 6);
210 GEN_CP15_REG_FUNCS(ats12nsouw, 0, c7, c8, 7);
211 GEN_CP15_REG_FUNCS(par, 0, c7, c4, 0);
212 
213 /* Branch predictor invalidate */
214 GEN_CP15_REG_FUNCS(bpiall, 0, c7, c5, 6);
215 GEN_CP15_REG_FUNCS(bpimva, 0, c7, c5, 7);
216 GEN_CP15_REG_FUNCS(bpiallis, 0, c7, c1, 6);
217 
218 /* tlb registers */
219 GEN_CP15_REG_FUNCS(tlbiallis, 0, c8, c3, 0);
220 GEN_CP15_REG_FUNCS(tlbimvais, 0, c8, c3, 1);
221 GEN_CP15_REG_FUNCS(tlbiasidis, 0, c8, c3, 2);
222 GEN_CP15_REG_FUNCS(tlbimvaais, 0, c8, c3, 3);
223 GEN_CP15_REG_FUNCS(itlbiall, 0, c8, c5, 0);
224 GEN_CP15_REG_FUNCS(itlbimva, 0, c8, c5, 1);
225 GEN_CP15_REG_FUNCS(itlbiasid, 0, c8, c5, 2);
226 GEN_CP15_REG_FUNCS(dtlbiall, 0, c8, c6, 0);
227 GEN_CP15_REG_FUNCS(dtlbimva, 0, c8, c6, 1);
228 GEN_CP15_REG_FUNCS(dtlbiasid, 0, c8, c6, 2);
229 GEN_CP15_REG_FUNCS(tlbiall, 0, c8, c7, 0);
230 GEN_CP15_REG_FUNCS(tlbimva, 0, c8, c7, 1);
231 GEN_CP15_REG_FUNCS(tlbiasid, 0, c8, c7, 2);
232 GEN_CP15_REG_FUNCS(tlbimvaa, 0, c8, c7, 3);
233 
234 GEN_CP15_REG_FUNCS(l2ctlr, 1, c9, c0, 2);
235 GEN_CP15_REG_FUNCS(l2ectlr, 1, c9, c0, 3);
236 
237 /* debug registers */
238 GEN_CP14_REG_FUNCS(dbddidr, 0, c0, c0, 0);
239 GEN_CP14_REG_FUNCS(dbgdrar, 0, c1, c0, 0);
240 GEN_CP14_REG_FUNCS(dbgdsar, 0, c2, c0, 0);
241 GEN_CP14_REG_FUNCS(dbgdscr, 0, c0, c1, 0);
242 GEN_CP14_REG_FUNCS(dbgdtrtxint, 0, c0, c5, 0);
243 GEN_CP14_REG_FUNCS(dbgdtrrxint, 0, c0, c5, 0); /* alias to previous */
244 GEN_CP14_REG_FUNCS(dbgwfar, 0, c0, c6, 0);
245 GEN_CP14_REG_FUNCS(dbgvcr, 0, c0, c7, 0);
246 GEN_CP14_REG_FUNCS(dbgecr, 0, c0, c9, 0);
247 GEN_CP14_REG_FUNCS(dbgdsccr, 0, c0, c10, 0);
248 GEN_CP14_REG_FUNCS(dbgdsmcr, 0, c0, c11, 0);
249 GEN_CP14_REG_FUNCS(dbgdtrrxext, 0, c0, c0, 2);
250 GEN_CP14_REG_FUNCS(dbgdscrext, 0, c0, c2, 2);
251 GEN_CP14_REG_FUNCS(dbgdtrtxext, 0, c0, c3, 2);
252 GEN_CP14_REG_FUNCS(dbgdrcr, 0, c0, c4, 2);
253 GEN_CP14_REG_FUNCS(dbgvr0, 0, c0, c0, 4);
254 GEN_CP14_REG_FUNCS(dbgvr1, 0, c0, c1, 4);
255 GEN_CP14_REG_FUNCS(dbgvr2, 0, c0, c2, 4);
256 GEN_CP14_REG_FUNCS(dbgbcr0, 0, c0, c0, 5);
257 GEN_CP14_REG_FUNCS(dbgbcr1, 0, c0, c1, 5);
258 GEN_CP14_REG_FUNCS(dbgbcr2, 0, c0, c2, 5);
259 GEN_CP14_REG_FUNCS(dbgwvr0, 0, c0, c0, 6);
260 GEN_CP14_REG_FUNCS(dbgwvr1, 0, c0, c1, 6);
261 GEN_CP14_REG_FUNCS(dbgwcr0, 0, c0, c0, 7);
262 GEN_CP14_REG_FUNCS(dbgwcr1, 0, c0, c1, 7);
263 GEN_CP14_REG_FUNCS(dbgoslar, 0, c1, c0, 4);
264 GEN_CP14_REG_FUNCS(dbgoslsr, 0, c1, c1, 4);
265 GEN_CP14_REG_FUNCS(dbgossrr, 0, c1, c2, 4);
266 GEN_CP14_REG_FUNCS(dbgprcr, 0, c1, c4, 4);
267 GEN_CP14_REG_FUNCS(dbgprsr, 0, c1, c5, 4);
268 GEN_CP14_REG_FUNCS(dbgclaimset, 0, c7, c8, 6);
269 GEN_CP14_REG_FUNCS(dbgclaimclr, 0, c7, c9, 6);
270 GEN_CP14_REG_FUNCS(dbgauthstatus, 0, c7, c14, 6);
271 GEN_CP14_REG_FUNCS(dbgdevid, 0, c7, c2, 7);
272 
273 /* fpu */
274 void arm_fpu_set_enable(bool enable);
275 #if ARM_WITH_VFP
276 void arm_fpu_undefined_instruction(struct arm_iframe *frame);
277 struct thread;
278 void arm_fpu_thread_initialize(struct thread *t);
279 void arm_fpu_thread_swap(struct thread *oldthread, struct thread *newthread);
280 #endif
281 
282 __END_CDECLS
283 
284 #endif
285