xref: /aosp_15_r20/external/coreboot/src/arch/arm64/include/armv8/arch/lib_helpers.h (revision b9411a12aaaa7e1e6a6fb7c5e057f44ee179a49c)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * lib_helpers.h: All library function prototypes and macros are defined in this
4  * file.
5  */
6 
7 #ifndef __ARCH_LIB_HELPERS_H__
8 #define __ARCH_LIB_HELPERS_H__
9 
10 #define EL0               0
11 #define EL1               1
12 #define EL2               2
13 #define EL3               3
14 
15 #define CURRENT_EL_MASK   0x3
16 #define CURRENT_EL_SHIFT  2
17 
18 #define SPSR_USE_L           0
19 #define SPSR_USE_H           1
20 #define SPSR_L_H_MASK        1
21 #define SPSR_M_SHIFT         4
22 #define SPSR_ERET_32         (1 << SPSR_M_SHIFT)
23 #define SPSR_ERET_64         (0 << SPSR_M_SHIFT)
24 #define SPSR_FIQ             (1 << 6)
25 #define SPSR_IRQ             (1 << 7)
26 #define SPSR_SERROR          (1 << 8)
27 #define SPSR_DEBUG           (1 << 9)
28 #define SPSR_EXCEPTION_MASK  (SPSR_FIQ | SPSR_IRQ | SPSR_SERROR | SPSR_DEBUG)
29 
30 #define SCR_NS		(1 << 0)	/* EL0/1 are non-secure		*/
31 #define SCR_IRQ		(1 << 1)	/* Take IRQs in EL3		*/
32 #define SCR_FIQ		(1 << 2)	/* Take FIQs in EL3		*/
33 #define SCR_EA		(1 << 3)	/* Take EA/SError in EL3	*/
34 #define SCR_SMD		(1 << 7)	/* Disable SMC instruction	*/
35 #define SCR_HCE		(1 << 8)	/* Enable HVC instruction	*/
36 #define SCR_SIF		(1 << 9)	/* Forbid insns from NS memory	*/
37 #define SCR_RW		(1 << 10)	/* Lower ELs are AArch64	*/
38 #define SCR_ST		(1 << 11)	/* Don't trap secure CNTPS	*/
39 #define SCR_TWI		(1 << 12)	/* Trap WFI to EL3		*/
40 #define SCR_TWE		(1 << 13)	/* Trap WFE to EL3		*/
41 #define SCR_TLOR	(1 << 14)	/* Trap LOR accesses to EL3	*/
42 #define SCR_TERR	(1 << 15)	/* Trap ERR accesses to EL3	*/
43 #define SCR_APK		(1 << 16)	/* Don't trap ptrauth keys	*/
44 #define SCR_API		(1 << 17)	/* Don't trap ptrauth insn	*/
45 #define SCR_EEL2	(1 << 18)	/* Enable secure EL2		*/
46 #define SCR_EASE	(1 << 19)	/* Sync EAs use SError vector	*/
47 #define SCR_NMEA	(1 << 20)	/* Disallow EL3 SError masking	*/
48 #define SCR_FIEN	(1 << 21)	/* Don't trap EXRPFG		*/
49 #define SCR_RES1	(3 << 4)
50 
51 #define HCR_RW_SHIFT         31
52 #define HCR_LOWER_AARCH64    (1 << HCR_RW_SHIFT)
53 #define HCR_LOWER_AARCH32    (0 << HCR_RW_SHIFT)
54 
55 #define SCTLR_M		(1 << 0)	/* MMU enable			*/
56 #define SCTLR_A		(1 << 1)	/* Alignment check enable	*/
57 #define SCTLR_C		(1 << 2)	/* Data/unified cache enable	*/
58 #define SCTLR_SA	(1 << 3)	/* Stack alignment check enable	*/
59 #define SCTLR_NAA	(1 << 6)	/* non-aligned access STA/LDR	*/
60 #define SCTLR_I		(1 << 12)	/* Instruction cache enable	*/
61 #define SCTLR_ENDB	(1 << 13)	/* Pointer auth (data B)	*/
62 #define SCTLR_WXN	(1 << 19)	/* Write permission implies XN	*/
63 #define SCTLR_IESB	(1 << 21)	/* Implicit error sync event	*/
64 #define SCTLR_EE	(1 << 25)	/* Exception endianness	(BE)	*/
65 #define SCTLR_ENDA	(1 << 27)	/* Pointer auth (data A)	*/
66 #define SCTLR_ENIB	(1 << 30)	/* Pointer auth (insn B)	*/
67 #define SCTLR_ENIA	(1 << 31)	/* Pointer auth (insn A)	*/
68 #define SCTLR_RES1           ((0x3 << 4) | (0x1 << 11) | (0x1 << 16) |	\
69 			      (0x1 << 18) | (0x3 << 22) | (0x3 << 28))
70 
71 #define CPTR_EL3_TCPAC_SHIFT	(31)
72 #define CPTR_EL3_TTA_SHIFT	(20)
73 #define CPTR_EL3_TFP_SHIFT	(10)
74 #define CPTR_EL3_TCPAC_DISABLE	(0 << CPTR_EL3_TCPAC_SHIFT)
75 #define CPTR_EL3_TCPAC_ENABLE	(1 << CPTR_EL3_TCPAC_SHIFT)
76 #define CPTR_EL3_TTA_DISABLE	(0 << CPTR_EL3_TTA_SHIFT)
77 #define CPTR_EL3_TTA_ENABLE	(1 << CPTR_EL3_TTA_SHIFT)
78 #define CPTR_EL3_TFP_DISABLE	(0 << CPTR_EL3_TFP_SHIFT)
79 #define CPTR_EL3_TFP_ENABLE	(1 << CPTR_EL3_TFP_SHIFT)
80 
81 #define CPACR_TTA_SHIFT	(28)
82 #define CPACR_TTA_ENABLE	(1 << CPACR_TTA_SHIFT)
83 #define CPACR_TTA_DISABLE	(0 << CPACR_TTA_SHIFT)
84 #define CPACR_FPEN_SHIFT	(20)
85 /*
86  * ARMv8-A spec: Values 0b00 and 0b10 both seem to enable traps from el0 and el1
87  * for fp reg access.
88  */
89 #define CPACR_TRAP_FP_EL0_EL1	(0 << CPACR_FPEN_SHIFT)
90 #define CPACR_TRAP_FP_EL0	(1 << CPACR_FPEN_SHIFT)
91 #define CPACR_TRAP_FP_DISABLE	(3 << CPACR_FPEN_SHIFT)
92 
93 #define DAIF_DBG_BIT      (1<<3)
94 #define DAIF_ABT_BIT      (1<<2)
95 #define DAIF_IRQ_BIT      (1<<1)
96 #define DAIF_FIQ_BIT      (1<<0)
97 
98 #ifndef __ASSEMBLER__
99 
100 #include <stdint.h>
101 
102 #define MAKE_REGISTER_ACCESSORS(reg) \
103 	static inline uint64_t raw_read_##reg(void) \
104 	{ \
105 		uint64_t value; \
106 		__asm__ __volatile__("mrs %0, " #reg "\n\t" \
107 				     : "=r" (value) : : "memory"); \
108 		return value; \
109 	} \
110 	static inline void raw_write_##reg(uint64_t value) \
111 	{ \
112 		__asm__ __volatile__("msr " #reg ", %0\n\t" \
113 				     : : "r" (value) : "memory"); \
114 	}
115 
116 /*
117  * In order to allow easy access to current EL's registers,
118  * we export following two functions for each EL register, that
119  * was passed to the MAKE_REGISTER_ACCESSORS_CURRENT_EL macro. Doing
120  * that, eliminates, or at least hides, repetitive branching on the
121  * current EL across the arm64 codebase.
122  *
123  * MAKE_REGISTER_ACCESSORS_CURRENT_EL was hooked into MAKE_REGISTER_ACCESSORS_EL123,
124  * in order to automatically generate current_el accessors only for registers which
125  * exist on EL1, EL2 and EL3.
126  *
127  * Note, that we don't handle EL0 here, as most of the defined registers do not
128  * have an EL0 variant (see MAKE_REGISTER_ACCESSORS_EL123).
129  *
130  * Important:
131  *  - target register should be specified without the '_elx' suffix
132  *  - only registers which exist in EL1, EL2 and EL3 should be passed
133  *    to the MAKE_REGISTER_ACCESSORS_CURRENT_EL macro
134  */
135 #define MAKE_REGISTER_ACCESSORS_CURRENT_EL(reg) \
136 	static inline uint64_t raw_read_##reg(void) \
137 	{ \
138 		if (CONFIG_ARM64_CURRENT_EL == EL1) \
139 			return raw_read_##reg##_el1(); \
140 		else if (CONFIG_ARM64_CURRENT_EL == EL2) \
141 			return raw_read_##reg##_el2(); \
142 		return raw_read_##reg##_el3(); \
143 	} \
144 	static inline void raw_write_##reg(uint64_t value) \
145 	{ \
146 		if (CONFIG_ARM64_CURRENT_EL == EL1) \
147 			raw_write_##reg##_el1(value); \
148 		else if (CONFIG_ARM64_CURRENT_EL == EL2) \
149 			raw_write_##reg##_el2(value); \
150 		else \
151 			raw_write_##reg##_el3(value); \
152 	}
153 
154 #define MAKE_REGISTER_ACCESSORS_EL123(reg) \
155 	MAKE_REGISTER_ACCESSORS(reg##_el1) \
156 	MAKE_REGISTER_ACCESSORS(reg##_el2) \
157 	MAKE_REGISTER_ACCESSORS(reg##_el3) \
158 	MAKE_REGISTER_ACCESSORS_CURRENT_EL(reg)
159 
160 /* Architectural register accessors */
161 MAKE_REGISTER_ACCESSORS_EL123(actlr)
MAKE_REGISTER_ACCESSORS_EL123(afsr0)162 MAKE_REGISTER_ACCESSORS_EL123(afsr0)
163 MAKE_REGISTER_ACCESSORS_EL123(afsr1)
164 MAKE_REGISTER_ACCESSORS(aidr_el1)
165 MAKE_REGISTER_ACCESSORS_EL123(amair)
166 MAKE_REGISTER_ACCESSORS(ccsidr_el1)
167 MAKE_REGISTER_ACCESSORS(clidr_el1)
168 MAKE_REGISTER_ACCESSORS(cntfrq_el0)
169 MAKE_REGISTER_ACCESSORS(cnthctl_el2)
170 MAKE_REGISTER_ACCESSORS(cnthp_ctl_el2)
171 MAKE_REGISTER_ACCESSORS(cnthp_cval_el2)
172 MAKE_REGISTER_ACCESSORS(cnthp_tval_el2)
173 MAKE_REGISTER_ACCESSORS(cntkctl_el1)
174 MAKE_REGISTER_ACCESSORS(cntp_ctl_el0)
175 MAKE_REGISTER_ACCESSORS(cntp_cval_el0)
176 MAKE_REGISTER_ACCESSORS(cntp_tval_el0)
177 MAKE_REGISTER_ACCESSORS(cntpct_el0)
178 MAKE_REGISTER_ACCESSORS(cntps_ctl_el1)
179 MAKE_REGISTER_ACCESSORS(cntps_cval_el1)
180 MAKE_REGISTER_ACCESSORS(cntps_tval_el1)
181 MAKE_REGISTER_ACCESSORS(cntv_ctl_el0)
182 MAKE_REGISTER_ACCESSORS(cntv_cval_el0)
183 MAKE_REGISTER_ACCESSORS(cntv_tval_el0)
184 MAKE_REGISTER_ACCESSORS(cntvct_el0)
185 MAKE_REGISTER_ACCESSORS(cntvoff_el2)
186 MAKE_REGISTER_ACCESSORS(contextidr_el1)
187 MAKE_REGISTER_ACCESSORS(cpacr_el1)
188 MAKE_REGISTER_ACCESSORS(cptr_el2)
189 MAKE_REGISTER_ACCESSORS(cptr_el3)
190 MAKE_REGISTER_ACCESSORS(csselr_el1)
191 MAKE_REGISTER_ACCESSORS(ctr_el0)
192 MAKE_REGISTER_ACCESSORS(currentel)
193 MAKE_REGISTER_ACCESSORS(daif)
194 MAKE_REGISTER_ACCESSORS(dczid_el0)
195 MAKE_REGISTER_ACCESSORS_EL123(elr)
196 MAKE_REGISTER_ACCESSORS_EL123(esr)
197 MAKE_REGISTER_ACCESSORS_EL123(far)
198 MAKE_REGISTER_ACCESSORS(fpcr)
199 MAKE_REGISTER_ACCESSORS(fpsr)
200 MAKE_REGISTER_ACCESSORS(hacr_el2)
201 MAKE_REGISTER_ACCESSORS(hcr_el2)
202 MAKE_REGISTER_ACCESSORS(hpfar_el2)
203 MAKE_REGISTER_ACCESSORS(hstr_el2)
204 MAKE_REGISTER_ACCESSORS(id_aa64mmfr2_el1)
205 MAKE_REGISTER_ACCESSORS(isr_el1)
206 MAKE_REGISTER_ACCESSORS_EL123(mair)
207 MAKE_REGISTER_ACCESSORS_EL123(mdcr)
208 MAKE_REGISTER_ACCESSORS(mdscr)
209 MAKE_REGISTER_ACCESSORS(midr_el1)
210 MAKE_REGISTER_ACCESSORS(mpidr_el1)
211 MAKE_REGISTER_ACCESSORS(nzcv)
212 MAKE_REGISTER_ACCESSORS(oslar_el1)
213 MAKE_REGISTER_ACCESSORS(oslsr_el1)
214 MAKE_REGISTER_ACCESSORS(par_el1)
215 MAKE_REGISTER_ACCESSORS(revdir_el1)
216 MAKE_REGISTER_ACCESSORS_EL123(rmr)
217 MAKE_REGISTER_ACCESSORS_EL123(rvbar)
218 MAKE_REGISTER_ACCESSORS(scr_el3)
219 MAKE_REGISTER_ACCESSORS_EL123(sctlr)
220 MAKE_REGISTER_ACCESSORS(sp_el0)
221 MAKE_REGISTER_ACCESSORS(sp_el1)
222 MAKE_REGISTER_ACCESSORS(sp_el2)
223 MAKE_REGISTER_ACCESSORS(spsel)
224 MAKE_REGISTER_ACCESSORS_EL123(spsr)
225 MAKE_REGISTER_ACCESSORS(spsr_abt)
226 MAKE_REGISTER_ACCESSORS(spsr_fiq)
227 MAKE_REGISTER_ACCESSORS(spsr_irq)
228 MAKE_REGISTER_ACCESSORS(spsr_und)
229 MAKE_REGISTER_ACCESSORS_EL123(tcr)
230 MAKE_REGISTER_ACCESSORS_EL123(tpidr)
231 MAKE_REGISTER_ACCESSORS_EL123(ttbr0)
232 MAKE_REGISTER_ACCESSORS(ttbr1_el1)
233 MAKE_REGISTER_ACCESSORS_EL123(vbar)
234 MAKE_REGISTER_ACCESSORS(vmpidr_el2)
235 MAKE_REGISTER_ACCESSORS(vpidr_el2)
236 MAKE_REGISTER_ACCESSORS(vtcr_el2)
237 MAKE_REGISTER_ACCESSORS(vttbr_el2)
238 
239 /* Special DAIF accessor functions */
240 static inline void enable_debug_exceptions(void)
241 {
242 	__asm__ __volatile__("msr DAIFClr, %0\n\t"
243 			     : : "i" (DAIF_DBG_BIT)  : "memory");
244 }
245 
enable_serror_exceptions(void)246 static inline void enable_serror_exceptions(void)
247 {
248 	__asm__ __volatile__("msr DAIFClr, %0\n\t"
249 			     : : "i" (DAIF_ABT_BIT)  : "memory");
250 }
251 
enable_irq(void)252 static inline void enable_irq(void)
253 {
254 	__asm__ __volatile__("msr DAIFClr, %0\n\t"
255 			     : : "i" (DAIF_IRQ_BIT)  : "memory");
256 }
257 
enable_fiq(void)258 static inline void enable_fiq(void)
259 {
260 	__asm__ __volatile__("msr DAIFClr, %0\n\t"
261 			     : : "i" (DAIF_FIQ_BIT)  : "memory");
262 }
263 
disable_debug_exceptions(void)264 static inline void disable_debug_exceptions(void)
265 {
266 	__asm__ __volatile__("msr DAIFSet, %0\n\t"
267 			     : : "i" (DAIF_DBG_BIT)  : "memory");
268 }
269 
disable_serror_exceptions(void)270 static inline void disable_serror_exceptions(void)
271 {
272 	__asm__ __volatile__("msr DAIFSet, %0\n\t"
273 			     : : "i" (DAIF_ABT_BIT)  : "memory");
274 }
275 
disable_irq(void)276 static inline void disable_irq(void)
277 {
278 	__asm__ __volatile__("msr DAIFSet, %0\n\t"
279 			     : : "i" (DAIF_IRQ_BIT)  : "memory");
280 }
281 
disable_fiq(void)282 static inline void disable_fiq(void)
283 {
284 	__asm__ __volatile__("msr DAIFSet, %0\n\t"
285 			     : : "i" (DAIF_FIQ_BIT)  : "memory");
286 }
287 
288 /* Cache maintenance system instructions */
dccisw(uint64_t cisw)289 static inline void dccisw(uint64_t cisw)
290 {
291 	__asm__ __volatile__("dc cisw, %0\n\t" : : "r" (cisw) : "memory");
292 }
293 
dccivac(uint64_t civac)294 static inline void dccivac(uint64_t civac)
295 {
296 	__asm__ __volatile__("dc civac, %0\n\t" : : "r" (civac) : "memory");
297 }
298 
dccsw(uint64_t csw)299 static inline void dccsw(uint64_t csw)
300 {
301 	__asm__ __volatile__("dc csw, %0\n\t" : : "r" (csw) : "memory");
302 }
303 
dccvac(uint64_t cvac)304 static inline void dccvac(uint64_t cvac)
305 {
306 	__asm__ __volatile__("dc cvac, %0\n\t" : : "r" (cvac) : "memory");
307 }
308 
dccvau(uint64_t cvau)309 static inline void dccvau(uint64_t cvau)
310 {
311 	__asm__ __volatile__("dc cvau, %0\n\t" : : "r" (cvau) : "memory");
312 }
313 
dcisw(uint64_t isw)314 static inline void dcisw(uint64_t isw)
315 {
316 	__asm__ __volatile__("dc isw, %0\n\t" : : "r" (isw) : "memory");
317 }
318 
dcivac(uint64_t ivac)319 static inline void dcivac(uint64_t ivac)
320 {
321 	__asm__ __volatile__("dc ivac, %0\n\t" : : "r" (ivac) : "memory");
322 }
323 
dczva(uint64_t zva)324 static inline void dczva(uint64_t zva)
325 {
326 	__asm__ __volatile__("dc zva, %0\n\t" : : "r" (zva) : "memory");
327 }
328 
iciallu(void)329 static inline void iciallu(void)
330 {
331 	__asm__ __volatile__("ic iallu\n\t" : : : "memory");
332 }
333 
icialluis(void)334 static inline void icialluis(void)
335 {
336 	__asm__ __volatile__("ic ialluis\n\t" : : : "memory");
337 }
338 
icivau(uint64_t ivau)339 static inline void icivau(uint64_t ivau)
340 {
341 	__asm__ __volatile__("ic ivau, %0\n\t" : : "r" (ivau) : "memory");
342 }
343 
344 /* TLB maintenance instructions */
tlbiall_el1(void)345 static inline void tlbiall_el1(void)
346 {
347 	__asm__ __volatile__("tlbi alle1\n\t" : : : "memory");
348 }
349 
tlbiall_el2(void)350 static inline void tlbiall_el2(void)
351 {
352 	__asm__ __volatile__("tlbi alle2\n\t" : : : "memory");
353 }
354 
tlbiall_el3(void)355 static inline void tlbiall_el3(void)
356 {
357 	__asm__ __volatile__("tlbi alle3\n\t" : : : "memory");
358 }
359 
tlbiall(void)360 static inline void tlbiall(void)
361 {
362 	if (CONFIG_ARM64_CURRENT_EL == EL1)
363 		tlbiall_el1();
364 	else if (CONFIG_ARM64_CURRENT_EL == EL2)
365 		tlbiall_el2();
366 	else
367 		tlbiall_el3();
368 }
369 
tlbiallis_el1(void)370 static inline void tlbiallis_el1(void)
371 {
372 	__asm__ __volatile__("tlbi alle1is\n\t" : : : "memory");
373 }
374 
tlbiallis_el2(void)375 static inline void tlbiallis_el2(void)
376 {
377 	__asm__ __volatile__("tlbi alle2is\n\t" : : : "memory");
378 }
379 
tlbiallis_el3(void)380 static inline void tlbiallis_el3(void)
381 {
382 	__asm__ __volatile__("tlbi alle3is\n\t" : : : "memory");
383 }
384 
tlbivaa_el1(uint64_t va)385 static inline void tlbivaa_el1(uint64_t va)
386 {
387 	__asm__ __volatile__("tlbi vaae1, %0\n\t" : : "r" (va) : "memory");
388 }
389 
390 #endif /* __ASSEMBLER__ */
391 
392 #endif /* __ARCH_LIB_HELPERS_H__ */
393