1 /*
2 * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7 #ifndef ARCH_HELPERS_H
8 #define ARCH_HELPERS_H
9
10 #include <cdefs.h>
11 #include <stdbool.h>
12 #include <stdint.h>
13 #include <string.h>
14
15 #include <arch.h>
16
17 /**********************************************************************
18 * Macros which create inline functions to read or write CPU system
19 * registers
20 *********************************************************************/
21
22 #define _DEFINE_SYSREG_READ_FUNC(_name, _reg_name) \
23 static inline u_register_t read_ ## _name(void) \
24 { \
25 u_register_t v; \
26 __asm__ volatile ("mrs %0, " #_reg_name : "=r" (v)); \
27 return v; \
28 }
29
30 #define _DEFINE_SYSREG_READ_FUNC_NV(_name, _reg_name) \
31 static inline u_register_t read_ ## _name(void) \
32 { \
33 u_register_t v; \
34 __asm__ ("mrs %0, " #_reg_name : "=r" (v)); \
35 return v; \
36 }
37
38 #define _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name) \
39 static inline void write_ ## _name(u_register_t v) \
40 { \
41 __asm__ volatile ("msr " #_reg_name ", %0" : : "r" (v)); \
42 }
43
44 #define SYSREG_WRITE_CONST(reg_name, v) \
45 __asm__ volatile ("msr " #reg_name ", %0" : : "i" (v))
46
47 /* Define read function for system register */
48 #define DEFINE_SYSREG_READ_FUNC(_name) \
49 _DEFINE_SYSREG_READ_FUNC(_name, _name)
50
51 /* Define read & write function for system register */
52 #define DEFINE_SYSREG_RW_FUNCS(_name) \
53 _DEFINE_SYSREG_READ_FUNC(_name, _name) \
54 _DEFINE_SYSREG_WRITE_FUNC(_name, _name)
55
56 /* Define read & write function for renamed system register */
57 #define DEFINE_RENAME_SYSREG_RW_FUNCS(_name, _reg_name) \
58 _DEFINE_SYSREG_READ_FUNC(_name, _reg_name) \
59 _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name)
60
61 /* Define read function for renamed system register */
62 #define DEFINE_RENAME_SYSREG_READ_FUNC(_name, _reg_name) \
63 _DEFINE_SYSREG_READ_FUNC(_name, _reg_name)
64
65 /* Define write function for renamed system register */
66 #define DEFINE_RENAME_SYSREG_WRITE_FUNC(_name, _reg_name) \
67 _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name)
68
69 /* Define read function for ID register (w/o volatile qualifier) */
70 #define DEFINE_IDREG_READ_FUNC(_name) \
71 _DEFINE_SYSREG_READ_FUNC_NV(_name, _name)
72
73 /* Define read function for renamed ID register (w/o volatile qualifier) */
74 #define DEFINE_RENAME_IDREG_READ_FUNC(_name, _reg_name) \
75 _DEFINE_SYSREG_READ_FUNC_NV(_name, _reg_name)
76
77 /**********************************************************************
78 * Macros to create inline functions for system instructions
79 *********************************************************************/
80
81 /* Define function for simple system instruction */
82 #define DEFINE_SYSOP_FUNC(_op) \
83 static inline void _op(void) \
84 { \
85 __asm__ (#_op); \
86 }
87
88 /* Define function for system instruction with register parameter */
89 #define DEFINE_SYSOP_PARAM_FUNC(_op) \
90 static inline void _op(uint64_t v) \
91 { \
92 __asm__ (#_op " %0" : : "r" (v)); \
93 }
94
95 /* Define function for system instruction with type specifier */
96 #define DEFINE_SYSOP_TYPE_FUNC(_op, _type) \
97 static inline void _op ## _type(void) \
98 { \
99 __asm__ (#_op " " #_type : : : "memory"); \
100 }
101
102 /* Define function for system instruction with register parameter */
103 #define DEFINE_SYSOP_TYPE_PARAM_FUNC(_op, _type) \
104 static inline void _op ## _type(uint64_t v) \
105 { \
106 __asm__ (#_op " " #_type ", %0" : : "r" (v)); \
107 }
108
109 /*******************************************************************************
110 * TLB maintenance accessor prototypes
111 ******************************************************************************/
112
113 #if ERRATA_A57_813419 || ERRATA_A76_1286807
114 /*
115 * Define function for TLBI instruction with type specifier that implements
116 * the workaround for errata 813419 of Cortex-A57 or errata 1286807 of
117 * Cortex-A76.
118 */
119 #define DEFINE_TLBIOP_ERRATA_TYPE_FUNC(_type)\
120 static inline void tlbi ## _type(void) \
121 { \
122 __asm__("tlbi " #_type "\n" \
123 "dsb ish\n" \
124 "tlbi " #_type); \
125 }
126
127 /*
128 * Define function for TLBI instruction with register parameter that implements
129 * the workaround for errata 813419 of Cortex-A57 or errata 1286807 of
130 * Cortex-A76.
131 */
132 #define DEFINE_TLBIOP_ERRATA_TYPE_PARAM_FUNC(_type) \
133 static inline void tlbi ## _type(uint64_t v) \
134 { \
135 __asm__("tlbi " #_type ", %0\n" \
136 "dsb ish\n" \
137 "tlbi " #_type ", %0" : : "r" (v)); \
138 }
139 #endif /* ERRATA_A57_813419 */
140
141 #if ERRATA_A53_819472 || ERRATA_A53_824069 || ERRATA_A53_827319
142 /*
143 * Define function for DC instruction with register parameter that enables
144 * the workaround for errata 819472, 824069 and 827319 of Cortex-A53.
145 */
146 #define DEFINE_DCOP_ERRATA_A53_TYPE_PARAM_FUNC(_name, _type) \
147 static inline void dc ## _name(uint64_t v) \
148 { \
149 __asm__("dc " #_type ", %0" : : "r" (v)); \
150 }
151 #endif /* ERRATA_A53_819472 || ERRATA_A53_824069 || ERRATA_A53_827319 */
152
153 #if ERRATA_A57_813419
154 DEFINE_SYSOP_TYPE_FUNC(tlbi, alle1)
155 DEFINE_SYSOP_TYPE_FUNC(tlbi, alle1is)
156 DEFINE_SYSOP_TYPE_FUNC(tlbi, alle2)
157 DEFINE_SYSOP_TYPE_FUNC(tlbi, alle2is)
158 DEFINE_TLBIOP_ERRATA_TYPE_FUNC(alle3)
159 DEFINE_TLBIOP_ERRATA_TYPE_FUNC(alle3is)
160 DEFINE_SYSOP_TYPE_FUNC(tlbi, vmalle1)
161 #elif ERRATA_A76_1286807
162 DEFINE_TLBIOP_ERRATA_TYPE_FUNC(alle1)
163 DEFINE_TLBIOP_ERRATA_TYPE_FUNC(alle1is)
164 DEFINE_TLBIOP_ERRATA_TYPE_FUNC(alle2)
165 DEFINE_TLBIOP_ERRATA_TYPE_FUNC(alle2is)
166 DEFINE_TLBIOP_ERRATA_TYPE_FUNC(alle3)
167 DEFINE_TLBIOP_ERRATA_TYPE_FUNC(alle3is)
168 DEFINE_TLBIOP_ERRATA_TYPE_FUNC(vmalle1)
169 #else
170 DEFINE_SYSOP_TYPE_FUNC(tlbi, alle1)
171 DEFINE_SYSOP_TYPE_FUNC(tlbi, alle1is)
172 DEFINE_SYSOP_TYPE_FUNC(tlbi, alle2)
173 DEFINE_SYSOP_TYPE_FUNC(tlbi, alle2is)
174 DEFINE_SYSOP_TYPE_FUNC(tlbi, alle3)
175 DEFINE_SYSOP_TYPE_FUNC(tlbi, alle3is)
176 DEFINE_SYSOP_TYPE_FUNC(tlbi, vmalle1)
177 #endif
178
179 #if ERRATA_A57_813419
180 DEFINE_SYSOP_TYPE_PARAM_FUNC(tlbi, vaae1is)
181 DEFINE_SYSOP_TYPE_PARAM_FUNC(tlbi, vaale1is)
182 DEFINE_SYSOP_TYPE_PARAM_FUNC(tlbi, vae2is)
183 DEFINE_SYSOP_TYPE_PARAM_FUNC(tlbi, vale2is)
184 DEFINE_TLBIOP_ERRATA_TYPE_PARAM_FUNC(vae3is)
185 DEFINE_TLBIOP_ERRATA_TYPE_PARAM_FUNC(vale3is)
186 #elif ERRATA_A76_1286807
187 DEFINE_TLBIOP_ERRATA_TYPE_PARAM_FUNC(vaae1is)
188 DEFINE_TLBIOP_ERRATA_TYPE_PARAM_FUNC(vaale1is)
189 DEFINE_TLBIOP_ERRATA_TYPE_PARAM_FUNC(vae2is)
190 DEFINE_TLBIOP_ERRATA_TYPE_PARAM_FUNC(vale2is)
191 DEFINE_TLBIOP_ERRATA_TYPE_PARAM_FUNC(vae3is)
192 DEFINE_TLBIOP_ERRATA_TYPE_PARAM_FUNC(vale3is)
193 #else
194 DEFINE_SYSOP_TYPE_PARAM_FUNC(tlbi, vaae1is)
195 DEFINE_SYSOP_TYPE_PARAM_FUNC(tlbi, vaale1is)
196 DEFINE_SYSOP_TYPE_PARAM_FUNC(tlbi, vae2is)
197 DEFINE_SYSOP_TYPE_PARAM_FUNC(tlbi, vale2is)
198 DEFINE_SYSOP_TYPE_PARAM_FUNC(tlbi, vae3is)
199 DEFINE_SYSOP_TYPE_PARAM_FUNC(tlbi, vale3is)
200 #endif
201
202 /*******************************************************************************
203 * Cache maintenance accessor prototypes
204 ******************************************************************************/
205 DEFINE_SYSOP_TYPE_PARAM_FUNC(dc, isw)
206 DEFINE_SYSOP_TYPE_PARAM_FUNC(dc, cisw)
207 #if ERRATA_A53_827319
208 DEFINE_DCOP_ERRATA_A53_TYPE_PARAM_FUNC(csw, cisw)
209 #else
210 DEFINE_SYSOP_TYPE_PARAM_FUNC(dc, csw)
211 #endif
212 #if ERRATA_A53_819472 || ERRATA_A53_824069 || ERRATA_A53_827319
213 DEFINE_DCOP_ERRATA_A53_TYPE_PARAM_FUNC(cvac, civac)
214 #else
215 DEFINE_SYSOP_TYPE_PARAM_FUNC(dc, cvac)
216 #endif
217 DEFINE_SYSOP_TYPE_PARAM_FUNC(dc, ivac)
218 DEFINE_SYSOP_TYPE_PARAM_FUNC(dc, civac)
219 #if ERRATA_A53_819472 || ERRATA_A53_824069 || ERRATA_A53_827319
220 DEFINE_DCOP_ERRATA_A53_TYPE_PARAM_FUNC(cvau, civac)
221 #else
222 DEFINE_SYSOP_TYPE_PARAM_FUNC(dc, cvau)
223 #endif
224 DEFINE_SYSOP_TYPE_PARAM_FUNC(dc, zva)
225
226 /*******************************************************************************
227 * Address translation accessor prototypes
228 ******************************************************************************/
229 DEFINE_SYSOP_TYPE_PARAM_FUNC(at, s12e1r)
230 DEFINE_SYSOP_TYPE_PARAM_FUNC(at, s12e1w)
231 DEFINE_SYSOP_TYPE_PARAM_FUNC(at, s12e0r)
232 DEFINE_SYSOP_TYPE_PARAM_FUNC(at, s12e0w)
233 DEFINE_SYSOP_TYPE_PARAM_FUNC(at, s1e1r)
234 DEFINE_SYSOP_TYPE_PARAM_FUNC(at, s1e2r)
235 DEFINE_SYSOP_TYPE_PARAM_FUNC(at, s1e3r)
236
237 /*******************************************************************************
238 * Strip Pointer Authentication Code
239 ******************************************************************************/
240 DEFINE_SYSOP_PARAM_FUNC(xpaci)
241
242 void flush_dcache_range(uintptr_t addr, size_t size);
243 void flush_dcache_to_popa_range(uintptr_t addr, size_t size);
244 void flush_dcache_to_popa_range_mte2(uintptr_t addr, size_t size);
245 void clean_dcache_range(uintptr_t addr, size_t size);
246 void inv_dcache_range(uintptr_t addr, size_t size);
247 bool is_dcache_enabled(void);
248
249 void dcsw_op_louis(u_register_t op_type);
250 void dcsw_op_all(u_register_t op_type);
251
252 void disable_mmu_el1(void);
253 void disable_mmu_el3(void);
254 void disable_mpu_el2(void);
255 void disable_mmu_icache_el1(void);
256 void disable_mmu_icache_el3(void);
257 void disable_mpu_icache_el2(void);
258
259 /*******************************************************************************
260 * Misc. accessor prototypes
261 ******************************************************************************/
262
263 #define write_daifclr(val) SYSREG_WRITE_CONST(daifclr, val)
264 #define write_daifset(val) SYSREG_WRITE_CONST(daifset, val)
265
266 DEFINE_SYSREG_RW_FUNCS(par_el1)
DEFINE_IDREG_READ_FUNC(id_pfr1_el1)267 DEFINE_IDREG_READ_FUNC(id_pfr1_el1)
268 DEFINE_IDREG_READ_FUNC(id_aa64isar0_el1)
269 DEFINE_IDREG_READ_FUNC(id_aa64isar1_el1)
270 DEFINE_RENAME_IDREG_READ_FUNC(id_aa64isar2_el1, ID_AA64ISAR2_EL1)
271 DEFINE_IDREG_READ_FUNC(id_aa64pfr0_el1)
272 DEFINE_IDREG_READ_FUNC(id_aa64pfr1_el1)
273 DEFINE_RENAME_IDREG_READ_FUNC(id_aa64pfr2_el1, ID_AA64PFR2_EL1)
274 DEFINE_IDREG_READ_FUNC(id_aa64dfr0_el1)
275 DEFINE_IDREG_READ_FUNC(id_aa64dfr1_el1)
276 DEFINE_IDREG_READ_FUNC(id_afr0_el1)
277 DEFINE_SYSREG_READ_FUNC(CurrentEl)
278 DEFINE_SYSREG_READ_FUNC(ctr_el0)
279 DEFINE_SYSREG_RW_FUNCS(daif)
280 DEFINE_SYSREG_RW_FUNCS(spsr_el1)
281 DEFINE_SYSREG_RW_FUNCS(spsr_el2)
282 DEFINE_SYSREG_RW_FUNCS(spsr_el3)
283 DEFINE_SYSREG_RW_FUNCS(elr_el1)
284 DEFINE_SYSREG_RW_FUNCS(elr_el2)
285 DEFINE_SYSREG_RW_FUNCS(elr_el3)
286 DEFINE_SYSREG_RW_FUNCS(mdccsr_el0)
287 DEFINE_SYSREG_RW_FUNCS(mdccint_el1)
288 DEFINE_SYSREG_RW_FUNCS(dbgdtrrx_el0)
289 DEFINE_SYSREG_RW_FUNCS(dbgdtrtx_el0)
290 DEFINE_SYSREG_RW_FUNCS(sp_el1)
291 DEFINE_SYSREG_RW_FUNCS(sp_el2)
292
293 DEFINE_SYSOP_FUNC(wfi)
294 DEFINE_SYSOP_FUNC(wfe)
295 DEFINE_SYSOP_FUNC(sev)
296 DEFINE_SYSOP_TYPE_FUNC(dsb, sy)
297 DEFINE_SYSOP_TYPE_FUNC(dmb, sy)
298 DEFINE_SYSOP_TYPE_FUNC(dmb, st)
299 DEFINE_SYSOP_TYPE_FUNC(dmb, ld)
300 DEFINE_SYSOP_TYPE_FUNC(dsb, ish)
301 DEFINE_SYSOP_TYPE_FUNC(dsb, osh)
302 DEFINE_SYSOP_TYPE_FUNC(dsb, nsh)
303 DEFINE_SYSOP_TYPE_FUNC(dsb, ishst)
304 DEFINE_SYSOP_TYPE_FUNC(dsb, oshst)
305 DEFINE_SYSOP_TYPE_FUNC(dmb, oshld)
306 DEFINE_SYSOP_TYPE_FUNC(dmb, oshst)
307 DEFINE_SYSOP_TYPE_FUNC(dmb, osh)
308 DEFINE_SYSOP_TYPE_FUNC(dmb, nshld)
309 DEFINE_SYSOP_TYPE_FUNC(dmb, nshst)
310 DEFINE_SYSOP_TYPE_FUNC(dmb, nsh)
311 DEFINE_SYSOP_TYPE_FUNC(dmb, ishld)
312 DEFINE_SYSOP_TYPE_FUNC(dmb, ishst)
313 DEFINE_SYSOP_TYPE_FUNC(dmb, ish)
314 DEFINE_SYSOP_FUNC(isb)
315
316 static inline void enable_irq(void)
317 {
318 /*
319 * The compiler memory barrier will prevent the compiler from
320 * scheduling non-volatile memory access after the write to the
321 * register.
322 *
323 * This could happen if some initialization code issues non-volatile
324 * accesses to an area used by an interrupt handler, in the assumption
325 * that it is safe as the interrupts are disabled at the time it does
326 * that (according to program order). However, non-volatile accesses
327 * are not necessarily in program order relatively with volatile inline
328 * assembly statements (and volatile accesses).
329 */
330 COMPILER_BARRIER();
331 write_daifclr(DAIF_IRQ_BIT);
332 isb();
333 }
334
enable_fiq(void)335 static inline void enable_fiq(void)
336 {
337 COMPILER_BARRIER();
338 write_daifclr(DAIF_FIQ_BIT);
339 isb();
340 }
341
enable_serror(void)342 static inline void enable_serror(void)
343 {
344 COMPILER_BARRIER();
345 write_daifclr(DAIF_ABT_BIT);
346 isb();
347 }
348
enable_debug_exceptions(void)349 static inline void enable_debug_exceptions(void)
350 {
351 COMPILER_BARRIER();
352 write_daifclr(DAIF_DBG_BIT);
353 isb();
354 }
355
disable_irq(void)356 static inline void disable_irq(void)
357 {
358 COMPILER_BARRIER();
359 write_daifset(DAIF_IRQ_BIT);
360 isb();
361 }
362
disable_fiq(void)363 static inline void disable_fiq(void)
364 {
365 COMPILER_BARRIER();
366 write_daifset(DAIF_FIQ_BIT);
367 isb();
368 }
369
disable_serror(void)370 static inline void disable_serror(void)
371 {
372 COMPILER_BARRIER();
373 write_daifset(DAIF_ABT_BIT);
374 isb();
375 }
376
disable_debug_exceptions(void)377 static inline void disable_debug_exceptions(void)
378 {
379 COMPILER_BARRIER();
380 write_daifset(DAIF_DBG_BIT);
381 isb();
382 }
383
384 void __dead2 smc(uint64_t x0, uint64_t x1, uint64_t x2, uint64_t x3,
385 uint64_t x4, uint64_t x5, uint64_t x6, uint64_t x7);
386
387 /*******************************************************************************
388 * System register accessor prototypes
389 ******************************************************************************/
390 DEFINE_IDREG_READ_FUNC(midr_el1)
DEFINE_SYSREG_READ_FUNC(mpidr_el1)391 DEFINE_SYSREG_READ_FUNC(mpidr_el1)
392 DEFINE_IDREG_READ_FUNC(id_aa64mmfr0_el1)
393 DEFINE_IDREG_READ_FUNC(id_aa64mmfr1_el1)
394
395 DEFINE_SYSREG_RW_FUNCS(scr_el3)
396 DEFINE_SYSREG_RW_FUNCS(hcr_el2)
397
398 DEFINE_SYSREG_RW_FUNCS(vbar_el1)
399 DEFINE_SYSREG_RW_FUNCS(vbar_el2)
400 DEFINE_SYSREG_RW_FUNCS(vbar_el3)
401
402 DEFINE_SYSREG_RW_FUNCS(sctlr_el1)
403 DEFINE_SYSREG_RW_FUNCS(sctlr_el2)
404 DEFINE_SYSREG_RW_FUNCS(sctlr_el3)
405
406 DEFINE_SYSREG_RW_FUNCS(actlr_el1)
407 DEFINE_SYSREG_RW_FUNCS(actlr_el2)
408 DEFINE_SYSREG_RW_FUNCS(actlr_el3)
409
410 DEFINE_SYSREG_RW_FUNCS(esr_el1)
411 DEFINE_SYSREG_RW_FUNCS(esr_el2)
412 DEFINE_SYSREG_RW_FUNCS(esr_el3)
413
414 DEFINE_SYSREG_RW_FUNCS(afsr0_el1)
415 DEFINE_SYSREG_RW_FUNCS(afsr0_el2)
416 DEFINE_SYSREG_RW_FUNCS(afsr0_el3)
417
418 DEFINE_SYSREG_RW_FUNCS(afsr1_el1)
419 DEFINE_SYSREG_RW_FUNCS(afsr1_el2)
420 DEFINE_SYSREG_RW_FUNCS(afsr1_el3)
421
422 DEFINE_SYSREG_RW_FUNCS(far_el1)
423 DEFINE_SYSREG_RW_FUNCS(far_el2)
424 DEFINE_SYSREG_RW_FUNCS(far_el3)
425
426 DEFINE_SYSREG_RW_FUNCS(mair_el1)
427 DEFINE_SYSREG_RW_FUNCS(mair_el2)
428 DEFINE_SYSREG_RW_FUNCS(mair_el3)
429
430 DEFINE_SYSREG_RW_FUNCS(amair_el1)
431 DEFINE_SYSREG_RW_FUNCS(amair_el2)
432 DEFINE_SYSREG_RW_FUNCS(amair_el3)
433
434 DEFINE_SYSREG_READ_FUNC(rvbar_el1)
435 DEFINE_SYSREG_READ_FUNC(rvbar_el2)
436 DEFINE_SYSREG_READ_FUNC(rvbar_el3)
437
438 DEFINE_SYSREG_RW_FUNCS(rmr_el1)
439 DEFINE_SYSREG_RW_FUNCS(rmr_el2)
440 DEFINE_SYSREG_RW_FUNCS(rmr_el3)
441
442 DEFINE_SYSREG_RW_FUNCS(tcr_el1)
443 DEFINE_SYSREG_RW_FUNCS(tcr_el2)
444 DEFINE_SYSREG_RW_FUNCS(tcr_el3)
445
446 DEFINE_SYSREG_RW_FUNCS(ttbr0_el1)
447 DEFINE_SYSREG_RW_FUNCS(ttbr0_el2)
448 DEFINE_SYSREG_RW_FUNCS(ttbr0_el3)
449
450 DEFINE_SYSREG_RW_FUNCS(ttbr1_el1)
451
452 DEFINE_SYSREG_RW_FUNCS(vttbr_el2)
453
454 DEFINE_SYSREG_RW_FUNCS(cptr_el2)
455 DEFINE_SYSREG_RW_FUNCS(cptr_el3)
456
457 DEFINE_SYSREG_RW_FUNCS(cpacr_el1)
458 DEFINE_SYSREG_RW_FUNCS(cntfrq_el0)
459 DEFINE_SYSREG_RW_FUNCS(cnthp_ctl_el2)
460 DEFINE_SYSREG_RW_FUNCS(cnthp_tval_el2)
461 DEFINE_SYSREG_RW_FUNCS(cnthp_cval_el2)
462 DEFINE_SYSREG_RW_FUNCS(cntps_ctl_el1)
463 DEFINE_SYSREG_RW_FUNCS(cntps_tval_el1)
464 DEFINE_SYSREG_RW_FUNCS(cntps_cval_el1)
465 DEFINE_SYSREG_RW_FUNCS(cntp_ctl_el0)
466 DEFINE_SYSREG_RW_FUNCS(cntp_tval_el0)
467 DEFINE_SYSREG_RW_FUNCS(cntp_cval_el0)
468 DEFINE_SYSREG_READ_FUNC(cntpct_el0)
469 DEFINE_SYSREG_RW_FUNCS(cnthctl_el2)
470 DEFINE_SYSREG_RW_FUNCS(cntv_ctl_el0)
471 DEFINE_SYSREG_RW_FUNCS(cntv_cval_el0)
472 DEFINE_SYSREG_RW_FUNCS(cntkctl_el1)
473
474 DEFINE_SYSREG_RW_FUNCS(vtcr_el2)
475
476 #define get_cntp_ctl_enable(x) (((x) >> CNTP_CTL_ENABLE_SHIFT) & \
477 CNTP_CTL_ENABLE_MASK)
478 #define get_cntp_ctl_imask(x) (((x) >> CNTP_CTL_IMASK_SHIFT) & \
479 CNTP_CTL_IMASK_MASK)
480 #define get_cntp_ctl_istatus(x) (((x) >> CNTP_CTL_ISTATUS_SHIFT) & \
481 CNTP_CTL_ISTATUS_MASK)
482
483 #define set_cntp_ctl_enable(x) ((x) |= (U(1) << CNTP_CTL_ENABLE_SHIFT))
484 #define set_cntp_ctl_imask(x) ((x) |= (U(1) << CNTP_CTL_IMASK_SHIFT))
485
486 #define clr_cntp_ctl_enable(x) ((x) &= ~(U(1) << CNTP_CTL_ENABLE_SHIFT))
487 #define clr_cntp_ctl_imask(x) ((x) &= ~(U(1) << CNTP_CTL_IMASK_SHIFT))
488
489 DEFINE_SYSREG_RW_FUNCS(tpidr_el0)
490 DEFINE_SYSREG_RW_FUNCS(tpidr_el1)
491 DEFINE_SYSREG_RW_FUNCS(tpidr_el2)
492 DEFINE_SYSREG_RW_FUNCS(tpidr_el3)
493
494 DEFINE_SYSREG_RW_FUNCS(cntvoff_el2)
495
496 DEFINE_SYSREG_RW_FUNCS(vpidr_el2)
497 DEFINE_SYSREG_RW_FUNCS(vmpidr_el2)
498
499 DEFINE_SYSREG_RW_FUNCS(hacr_el2)
500 DEFINE_SYSREG_RW_FUNCS(hpfar_el2)
501
502 DEFINE_SYSREG_RW_FUNCS(dbgvcr32_el2)
503 DEFINE_RENAME_SYSREG_RW_FUNCS(ich_hcr_el2, ICH_HCR_EL2)
504 DEFINE_RENAME_SYSREG_RW_FUNCS(ich_vmcr_el2, ICH_VMCR_EL2)
505
506 DEFINE_SYSREG_READ_FUNC(isr_el1)
507
508 DEFINE_SYSREG_RW_FUNCS(mdscr_el1)
509 DEFINE_SYSREG_RW_FUNCS(mdcr_el2)
510 DEFINE_SYSREG_RW_FUNCS(mdcr_el3)
511 DEFINE_SYSREG_RW_FUNCS(hstr_el2)
512 DEFINE_SYSREG_RW_FUNCS(pmcr_el0)
513
514 DEFINE_SYSREG_RW_FUNCS(csselr_el1)
515 DEFINE_SYSREG_RW_FUNCS(tpidrro_el0)
516 DEFINE_SYSREG_RW_FUNCS(contextidr_el1)
517 DEFINE_SYSREG_RW_FUNCS(spsr_abt)
518 DEFINE_SYSREG_RW_FUNCS(spsr_und)
519 DEFINE_SYSREG_RW_FUNCS(spsr_irq)
520 DEFINE_SYSREG_RW_FUNCS(spsr_fiq)
521 DEFINE_SYSREG_RW_FUNCS(dacr32_el2)
522 DEFINE_SYSREG_RW_FUNCS(ifsr32_el2)
523
524 /* GICv3 System Registers */
525
526 DEFINE_RENAME_SYSREG_RW_FUNCS(icc_sre_el1, ICC_SRE_EL1)
527 DEFINE_RENAME_SYSREG_RW_FUNCS(icc_sre_el2, ICC_SRE_EL2)
528 DEFINE_RENAME_SYSREG_RW_FUNCS(icc_sre_el3, ICC_SRE_EL3)
529 DEFINE_RENAME_SYSREG_RW_FUNCS(icc_pmr_el1, ICC_PMR_EL1)
530 DEFINE_RENAME_SYSREG_READ_FUNC(icc_rpr_el1, ICC_RPR_EL1)
531 DEFINE_RENAME_SYSREG_RW_FUNCS(icc_igrpen1_el3, ICC_IGRPEN1_EL3)
532 DEFINE_RENAME_SYSREG_RW_FUNCS(icc_igrpen1_el1, ICC_IGRPEN1_EL1)
533 DEFINE_RENAME_SYSREG_RW_FUNCS(icc_igrpen0_el1, ICC_IGRPEN0_EL1)
534 DEFINE_RENAME_SYSREG_READ_FUNC(icc_hppir0_el1, ICC_HPPIR0_EL1)
535 DEFINE_RENAME_SYSREG_READ_FUNC(icc_hppir1_el1, ICC_HPPIR1_EL1)
536 DEFINE_RENAME_SYSREG_READ_FUNC(icc_iar0_el1, ICC_IAR0_EL1)
537 DEFINE_RENAME_SYSREG_READ_FUNC(icc_iar1_el1, ICC_IAR1_EL1)
538 DEFINE_RENAME_SYSREG_WRITE_FUNC(icc_eoir0_el1, ICC_EOIR0_EL1)
539 DEFINE_RENAME_SYSREG_WRITE_FUNC(icc_eoir1_el1, ICC_EOIR1_EL1)
540 DEFINE_RENAME_SYSREG_WRITE_FUNC(icc_sgi0r_el1, ICC_SGI0R_EL1)
541 DEFINE_RENAME_SYSREG_RW_FUNCS(icc_sgi1r, ICC_SGI1R)
542 DEFINE_RENAME_SYSREG_RW_FUNCS(icc_asgi1r, ICC_ASGI1R)
543
544 DEFINE_RENAME_SYSREG_READ_FUNC(amcfgr_el0, AMCFGR_EL0)
545 DEFINE_RENAME_SYSREG_READ_FUNC(amcgcr_el0, AMCGCR_EL0)
546 DEFINE_RENAME_SYSREG_READ_FUNC(amcg1idr_el0, AMCG1IDR_EL0)
547 DEFINE_RENAME_SYSREG_RW_FUNCS(amcr_el0, AMCR_EL0)
548 DEFINE_RENAME_SYSREG_RW_FUNCS(amcntenclr0_el0, AMCNTENCLR0_EL0)
549 DEFINE_RENAME_SYSREG_RW_FUNCS(amcntenset0_el0, AMCNTENSET0_EL0)
550 DEFINE_RENAME_SYSREG_RW_FUNCS(amcntenclr1_el0, AMCNTENCLR1_EL0)
551 DEFINE_RENAME_SYSREG_RW_FUNCS(amcntenset1_el0, AMCNTENSET1_EL0)
552
553 DEFINE_RENAME_SYSREG_RW_FUNCS(pmblimitr_el1, PMBLIMITR_EL1)
554
555 DEFINE_RENAME_SYSREG_WRITE_FUNC(zcr_el3, ZCR_EL3)
556 DEFINE_RENAME_SYSREG_WRITE_FUNC(zcr_el2, ZCR_EL2)
557
558 DEFINE_RENAME_IDREG_READ_FUNC(id_aa64smfr0_el1, ID_AA64SMFR0_EL1)
559 DEFINE_RENAME_SYSREG_RW_FUNCS(smcr_el3, SMCR_EL3)
560
561 DEFINE_RENAME_SYSREG_READ_FUNC(erridr_el1, ERRIDR_EL1)
562 DEFINE_RENAME_SYSREG_WRITE_FUNC(errselr_el1, ERRSELR_EL1)
563
564 DEFINE_RENAME_SYSREG_READ_FUNC(erxfr_el1, ERXFR_EL1)
565 DEFINE_RENAME_SYSREG_RW_FUNCS(erxctlr_el1, ERXCTLR_EL1)
566 DEFINE_RENAME_SYSREG_RW_FUNCS(erxstatus_el1, ERXSTATUS_EL1)
567 DEFINE_RENAME_SYSREG_READ_FUNC(erxaddr_el1, ERXADDR_EL1)
568 DEFINE_RENAME_SYSREG_READ_FUNC(erxmisc0_el1, ERXMISC0_EL1)
569 DEFINE_RENAME_SYSREG_READ_FUNC(erxmisc1_el1, ERXMISC1_EL1)
570
571 DEFINE_RENAME_SYSREG_RW_FUNCS(scxtnum_el2, SCXTNUM_EL2)
572 DEFINE_RENAME_SYSREG_RW_FUNCS(scxtnum_el1, SCXTNUM_EL1)
573 DEFINE_RENAME_SYSREG_RW_FUNCS(scxtnum_el0, SCXTNUM_EL0)
574
575 /* Armv8.1 VHE Registers */
576 DEFINE_RENAME_SYSREG_RW_FUNCS(contextidr_el2, CONTEXTIDR_EL2)
577 DEFINE_RENAME_SYSREG_RW_FUNCS(ttbr1_el2, TTBR1_EL2)
578
579 /* Armv8.2 ID Registers */
580 DEFINE_RENAME_IDREG_READ_FUNC(id_aa64mmfr2_el1, ID_AA64MMFR2_EL1)
581
582 /* Armv8.2 RAS Registers */
583 DEFINE_RENAME_SYSREG_RW_FUNCS(disr_el1, DISR_EL1)
584 DEFINE_RENAME_SYSREG_RW_FUNCS(vdisr_el2, VDISR_EL2)
585 DEFINE_RENAME_SYSREG_RW_FUNCS(vsesr_el2, VSESR_EL2)
586
587 /* Armv8.2 MPAM Registers */
588 DEFINE_RENAME_SYSREG_READ_FUNC(mpamidr_el1, MPAMIDR_EL1)
589 DEFINE_RENAME_SYSREG_RW_FUNCS(mpam3_el3, MPAM3_EL3)
590 DEFINE_RENAME_SYSREG_RW_FUNCS(mpam2_el2, MPAM2_EL2)
591 DEFINE_RENAME_SYSREG_RW_FUNCS(mpamhcr_el2, MPAMHCR_EL2)
592 DEFINE_RENAME_SYSREG_RW_FUNCS(mpamvpm0_el2, MPAMVPM0_EL2)
593 DEFINE_RENAME_SYSREG_RW_FUNCS(mpamvpm1_el2, MPAMVPM1_EL2)
594 DEFINE_RENAME_SYSREG_RW_FUNCS(mpamvpm2_el2, MPAMVPM2_EL2)
595 DEFINE_RENAME_SYSREG_RW_FUNCS(mpamvpm3_el2, MPAMVPM3_EL2)
596 DEFINE_RENAME_SYSREG_RW_FUNCS(mpamvpm4_el2, MPAMVPM4_EL2)
597 DEFINE_RENAME_SYSREG_RW_FUNCS(mpamvpm5_el2, MPAMVPM5_EL2)
598 DEFINE_RENAME_SYSREG_RW_FUNCS(mpamvpm6_el2, MPAMVPM6_EL2)
599 DEFINE_RENAME_SYSREG_RW_FUNCS(mpamvpm7_el2, MPAMVPM7_EL2)
600 DEFINE_RENAME_SYSREG_RW_FUNCS(mpamvpmv_el2, MPAMVPMV_EL2)
601
602 /* Armv8.3 Pointer Authentication Registers */
603 DEFINE_RENAME_SYSREG_RW_FUNCS(apiakeyhi_el1, APIAKeyHi_EL1)
604 DEFINE_RENAME_SYSREG_RW_FUNCS(apiakeylo_el1, APIAKeyLo_EL1)
605
606 /* Armv8.4 Data Independent Timing Register */
607 DEFINE_RENAME_SYSREG_RW_FUNCS(dit, DIT)
608
609 /* Armv8.4 FEAT_TRF Register */
610 DEFINE_RENAME_SYSREG_RW_FUNCS(trfcr_el2, TRFCR_EL2)
611 DEFINE_RENAME_SYSREG_RW_FUNCS(trfcr_el1, TRFCR_EL1)
612 DEFINE_RENAME_SYSREG_RW_FUNCS(vncr_el2, VNCR_EL2)
613
614 /* Armv8.5 MTE Registers */
615 DEFINE_RENAME_SYSREG_RW_FUNCS(tfsre0_el1, TFSRE0_EL1)
616 DEFINE_RENAME_SYSREG_RW_FUNCS(tfsr_el1, TFSR_EL1)
617 DEFINE_RENAME_SYSREG_RW_FUNCS(rgsr_el1, RGSR_EL1)
618 DEFINE_RENAME_SYSREG_RW_FUNCS(gcr_el1, GCR_EL1)
619 DEFINE_RENAME_SYSREG_RW_FUNCS(tfsr_el2, TFSR_EL2)
620
621 /* Armv8.5 FEAT_RNG Registers */
622 DEFINE_RENAME_SYSREG_READ_FUNC(rndr, RNDR)
623 DEFINE_RENAME_SYSREG_READ_FUNC(rndrrs, RNDRRS)
624
625 /* Armv8.6 FEAT_FGT Registers */
626 DEFINE_RENAME_SYSREG_RW_FUNCS(hdfgrtr_el2, HDFGRTR_EL2)
627 DEFINE_RENAME_SYSREG_RW_FUNCS(hafgrtr_el2, HAFGRTR_EL2)
628 DEFINE_RENAME_SYSREG_RW_FUNCS(hdfgwtr_el2, HDFGWTR_EL2)
629 DEFINE_RENAME_SYSREG_RW_FUNCS(hfgitr_el2, HFGITR_EL2)
630 DEFINE_RENAME_SYSREG_RW_FUNCS(hfgrtr_el2, HFGRTR_EL2)
631 DEFINE_RENAME_SYSREG_RW_FUNCS(hfgwtr_el2, HFGWTR_EL2)
632
633 /* ARMv8.6 FEAT_ECV Register */
634 DEFINE_RENAME_SYSREG_RW_FUNCS(cntpoff_el2, CNTPOFF_EL2)
635
636 /* FEAT_HCX Register */
637 DEFINE_RENAME_SYSREG_RW_FUNCS(hcrx_el2, HCRX_EL2)
638
639 /* Armv8.9 system registers */
640 DEFINE_RENAME_IDREG_READ_FUNC(id_aa64mmfr3_el1, ID_AA64MMFR3_EL1)
641
642 /* FEAT_TCR2 Register */
643 DEFINE_RENAME_SYSREG_RW_FUNCS(tcr2_el1, TCR2_EL1)
644 DEFINE_RENAME_SYSREG_RW_FUNCS(tcr2_el2, TCR2_EL2)
645
646 /* FEAT_SxPIE Registers */
647 DEFINE_RENAME_SYSREG_RW_FUNCS(pire0_el1, PIRE0_EL1)
648 DEFINE_RENAME_SYSREG_RW_FUNCS(pire0_el2, PIRE0_EL2)
649 DEFINE_RENAME_SYSREG_RW_FUNCS(pir_el1, PIR_EL1)
650 DEFINE_RENAME_SYSREG_RW_FUNCS(pir_el2, PIR_EL2)
651 DEFINE_RENAME_SYSREG_RW_FUNCS(s2pir_el2, S2PIR_EL2)
652
653 /* FEAT_SxPOE Registers */
654 DEFINE_RENAME_SYSREG_RW_FUNCS(por_el1, POR_EL1)
655 DEFINE_RENAME_SYSREG_RW_FUNCS(por_el2, POR_EL2)
656 DEFINE_RENAME_SYSREG_RW_FUNCS(s2por_el1, S2POR_EL1)
657
658 /* FEAT_GCS Registers */
659 DEFINE_RENAME_SYSREG_RW_FUNCS(gcscr_el2, GCSCR_EL2)
660 DEFINE_RENAME_SYSREG_RW_FUNCS(gcspr_el2, GCSPR_EL2)
661 DEFINE_RENAME_SYSREG_RW_FUNCS(gcscr_el1, GCSCR_EL1)
662 DEFINE_RENAME_SYSREG_RW_FUNCS(gcscre0_el1, GCSCRE0_EL1)
663 DEFINE_RENAME_SYSREG_RW_FUNCS(gcspr_el1, GCSPR_EL1)
664 DEFINE_RENAME_SYSREG_RW_FUNCS(gcspr_el0, GCSPR_EL0)
665
666 /* DynamIQ Control registers */
667 DEFINE_RENAME_SYSREG_RW_FUNCS(clusterpwrdn_el1, CLUSTERPWRDN_EL1)
668 DEFINE_RENAME_SYSREG_RW_FUNCS(clusterpmcr_el1, CLUSTERPMCR_EL1)
669 DEFINE_RENAME_SYSREG_RW_FUNCS(clusterpmcntenset_el1, CLUSTERPMCNTENSET_EL1)
670 DEFINE_RENAME_SYSREG_RW_FUNCS(clusterpmccntr_el1, CLUSTERPMCCNTR_EL1)
671 DEFINE_RENAME_SYSREG_RW_FUNCS(clusterpmovsset_el1, CLUSTERPMOVSSET_EL1)
672 DEFINE_RENAME_SYSREG_RW_FUNCS(clusterpmovsclr_el1, CLUSTERPMOVSCLR_EL1)
673 DEFINE_RENAME_SYSREG_RW_FUNCS(clusterpmselr_el1, CLUSTERPMSELR_EL1)
674 DEFINE_RENAME_SYSREG_RW_FUNCS(clusterpmxevcntr_el1, CLUSTERPMXEVCNTR_EL1)
675 DEFINE_RENAME_SYSREG_RW_FUNCS(clusterpmxevtyper_el1, CLUSTERPMXEVTYPER_EL1)
676
677 /* CPU Power/Performance Management registers */
678 DEFINE_RENAME_SYSREG_RW_FUNCS(cpuppmcr_el3, CPUPPMCR_EL3)
679 DEFINE_RENAME_SYSREG_RW_FUNCS(cpumpmmcr_el3, CPUMPMMCR_EL3)
680
681 /* Armv9.2 RME Registers */
682 DEFINE_RENAME_SYSREG_RW_FUNCS(gptbr_el3, GPTBR_EL3)
683 DEFINE_RENAME_SYSREG_RW_FUNCS(gpccr_el3, GPCCR_EL3)
684
685 #define IS_IN_EL(x) \
686 (GET_EL(read_CurrentEl()) == MODE_EL##x)
687
688 #define IS_IN_EL1() IS_IN_EL(1)
689 #define IS_IN_EL2() IS_IN_EL(2)
690 #define IS_IN_EL3() IS_IN_EL(3)
691
692 static inline unsigned int get_current_el(void)
693 {
694 return GET_EL(read_CurrentEl());
695 }
696
get_current_el_maybe_constant(void)697 static inline unsigned int get_current_el_maybe_constant(void)
698 {
699 #if defined(IMAGE_AT_EL1)
700 return 1;
701 #elif defined(IMAGE_AT_EL2)
702 return 2; /* no use-case in TF-A */
703 #elif defined(IMAGE_AT_EL3)
704 return 3;
705 #else
706 /*
707 * If we do not know which exception level this is being built for
708 * (e.g. built for library), fall back to run-time detection.
709 */
710 return get_current_el();
711 #endif
712 }
713
714 /*
715 * Check if an EL is implemented from AA64PFR0 register fields.
716 */
el_implemented(unsigned int el)717 static inline uint64_t el_implemented(unsigned int el)
718 {
719 if (el > 3U) {
720 return EL_IMPL_NONE;
721 } else {
722 unsigned int shift = ID_AA64PFR0_EL1_SHIFT * el;
723
724 return (read_id_aa64pfr0_el1() >> shift) & ID_AA64PFR0_ELX_MASK;
725 }
726 }
727
728 /*
729 * TLBI PAALLOS instruction
730 * (TLB Invalidate GPT Information by PA, All Entries, Outer Shareable)
731 */
tlbipaallos(void)732 static inline void tlbipaallos(void)
733 {
734 __asm__("sys #6, c8, c1, #4");
735 }
736
737 /*
738 * TLBI RPALOS instructions
739 * (TLB Range Invalidate GPT Information by PA, Last level, Outer Shareable)
740 *
741 * command SIZE, bits [47:44] field:
742 * 0b0000 4KB
743 * 0b0001 16KB
744 * 0b0010 64KB
745 * 0b0011 2MB
746 * 0b0100 32MB
747 * 0b0101 512MB
748 * 0b0110 1GB
749 * 0b0111 16GB
750 * 0b1000 64GB
751 * 0b1001 512GB
752 */
753 #define TLBI_SZ_4K 0UL
754 #define TLBI_SZ_16K 1UL
755 #define TLBI_SZ_64K 2UL
756 #define TLBI_SZ_2M 3UL
757 #define TLBI_SZ_32M 4UL
758 #define TLBI_SZ_512M 5UL
759 #define TLBI_SZ_1G 6UL
760 #define TLBI_SZ_16G 7UL
761 #define TLBI_SZ_64G 8UL
762 #define TLBI_SZ_512G 9UL
763
764 #define TLBI_ADDR_SHIFT U(12)
765 #define TLBI_SIZE_SHIFT U(44)
766
767 #define TLBIRPALOS(_addr, _size) \
768 { \
769 u_register_t arg = ((_addr) >> TLBI_ADDR_SHIFT) | \
770 ((_size) << TLBI_SIZE_SHIFT); \
771 __asm__("sys #6, c8, c4, #7, %0" : : "r" (arg)); \
772 }
773
774 /* Note: addr must be aligned to 4KB */
tlbirpalos_4k(uintptr_t addr)775 static inline void tlbirpalos_4k(uintptr_t addr)
776 {
777 TLBIRPALOS(addr, TLBI_SZ_4K);
778 }
779
780 /* Note: addr must be aligned to 16KB */
tlbirpalos_16k(uintptr_t addr)781 static inline void tlbirpalos_16k(uintptr_t addr)
782 {
783 TLBIRPALOS(addr, TLBI_SZ_16K);
784 }
785
786 /* Note: addr must be aligned to 64KB */
tlbirpalos_64k(uintptr_t addr)787 static inline void tlbirpalos_64k(uintptr_t addr)
788 {
789 TLBIRPALOS(addr, TLBI_SZ_64K);
790 }
791
792 /* Note: addr must be aligned to 2MB */
tlbirpalos_2m(uintptr_t addr)793 static inline void tlbirpalos_2m(uintptr_t addr)
794 {
795 TLBIRPALOS(addr, TLBI_SZ_2M);
796 }
797
798 /* Note: addr must be aligned to 32MB */
tlbirpalos_32m(uintptr_t addr)799 static inline void tlbirpalos_32m(uintptr_t addr)
800 {
801 TLBIRPALOS(addr, TLBI_SZ_32M);
802 }
803
804 /* Note: addr must be aligned to 512MB */
tlbirpalos_512m(uintptr_t addr)805 static inline void tlbirpalos_512m(uintptr_t addr)
806 {
807 TLBIRPALOS(addr, TLBI_SZ_512M);
808 }
809
810 /*
811 * Invalidate TLBs of GPT entries by Physical address, last level.
812 *
813 * @pa: the starting address for the range
814 * of invalidation
815 * @size: size of the range of invalidation
816 */
817 void gpt_tlbi_by_pa_ll(uint64_t pa, size_t size);
818
819 /* Previously defined accessor functions with incomplete register names */
820
821 #define read_current_el() read_CurrentEl()
822
823 #define dsb() dsbsy()
824
825 #define read_midr() read_midr_el1()
826
827 #define read_mpidr() read_mpidr_el1()
828
829 #define read_scr() read_scr_el3()
830 #define write_scr(_v) write_scr_el3(_v)
831
832 #define read_hcr() read_hcr_el2()
833 #define write_hcr(_v) write_hcr_el2(_v)
834
835 #define read_cpacr() read_cpacr_el1()
836 #define write_cpacr(_v) write_cpacr_el1(_v)
837
838 #define read_clusterpwrdn() read_clusterpwrdn_el1()
839 #define write_clusterpwrdn(_v) write_clusterpwrdn_el1(_v)
840
841 #define read_clusterpmcr() read_clusterpmcr_el1()
842 #define write_clusterpmcr(_v) write_clusterpmcr_el1(_v)
843
844 #define read_clusterpmcntenset() read_clusterpmcntenset_el1()
845 #define write_clusterpmcntenset(_v) write_clusterpmcntenset_el1(_v)
846
847 #define read_clusterpmccntr() read_clusterpmccntr_el1()
848 #define write_clusterpmccntr(_v) write_clusterpmccntr_el1(_v)
849
850 #define read_clusterpmovsset() read_clusterpmovsset_el1()
851 #define write_clusterpmovsset(_v) write_clusterpmovsset_el1(_v)
852
853 #define read_clusterpmovsclr() read_clusterpmovsclr_el1()
854 #define write_clusterpmovsclr(_v) write_clusterpmovsclr_el1(_v)
855
856 #define read_clusterpmselr() read_clusterpmselr_el1()
857 #define write_clusterpmselr(_v) write_clusterpmselr_el1(_v)
858
859 #define read_clusterpmxevcntr() read_clusterpmxevcntr_el1()
860 #define write_clusterpmxevcntr(_v) write_clusterpmxevcntr_el1(_v)
861
862 #define read_clusterpmxevtyper() read_clusterpmxevtyper_el1()
863 #define write_clusterpmxevtyper(_v) write_clusterpmxevtyper_el1(_v)
864
865 #if ERRATA_SPECULATIVE_AT
866 /*
867 * Assuming SCTLR.M bit is already enabled
868 * 1. Enable page table walk by clearing TCR_EL1.EPDx bits
869 * 2. Execute AT instruction for lower EL1/0
870 * 3. Disable page table walk by setting TCR_EL1.EPDx bits
871 */
872 #define AT(_at_inst, _va) \
873 { \
874 assert((read_sctlr_el1() & SCTLR_M_BIT) != 0ULL); \
875 write_tcr_el1(read_tcr_el1() & ~(TCR_EPD0_BIT | TCR_EPD1_BIT)); \
876 isb(); \
877 _at_inst(_va); \
878 write_tcr_el1(read_tcr_el1() | (TCR_EPD0_BIT | TCR_EPD1_BIT)); \
879 isb(); \
880 }
881 #else
882 #define AT(_at_inst, _va) _at_inst(_va)
883 #endif
884
885 #endif /* ARCH_HELPERS_H */
886