1 /*
2  * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef CONTEXT_H
8 #define CONTEXT_H
9 
10 #include <lib/el3_runtime/context_el2.h>
11 #include <lib/el3_runtime/cpu_data.h>
12 #include <lib/utils_def.h>
13 
14 /*******************************************************************************
15  * Constants that allow assembler code to access members of and the 'gp_regs'
16  * structure at their correct offsets.
17  ******************************************************************************/
18 #define CTX_GPREGS_OFFSET	U(0x0)
19 #define CTX_GPREG_X0		U(0x0)
20 #define CTX_GPREG_X1		U(0x8)
21 #define CTX_GPREG_X2		U(0x10)
22 #define CTX_GPREG_X3		U(0x18)
23 #define CTX_GPREG_X4		U(0x20)
24 #define CTX_GPREG_X5		U(0x28)
25 #define CTX_GPREG_X6		U(0x30)
26 #define CTX_GPREG_X7		U(0x38)
27 #define CTX_GPREG_X8		U(0x40)
28 #define CTX_GPREG_X9		U(0x48)
29 #define CTX_GPREG_X10		U(0x50)
30 #define CTX_GPREG_X11		U(0x58)
31 #define CTX_GPREG_X12		U(0x60)
32 #define CTX_GPREG_X13		U(0x68)
33 #define CTX_GPREG_X14		U(0x70)
34 #define CTX_GPREG_X15		U(0x78)
35 #define CTX_GPREG_X16		U(0x80)
36 #define CTX_GPREG_X17		U(0x88)
37 #define CTX_GPREG_X18		U(0x90)
38 #define CTX_GPREG_X19		U(0x98)
39 #define CTX_GPREG_X20		U(0xa0)
40 #define CTX_GPREG_X21		U(0xa8)
41 #define CTX_GPREG_X22		U(0xb0)
42 #define CTX_GPREG_X23		U(0xb8)
43 #define CTX_GPREG_X24		U(0xc0)
44 #define CTX_GPREG_X25		U(0xc8)
45 #define CTX_GPREG_X26		U(0xd0)
46 #define CTX_GPREG_X27		U(0xd8)
47 #define CTX_GPREG_X28		U(0xe0)
48 #define CTX_GPREG_X29		U(0xe8)
49 #define CTX_GPREG_LR		U(0xf0)
50 #define CTX_GPREG_SP_EL0	U(0xf8)
51 #define CTX_GPREGS_END		U(0x100)
52 
53 /*******************************************************************************
54  * Constants that allow assembler code to access members of and the 'el3_state'
55  * structure at their correct offsets. Note that some of the registers are only
56  * 32-bits wide but are stored as 64-bit values for convenience
57  ******************************************************************************/
58 #define CTX_EL3STATE_OFFSET	(CTX_GPREGS_OFFSET + CTX_GPREGS_END)
59 #define CTX_SCR_EL3		U(0x0)
60 #define CTX_ESR_EL3		U(0x8)
61 #define CTX_RUNTIME_SP		U(0x10)
62 #define CTX_SPSR_EL3		U(0x18)
63 #define CTX_ELR_EL3		U(0x20)
64 #define CTX_PMCR_EL0		U(0x28)
65 #define CTX_IS_IN_EL3		U(0x30)
66 /* Constants required in supporting nested exception in EL3 */
67 #define CTX_SAVED_ELR_EL3	U(0x38)
68 /*
69  * General purpose flag, to save various EL3 states
70  * FFH mode : Used to identify if handling nested exception
71  * KFH mode : Used as counter value
72  */
73 #define CTX_NESTED_EA_FLAG	U(0x40)
74 #if FFH_SUPPORT
75  #define CTX_SAVED_ESR_EL3	U(0x48)
76  #define CTX_SAVED_SPSR_EL3	U(0x50)
77  #define CTX_SAVED_GPREG_LR	U(0x58)
78  #define CTX_EL3STATE_END	U(0x60) /* Align to the next 16 byte boundary */
79 #else
80  #define CTX_EL3STATE_END	U(0x50) /* Align to the next 16 byte boundary */
81 #endif /* FFH_SUPPORT */
82 
83 /*******************************************************************************
84  * Constants that allow assembler code to access members of and the
85  * 'el1_sys_regs' structure at their correct offsets. Note that some of the
86  * registers are only 32-bits wide but are stored as 64-bit values for
87  * convenience
88  ******************************************************************************/
89 #define CTX_EL1_SYSREGS_OFFSET	(CTX_EL3STATE_OFFSET + CTX_EL3STATE_END)
90 #define CTX_SPSR_EL1		U(0x0)
91 #define CTX_ELR_EL1		U(0x8)
92 #define CTX_SCTLR_EL1		U(0x10)
93 #define CTX_TCR_EL1		U(0x18)
94 #define CTX_CPACR_EL1		U(0x20)
95 #define CTX_CSSELR_EL1		U(0x28)
96 #define CTX_SP_EL1		U(0x30)
97 #define CTX_ESR_EL1		U(0x38)
98 #define CTX_TTBR0_EL1		U(0x40)
99 #define CTX_TTBR1_EL1		U(0x48)
100 #define CTX_MAIR_EL1		U(0x50)
101 #define CTX_AMAIR_EL1		U(0x58)
102 #define CTX_ACTLR_EL1		U(0x60)
103 #define CTX_TPIDR_EL1		U(0x68)
104 #define CTX_TPIDR_EL0		U(0x70)
105 #define CTX_TPIDRRO_EL0		U(0x78)
106 #define CTX_PAR_EL1		U(0x80)
107 #define CTX_FAR_EL1		U(0x88)
108 #define CTX_AFSR0_EL1		U(0x90)
109 #define CTX_AFSR1_EL1		U(0x98)
110 #define CTX_CONTEXTIDR_EL1	U(0xa0)
111 #define CTX_VBAR_EL1		U(0xa8)
112 #define CTX_MDCCINT_EL1		U(0xb0)
113 #define CTX_MDSCR_EL1		U(0xb8)
114 
115 #define CTX_AARCH64_END		U(0xc0) /* Align to the next 16 byte boundary */
116 
117 /*
118  * If the platform is AArch64-only, there is no need to save and restore these
119  * AArch32 registers.
120  */
121 #if CTX_INCLUDE_AARCH32_REGS
122 #define CTX_SPSR_ABT		(CTX_AARCH64_END + U(0x0))
123 #define CTX_SPSR_UND		(CTX_AARCH64_END + U(0x8))
124 #define CTX_SPSR_IRQ		(CTX_AARCH64_END + U(0x10))
125 #define CTX_SPSR_FIQ		(CTX_AARCH64_END + U(0x18))
126 #define CTX_DACR32_EL2		(CTX_AARCH64_END + U(0x20))
127 #define CTX_IFSR32_EL2		(CTX_AARCH64_END + U(0x28))
128 #define CTX_AARCH32_END		(CTX_AARCH64_END + U(0x30)) /* Align to the next 16 byte boundary */
129 #else
130 #define CTX_AARCH32_END		CTX_AARCH64_END
131 #endif /* CTX_INCLUDE_AARCH32_REGS */
132 
133 /*
134  * If the timer registers aren't saved and restored, we don't have to reserve
135  * space for them in the context
136  */
137 #if NS_TIMER_SWITCH
138 #define CTX_CNTP_CTL_EL0	(CTX_AARCH32_END + U(0x0))
139 #define CTX_CNTP_CVAL_EL0	(CTX_AARCH32_END + U(0x8))
140 #define CTX_CNTV_CTL_EL0	(CTX_AARCH32_END + U(0x10))
141 #define CTX_CNTV_CVAL_EL0	(CTX_AARCH32_END + U(0x18))
142 #define CTX_CNTKCTL_EL1		(CTX_AARCH32_END + U(0x20))
143 #define CTX_TIMER_SYSREGS_END	(CTX_AARCH32_END + U(0x30)) /* Align to the next 16 byte boundary */
144 #else
145 #define CTX_TIMER_SYSREGS_END	CTX_AARCH32_END
146 #endif /* NS_TIMER_SWITCH */
147 
148 #if ENABLE_FEAT_MTE2
149 #define CTX_TFSRE0_EL1		(CTX_TIMER_SYSREGS_END + U(0x0))
150 #define CTX_TFSR_EL1		(CTX_TIMER_SYSREGS_END + U(0x8))
151 #define CTX_RGSR_EL1		(CTX_TIMER_SYSREGS_END + U(0x10))
152 #define CTX_GCR_EL1		(CTX_TIMER_SYSREGS_END + U(0x18))
153 #define CTX_MTE_REGS_END	(CTX_TIMER_SYSREGS_END + U(0x20)) /* Align to the next 16 byte boundary */
154 #else
155 #define CTX_MTE_REGS_END	CTX_TIMER_SYSREGS_END
156 #endif /* ENABLE_FEAT_MTE2 */
157 
158 #if ENABLE_FEAT_RAS
159 #define CTX_DISR_EL1		(CTX_MTE_REGS_END + U(0x0))
160 #define CTX_RAS_REGS_END	(CTX_MTE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
161 #else
162 #define CTX_RAS_REGS_END        CTX_MTE_REGS_END
163 #endif /* ENABLE_FEAT_RAS */
164 
165 #if ENABLE_FEAT_S1PIE
166 #define CTX_PIRE0_EL1		(CTX_RAS_REGS_END + U(0x0))
167 #define CTX_PIR_EL1		(CTX_RAS_REGS_END + U(0x8))
168 #define CTX_S1PIE_REGS_END	(CTX_RAS_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
169 #else
170 #define CTX_S1PIE_REGS_END	CTX_RAS_REGS_END
171 #endif /* ENABLE_FEAT_S1PIE */
172 
173 #if ENABLE_FEAT_S1POE
174 #define CTX_POR_EL1		(CTX_S1PIE_REGS_END + U(0x0))
175 #define CTX_S1POE_REGS_END	(CTX_S1PIE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
176 #else
177 #define CTX_S1POE_REGS_END	CTX_S1PIE_REGS_END
178 #endif /* ENABLE_FEAT_S1POE */
179 
180 #if ENABLE_FEAT_S2POE
181 #define CTX_S2POR_EL1		(CTX_S1POE_REGS_END + U(0x0))
182 #define CTX_S2POE_REGS_END	(CTX_S1POE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
183 #else
184 #define CTX_S2POE_REGS_END	CTX_S1POE_REGS_END
185 #endif /* ENABLE_FEAT_S2POE */
186 
187 #if ENABLE_FEAT_TCR2
188 #define CTX_TCR2_EL1		(CTX_S2POE_REGS_END + U(0x0))
189 #define CTX_TCR2_REGS_END	(CTX_S2POE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
190 #else
191 #define CTX_TCR2_REGS_END       CTX_S2POE_REGS_END
192 #endif /* ENABLE_FEAT_TCR2 */
193 
194 #if ENABLE_TRF_FOR_NS
195 #define CTX_TRFCR_EL1		(CTX_TCR2_REGS_END + U(0x0))
196 #define CTX_TRF_REGS_END	(CTX_TCR2_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
197 #else
198 #define CTX_TRF_REGS_END	CTX_TCR2_REGS_END
199 #endif /* ENABLE_TRF_FOR_NS */
200 
201 #if ENABLE_FEAT_CSV2_2
202 #define CTX_SCXTNUM_EL0		(CTX_TRF_REGS_END + U(0x0))
203 #define CTX_SCXTNUM_EL1		(CTX_TRF_REGS_END + U(0x8))
204 #define CTX_CSV2_2_REGS_END	(CTX_TRF_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
205 #else
206 #define CTX_CSV2_2_REGS_END	CTX_TRF_REGS_END
207 #endif /* ENABLE_FEAT_CSV2_2 */
208 
209 #if ENABLE_FEAT_GCS
210 #define CTX_GCSCR_EL1		(CTX_CSV2_2_REGS_END + U(0x0))
211 #define CTX_GCSCRE0_EL1		(CTX_CSV2_2_REGS_END + U(0x8))
212 #define CTX_GCSPR_EL1		(CTX_CSV2_2_REGS_END + U(0x10))
213 #define CTX_GCSPR_EL0		(CTX_CSV2_2_REGS_END + U(0x18))
214 #define CTX_GCS_REGS_END	(CTX_CSV2_2_REGS_END + U(0x20)) /* Align to the next 16 byte boundary */
215 #else
216 #define CTX_GCS_REGS_END	CTX_CSV2_2_REGS_END
217 #endif /* ENABLE_FEAT_GCS */
218 
219 /*
220  * End of EL1 system registers.
221  */
222 #define CTX_EL1_SYSREGS_END	CTX_GCS_REGS_END
223 
224 /*******************************************************************************
225  * Constants that allow assembler code to access members of and the 'fp_regs'
226  * structure at their correct offsets.
227  ******************************************************************************/
228 # define CTX_FPREGS_OFFSET	(CTX_EL1_SYSREGS_OFFSET + CTX_EL1_SYSREGS_END)
229 #if CTX_INCLUDE_FPREGS
230 #define CTX_FP_Q0		U(0x0)
231 #define CTX_FP_Q1		U(0x10)
232 #define CTX_FP_Q2		U(0x20)
233 #define CTX_FP_Q3		U(0x30)
234 #define CTX_FP_Q4		U(0x40)
235 #define CTX_FP_Q5		U(0x50)
236 #define CTX_FP_Q6		U(0x60)
237 #define CTX_FP_Q7		U(0x70)
238 #define CTX_FP_Q8		U(0x80)
239 #define CTX_FP_Q9		U(0x90)
240 #define CTX_FP_Q10		U(0xa0)
241 #define CTX_FP_Q11		U(0xb0)
242 #define CTX_FP_Q12		U(0xc0)
243 #define CTX_FP_Q13		U(0xd0)
244 #define CTX_FP_Q14		U(0xe0)
245 #define CTX_FP_Q15		U(0xf0)
246 #define CTX_FP_Q16		U(0x100)
247 #define CTX_FP_Q17		U(0x110)
248 #define CTX_FP_Q18		U(0x120)
249 #define CTX_FP_Q19		U(0x130)
250 #define CTX_FP_Q20		U(0x140)
251 #define CTX_FP_Q21		U(0x150)
252 #define CTX_FP_Q22		U(0x160)
253 #define CTX_FP_Q23		U(0x170)
254 #define CTX_FP_Q24		U(0x180)
255 #define CTX_FP_Q25		U(0x190)
256 #define CTX_FP_Q26		U(0x1a0)
257 #define CTX_FP_Q27		U(0x1b0)
258 #define CTX_FP_Q28		U(0x1c0)
259 #define CTX_FP_Q29		U(0x1d0)
260 #define CTX_FP_Q30		U(0x1e0)
261 #define CTX_FP_Q31		U(0x1f0)
262 #define CTX_FP_FPSR		U(0x200)
263 #define CTX_FP_FPCR		U(0x208)
264 #if CTX_INCLUDE_AARCH32_REGS
265 #define CTX_FP_FPEXC32_EL2	U(0x210)
266 #define CTX_FPREGS_END		U(0x220) /* Align to the next 16 byte boundary */
267 #else
268 #define CTX_FPREGS_END		U(0x210) /* Align to the next 16 byte boundary */
269 #endif /* CTX_INCLUDE_AARCH32_REGS */
270 #else
271 #define CTX_FPREGS_END		U(0)
272 #endif /* CTX_INCLUDE_FPREGS */
273 
274 /*******************************************************************************
275  * Registers related to CVE-2018-3639
276  ******************************************************************************/
277 #define CTX_CVE_2018_3639_OFFSET	(CTX_FPREGS_OFFSET + CTX_FPREGS_END)
278 #define CTX_CVE_2018_3639_DISABLE	U(0)
279 #define CTX_CVE_2018_3639_END		U(0x10) /* Align to the next 16 byte boundary */
280 
281 /*******************************************************************************
282  * Registers related to ARMv8.3-PAuth.
283  ******************************************************************************/
284 #define CTX_PAUTH_REGS_OFFSET	(CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_END)
285 #if CTX_INCLUDE_PAUTH_REGS
286 #define CTX_PACIAKEY_LO		U(0x0)
287 #define CTX_PACIAKEY_HI		U(0x8)
288 #define CTX_PACIBKEY_LO		U(0x10)
289 #define CTX_PACIBKEY_HI		U(0x18)
290 #define CTX_PACDAKEY_LO		U(0x20)
291 #define CTX_PACDAKEY_HI		U(0x28)
292 #define CTX_PACDBKEY_LO		U(0x30)
293 #define CTX_PACDBKEY_HI		U(0x38)
294 #define CTX_PACGAKEY_LO		U(0x40)
295 #define CTX_PACGAKEY_HI		U(0x48)
296 #define CTX_PAUTH_REGS_END	U(0x50) /* Align to the next 16 byte boundary */
297 #else
298 #define CTX_PAUTH_REGS_END	U(0)
299 #endif /* CTX_INCLUDE_PAUTH_REGS */
300 
301 /*******************************************************************************
302  * Registers related to ARMv8.2-MPAM.
303  ******************************************************************************/
304 #define CTX_MPAM_REGS_OFFSET	(CTX_PAUTH_REGS_OFFSET + CTX_PAUTH_REGS_END)
305 #if CTX_INCLUDE_MPAM_REGS
306 #define CTX_MPAM2_EL2		U(0x0)
307 #define CTX_MPAMHCR_EL2		U(0x8)
308 #define CTX_MPAMVPM0_EL2	U(0x10)
309 #define CTX_MPAMVPM1_EL2	U(0x18)
310 #define CTX_MPAMVPM2_EL2	U(0x20)
311 #define CTX_MPAMVPM3_EL2	U(0x28)
312 #define CTX_MPAMVPM4_EL2	U(0x30)
313 #define CTX_MPAMVPM5_EL2	U(0x38)
314 #define CTX_MPAMVPM6_EL2	U(0x40)
315 #define CTX_MPAMVPM7_EL2	U(0x48)
316 #define CTX_MPAMVPMV_EL2	U(0x50)
317 #define CTX_MPAM_REGS_END	U(0x60)
318 #else
319 #define CTX_MPAM_REGS_END	U(0x0)
320 #endif /* CTX_INCLUDE_MPAM_REGS */
321 
322 /*******************************************************************************
323  * Registers initialised in a per-world context.
324  ******************************************************************************/
325 #define CTX_CPTR_EL3			U(0x0)
326 #define CTX_ZCR_EL3			U(0x8)
327 #define CTX_MPAM3_EL3			U(0x10)
328 #define CTX_PERWORLD_EL3STATE_END	U(0x18)
329 
330 #ifndef __ASSEMBLER__
331 
332 #include <stdint.h>
333 
334 #include <lib/cassert.h>
335 
336 /*
337  * Common constants to help define the 'cpu_context' structure and its
338  * members below.
339  */
340 #define DWORD_SHIFT		U(3)
341 #define DEFINE_REG_STRUCT(name, num_regs)	\
342 	typedef struct name {			\
343 		uint64_t ctx_regs[num_regs];	\
344 	}  __aligned(16) name##_t
345 
346 /* Constants to determine the size of individual context structures */
347 #define CTX_GPREG_ALL		(CTX_GPREGS_END >> DWORD_SHIFT)
348 #define CTX_EL1_SYSREGS_ALL	(CTX_EL1_SYSREGS_END >> DWORD_SHIFT)
349 
350 #if CTX_INCLUDE_FPREGS
351 # define CTX_FPREG_ALL		(CTX_FPREGS_END >> DWORD_SHIFT)
352 #endif
353 #define CTX_EL3STATE_ALL	(CTX_EL3STATE_END >> DWORD_SHIFT)
354 #define CTX_CVE_2018_3639_ALL	(CTX_CVE_2018_3639_END >> DWORD_SHIFT)
355 #if CTX_INCLUDE_PAUTH_REGS
356 # define CTX_PAUTH_REGS_ALL	(CTX_PAUTH_REGS_END >> DWORD_SHIFT)
357 #endif
358 #if CTX_INCLUDE_MPAM_REGS
359 # define CTX_MPAM_REGS_ALL	(CTX_MPAM_REGS_END >> DWORD_SHIFT)
360 #endif
361 
362 /*
363  * AArch64 general purpose register context structure. Usually x0-x18,
364  * lr are saved as the compiler is expected to preserve the remaining
365  * callee saved registers if used by the C runtime and the assembler
366  * does not touch the remaining. But in case of world switch during
367  * exception handling, we need to save the callee registers too.
368  */
369 DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL);
370 
371 /*
372  * AArch64 EL1 system register context structure for preserving the
373  * architectural state during world switches.
374  */
375 DEFINE_REG_STRUCT(el1_sysregs, CTX_EL1_SYSREGS_ALL);
376 
377 /*
378  * AArch64 floating point register context structure for preserving
379  * the floating point state during switches from one security state to
380  * another.
381  */
382 #if CTX_INCLUDE_FPREGS
383 DEFINE_REG_STRUCT(fp_regs, CTX_FPREG_ALL);
384 #endif
385 
386 /*
387  * Miscellaneous registers used by EL3 firmware to maintain its state
388  * across exception entries and exits
389  */
390 DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL);
391 
392 /* Function pointer used by CVE-2018-3639 dynamic mitigation */
393 DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL);
394 
395 /* Registers associated to ARMv8.3-PAuth */
396 #if CTX_INCLUDE_PAUTH_REGS
397 DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL);
398 #endif
399 
400 /* Registers associated to ARMv8.2 MPAM */
401 #if CTX_INCLUDE_MPAM_REGS
402 DEFINE_REG_STRUCT(mpam, CTX_MPAM_REGS_ALL);
403 #endif
404 
405 /*
406  * Macros to access members of any of the above structures using their
407  * offsets
408  */
409 #define read_ctx_reg(ctx, offset)	((ctx)->ctx_regs[(offset) >> DWORD_SHIFT])
410 #define write_ctx_reg(ctx, offset, val)	(((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) \
411 					 = (uint64_t) (val))
412 
413 /*
414  * Top-level context structure which is used by EL3 firmware to preserve
415  * the state of a core at the next lower EL in a given security state and
416  * save enough EL3 meta data to be able to return to that EL and security
417  * state. The context management library will be used to ensure that
418  * SP_EL3 always points to an instance of this structure at exception
419  * entry and exit.
420  */
421 typedef struct cpu_context {
422 	gp_regs_t gpregs_ctx;
423 	el3_state_t el3state_ctx;
424 	el1_sysregs_t el1_sysregs_ctx;
425 
426 #if CTX_INCLUDE_FPREGS
427 	fp_regs_t fpregs_ctx;
428 #endif
429 	cve_2018_3639_t cve_2018_3639_ctx;
430 
431 #if CTX_INCLUDE_PAUTH_REGS
432 	pauth_t pauth_ctx;
433 #endif
434 
435 #if CTX_INCLUDE_MPAM_REGS
436 	mpam_t	mpam_ctx;
437 #endif
438 
439 #if CTX_INCLUDE_EL2_REGS
440 	el2_sysregs_t el2_sysregs_ctx;
441 #endif
442 
443 } cpu_context_t;
444 
445 /*
446  * Per-World Context.
447  * It stores registers whose values can be shared across CPUs.
448  */
449 typedef struct per_world_context {
450 	uint64_t ctx_cptr_el3;
451 	uint64_t ctx_zcr_el3;
452 	uint64_t ctx_mpam3_el3;
453 } per_world_context_t;
454 
455 extern per_world_context_t per_world_context[CPU_DATA_CONTEXT_NUM];
456 
457 /* Macros to access members of the 'cpu_context_t' structure */
458 #define get_el3state_ctx(h)	(&((cpu_context_t *) h)->el3state_ctx)
459 #if CTX_INCLUDE_FPREGS
460 # define get_fpregs_ctx(h)	(&((cpu_context_t *) h)->fpregs_ctx)
461 #endif
462 #define get_el1_sysregs_ctx(h)	(&((cpu_context_t *) h)->el1_sysregs_ctx)
463 #if CTX_INCLUDE_EL2_REGS
464 # define get_el2_sysregs_ctx(h)	(&((cpu_context_t *) h)->el2_sysregs_ctx)
465 #endif
466 #define get_gpregs_ctx(h)	(&((cpu_context_t *) h)->gpregs_ctx)
467 #define get_cve_2018_3639_ctx(h)	(&((cpu_context_t *) h)->cve_2018_3639_ctx)
468 #if CTX_INCLUDE_PAUTH_REGS
469 # define get_pauth_ctx(h)	(&((cpu_context_t *) h)->pauth_ctx)
470 #endif
471 #if CTX_INCLUDE_MPAM_REGS
472 # define get_mpam_ctx(h)	(&((cpu_context_t *) h)->mpam_ctx)
473 #endif
474 
475 /*
476  * Compile time assertions related to the 'cpu_context' structure to
477  * ensure that the assembler and the compiler view of the offsets of
478  * the structure members is the same.
479  */
480 CASSERT(CTX_GPREGS_OFFSET == __builtin_offsetof(cpu_context_t, gpregs_ctx),
481 	assert_core_context_gp_offset_mismatch);
482 
483 CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx),
484 	assert_core_context_el3state_offset_mismatch);
485 
486 CASSERT(CTX_EL1_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, el1_sysregs_ctx),
487 	assert_core_context_el1_sys_offset_mismatch);
488 
489 #if CTX_INCLUDE_FPREGS
490 CASSERT(CTX_FPREGS_OFFSET == __builtin_offsetof(cpu_context_t, fpregs_ctx),
491 	assert_core_context_fp_offset_mismatch);
492 #endif /* CTX_INCLUDE_FPREGS */
493 
494 CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx),
495 	assert_core_context_cve_2018_3639_offset_mismatch);
496 
497 #if CTX_INCLUDE_PAUTH_REGS
498 CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx),
499 	assert_core_context_pauth_offset_mismatch);
500 #endif /* CTX_INCLUDE_PAUTH_REGS */
501 
502 #if CTX_INCLUDE_MPAM_REGS
503 CASSERT(CTX_MPAM_REGS_OFFSET == __builtin_offsetof(cpu_context_t, mpam_ctx),
504 	assert_core_context_mpam_offset_mismatch);
505 #endif /* CTX_INCLUDE_MPAM_REGS */
506 
507 /*
508  * Helper macro to set the general purpose registers that correspond to
509  * parameters in an aapcs_64 call i.e. x0-x7
510  */
511 #define set_aapcs_args0(ctx, x0)				do {	\
512 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X0, x0);	\
513 	} while (0)
514 #define set_aapcs_args1(ctx, x0, x1)				do {	\
515 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X1, x1);	\
516 		set_aapcs_args0(ctx, x0);				\
517 	} while (0)
518 #define set_aapcs_args2(ctx, x0, x1, x2)			do {	\
519 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X2, x2);	\
520 		set_aapcs_args1(ctx, x0, x1);				\
521 	} while (0)
522 #define set_aapcs_args3(ctx, x0, x1, x2, x3)			do {	\
523 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X3, x3);	\
524 		set_aapcs_args2(ctx, x0, x1, x2);			\
525 	} while (0)
526 #define set_aapcs_args4(ctx, x0, x1, x2, x3, x4)		do {	\
527 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X4, x4);	\
528 		set_aapcs_args3(ctx, x0, x1, x2, x3);			\
529 	} while (0)
530 #define set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5)		do {	\
531 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X5, x5);	\
532 		set_aapcs_args4(ctx, x0, x1, x2, x3, x4);		\
533 	} while (0)
534 #define set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6)	do {	\
535 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X6, x6);	\
536 		set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5);		\
537 	} while (0)
538 #define set_aapcs_args7(ctx, x0, x1, x2, x3, x4, x5, x6, x7)	do {	\
539 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X7, x7);	\
540 		set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6);	\
541 	} while (0)
542 
543 /*******************************************************************************
544  * Function prototypes
545  ******************************************************************************/
546 #if CTX_INCLUDE_FPREGS
547 void fpregs_context_save(fp_regs_t *regs);
548 void fpregs_context_restore(fp_regs_t *regs);
549 #endif
550 
551 #endif /* __ASSEMBLER__ */
552 
553 #endif /* CONTEXT_H */
554