xref: /aosp_15_r20/external/arm-trusted-firmware/bl31/aarch64/ea_delegate.S (revision 54fd6939e177f8ff529b10183254802c76df6d08)
1*54fd6939SJiyong Park/*
2*54fd6939SJiyong Park * Copyright (c) 2018-2021, ARM Limited and Contributors. All rights reserved.
3*54fd6939SJiyong Park *
4*54fd6939SJiyong Park * SPDX-License-Identifier: BSD-3-Clause
5*54fd6939SJiyong Park */
6*54fd6939SJiyong Park
7*54fd6939SJiyong Park
8*54fd6939SJiyong Park#include <assert_macros.S>
9*54fd6939SJiyong Park#include <asm_macros.S>
10*54fd6939SJiyong Park#include <assert_macros.S>
11*54fd6939SJiyong Park#include <bl31/ea_handle.h>
12*54fd6939SJiyong Park#include <context.h>
13*54fd6939SJiyong Park#include <lib/extensions/ras_arch.h>
14*54fd6939SJiyong Park#include <cpu_macros.S>
15*54fd6939SJiyong Park#include <context.h>
16*54fd6939SJiyong Park
17*54fd6939SJiyong Park	.globl	handle_lower_el_ea_esb
18*54fd6939SJiyong Park	.globl  handle_lower_el_async_ea
19*54fd6939SJiyong Park	.globl	enter_lower_el_sync_ea
20*54fd6939SJiyong Park	.globl	enter_lower_el_async_ea
21*54fd6939SJiyong Park
22*54fd6939SJiyong Park
23*54fd6939SJiyong Park/*
24*54fd6939SJiyong Park * Function to delegate External Aborts synchronized by ESB instruction at EL3
25*54fd6939SJiyong Park * vector entry. This function assumes GP registers x0-x29 have been saved, and
26*54fd6939SJiyong Park * are available for use. It delegates the handling of the EA to platform
27*54fd6939SJiyong Park * handler, and returns only upon successfully handling the EA; otherwise
28*54fd6939SJiyong Park * panics. On return from this function, the original exception handler is
29*54fd6939SJiyong Park * expected to resume.
30*54fd6939SJiyong Park */
31*54fd6939SJiyong Parkfunc handle_lower_el_ea_esb
32*54fd6939SJiyong Park	mov	x0, #ERROR_EA_ESB
33*54fd6939SJiyong Park	mrs	x1, DISR_EL1
34*54fd6939SJiyong Park	b	ea_proceed
35*54fd6939SJiyong Parkendfunc handle_lower_el_ea_esb
36*54fd6939SJiyong Park
37*54fd6939SJiyong Park
38*54fd6939SJiyong Park/*
39*54fd6939SJiyong Park * This function forms the tail end of Synchronous Exception entry from lower
40*54fd6939SJiyong Park * EL, and expects to handle Synchronous External Aborts from lower EL and CPU
41*54fd6939SJiyong Park * Implementation Defined Exceptions. If any other kind of exception is detected,
42*54fd6939SJiyong Park * then this function reports unhandled exception.
43*54fd6939SJiyong Park *
44*54fd6939SJiyong Park * Since it's part of exception vector, this function doesn't expect any GP
45*54fd6939SJiyong Park * registers to have been saved. It delegates the handling of the EA to platform
46*54fd6939SJiyong Park * handler, and upon successfully handling the EA, exits EL3; otherwise panics.
47*54fd6939SJiyong Park */
48*54fd6939SJiyong Parkfunc enter_lower_el_sync_ea
49*54fd6939SJiyong Park	/*
50*54fd6939SJiyong Park	 * Explicitly save x30 so as to free up a register and to enable
51*54fd6939SJiyong Park	 * branching.
52*54fd6939SJiyong Park	 */
53*54fd6939SJiyong Park	str	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
54*54fd6939SJiyong Park
55*54fd6939SJiyong Park	mrs	x30, esr_el3
56*54fd6939SJiyong Park	ubfx	x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
57*54fd6939SJiyong Park
58*54fd6939SJiyong Park	/* Check for I/D aborts from lower EL */
59*54fd6939SJiyong Park	cmp	x30, #EC_IABORT_LOWER_EL
60*54fd6939SJiyong Park	b.eq	1f
61*54fd6939SJiyong Park
62*54fd6939SJiyong Park	cmp	x30, #EC_DABORT_LOWER_EL
63*54fd6939SJiyong Park	b.eq	1f
64*54fd6939SJiyong Park
65*54fd6939SJiyong Park	/* Save GP registers */
66*54fd6939SJiyong Park	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
67*54fd6939SJiyong Park	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
68*54fd6939SJiyong Park	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
69*54fd6939SJiyong Park
70*54fd6939SJiyong Park	/* Get the cpu_ops pointer */
71*54fd6939SJiyong Park	bl	get_cpu_ops_ptr
72*54fd6939SJiyong Park
73*54fd6939SJiyong Park	/* Get the cpu_ops exception handler */
74*54fd6939SJiyong Park	ldr	x0, [x0, #CPU_E_HANDLER_FUNC]
75*54fd6939SJiyong Park
76*54fd6939SJiyong Park	/*
77*54fd6939SJiyong Park	 * If the reserved function pointer is NULL, this CPU does not have an
78*54fd6939SJiyong Park	 * implementation defined exception handler function
79*54fd6939SJiyong Park	 */
80*54fd6939SJiyong Park	cbz	x0, 2f
81*54fd6939SJiyong Park	mrs	x1, esr_el3
82*54fd6939SJiyong Park	ubfx	x1, x1, #ESR_EC_SHIFT, #ESR_EC_LENGTH
83*54fd6939SJiyong Park	blr	x0
84*54fd6939SJiyong Park	b	2f
85*54fd6939SJiyong Park
86*54fd6939SJiyong Park1:
87*54fd6939SJiyong Park	/* Test for EA bit in the instruction syndrome */
88*54fd6939SJiyong Park	mrs	x30, esr_el3
89*54fd6939SJiyong Park	tbz	x30, #ESR_ISS_EABORT_EA_BIT, 3f
90*54fd6939SJiyong Park
91*54fd6939SJiyong Park	/*
92*54fd6939SJiyong Park	 * Save general purpose and ARMv8.3-PAuth registers (if enabled).
93*54fd6939SJiyong Park	 * If Secure Cycle Counter is not disabled in MDCR_EL3 when
94*54fd6939SJiyong Park	 * ARMv8.5-PMU is implemented, save PMCR_EL0 and disable Cycle Counter.
95*54fd6939SJiyong Park	 */
96*54fd6939SJiyong Park	bl	save_gp_pmcr_pauth_regs
97*54fd6939SJiyong Park
98*54fd6939SJiyong Park#if ENABLE_PAUTH
99*54fd6939SJiyong Park	/* Load and program APIAKey firmware key */
100*54fd6939SJiyong Park	bl	pauth_load_bl31_apiakey
101*54fd6939SJiyong Park#endif
102*54fd6939SJiyong Park
103*54fd6939SJiyong Park	/* Setup exception class and syndrome arguments for platform handler */
104*54fd6939SJiyong Park	mov	x0, #ERROR_EA_SYNC
105*54fd6939SJiyong Park	mrs	x1, esr_el3
106*54fd6939SJiyong Park	bl	delegate_sync_ea
107*54fd6939SJiyong Park
108*54fd6939SJiyong Park	/* el3_exit assumes SP_EL0 on entry */
109*54fd6939SJiyong Park	msr	spsel, #MODE_SP_EL0
110*54fd6939SJiyong Park	b	el3_exit
111*54fd6939SJiyong Park2:
112*54fd6939SJiyong Park	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
113*54fd6939SJiyong Park	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
114*54fd6939SJiyong Park	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
115*54fd6939SJiyong Park
116*54fd6939SJiyong Park3:
117*54fd6939SJiyong Park	/* Synchronous exceptions other than the above are assumed to be EA */
118*54fd6939SJiyong Park	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
119*54fd6939SJiyong Park	no_ret	report_unhandled_exception
120*54fd6939SJiyong Parkendfunc enter_lower_el_sync_ea
121*54fd6939SJiyong Park
122*54fd6939SJiyong Park
123*54fd6939SJiyong Park/*
124*54fd6939SJiyong Park * This function handles SErrors from lower ELs.
125*54fd6939SJiyong Park *
126*54fd6939SJiyong Park * Since it's part of exception vector, this function doesn't expect any GP
127*54fd6939SJiyong Park * registers to have been saved. It delegates the handling of the EA to platform
128*54fd6939SJiyong Park * handler, and upon successfully handling the EA, exits EL3; otherwise panics.
129*54fd6939SJiyong Park */
130*54fd6939SJiyong Parkfunc enter_lower_el_async_ea
131*54fd6939SJiyong Park	/*
132*54fd6939SJiyong Park	 * Explicitly save x30 so as to free up a register and to enable
133*54fd6939SJiyong Park	 * branching
134*54fd6939SJiyong Park	 */
135*54fd6939SJiyong Park	str	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
136*54fd6939SJiyong Park
137*54fd6939SJiyong Parkhandle_lower_el_async_ea:
138*54fd6939SJiyong Park	/*
139*54fd6939SJiyong Park	 * Save general purpose and ARMv8.3-PAuth registers (if enabled).
140*54fd6939SJiyong Park	 * If Secure Cycle Counter is not disabled in MDCR_EL3 when
141*54fd6939SJiyong Park	 * ARMv8.5-PMU is implemented, save PMCR_EL0 and disable Cycle Counter.
142*54fd6939SJiyong Park	 */
143*54fd6939SJiyong Park	bl	save_gp_pmcr_pauth_regs
144*54fd6939SJiyong Park
145*54fd6939SJiyong Park#if ENABLE_PAUTH
146*54fd6939SJiyong Park	/* Load and program APIAKey firmware key */
147*54fd6939SJiyong Park	bl	pauth_load_bl31_apiakey
148*54fd6939SJiyong Park#endif
149*54fd6939SJiyong Park
150*54fd6939SJiyong Park	/* Setup exception class and syndrome arguments for platform handler */
151*54fd6939SJiyong Park	mov	x0, #ERROR_EA_ASYNC
152*54fd6939SJiyong Park	mrs	x1, esr_el3
153*54fd6939SJiyong Park	bl	delegate_async_ea
154*54fd6939SJiyong Park
155*54fd6939SJiyong Park	/* el3_exit assumes SP_EL0 on entry */
156*54fd6939SJiyong Park	msr	spsel, #MODE_SP_EL0
157*54fd6939SJiyong Park	b	el3_exit
158*54fd6939SJiyong Parkendfunc enter_lower_el_async_ea
159*54fd6939SJiyong Park
160*54fd6939SJiyong Park
161*54fd6939SJiyong Park/*
162*54fd6939SJiyong Park * Prelude for Synchronous External Abort handling. This function assumes that
163*54fd6939SJiyong Park * all GP registers have been saved by the caller.
164*54fd6939SJiyong Park *
165*54fd6939SJiyong Park * x0: EA reason
166*54fd6939SJiyong Park * x1: EA syndrome
167*54fd6939SJiyong Park */
168*54fd6939SJiyong Parkfunc delegate_sync_ea
169*54fd6939SJiyong Park#if RAS_EXTENSION
170*54fd6939SJiyong Park	/*
171*54fd6939SJiyong Park	 * Check for Uncontainable error type. If so, route to the platform
172*54fd6939SJiyong Park	 * fatal error handler rather than the generic EA one.
173*54fd6939SJiyong Park	 */
174*54fd6939SJiyong Park	ubfx    x2, x1, #EABORT_SET_SHIFT, #EABORT_SET_WIDTH
175*54fd6939SJiyong Park	cmp     x2, #ERROR_STATUS_SET_UC
176*54fd6939SJiyong Park	b.ne    1f
177*54fd6939SJiyong Park
178*54fd6939SJiyong Park	/* Check fault status code */
179*54fd6939SJiyong Park	ubfx    x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH
180*54fd6939SJiyong Park	cmp     x3, #SYNC_EA_FSC
181*54fd6939SJiyong Park	b.ne    1f
182*54fd6939SJiyong Park
183*54fd6939SJiyong Park	no_ret  plat_handle_uncontainable_ea
184*54fd6939SJiyong Park1:
185*54fd6939SJiyong Park#endif
186*54fd6939SJiyong Park
187*54fd6939SJiyong Park	b       ea_proceed
188*54fd6939SJiyong Parkendfunc delegate_sync_ea
189*54fd6939SJiyong Park
190*54fd6939SJiyong Park
191*54fd6939SJiyong Park/*
192*54fd6939SJiyong Park * Prelude for Asynchronous External Abort handling. This function assumes that
193*54fd6939SJiyong Park * all GP registers have been saved by the caller.
194*54fd6939SJiyong Park *
195*54fd6939SJiyong Park * x0: EA reason
196*54fd6939SJiyong Park * x1: EA syndrome
197*54fd6939SJiyong Park */
198*54fd6939SJiyong Parkfunc delegate_async_ea
199*54fd6939SJiyong Park#if RAS_EXTENSION
200*54fd6939SJiyong Park	/*
201*54fd6939SJiyong Park	 * Check for Implementation Defined Syndrome. If so, skip checking
202*54fd6939SJiyong Park	 * Uncontainable error type from the syndrome as the format is unknown.
203*54fd6939SJiyong Park	 */
204*54fd6939SJiyong Park	tbnz	x1, #SERROR_IDS_BIT, 1f
205*54fd6939SJiyong Park
206*54fd6939SJiyong Park	/*
207*54fd6939SJiyong Park	 * Check for Uncontainable error type. If so, route to the platform
208*54fd6939SJiyong Park	 * fatal error handler rather than the generic EA one.
209*54fd6939SJiyong Park	 */
210*54fd6939SJiyong Park	ubfx	x2, x1, #EABORT_AET_SHIFT, #EABORT_AET_WIDTH
211*54fd6939SJiyong Park	cmp	x2, #ERROR_STATUS_UET_UC
212*54fd6939SJiyong Park	b.ne	1f
213*54fd6939SJiyong Park
214*54fd6939SJiyong Park	/* Check DFSC for SError type */
215*54fd6939SJiyong Park	ubfx	x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH
216*54fd6939SJiyong Park	cmp	x3, #DFSC_SERROR
217*54fd6939SJiyong Park	b.ne	1f
218*54fd6939SJiyong Park
219*54fd6939SJiyong Park	no_ret	plat_handle_uncontainable_ea
220*54fd6939SJiyong Park1:
221*54fd6939SJiyong Park#endif
222*54fd6939SJiyong Park
223*54fd6939SJiyong Park	b	ea_proceed
224*54fd6939SJiyong Parkendfunc delegate_async_ea
225*54fd6939SJiyong Park
226*54fd6939SJiyong Park
227*54fd6939SJiyong Park/*
228*54fd6939SJiyong Park * Delegate External Abort handling to platform's EA handler. This function
229*54fd6939SJiyong Park * assumes that all GP registers have been saved by the caller.
230*54fd6939SJiyong Park *
231*54fd6939SJiyong Park * x0: EA reason
232*54fd6939SJiyong Park * x1: EA syndrome
233*54fd6939SJiyong Park */
234*54fd6939SJiyong Parkfunc ea_proceed
235*54fd6939SJiyong Park	/*
236*54fd6939SJiyong Park	 * If the ESR loaded earlier is not zero, we were processing an EA
237*54fd6939SJiyong Park	 * already, and this is a double fault.
238*54fd6939SJiyong Park	 */
239*54fd6939SJiyong Park	ldr	x5, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3]
240*54fd6939SJiyong Park	cbz	x5, 1f
241*54fd6939SJiyong Park	no_ret	plat_handle_double_fault
242*54fd6939SJiyong Park
243*54fd6939SJiyong Park1:
244*54fd6939SJiyong Park	/* Save EL3 state */
245*54fd6939SJiyong Park	mrs	x2, spsr_el3
246*54fd6939SJiyong Park	mrs	x3, elr_el3
247*54fd6939SJiyong Park	stp	x2, x3, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
248*54fd6939SJiyong Park
249*54fd6939SJiyong Park	/*
250*54fd6939SJiyong Park	 * Save ESR as handling might involve lower ELs, and returning back to
251*54fd6939SJiyong Park	 * EL3 from there would trample the original ESR.
252*54fd6939SJiyong Park	 */
253*54fd6939SJiyong Park	mrs	x4, scr_el3
254*54fd6939SJiyong Park	mrs	x5, esr_el3
255*54fd6939SJiyong Park	stp	x4, x5, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
256*54fd6939SJiyong Park
257*54fd6939SJiyong Park	/*
258*54fd6939SJiyong Park	 * Setup rest of arguments, and call platform External Abort handler.
259*54fd6939SJiyong Park	 *
260*54fd6939SJiyong Park	 * x0: EA reason (already in place)
261*54fd6939SJiyong Park	 * x1: Exception syndrome (already in place).
262*54fd6939SJiyong Park	 * x2: Cookie (unused for now).
263*54fd6939SJiyong Park	 * x3: Context pointer.
264*54fd6939SJiyong Park	 * x4: Flags (security state from SCR for now).
265*54fd6939SJiyong Park	 */
266*54fd6939SJiyong Park	mov	x2, xzr
267*54fd6939SJiyong Park	mov	x3, sp
268*54fd6939SJiyong Park	ubfx	x4, x4, #0, #1
269*54fd6939SJiyong Park
270*54fd6939SJiyong Park	/* Switch to runtime stack */
271*54fd6939SJiyong Park	ldr	x5, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
272*54fd6939SJiyong Park	msr	spsel, #MODE_SP_EL0
273*54fd6939SJiyong Park	mov	sp, x5
274*54fd6939SJiyong Park
275*54fd6939SJiyong Park	mov	x29, x30
276*54fd6939SJiyong Park#if ENABLE_ASSERTIONS
277*54fd6939SJiyong Park	/* Stash the stack pointer */
278*54fd6939SJiyong Park	mov	x28, sp
279*54fd6939SJiyong Park#endif
280*54fd6939SJiyong Park	bl	plat_ea_handler
281*54fd6939SJiyong Park
282*54fd6939SJiyong Park#if ENABLE_ASSERTIONS
283*54fd6939SJiyong Park	/*
284*54fd6939SJiyong Park	 * Error handling flows might involve long jumps; so upon returning from
285*54fd6939SJiyong Park	 * the platform error handler, validate that the we've completely
286*54fd6939SJiyong Park	 * unwound the stack.
287*54fd6939SJiyong Park	 */
288*54fd6939SJiyong Park	mov	x27, sp
289*54fd6939SJiyong Park	cmp	x28, x27
290*54fd6939SJiyong Park	ASM_ASSERT(eq)
291*54fd6939SJiyong Park#endif
292*54fd6939SJiyong Park
293*54fd6939SJiyong Park	/* Make SP point to context */
294*54fd6939SJiyong Park	msr	spsel, #MODE_SP_ELX
295*54fd6939SJiyong Park
296*54fd6939SJiyong Park	/* Restore EL3 state and ESR */
297*54fd6939SJiyong Park	ldp	x1, x2, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
298*54fd6939SJiyong Park	msr	spsr_el3, x1
299*54fd6939SJiyong Park	msr	elr_el3, x2
300*54fd6939SJiyong Park
301*54fd6939SJiyong Park	/* Restore ESR_EL3 and SCR_EL3 */
302*54fd6939SJiyong Park	ldp	x3, x4, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
303*54fd6939SJiyong Park	msr	scr_el3, x3
304*54fd6939SJiyong Park	msr	esr_el3, x4
305*54fd6939SJiyong Park
306*54fd6939SJiyong Park#if ENABLE_ASSERTIONS
307*54fd6939SJiyong Park	cmp	x4, xzr
308*54fd6939SJiyong Park	ASM_ASSERT(ne)
309*54fd6939SJiyong Park#endif
310*54fd6939SJiyong Park
311*54fd6939SJiyong Park	/* Clear ESR storage */
312*54fd6939SJiyong Park	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3]
313*54fd6939SJiyong Park
314*54fd6939SJiyong Park	ret	x29
315*54fd6939SJiyong Parkendfunc ea_proceed
316