xref: /aosp_15_r20/external/arm-trusted-firmware/include/arch/aarch32/smccc_macros.S (revision 54fd6939e177f8ff529b10183254802c76df6d08)
1*54fd6939SJiyong Park/*
2*54fd6939SJiyong Park * Copyright (c) 2016-2019, ARM Limited and Contributors. All rights reserved.
3*54fd6939SJiyong Park *
4*54fd6939SJiyong Park * SPDX-License-Identifier: BSD-3-Clause
5*54fd6939SJiyong Park */
6*54fd6939SJiyong Park#ifndef SMCCC_MACROS_S
7*54fd6939SJiyong Park#define SMCCC_MACROS_S
8*54fd6939SJiyong Park
9*54fd6939SJiyong Park#include <arch.h>
10*54fd6939SJiyong Park
11*54fd6939SJiyong Park/*
12*54fd6939SJiyong Park * Macro to save the General purpose registers (r0 - r12), the banked
13*54fd6939SJiyong Park * spsr, lr, sp registers and the `scr` register to the SMC context on entry
14*54fd6939SJiyong Park * due a SMC call. The `lr` of the current mode (monitor) is expected to be
15*54fd6939SJiyong Park * already saved. The `sp` must point to the `smc_ctx_t` to save to.
16*54fd6939SJiyong Park * Additionally, also save the 'pmcr' register as this is updated whilst
17*54fd6939SJiyong Park * executing in the secure world.
18*54fd6939SJiyong Park */
19*54fd6939SJiyong Park	.macro smccc_save_gp_mode_regs
20*54fd6939SJiyong Park	/* Save r0 - r12 in the SMC context */
21*54fd6939SJiyong Park	stm	sp, {r0-r12}
22*54fd6939SJiyong Park	mov	r0, sp
23*54fd6939SJiyong Park	add	r0, r0, #SMC_CTX_SP_USR
24*54fd6939SJiyong Park
25*54fd6939SJiyong Park#if ARM_ARCH_MAJOR == 7 && !defined(ARMV7_SUPPORTS_VIRTUALIZATION)
26*54fd6939SJiyong Park	/* Must be in secure state to restore Monitor mode */
27*54fd6939SJiyong Park	ldcopr	r4, SCR
28*54fd6939SJiyong Park	bic	r2, r4, #SCR_NS_BIT
29*54fd6939SJiyong Park	stcopr	r2, SCR
30*54fd6939SJiyong Park	isb
31*54fd6939SJiyong Park
32*54fd6939SJiyong Park	cps	#MODE32_sys
33*54fd6939SJiyong Park	stm	r0!, {sp, lr}
34*54fd6939SJiyong Park
35*54fd6939SJiyong Park	cps	#MODE32_irq
36*54fd6939SJiyong Park	mrs	r2, spsr
37*54fd6939SJiyong Park	stm	r0!, {r2, sp, lr}
38*54fd6939SJiyong Park
39*54fd6939SJiyong Park	cps	#MODE32_fiq
40*54fd6939SJiyong Park	mrs	r2, spsr
41*54fd6939SJiyong Park	stm	r0!, {r2, sp, lr}
42*54fd6939SJiyong Park
43*54fd6939SJiyong Park	cps	#MODE32_svc
44*54fd6939SJiyong Park	mrs	r2, spsr
45*54fd6939SJiyong Park	stm	r0!, {r2, sp, lr}
46*54fd6939SJiyong Park
47*54fd6939SJiyong Park	cps	#MODE32_abt
48*54fd6939SJiyong Park	mrs	r2, spsr
49*54fd6939SJiyong Park	stm	r0!, {r2, sp, lr}
50*54fd6939SJiyong Park
51*54fd6939SJiyong Park	cps	#MODE32_und
52*54fd6939SJiyong Park	mrs	r2, spsr
53*54fd6939SJiyong Park	stm	r0!, {r2, sp, lr}
54*54fd6939SJiyong Park
55*54fd6939SJiyong Park	/* lr_mon is already saved by caller */
56*54fd6939SJiyong Park	cps	#MODE32_mon
57*54fd6939SJiyong Park	mrs	r2, spsr
58*54fd6939SJiyong Park	stm	r0!, {r2}
59*54fd6939SJiyong Park
60*54fd6939SJiyong Park	stcopr	r4, SCR
61*54fd6939SJiyong Park#else
62*54fd6939SJiyong Park	/* Save the banked registers including the current SPSR and LR */
63*54fd6939SJiyong Park	mrs	r4, sp_usr
64*54fd6939SJiyong Park	mrs	r5, lr_usr
65*54fd6939SJiyong Park	mrs	r6, spsr_irq
66*54fd6939SJiyong Park	mrs	r7, sp_irq
67*54fd6939SJiyong Park	mrs	r8, lr_irq
68*54fd6939SJiyong Park	mrs	r9, spsr_fiq
69*54fd6939SJiyong Park	mrs	r10, sp_fiq
70*54fd6939SJiyong Park	mrs	r11, lr_fiq
71*54fd6939SJiyong Park	mrs	r12, spsr_svc
72*54fd6939SJiyong Park	stm	r0!, {r4-r12}
73*54fd6939SJiyong Park
74*54fd6939SJiyong Park	mrs	r4, sp_svc
75*54fd6939SJiyong Park	mrs	r5, lr_svc
76*54fd6939SJiyong Park	mrs	r6, spsr_abt
77*54fd6939SJiyong Park	mrs	r7, sp_abt
78*54fd6939SJiyong Park	mrs	r8, lr_abt
79*54fd6939SJiyong Park	mrs	r9, spsr_und
80*54fd6939SJiyong Park	mrs	r10, sp_und
81*54fd6939SJiyong Park	mrs	r11, lr_und
82*54fd6939SJiyong Park	mrs	r12, spsr
83*54fd6939SJiyong Park	stm	r0!, {r4-r12}
84*54fd6939SJiyong Park	/* lr_mon is already saved by caller */
85*54fd6939SJiyong Park
86*54fd6939SJiyong Park	ldcopr	r4, SCR
87*54fd6939SJiyong Park
88*54fd6939SJiyong Park#if ARM_ARCH_MAJOR > 7
89*54fd6939SJiyong Park	/*
90*54fd6939SJiyong Park	 * Check if earlier initialization of SDCR.SCCD to 1
91*54fd6939SJiyong Park	 * failed, meaning that ARMv8-PMU is not implemented,
92*54fd6939SJiyong Park	 * cycle counting is not disabled and PMCR should be
93*54fd6939SJiyong Park	 * saved in Non-secure context.
94*54fd6939SJiyong Park	 */
95*54fd6939SJiyong Park	ldcopr	r5, SDCR
96*54fd6939SJiyong Park	tst	r5, #SDCR_SCCD_BIT
97*54fd6939SJiyong Park	bne	1f
98*54fd6939SJiyong Park#endif
99*54fd6939SJiyong Park	/* Secure Cycle Counter is not disabled */
100*54fd6939SJiyong Park#endif
101*54fd6939SJiyong Park	ldcopr	r5, PMCR
102*54fd6939SJiyong Park
103*54fd6939SJiyong Park	/* Check caller's security state */
104*54fd6939SJiyong Park	tst	r4, #SCR_NS_BIT
105*54fd6939SJiyong Park	beq	2f
106*54fd6939SJiyong Park
107*54fd6939SJiyong Park	/* Save PMCR if called from Non-secure state */
108*54fd6939SJiyong Park	str	r5, [sp, #SMC_CTX_PMCR]
109*54fd6939SJiyong Park
110*54fd6939SJiyong Park	/* Disable cycle counter when event counting is prohibited */
111*54fd6939SJiyong Park2:	orr	r5, r5, #PMCR_DP_BIT
112*54fd6939SJiyong Park	stcopr	r5, PMCR
113*54fd6939SJiyong Park	isb
114*54fd6939SJiyong Park1:	str	r4, [sp, #SMC_CTX_SCR]
115*54fd6939SJiyong Park	.endm
116*54fd6939SJiyong Park
117*54fd6939SJiyong Park/*
118*54fd6939SJiyong Park * Macro to restore the `smc_ctx_t`, which includes the General purpose
119*54fd6939SJiyong Park * registers and banked mode registers, and exit from the monitor mode.
120*54fd6939SJiyong Park * r0 must point to the `smc_ctx_t` to restore from.
121*54fd6939SJiyong Park */
122*54fd6939SJiyong Park	.macro monitor_exit
123*54fd6939SJiyong Park	/*
124*54fd6939SJiyong Park	 * Save the current sp and restore the smc context
125*54fd6939SJiyong Park	 * pointer to sp which will be used for handling the
126*54fd6939SJiyong Park	 * next SMC.
127*54fd6939SJiyong Park	 */
128*54fd6939SJiyong Park	str	sp, [r0, #SMC_CTX_SP_MON]
129*54fd6939SJiyong Park	mov	sp, r0
130*54fd6939SJiyong Park
131*54fd6939SJiyong Park	/*
132*54fd6939SJiyong Park	 * Restore SCR first so that we access the right banked register
133*54fd6939SJiyong Park	 * when the other mode registers are restored.
134*54fd6939SJiyong Park	 */
135*54fd6939SJiyong Park	ldr	r1, [r0, #SMC_CTX_SCR]
136*54fd6939SJiyong Park	stcopr	r1, SCR
137*54fd6939SJiyong Park	isb
138*54fd6939SJiyong Park
139*54fd6939SJiyong Park	/*
140*54fd6939SJiyong Park	 * Restore PMCR when returning to Non-secure state
141*54fd6939SJiyong Park	 */
142*54fd6939SJiyong Park	tst	r1, #SCR_NS_BIT
143*54fd6939SJiyong Park	beq	2f
144*54fd6939SJiyong Park
145*54fd6939SJiyong Park	/*
146*54fd6939SJiyong Park	 * Back to Non-secure state
147*54fd6939SJiyong Park	 */
148*54fd6939SJiyong Park#if ARM_ARCH_MAJOR > 7
149*54fd6939SJiyong Park	/*
150*54fd6939SJiyong Park	 * Check if earlier initialization SDCR.SCCD to 1
151*54fd6939SJiyong Park	 * failed, meaning that ARMv8-PMU is not implemented and
152*54fd6939SJiyong Park	 * PMCR should be restored from Non-secure context.
153*54fd6939SJiyong Park	 */
154*54fd6939SJiyong Park	ldcopr	r1, SDCR
155*54fd6939SJiyong Park	tst	r1, #SDCR_SCCD_BIT
156*54fd6939SJiyong Park	bne	2f
157*54fd6939SJiyong Park#endif
158*54fd6939SJiyong Park	/*
159*54fd6939SJiyong Park	 * Restore the PMCR register.
160*54fd6939SJiyong Park	 */
161*54fd6939SJiyong Park	ldr	r1, [r0, #SMC_CTX_PMCR]
162*54fd6939SJiyong Park	stcopr	r1, PMCR
163*54fd6939SJiyong Park2:
164*54fd6939SJiyong Park	/* Restore the banked registers including the current SPSR */
165*54fd6939SJiyong Park	add	r1, r0, #SMC_CTX_SP_USR
166*54fd6939SJiyong Park
167*54fd6939SJiyong Park#if ARM_ARCH_MAJOR == 7 && !defined(ARMV7_SUPPORTS_VIRTUALIZATION)
168*54fd6939SJiyong Park	/* Must be in secure state to restore Monitor mode */
169*54fd6939SJiyong Park	ldcopr	r4, SCR
170*54fd6939SJiyong Park	bic	r2, r4, #SCR_NS_BIT
171*54fd6939SJiyong Park	stcopr	r2, SCR
172*54fd6939SJiyong Park	isb
173*54fd6939SJiyong Park
174*54fd6939SJiyong Park	cps	#MODE32_sys
175*54fd6939SJiyong Park	ldm	r1!, {sp, lr}
176*54fd6939SJiyong Park
177*54fd6939SJiyong Park	cps	#MODE32_irq
178*54fd6939SJiyong Park	ldm	r1!, {r2, sp, lr}
179*54fd6939SJiyong Park	msr	spsr_fsxc, r2
180*54fd6939SJiyong Park
181*54fd6939SJiyong Park	cps	#MODE32_fiq
182*54fd6939SJiyong Park	ldm	r1!, {r2, sp, lr}
183*54fd6939SJiyong Park	msr	spsr_fsxc, r2
184*54fd6939SJiyong Park
185*54fd6939SJiyong Park	cps	#MODE32_svc
186*54fd6939SJiyong Park	ldm	r1!, {r2, sp, lr}
187*54fd6939SJiyong Park	msr	spsr_fsxc, r2
188*54fd6939SJiyong Park
189*54fd6939SJiyong Park	cps	#MODE32_abt
190*54fd6939SJiyong Park	ldm	r1!, {r2, sp, lr}
191*54fd6939SJiyong Park	msr	spsr_fsxc, r2
192*54fd6939SJiyong Park
193*54fd6939SJiyong Park	cps	#MODE32_und
194*54fd6939SJiyong Park	ldm	r1!, {r2, sp, lr}
195*54fd6939SJiyong Park	msr	spsr_fsxc, r2
196*54fd6939SJiyong Park
197*54fd6939SJiyong Park	cps	#MODE32_mon
198*54fd6939SJiyong Park	ldm	r1!, {r2}
199*54fd6939SJiyong Park	msr	spsr_fsxc, r2
200*54fd6939SJiyong Park
201*54fd6939SJiyong Park	stcopr	r4, SCR
202*54fd6939SJiyong Park	isb
203*54fd6939SJiyong Park#else
204*54fd6939SJiyong Park	ldm	r1!, {r4-r12}
205*54fd6939SJiyong Park	msr	sp_usr, r4
206*54fd6939SJiyong Park	msr	lr_usr, r5
207*54fd6939SJiyong Park	msr	spsr_irq, r6
208*54fd6939SJiyong Park	msr	sp_irq, r7
209*54fd6939SJiyong Park	msr	lr_irq, r8
210*54fd6939SJiyong Park	msr	spsr_fiq, r9
211*54fd6939SJiyong Park	msr	sp_fiq, r10
212*54fd6939SJiyong Park	msr	lr_fiq, r11
213*54fd6939SJiyong Park	msr	spsr_svc, r12
214*54fd6939SJiyong Park
215*54fd6939SJiyong Park	ldm	r1!, {r4-r12}
216*54fd6939SJiyong Park	msr	sp_svc, r4
217*54fd6939SJiyong Park	msr	lr_svc, r5
218*54fd6939SJiyong Park	msr	spsr_abt, r6
219*54fd6939SJiyong Park	msr	sp_abt, r7
220*54fd6939SJiyong Park	msr	lr_abt, r8
221*54fd6939SJiyong Park	msr	spsr_und, r9
222*54fd6939SJiyong Park	msr	sp_und, r10
223*54fd6939SJiyong Park	msr	lr_und, r11
224*54fd6939SJiyong Park	/*
225*54fd6939SJiyong Park	 * Use the `_fsxc` suffix explicitly to instruct the assembler
226*54fd6939SJiyong Park	 * to update all the 32 bits of SPSR. Else, by default, the
227*54fd6939SJiyong Park	 * assembler assumes `_fc` suffix which only modifies
228*54fd6939SJiyong Park	 * f->[31:24] and c->[7:0] bits of SPSR.
229*54fd6939SJiyong Park	 */
230*54fd6939SJiyong Park	msr	spsr_fsxc, r12
231*54fd6939SJiyong Park#endif
232*54fd6939SJiyong Park
233*54fd6939SJiyong Park	/* Restore the LR */
234*54fd6939SJiyong Park	ldr	lr, [r0, #SMC_CTX_LR_MON]
235*54fd6939SJiyong Park
236*54fd6939SJiyong Park	/* Restore the rest of the general purpose registers */
237*54fd6939SJiyong Park	ldm	r0, {r0-r12}
238*54fd6939SJiyong Park	exception_return
239*54fd6939SJiyong Park	.endm
240*54fd6939SJiyong Park
241*54fd6939SJiyong Park#endif /* SMCCC_MACROS_S */
242