xref: /aosp_15_r20/external/arm-trusted-firmware/include/arch/aarch32/asm_macros.S (revision 54fd6939e177f8ff529b10183254802c76df6d08)
1*54fd6939SJiyong Park/*
2*54fd6939SJiyong Park * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
3*54fd6939SJiyong Park *
4*54fd6939SJiyong Park * SPDX-License-Identifier: BSD-3-Clause
5*54fd6939SJiyong Park */
6*54fd6939SJiyong Park#ifndef ASM_MACROS_S
7*54fd6939SJiyong Park#define ASM_MACROS_S
8*54fd6939SJiyong Park
9*54fd6939SJiyong Park#include <arch.h>
10*54fd6939SJiyong Park#include <common/asm_macros_common.S>
11*54fd6939SJiyong Park#include <lib/spinlock.h>
12*54fd6939SJiyong Park
13*54fd6939SJiyong Park/*
14*54fd6939SJiyong Park * TLBI instruction with type specifier that implements the workaround for
15*54fd6939SJiyong Park * errata 813419 of Cortex-A57.
16*54fd6939SJiyong Park */
17*54fd6939SJiyong Park#if ERRATA_A57_813419
18*54fd6939SJiyong Park#define TLB_INVALIDATE(_reg, _coproc) \
19*54fd6939SJiyong Park	stcopr	_reg, _coproc; \
20*54fd6939SJiyong Park	dsb	ish; \
21*54fd6939SJiyong Park	stcopr	_reg, _coproc
22*54fd6939SJiyong Park#else
23*54fd6939SJiyong Park#define TLB_INVALIDATE(_reg, _coproc) \
24*54fd6939SJiyong Park	stcopr	_reg, _coproc
25*54fd6939SJiyong Park#endif
26*54fd6939SJiyong Park
27*54fd6939SJiyong Park#define WORD_SIZE	4
28*54fd6939SJiyong Park
29*54fd6939SJiyong Park	/*
30*54fd6939SJiyong Park	 * Co processor register accessors
31*54fd6939SJiyong Park	 */
32*54fd6939SJiyong Park	.macro ldcopr reg, coproc, opc1, CRn, CRm, opc2
33*54fd6939SJiyong Park	mrc	\coproc, \opc1, \reg, \CRn, \CRm, \opc2
34*54fd6939SJiyong Park	.endm
35*54fd6939SJiyong Park
36*54fd6939SJiyong Park	.macro ldcopr16 reg1, reg2, coproc, opc1, CRm
37*54fd6939SJiyong Park	mrrc	\coproc, \opc1, \reg1, \reg2, \CRm
38*54fd6939SJiyong Park	.endm
39*54fd6939SJiyong Park
40*54fd6939SJiyong Park	.macro stcopr reg, coproc, opc1, CRn, CRm, opc2
41*54fd6939SJiyong Park	mcr	\coproc, \opc1, \reg, \CRn, \CRm, \opc2
42*54fd6939SJiyong Park	.endm
43*54fd6939SJiyong Park
44*54fd6939SJiyong Park	.macro stcopr16 reg1, reg2, coproc, opc1, CRm
45*54fd6939SJiyong Park	mcrr	\coproc, \opc1, \reg1, \reg2, \CRm
46*54fd6939SJiyong Park	.endm
47*54fd6939SJiyong Park
48*54fd6939SJiyong Park	/* Cache line size helpers */
49*54fd6939SJiyong Park	.macro	dcache_line_size  reg, tmp
50*54fd6939SJiyong Park	ldcopr	\tmp, CTR
51*54fd6939SJiyong Park	ubfx	\tmp, \tmp, #CTR_DMINLINE_SHIFT, #CTR_DMINLINE_WIDTH
52*54fd6939SJiyong Park	mov	\reg, #WORD_SIZE
53*54fd6939SJiyong Park	lsl	\reg, \reg, \tmp
54*54fd6939SJiyong Park	.endm
55*54fd6939SJiyong Park
56*54fd6939SJiyong Park	.macro	icache_line_size  reg, tmp
57*54fd6939SJiyong Park	ldcopr	\tmp, CTR
58*54fd6939SJiyong Park	and	\tmp, \tmp, #CTR_IMINLINE_MASK
59*54fd6939SJiyong Park	mov	\reg, #WORD_SIZE
60*54fd6939SJiyong Park	lsl	\reg, \reg, \tmp
61*54fd6939SJiyong Park	.endm
62*54fd6939SJiyong Park
63*54fd6939SJiyong Park	/*
64*54fd6939SJiyong Park	 * Declare the exception vector table, enforcing it is aligned on a
65*54fd6939SJiyong Park	 * 32 byte boundary.
66*54fd6939SJiyong Park	 */
67*54fd6939SJiyong Park	.macro vector_base  label
68*54fd6939SJiyong Park	.section .vectors, "ax"
69*54fd6939SJiyong Park	.align 5
70*54fd6939SJiyong Park	\label:
71*54fd6939SJiyong Park	.endm
72*54fd6939SJiyong Park
73*54fd6939SJiyong Park	/*
74*54fd6939SJiyong Park	 * This macro calculates the base address of the current CPU's multi
75*54fd6939SJiyong Park	 * processor(MP) stack using the plat_my_core_pos() index, the name of
76*54fd6939SJiyong Park	 * the stack storage and the size of each stack.
77*54fd6939SJiyong Park	 * Out: r0 = physical address of stack base
78*54fd6939SJiyong Park	 * Clobber: r14, r1, r2
79*54fd6939SJiyong Park	 */
80*54fd6939SJiyong Park	.macro get_my_mp_stack _name, _size
81*54fd6939SJiyong Park	bl	plat_my_core_pos
82*54fd6939SJiyong Park	ldr r2, =(\_name + \_size)
83*54fd6939SJiyong Park	mov r1, #\_size
84*54fd6939SJiyong Park	mla r0, r0, r1, r2
85*54fd6939SJiyong Park	.endm
86*54fd6939SJiyong Park
87*54fd6939SJiyong Park	/*
88*54fd6939SJiyong Park	 * This macro calculates the base address of a uniprocessor(UP) stack
89*54fd6939SJiyong Park	 * using the name of the stack storage and the size of the stack
90*54fd6939SJiyong Park	 * Out: r0 = physical address of stack base
91*54fd6939SJiyong Park	 */
92*54fd6939SJiyong Park	.macro get_up_stack _name, _size
93*54fd6939SJiyong Park	ldr r0, =(\_name + \_size)
94*54fd6939SJiyong Park	.endm
95*54fd6939SJiyong Park
96*54fd6939SJiyong Park#if ARM_ARCH_MAJOR == 7 && !defined(ARMV7_SUPPORTS_VIRTUALIZATION)
97*54fd6939SJiyong Park	/*
98*54fd6939SJiyong Park	 * Macro for mitigating against speculative execution.
99*54fd6939SJiyong Park	 * ARMv7 cores without Virtualization extension do not support the
100*54fd6939SJiyong Park	 * eret instruction.
101*54fd6939SJiyong Park	 */
102*54fd6939SJiyong Park	.macro exception_return
103*54fd6939SJiyong Park	movs	pc, lr
104*54fd6939SJiyong Park	dsb	nsh
105*54fd6939SJiyong Park	isb
106*54fd6939SJiyong Park	.endm
107*54fd6939SJiyong Park
108*54fd6939SJiyong Park#else
109*54fd6939SJiyong Park	/*
110*54fd6939SJiyong Park	 * Macro for mitigating against speculative execution beyond ERET. Uses the
111*54fd6939SJiyong Park	 * speculation barrier instruction introduced by FEAT_SB, if it's enabled.
112*54fd6939SJiyong Park	 */
113*54fd6939SJiyong Park	.macro exception_return
114*54fd6939SJiyong Park	eret
115*54fd6939SJiyong Park#if ENABLE_FEAT_SB
116*54fd6939SJiyong Park	sb
117*54fd6939SJiyong Park#else
118*54fd6939SJiyong Park	dsb	nsh
119*54fd6939SJiyong Park	isb
120*54fd6939SJiyong Park#endif
121*54fd6939SJiyong Park	.endm
122*54fd6939SJiyong Park#endif
123*54fd6939SJiyong Park
124*54fd6939SJiyong Park#if (ARM_ARCH_MAJOR == 7)
125*54fd6939SJiyong Park	/* ARMv7 does not support stl instruction */
126*54fd6939SJiyong Park	.macro stl _reg, _write_lock
127*54fd6939SJiyong Park	dmb
128*54fd6939SJiyong Park	str	\_reg, \_write_lock
129*54fd6939SJiyong Park	dsb
130*54fd6939SJiyong Park	.endm
131*54fd6939SJiyong Park#endif
132*54fd6939SJiyong Park
133*54fd6939SJiyong Park	/*
134*54fd6939SJiyong Park	 * Helper macro to generate the best mov/movw/movt combinations
135*54fd6939SJiyong Park	 * according to the value to be moved.
136*54fd6939SJiyong Park	 */
137*54fd6939SJiyong Park	.macro mov_imm _reg, _val
138*54fd6939SJiyong Park		.if ((\_val) & 0xffff0000) == 0
139*54fd6939SJiyong Park			mov	\_reg, #(\_val)
140*54fd6939SJiyong Park		.else
141*54fd6939SJiyong Park			movw	\_reg, #((\_val) & 0xffff)
142*54fd6939SJiyong Park			movt	\_reg, #((\_val) >> 16)
143*54fd6939SJiyong Park		.endif
144*54fd6939SJiyong Park	.endm
145*54fd6939SJiyong Park
146*54fd6939SJiyong Park	/*
147*54fd6939SJiyong Park	 * Macro to mark instances where we're jumping to a function and don't
148*54fd6939SJiyong Park	 * expect a return. To provide the function being jumped to with
149*54fd6939SJiyong Park	 * additional information, we use 'bl' instruction to jump rather than
150*54fd6939SJiyong Park	 * 'b'.
151*54fd6939SJiyong Park         *
152*54fd6939SJiyong Park	 * Debuggers infer the location of a call from where LR points to, which
153*54fd6939SJiyong Park	 * is usually the instruction after 'bl'. If this macro expansion
154*54fd6939SJiyong Park	 * happens to be the last location in a function, that'll cause the LR
155*54fd6939SJiyong Park	 * to point a location beyond the function, thereby misleading debugger
156*54fd6939SJiyong Park	 * back trace. We therefore insert a 'nop' after the function call for
157*54fd6939SJiyong Park	 * debug builds, unless 'skip_nop' parameter is non-zero.
158*54fd6939SJiyong Park	 */
159*54fd6939SJiyong Park	.macro no_ret _func:req, skip_nop=0
160*54fd6939SJiyong Park	bl	\_func
161*54fd6939SJiyong Park#if DEBUG
162*54fd6939SJiyong Park	.ifeq \skip_nop
163*54fd6939SJiyong Park	nop
164*54fd6939SJiyong Park	.endif
165*54fd6939SJiyong Park#endif
166*54fd6939SJiyong Park	.endm
167*54fd6939SJiyong Park
168*54fd6939SJiyong Park	/*
169*54fd6939SJiyong Park	 * Reserve space for a spin lock in assembly file.
170*54fd6939SJiyong Park	 */
171*54fd6939SJiyong Park	.macro define_asm_spinlock _name:req
172*54fd6939SJiyong Park	.align	SPINLOCK_ASM_ALIGN
173*54fd6939SJiyong Park	\_name:
174*54fd6939SJiyong Park	.space	SPINLOCK_ASM_SIZE
175*54fd6939SJiyong Park	.endm
176*54fd6939SJiyong Park
177*54fd6939SJiyong Park	/*
178*54fd6939SJiyong Park	 * Helper macro to OR the bottom 32 bits of `_val` into `_reg_l`
179*54fd6939SJiyong Park	 * and the top 32 bits of `_val` into `_reg_h`.  If either the bottom
180*54fd6939SJiyong Park	 * or top word of `_val` is zero, the corresponding OR operation
181*54fd6939SJiyong Park	 * is skipped.
182*54fd6939SJiyong Park	 */
183*54fd6939SJiyong Park	.macro orr64_imm _reg_l, _reg_h, _val
184*54fd6939SJiyong Park		.if (\_val >> 32)
185*54fd6939SJiyong Park			orr \_reg_h, \_reg_h, #(\_val >> 32)
186*54fd6939SJiyong Park		.endif
187*54fd6939SJiyong Park		.if (\_val & 0xffffffff)
188*54fd6939SJiyong Park			orr \_reg_l, \_reg_l, #(\_val & 0xffffffff)
189*54fd6939SJiyong Park		.endif
190*54fd6939SJiyong Park	.endm
191*54fd6939SJiyong Park
192*54fd6939SJiyong Park	/*
193*54fd6939SJiyong Park	 * Helper macro to bitwise-clear bits in `_reg_l` and
194*54fd6939SJiyong Park	 * `_reg_h` given a 64 bit immediate `_val`.  The set bits
195*54fd6939SJiyong Park	 * in the bottom word of `_val` dictate which bits from
196*54fd6939SJiyong Park	 * `_reg_l` should be cleared.  Similarly, the set bits in
197*54fd6939SJiyong Park	 * the top word of `_val` dictate which bits from `_reg_h`
198*54fd6939SJiyong Park	 * should be cleared.  If either the bottom or top word of
199*54fd6939SJiyong Park	 * `_val` is zero, the corresponding BIC operation is skipped.
200*54fd6939SJiyong Park	 */
201*54fd6939SJiyong Park	.macro bic64_imm _reg_l, _reg_h, _val
202*54fd6939SJiyong Park		.if (\_val >> 32)
203*54fd6939SJiyong Park			bic \_reg_h, \_reg_h, #(\_val >> 32)
204*54fd6939SJiyong Park		.endif
205*54fd6939SJiyong Park		.if (\_val & 0xffffffff)
206*54fd6939SJiyong Park			bic \_reg_l, \_reg_l, #(\_val & 0xffffffff)
207*54fd6939SJiyong Park		.endif
208*54fd6939SJiyong Park	.endm
209*54fd6939SJiyong Park
210*54fd6939SJiyong Park	/*
211*54fd6939SJiyong Park	 * Helper macro for carrying out division in software when
212*54fd6939SJiyong Park	 * hardware division is not suported. \top holds the dividend
213*54fd6939SJiyong Park	 * in the function call and the remainder after
214*54fd6939SJiyong Park	 * the function is executed. \bot holds the divisor. \div holds
215*54fd6939SJiyong Park	 * the quotient and \temp is a temporary registed used in calcualtion.
216*54fd6939SJiyong Park	 * The division algorithm has been obtained from:
217*54fd6939SJiyong Park	 * http://www.keil.com/support/man/docs/armasm/armasm_dom1359731155623.htm
218*54fd6939SJiyong Park	 */
219*54fd6939SJiyong Park	.macro	softudiv	div:req,top:req,bot:req,temp:req
220*54fd6939SJiyong Park
221*54fd6939SJiyong Park	mov     \temp, \bot
222*54fd6939SJiyong Park	cmp     \temp, \top, lsr #1
223*54fd6939SJiyong Parkdiv1:
224*54fd6939SJiyong Park	movls   \temp, \temp, lsl #1
225*54fd6939SJiyong Park	cmp     \temp, \top, lsr #1
226*54fd6939SJiyong Park	bls     div1
227*54fd6939SJiyong Park	mov     \div, #0
228*54fd6939SJiyong Park
229*54fd6939SJiyong Parkdiv2:
230*54fd6939SJiyong Park	cmp     \top, \temp
231*54fd6939SJiyong Park	subcs   \top, \top,\temp
232*54fd6939SJiyong Park	ADC     \div, \div, \div
233*54fd6939SJiyong Park	mov     \temp, \temp, lsr #1
234*54fd6939SJiyong Park	cmp     \temp, \bot
235*54fd6939SJiyong Park	bhs     div2
236*54fd6939SJiyong Park	.endm
237*54fd6939SJiyong Park#endif /* ASM_MACROS_S */
238