xref: /nrf52832-nimble/rt-thread/libcpu/arm/cortex-r4/context_gcc.S (revision 104654410c56c573564690304ae786df310c91fc)
1*10465441SEvalZero/*
2*10465441SEvalZero * Copyright (c) 2006-2018, RT-Thread Development Team
3*10465441SEvalZero *
4*10465441SEvalZero * SPDX-License-Identifier: Apache-2.0
5*10465441SEvalZero *
6*10465441SEvalZero * Change Logs:
7*10465441SEvalZero * Date       Author       Notes
8*10465441SEvalZero * 2009-01-20     Bernard      first version
9*10465441SEvalZero * 2011-07-22     Bernard      added thumb mode porting
10*10465441SEvalZero * 2013-05-24     Grissiom     port to CCS
11*10465441SEvalZero * 2013-05-26     Grissiom     optimize for ARMv7
12*10465441SEvalZero * 2013-10-20     Grissiom     port to GCC
13*10465441SEvalZero */
14*10465441SEvalZero
15*10465441SEvalZero#include <rtconfig.h>
16*10465441SEvalZero
17*10465441SEvalZero   .text
18*10465441SEvalZero   .arm
19*10465441SEvalZero   .globl rt_thread_switch_interrupt_flag
20*10465441SEvalZero   .globl rt_interrupt_from_thread
21*10465441SEvalZero   .globl rt_interrupt_to_thread
22*10465441SEvalZero   .globl rt_interrupt_enter
23*10465441SEvalZero   .globl rt_interrupt_leave
24*10465441SEvalZero   .globl rt_hw_trap_irq
25*10465441SEvalZero
26*10465441SEvalZero/*
27*10465441SEvalZero * rt_base_t rt_hw_interrupt_disable()
28*10465441SEvalZero */
29*10465441SEvalZero    .globl rt_hw_interrupt_disable
30*10465441SEvalZerort_hw_interrupt_disable:
31*10465441SEvalZero    MRS r0, cpsr
32*10465441SEvalZero    CPSID IF
33*10465441SEvalZero    BX  lr
34*10465441SEvalZero
35*10465441SEvalZero/*
36*10465441SEvalZero * void rt_hw_interrupt_enable(rt_base_t level)
37*10465441SEvalZero */
38*10465441SEvalZero    .globl rt_hw_interrupt_enable
39*10465441SEvalZerort_hw_interrupt_enable:
40*10465441SEvalZero    MSR cpsr_c, r0
41*10465441SEvalZero    BX  lr
42*10465441SEvalZero
43*10465441SEvalZero/*
44*10465441SEvalZero * void rt_hw_context_switch(rt_uint32 from, rt_uint32 to)
45*10465441SEvalZero * r0 --> from
46*10465441SEvalZero * r1 --> to
47*10465441SEvalZero */
48*10465441SEvalZero    .globl rt_hw_context_switch
49*10465441SEvalZerort_hw_context_switch:
50*10465441SEvalZero    STMDB   sp!, {lr}           @ push pc (lr should be pushed in place of PC)
51*10465441SEvalZero    STMDB   sp!, {r0-r12, lr}   @ push lr & register file
52*10465441SEvalZero
53*10465441SEvalZero    MRS     r4, cpsr
54*10465441SEvalZero    TST     lr, #0x01
55*10465441SEvalZero    ORRNE   r4, r4, #0x20       @ it's thumb code
56*10465441SEvalZero
57*10465441SEvalZero    STMDB   sp!, {r4}           @ push cpsr
58*10465441SEvalZero
59*10465441SEvalZero#if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
60*10465441SEvalZero		VMRS    r4,  fpexc
61*10465441SEvalZero        TST     r4,  #0x40000000
62*10465441SEvalZero        BEQ     __no_vfp_frame1
63*10465441SEvalZero		VSTMDB  sp!, {d0-d15}
64*10465441SEvalZero        VMRS    r5, fpscr
65*10465441SEvalZero        @ TODO: add support for Common VFPv3.
66*10465441SEvalZero        @       Save registers like FPINST, FPINST2
67*10465441SEvalZero        STMDB   sp!, {r5}
68*10465441SEvalZero__no_vfp_frame1:
69*10465441SEvalZero        STMDB   sp!, {r4}
70*10465441SEvalZero#endif
71*10465441SEvalZero
72*10465441SEvalZero    STR     sp, [r0]            @ store sp in preempted tasks TCB
73*10465441SEvalZero    LDR     sp, [r1]            @ get new task stack pointer
74*10465441SEvalZero
75*10465441SEvalZero#if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
76*10465441SEvalZero        LDMIA   sp!, {r0}       @ get fpexc
77*10465441SEvalZero        VMSR    fpexc,  r0      @ restore fpexc
78*10465441SEvalZero        TST     r0,  #0x40000000
79*10465441SEvalZero        BEQ     __no_vfp_frame2
80*10465441SEvalZero        LDMIA   sp!, {r1}       @ get fpscr
81*10465441SEvalZero        VMSR    fpscr, r1
82*10465441SEvalZero		VLDMIA  sp!, {d0-d15}
83*10465441SEvalZero__no_vfp_frame2:
84*10465441SEvalZero    #endif
85*10465441SEvalZero
86*10465441SEvalZero    LDMIA   sp!, {r4}           @ pop new task cpsr to spsr
87*10465441SEvalZero    MSR     spsr_cxsf, r4
88*10465441SEvalZero
89*10465441SEvalZero    LDMIA   sp!, {r0-r12, lr, pc}^ @ pop new task r0-r12, lr & pc, copy spsr to cpsr
90*10465441SEvalZero
91*10465441SEvalZero/*
92*10465441SEvalZero * void rt_hw_context_switch_to(rt_uint32 to)
93*10465441SEvalZero * r0 --> to
94*10465441SEvalZero */
95*10465441SEvalZero    .globl rt_hw_context_switch_to
96*10465441SEvalZerort_hw_context_switch_to:
97*10465441SEvalZero    LDR     sp, [r0]            @ get new task stack pointer
98*10465441SEvalZero
99*10465441SEvalZero#if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
100*10465441SEvalZero        LDMIA   sp!, {r0}       @ get fpexc
101*10465441SEvalZero        VMSR    fpexc, r0
102*10465441SEvalZero        TST     r0,  #0x40000000
103*10465441SEvalZero        BEQ     __no_vfp_frame_to
104*10465441SEvalZero        LDMIA   sp!, {r1}       @ get fpscr
105*10465441SEvalZero        VMSR    fpscr, r1
106*10465441SEvalZero		VLDMIA  sp!, {d0-d15}
107*10465441SEvalZero__no_vfp_frame_to:
108*10465441SEvalZero#endif
109*10465441SEvalZero
110*10465441SEvalZero    LDMIA   sp!, {r4}           @ pop new task cpsr to spsr
111*10465441SEvalZero    MSR     spsr_cxsf, r4
112*10465441SEvalZero
113*10465441SEvalZero    LDMIA   sp!, {r0-r12, lr, pc}^ @ pop new task r0-r12, lr & pc, copy spsr to cpsr
114*10465441SEvalZero
115*10465441SEvalZero/*
116*10465441SEvalZero * void rt_hw_context_switch_interrupt(rt_uint32 from, rt_uint32 to)@
117*10465441SEvalZero */
118*10465441SEvalZero
119*10465441SEvalZero    .globl rt_hw_context_switch_interrupt
120*10465441SEvalZerort_hw_context_switch_interrupt:
121*10465441SEvalZero    LDR r2, =rt_thread_switch_interrupt_flag
122*10465441SEvalZero    LDR r3, [r2]
123*10465441SEvalZero    CMP r3, #1
124*10465441SEvalZero    BEQ _reswitch
125*10465441SEvalZero    MOV r3, #1              @ set rt_thread_switch_interrupt_flag to 1
126*10465441SEvalZero    STR r3, [r2]
127*10465441SEvalZero    LDR r2, =rt_interrupt_from_thread  @ set rt_interrupt_from_thread
128*10465441SEvalZero
129*10465441SEvalZero    STR r0, [r2]
130*10465441SEvalZero_reswitch:
131*10465441SEvalZero    LDR r2, =rt_interrupt_to_thread       @ set rt_interrupt_to_thread
132*10465441SEvalZero    STR r1, [r2]
133*10465441SEvalZero    BX  lr
134*10465441SEvalZero
135*10465441SEvalZero    .globl IRQ_Handler
136*10465441SEvalZeroIRQ_Handler:
137*10465441SEvalZero    STMDB   sp!, {r0-r12,lr}
138*10465441SEvalZero
139*10465441SEvalZero#if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
140*10465441SEvalZero		VMRS    r0,  fpexc
141*10465441SEvalZero        TST     r0,  #0x40000000
142*10465441SEvalZero        BEQ     __no_vfp_frame_str_irq
143*10465441SEvalZero		VSTMDB  sp!, {d0-d15}
144*10465441SEvalZero        VMRS    r1, fpscr
145*10465441SEvalZero        @ TODO: add support for Common VFPv3.
146*10465441SEvalZero        @       Save registers like FPINST, FPINST2
147*10465441SEvalZero        STMDB   sp!, {r1}
148*10465441SEvalZero__no_vfp_frame_str_irq:
149*10465441SEvalZero        STMDB   sp!, {r0}
150*10465441SEvalZero#endif
151*10465441SEvalZero
152*10465441SEvalZero    BL  rt_interrupt_enter
153*10465441SEvalZero    BL  rt_hw_trap_irq
154*10465441SEvalZero    BL  rt_interrupt_leave
155*10465441SEvalZero
156*10465441SEvalZero    @ if rt_thread_switch_interrupt_flag set, jump to
157*10465441SEvalZero    @ rt_hw_context_switch_interrupt_do and don't return
158*10465441SEvalZero    LDR r0, =rt_thread_switch_interrupt_flag
159*10465441SEvalZero    LDR r1, [r0]
160*10465441SEvalZero    CMP r1, #1
161*10465441SEvalZero    BEQ rt_hw_context_switch_interrupt_do
162*10465441SEvalZero
163*10465441SEvalZero#if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
164*10465441SEvalZero        LDMIA   sp!, {r0}       @ get fpexc
165*10465441SEvalZero        VMSR    fpexc, r0
166*10465441SEvalZero        TST     r0,  #0x40000000
167*10465441SEvalZero        BEQ     __no_vfp_frame_ldr_irq
168*10465441SEvalZero        LDMIA   sp!, {r1}       @ get fpscr
169*10465441SEvalZero        VMSR    fpscr, r1
170*10465441SEvalZero		VLDMIA  sp!, {d0-d15}
171*10465441SEvalZero__no_vfp_frame_ldr_irq:
172*10465441SEvalZero#endif
173*10465441SEvalZero
174*10465441SEvalZero    LDMIA   sp!, {r0-r12,lr}
175*10465441SEvalZero    SUBS    pc, lr, #4
176*10465441SEvalZero
177*10465441SEvalZero/*
178*10465441SEvalZero * void rt_hw_context_switch_interrupt_do(rt_base_t flag)
179*10465441SEvalZero */
180*10465441SEvalZero    .globl rt_hw_context_switch_interrupt_do
181*10465441SEvalZerort_hw_context_switch_interrupt_do:
182*10465441SEvalZero    MOV     r1,  #0           @ clear flag
183*10465441SEvalZero    STR     r1,  [r0]
184*10465441SEvalZero
185*10465441SEvalZero#if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
186*10465441SEvalZero        LDMIA   sp!, {r0}       @ get fpexc
187*10465441SEvalZero        VMSR    fpexc, r0
188*10465441SEvalZero        TST     r0,  #0x40000000
189*10465441SEvalZero        BEQ     __no_vfp_frame_do1
190*10465441SEvalZero        LDMIA   sp!, {r1}       @ get fpscr
191*10465441SEvalZero        VMSR    fpscr, r1
192*10465441SEvalZero		VLDMIA  sp!, {d0-d15}
193*10465441SEvalZero__no_vfp_frame_do1:
194*10465441SEvalZero#endif
195*10465441SEvalZero
196*10465441SEvalZero    LDMIA   sp!, {r0-r12,lr}  @ reload saved registers
197*10465441SEvalZero    STMDB   sp, {r0-r3}       @ save r0-r3. We will restore r0-r3 in the SVC
198*10465441SEvalZero                              @ mode so there is no need to update SP.
199*10465441SEvalZero    SUB     r1,  sp, #16      @ save the right SP value in r1, so we could restore r0-r3.
200*10465441SEvalZero    SUB     r2,  lr, #4       @ save old task's pc to r2
201*10465441SEvalZero
202*10465441SEvalZero    MRS     r3,  spsr         @ get cpsr of interrupt thread
203*10465441SEvalZero
204*10465441SEvalZero    @ switch to SVC mode and no interrupt
205*10465441SEvalZero    CPSID   IF, #0x13
206*10465441SEvalZero
207*10465441SEvalZero    STMDB   sp!, {r2}         @ push old task's pc
208*10465441SEvalZero    STMDB   sp!, {r4-r12,lr}  @ push old task's lr,r12-r4
209*10465441SEvalZero    LDMIA   r1!, {r4-r7}      @ restore r0-r3 of the interrupted thread
210*10465441SEvalZero    STMDB   sp!, {r4-r7}      @ push old task's r3-r0. We don't need to push/pop them to
211*10465441SEvalZero                              @ r0-r3 because we just want to transfer the data and don't
212*10465441SEvalZero                              @ use them here.
213*10465441SEvalZero    STMDB   sp!, {r3}         @ push old task's cpsr
214*10465441SEvalZero
215*10465441SEvalZero#if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
216*10465441SEvalZero		VMRS    r0,  fpexc
217*10465441SEvalZero        TST     r0,  #0x40000000
218*10465441SEvalZero        BEQ     __no_vfp_frame_do2
219*10465441SEvalZero        VSTMDB  sp!, {d0-d15}
220*10465441SEvalZero        VMRS    r1, fpscr
221*10465441SEvalZero        @ TODO: add support for Common VFPv3.
222*10465441SEvalZero        @       Save registers like FPINST, FPINST2
223*10465441SEvalZero        STMDB   sp!, {r1}
224*10465441SEvalZero__no_vfp_frame_do2:
225*10465441SEvalZero        STMDB   sp!, {r0}
226*10465441SEvalZero#endif
227*10465441SEvalZero
228*10465441SEvalZero    LDR     r4,  =rt_interrupt_from_thread
229*10465441SEvalZero    LDR     r5,  [r4]
230*10465441SEvalZero    STR     sp,  [r5]         @ store sp in preempted tasks's TCB
231*10465441SEvalZero
232*10465441SEvalZero    LDR     r6,  =rt_interrupt_to_thread
233*10465441SEvalZero    LDR     r6,  [r6]
234*10465441SEvalZero    LDR     sp,  [r6]         @ get new task's stack pointer
235*10465441SEvalZero
236*10465441SEvalZero#if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
237*10465441SEvalZero        LDMIA   sp!, {r0}       @ get fpexc
238*10465441SEvalZero        VMSR    fpexc, r0
239*10465441SEvalZero        TST     r0,  #0x40000000
240*10465441SEvalZero        BEQ     __no_vfp_frame_do3
241*10465441SEvalZero        LDMIA   sp!, {r1}       @ get fpscr
242*10465441SEvalZero        VMSR    fpscr, r1
243*10465441SEvalZero		VLDMIA  sp!, {d0-d15}
244*10465441SEvalZero__no_vfp_frame_do3:
245*10465441SEvalZero#endif
246*10465441SEvalZero
247*10465441SEvalZero    LDMIA   sp!, {r4}         @ pop new task's cpsr to spsr
248*10465441SEvalZero    MSR     spsr_cxsf, r4
249*10465441SEvalZero
250*10465441SEvalZero    LDMIA   sp!, {r0-r12,lr,pc}^ @ pop new task's r0-r12,lr & pc, copy spsr to cpsr
251*10465441SEvalZero
252