xref: /aosp_15_r20/external/libffi/src/powerpc/linux64.S (revision 1fd5a2e1d639cd1ddf29dd0c484c123bbd850c21)
1*1fd5a2e1SPrashanth Swaminathan/* -----------------------------------------------------------------------
2*1fd5a2e1SPrashanth Swaminathan   sysv.h - Copyright (c) 2003 Jakub Jelinek <[email protected]>
3*1fd5a2e1SPrashanth Swaminathan	    Copyright (c) 2008 Red Hat, Inc.
4*1fd5a2e1SPrashanth Swaminathan
5*1fd5a2e1SPrashanth Swaminathan   PowerPC64 Assembly glue.
6*1fd5a2e1SPrashanth Swaminathan
7*1fd5a2e1SPrashanth Swaminathan   Permission is hereby granted, free of charge, to any person obtaining
8*1fd5a2e1SPrashanth Swaminathan   a copy of this software and associated documentation files (the
9*1fd5a2e1SPrashanth Swaminathan   ``Software''), to deal in the Software without restriction, including
10*1fd5a2e1SPrashanth Swaminathan   without limitation the rights to use, copy, modify, merge, publish,
11*1fd5a2e1SPrashanth Swaminathan   distribute, sublicense, and/or sell copies of the Software, and to
12*1fd5a2e1SPrashanth Swaminathan   permit persons to whom the Software is furnished to do so, subject to
13*1fd5a2e1SPrashanth Swaminathan   the following conditions:
14*1fd5a2e1SPrashanth Swaminathan
15*1fd5a2e1SPrashanth Swaminathan   The above copyright notice and this permission notice shall be included
16*1fd5a2e1SPrashanth Swaminathan   in all copies or substantial portions of the Software.
17*1fd5a2e1SPrashanth Swaminathan
18*1fd5a2e1SPrashanth Swaminathan   THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND,
19*1fd5a2e1SPrashanth Swaminathan   EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20*1fd5a2e1SPrashanth Swaminathan   MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
21*1fd5a2e1SPrashanth Swaminathan   NONINFRINGEMENT.  IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
22*1fd5a2e1SPrashanth Swaminathan   HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
23*1fd5a2e1SPrashanth Swaminathan   WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24*1fd5a2e1SPrashanth Swaminathan   OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
25*1fd5a2e1SPrashanth Swaminathan   DEALINGS IN THE SOFTWARE.
26*1fd5a2e1SPrashanth Swaminathan   ----------------------------------------------------------------------- */
27*1fd5a2e1SPrashanth Swaminathan
28*1fd5a2e1SPrashanth Swaminathan#define LIBFFI_ASM
29*1fd5a2e1SPrashanth Swaminathan#include <fficonfig.h>
30*1fd5a2e1SPrashanth Swaminathan#include <ffi.h>
31*1fd5a2e1SPrashanth Swaminathan
32*1fd5a2e1SPrashanth Swaminathan#ifdef POWERPC64
33*1fd5a2e1SPrashanth Swaminathan	.hidden	ffi_call_LINUX64
34*1fd5a2e1SPrashanth Swaminathan	.globl	ffi_call_LINUX64
35*1fd5a2e1SPrashanth Swaminathan	.text
36*1fd5a2e1SPrashanth Swaminathan	.cfi_startproc
37*1fd5a2e1SPrashanth Swaminathan# if _CALL_ELF == 2
38*1fd5a2e1SPrashanth Swaminathanffi_call_LINUX64:
39*1fd5a2e1SPrashanth Swaminathan	addis	%r2, %r12, .TOC.-ffi_call_LINUX64@ha
40*1fd5a2e1SPrashanth Swaminathan	addi	%r2, %r2, .TOC.-ffi_call_LINUX64@l
41*1fd5a2e1SPrashanth Swaminathan	.localentry ffi_call_LINUX64, . - ffi_call_LINUX64
42*1fd5a2e1SPrashanth Swaminathan# else
43*1fd5a2e1SPrashanth Swaminathan	.section	".opd","aw"
44*1fd5a2e1SPrashanth Swaminathan	.align	3
45*1fd5a2e1SPrashanth Swaminathanffi_call_LINUX64:
46*1fd5a2e1SPrashanth Swaminathan#  ifdef _CALL_LINUX
47*1fd5a2e1SPrashanth Swaminathan	.quad	.L.ffi_call_LINUX64,.TOC.@tocbase,0
48*1fd5a2e1SPrashanth Swaminathan	.type	ffi_call_LINUX64,@function
49*1fd5a2e1SPrashanth Swaminathan	.text
50*1fd5a2e1SPrashanth Swaminathan.L.ffi_call_LINUX64:
51*1fd5a2e1SPrashanth Swaminathan#  else
52*1fd5a2e1SPrashanth Swaminathan	.hidden	.ffi_call_LINUX64
53*1fd5a2e1SPrashanth Swaminathan	.globl	.ffi_call_LINUX64
54*1fd5a2e1SPrashanth Swaminathan	.quad	.ffi_call_LINUX64,.TOC.@tocbase,0
55*1fd5a2e1SPrashanth Swaminathan	.size	ffi_call_LINUX64,24
56*1fd5a2e1SPrashanth Swaminathan	.type	.ffi_call_LINUX64,@function
57*1fd5a2e1SPrashanth Swaminathan	.text
58*1fd5a2e1SPrashanth Swaminathan.ffi_call_LINUX64:
59*1fd5a2e1SPrashanth Swaminathan#  endif
60*1fd5a2e1SPrashanth Swaminathan# endif
61*1fd5a2e1SPrashanth Swaminathan	mflr	%r0
62*1fd5a2e1SPrashanth Swaminathan	std	%r28, -32(%r1)
63*1fd5a2e1SPrashanth Swaminathan	std	%r29, -24(%r1)
64*1fd5a2e1SPrashanth Swaminathan	std	%r30, -16(%r1)
65*1fd5a2e1SPrashanth Swaminathan	std	%r31, -8(%r1)
66*1fd5a2e1SPrashanth Swaminathan	std	%r7, 8(%r1)	/* closure, saved in cr field.  */
67*1fd5a2e1SPrashanth Swaminathan	std	%r0, 16(%r1)
68*1fd5a2e1SPrashanth Swaminathan
69*1fd5a2e1SPrashanth Swaminathan	mr	%r28, %r1	/* our AP.  */
70*1fd5a2e1SPrashanth Swaminathan	.cfi_def_cfa_register 28
71*1fd5a2e1SPrashanth Swaminathan	.cfi_offset 65, 16
72*1fd5a2e1SPrashanth Swaminathan	.cfi_offset 31, -8
73*1fd5a2e1SPrashanth Swaminathan	.cfi_offset 30, -16
74*1fd5a2e1SPrashanth Swaminathan	.cfi_offset 29, -24
75*1fd5a2e1SPrashanth Swaminathan	.cfi_offset 28, -32
76*1fd5a2e1SPrashanth Swaminathan
77*1fd5a2e1SPrashanth Swaminathan	stdux	%r1, %r1, %r8
78*1fd5a2e1SPrashanth Swaminathan	mr	%r31, %r6	/* flags, */
79*1fd5a2e1SPrashanth Swaminathan	mr	%r30, %r5	/* rvalue, */
80*1fd5a2e1SPrashanth Swaminathan	mr	%r29, %r4	/* function address.  */
81*1fd5a2e1SPrashanth Swaminathan/* Save toc pointer, not for the ffi_prep_args64 call, but for the later
82*1fd5a2e1SPrashanth Swaminathan   bctrl function call.  */
83*1fd5a2e1SPrashanth Swaminathan# if _CALL_ELF == 2
84*1fd5a2e1SPrashanth Swaminathan	std	%r2, 24(%r1)
85*1fd5a2e1SPrashanth Swaminathan# else
86*1fd5a2e1SPrashanth Swaminathan	std	%r2, 40(%r1)
87*1fd5a2e1SPrashanth Swaminathan# endif
88*1fd5a2e1SPrashanth Swaminathan
89*1fd5a2e1SPrashanth Swaminathan	/* Call ffi_prep_args64.  */
90*1fd5a2e1SPrashanth Swaminathan	mr	%r4, %r1
91*1fd5a2e1SPrashanth Swaminathan# if defined _CALL_LINUX || _CALL_ELF == 2
92*1fd5a2e1SPrashanth Swaminathan	bl	ffi_prep_args64
93*1fd5a2e1SPrashanth Swaminathan# else
94*1fd5a2e1SPrashanth Swaminathan	bl	.ffi_prep_args64
95*1fd5a2e1SPrashanth Swaminathan# endif
96*1fd5a2e1SPrashanth Swaminathan
97*1fd5a2e1SPrashanth Swaminathan# if _CALL_ELF == 2
98*1fd5a2e1SPrashanth Swaminathan	mr	%r12, %r29
99*1fd5a2e1SPrashanth Swaminathan# else
100*1fd5a2e1SPrashanth Swaminathan	ld	%r12, 0(%r29)
101*1fd5a2e1SPrashanth Swaminathan	ld	%r2, 8(%r29)
102*1fd5a2e1SPrashanth Swaminathan# endif
103*1fd5a2e1SPrashanth Swaminathan	/* Now do the call.  */
104*1fd5a2e1SPrashanth Swaminathan	/* Set up cr1 with bits 3-7 of the flags.  */
105*1fd5a2e1SPrashanth Swaminathan	mtcrf	0xc0, %r31
106*1fd5a2e1SPrashanth Swaminathan
107*1fd5a2e1SPrashanth Swaminathan	/* Get the address to call into CTR.  */
108*1fd5a2e1SPrashanth Swaminathan	mtctr	%r12
109*1fd5a2e1SPrashanth Swaminathan	/* Load all those argument registers.  */
110*1fd5a2e1SPrashanth Swaminathan	addi	%r29, %r28, -32-(8*8)
111*1fd5a2e1SPrashanth Swaminathan	ld	%r3,  (0*8)(%r29)
112*1fd5a2e1SPrashanth Swaminathan	ld	%r4,  (1*8)(%r29)
113*1fd5a2e1SPrashanth Swaminathan	ld	%r5,  (2*8)(%r29)
114*1fd5a2e1SPrashanth Swaminathan	ld	%r6,  (3*8)(%r29)
115*1fd5a2e1SPrashanth Swaminathan	bf-	5, 1f
116*1fd5a2e1SPrashanth Swaminathan	ld	%r7,  (4*8)(%r29)
117*1fd5a2e1SPrashanth Swaminathan	ld	%r8,  (5*8)(%r29)
118*1fd5a2e1SPrashanth Swaminathan	ld	%r9,  (6*8)(%r29)
119*1fd5a2e1SPrashanth Swaminathan	ld	%r10, (7*8)(%r29)
120*1fd5a2e1SPrashanth Swaminathan1:
121*1fd5a2e1SPrashanth Swaminathan
122*1fd5a2e1SPrashanth Swaminathan	/* Load all the FP registers.  */
123*1fd5a2e1SPrashanth Swaminathan	bf-	6, 2f
124*1fd5a2e1SPrashanth Swaminathan	addi	%r29, %r29, -(14*8)
125*1fd5a2e1SPrashanth Swaminathan	lfd	%f1,  ( 1*8)(%r29)
126*1fd5a2e1SPrashanth Swaminathan	lfd	%f2,  ( 2*8)(%r29)
127*1fd5a2e1SPrashanth Swaminathan	lfd	%f3,  ( 3*8)(%r29)
128*1fd5a2e1SPrashanth Swaminathan	lfd	%f4,  ( 4*8)(%r29)
129*1fd5a2e1SPrashanth Swaminathan	lfd	%f5,  ( 5*8)(%r29)
130*1fd5a2e1SPrashanth Swaminathan	lfd	%f6,  ( 6*8)(%r29)
131*1fd5a2e1SPrashanth Swaminathan	lfd	%f7,  ( 7*8)(%r29)
132*1fd5a2e1SPrashanth Swaminathan	lfd	%f8,  ( 8*8)(%r29)
133*1fd5a2e1SPrashanth Swaminathan	lfd	%f9,  ( 9*8)(%r29)
134*1fd5a2e1SPrashanth Swaminathan	lfd	%f10, (10*8)(%r29)
135*1fd5a2e1SPrashanth Swaminathan	lfd	%f11, (11*8)(%r29)
136*1fd5a2e1SPrashanth Swaminathan	lfd	%f12, (12*8)(%r29)
137*1fd5a2e1SPrashanth Swaminathan	lfd	%f13, (13*8)(%r29)
138*1fd5a2e1SPrashanth Swaminathan2:
139*1fd5a2e1SPrashanth Swaminathan
140*1fd5a2e1SPrashanth Swaminathan	/* Load all the vector registers.  */
141*1fd5a2e1SPrashanth Swaminathan	bf-	3, 3f
142*1fd5a2e1SPrashanth Swaminathan	addi	%r29, %r29, -16
143*1fd5a2e1SPrashanth Swaminathan	lvx	%v13, 0, %r29
144*1fd5a2e1SPrashanth Swaminathan	addi	%r29, %r29, -16
145*1fd5a2e1SPrashanth Swaminathan	lvx	%v12, 0, %r29
146*1fd5a2e1SPrashanth Swaminathan	addi	%r29, %r29, -16
147*1fd5a2e1SPrashanth Swaminathan	lvx	%v11, 0, %r29
148*1fd5a2e1SPrashanth Swaminathan	addi	%r29, %r29, -16
149*1fd5a2e1SPrashanth Swaminathan	lvx	%v10, 0, %r29
150*1fd5a2e1SPrashanth Swaminathan	addi	%r29, %r29, -16
151*1fd5a2e1SPrashanth Swaminathan	lvx	%v9,  0, %r29
152*1fd5a2e1SPrashanth Swaminathan	addi	%r29, %r29, -16
153*1fd5a2e1SPrashanth Swaminathan	lvx	%v8,  0, %r29
154*1fd5a2e1SPrashanth Swaminathan	addi	%r29, %r29, -16
155*1fd5a2e1SPrashanth Swaminathan	lvx	%v7,  0, %r29
156*1fd5a2e1SPrashanth Swaminathan	addi	%r29, %r29, -16
157*1fd5a2e1SPrashanth Swaminathan	lvx	%v6,  0, %r29
158*1fd5a2e1SPrashanth Swaminathan	addi	%r29, %r29, -16
159*1fd5a2e1SPrashanth Swaminathan	lvx	%v5,  0, %r29
160*1fd5a2e1SPrashanth Swaminathan	addi	%r29, %r29, -16
161*1fd5a2e1SPrashanth Swaminathan	lvx	%v4,  0, %r29
162*1fd5a2e1SPrashanth Swaminathan	addi	%r29, %r29, -16
163*1fd5a2e1SPrashanth Swaminathan	lvx	%v3,  0, %r29
164*1fd5a2e1SPrashanth Swaminathan	addi	%r29, %r29, -16
165*1fd5a2e1SPrashanth Swaminathan	lvx	%v2,  0, %r29
166*1fd5a2e1SPrashanth Swaminathan3:
167*1fd5a2e1SPrashanth Swaminathan
168*1fd5a2e1SPrashanth Swaminathan	/* Make the call.  */
169*1fd5a2e1SPrashanth Swaminathan	ld	%r11, 8(%r28)
170*1fd5a2e1SPrashanth Swaminathan	bctrl
171*1fd5a2e1SPrashanth Swaminathan
172*1fd5a2e1SPrashanth Swaminathan	/* This must follow the call immediately, the unwinder
173*1fd5a2e1SPrashanth Swaminathan	   uses this to find out if r2 has been saved or not.  */
174*1fd5a2e1SPrashanth Swaminathan# if _CALL_ELF == 2
175*1fd5a2e1SPrashanth Swaminathan	ld	%r2, 24(%r1)
176*1fd5a2e1SPrashanth Swaminathan# else
177*1fd5a2e1SPrashanth Swaminathan	ld	%r2, 40(%r1)
178*1fd5a2e1SPrashanth Swaminathan# endif
179*1fd5a2e1SPrashanth Swaminathan
180*1fd5a2e1SPrashanth Swaminathan	/* Now, deal with the return value.  */
181*1fd5a2e1SPrashanth Swaminathan	mtcrf	0x01, %r31
182*1fd5a2e1SPrashanth Swaminathan	bt	31, .Lstruct_return_value
183*1fd5a2e1SPrashanth Swaminathan	bt	30, .Ldone_return_value
184*1fd5a2e1SPrashanth Swaminathan	bt	29, .Lfp_return_value
185*1fd5a2e1SPrashanth Swaminathan	bt	28, .Lvec_return_value
186*1fd5a2e1SPrashanth Swaminathan	std	%r3, 0(%r30)
187*1fd5a2e1SPrashanth Swaminathan	/* Fall through...  */
188*1fd5a2e1SPrashanth Swaminathan
189*1fd5a2e1SPrashanth Swaminathan.Ldone_return_value:
190*1fd5a2e1SPrashanth Swaminathan	/* Restore the registers we used and return.  */
191*1fd5a2e1SPrashanth Swaminathan	mr	%r1, %r28
192*1fd5a2e1SPrashanth Swaminathan	.cfi_def_cfa_register 1
193*1fd5a2e1SPrashanth Swaminathan	ld	%r0, 16(%r28)
194*1fd5a2e1SPrashanth Swaminathan	ld	%r28, -32(%r28)
195*1fd5a2e1SPrashanth Swaminathan	mtlr	%r0
196*1fd5a2e1SPrashanth Swaminathan	ld	%r29, -24(%r1)
197*1fd5a2e1SPrashanth Swaminathan	ld	%r30, -16(%r1)
198*1fd5a2e1SPrashanth Swaminathan	ld	%r31, -8(%r1)
199*1fd5a2e1SPrashanth Swaminathan	blr
200*1fd5a2e1SPrashanth Swaminathan
201*1fd5a2e1SPrashanth Swaminathan.Lvec_return_value:
202*1fd5a2e1SPrashanth Swaminathan	stvx	%v2, 0, %r30
203*1fd5a2e1SPrashanth Swaminathan	b	.Ldone_return_value
204*1fd5a2e1SPrashanth Swaminathan
205*1fd5a2e1SPrashanth Swaminathan.Lfp_return_value:
206*1fd5a2e1SPrashanth Swaminathan	.cfi_def_cfa_register 28
207*1fd5a2e1SPrashanth Swaminathan	mtcrf	0x02, %r31 /* cr6  */
208*1fd5a2e1SPrashanth Swaminathan	bf	27, .Lfloat_return_value
209*1fd5a2e1SPrashanth Swaminathan	stfd	%f1, 0(%r30)
210*1fd5a2e1SPrashanth Swaminathan	bf	26, .Ldone_return_value
211*1fd5a2e1SPrashanth Swaminathan	stfd	%f2, 8(%r30)
212*1fd5a2e1SPrashanth Swaminathan	b	.Ldone_return_value
213*1fd5a2e1SPrashanth Swaminathan.Lfloat_return_value:
214*1fd5a2e1SPrashanth Swaminathan	stfs	%f1, 0(%r30)
215*1fd5a2e1SPrashanth Swaminathan	b	.Ldone_return_value
216*1fd5a2e1SPrashanth Swaminathan
217*1fd5a2e1SPrashanth Swaminathan.Lstruct_return_value:
218*1fd5a2e1SPrashanth Swaminathan	bf	29, .Lvec_homog_or_small_struct
219*1fd5a2e1SPrashanth Swaminathan	mtcrf	0x02, %r31 /* cr6  */
220*1fd5a2e1SPrashanth Swaminathan	bf	27, .Lfloat_homog_return_value
221*1fd5a2e1SPrashanth Swaminathan	stfd	%f1, 0(%r30)
222*1fd5a2e1SPrashanth Swaminathan	stfd	%f2, 8(%r30)
223*1fd5a2e1SPrashanth Swaminathan	stfd	%f3, 16(%r30)
224*1fd5a2e1SPrashanth Swaminathan	stfd	%f4, 24(%r30)
225*1fd5a2e1SPrashanth Swaminathan	stfd	%f5, 32(%r30)
226*1fd5a2e1SPrashanth Swaminathan	stfd	%f6, 40(%r30)
227*1fd5a2e1SPrashanth Swaminathan	stfd	%f7, 48(%r30)
228*1fd5a2e1SPrashanth Swaminathan	stfd	%f8, 56(%r30)
229*1fd5a2e1SPrashanth Swaminathan	b	.Ldone_return_value
230*1fd5a2e1SPrashanth Swaminathan
231*1fd5a2e1SPrashanth Swaminathan.Lfloat_homog_return_value:
232*1fd5a2e1SPrashanth Swaminathan	stfs	%f1, 0(%r30)
233*1fd5a2e1SPrashanth Swaminathan	stfs	%f2, 4(%r30)
234*1fd5a2e1SPrashanth Swaminathan	stfs	%f3, 8(%r30)
235*1fd5a2e1SPrashanth Swaminathan	stfs	%f4, 12(%r30)
236*1fd5a2e1SPrashanth Swaminathan	stfs	%f5, 16(%r30)
237*1fd5a2e1SPrashanth Swaminathan	stfs	%f6, 20(%r30)
238*1fd5a2e1SPrashanth Swaminathan	stfs	%f7, 24(%r30)
239*1fd5a2e1SPrashanth Swaminathan	stfs	%f8, 28(%r30)
240*1fd5a2e1SPrashanth Swaminathan	b	.Ldone_return_value
241*1fd5a2e1SPrashanth Swaminathan
242*1fd5a2e1SPrashanth Swaminathan.Lvec_homog_or_small_struct:
243*1fd5a2e1SPrashanth Swaminathan	bf	28, .Lsmall_struct
244*1fd5a2e1SPrashanth Swaminathan	stvx	%v2, 0, %r30
245*1fd5a2e1SPrashanth Swaminathan	addi	%r30, %r30, 16
246*1fd5a2e1SPrashanth Swaminathan	stvx	%v3, 0, %r30
247*1fd5a2e1SPrashanth Swaminathan	addi	%r30, %r30, 16
248*1fd5a2e1SPrashanth Swaminathan	stvx	%v4, 0, %r30
249*1fd5a2e1SPrashanth Swaminathan	addi	%r30, %r30, 16
250*1fd5a2e1SPrashanth Swaminathan	stvx	%v5, 0, %r30
251*1fd5a2e1SPrashanth Swaminathan	addi	%r30, %r30, 16
252*1fd5a2e1SPrashanth Swaminathan	stvx	%v6, 0, %r30
253*1fd5a2e1SPrashanth Swaminathan	addi	%r30, %r30, 16
254*1fd5a2e1SPrashanth Swaminathan	stvx	%v7, 0, %r30
255*1fd5a2e1SPrashanth Swaminathan	addi	%r30, %r30, 16
256*1fd5a2e1SPrashanth Swaminathan	stvx	%v8, 0, %r30
257*1fd5a2e1SPrashanth Swaminathan	addi	%r30, %r30, 16
258*1fd5a2e1SPrashanth Swaminathan	stvx	%v9, 0, %r30
259*1fd5a2e1SPrashanth Swaminathan	b	.Ldone_return_value
260*1fd5a2e1SPrashanth Swaminathan
261*1fd5a2e1SPrashanth Swaminathan.Lsmall_struct:
262*1fd5a2e1SPrashanth Swaminathan	std	%r3, 0(%r30)
263*1fd5a2e1SPrashanth Swaminathan	std	%r4, 8(%r30)
264*1fd5a2e1SPrashanth Swaminathan	b	.Ldone_return_value
265*1fd5a2e1SPrashanth Swaminathan
266*1fd5a2e1SPrashanth Swaminathan	.cfi_endproc
267*1fd5a2e1SPrashanth Swaminathan# if _CALL_ELF == 2
268*1fd5a2e1SPrashanth Swaminathan	.size	ffi_call_LINUX64,.-ffi_call_LINUX64
269*1fd5a2e1SPrashanth Swaminathan# else
270*1fd5a2e1SPrashanth Swaminathan#  ifdef _CALL_LINUX
271*1fd5a2e1SPrashanth Swaminathan	.size	ffi_call_LINUX64,.-.L.ffi_call_LINUX64
272*1fd5a2e1SPrashanth Swaminathan#  else
273*1fd5a2e1SPrashanth Swaminathan	.long	0
274*1fd5a2e1SPrashanth Swaminathan	.byte	0,12,0,1,128,4,0,0
275*1fd5a2e1SPrashanth Swaminathan	.size	.ffi_call_LINUX64,.-.ffi_call_LINUX64
276*1fd5a2e1SPrashanth Swaminathan#  endif
277*1fd5a2e1SPrashanth Swaminathan# endif
278*1fd5a2e1SPrashanth Swaminathan
279*1fd5a2e1SPrashanth Swaminathan#endif
280*1fd5a2e1SPrashanth Swaminathan
281*1fd5a2e1SPrashanth Swaminathan#if (defined __ELF__ && defined __linux__) || _CALL_ELF == 2
282*1fd5a2e1SPrashanth Swaminathan	.section	.note.GNU-stack,"",@progbits
283*1fd5a2e1SPrashanth Swaminathan#endif
284