1*1fd5a2e1SPrashanth Swaminathan/* ----------------------------------------------------------------------- 2*1fd5a2e1SPrashanth Swaminathan sysv.S - Copyright (c) 2017 Anthony Green 3*1fd5a2e1SPrashanth Swaminathan - Copyright (c) 2013 The Written Word, Inc. 4*1fd5a2e1SPrashanth Swaminathan - Copyright (c) 1996,1998,2001-2003,2005,2008,2010 Red Hat, Inc. 5*1fd5a2e1SPrashanth Swaminathan 6*1fd5a2e1SPrashanth Swaminathan X86 Foreign Function Interface 7*1fd5a2e1SPrashanth Swaminathan 8*1fd5a2e1SPrashanth Swaminathan Permission is hereby granted, free of charge, to any person obtaining 9*1fd5a2e1SPrashanth Swaminathan a copy of this software and associated documentation files (the 10*1fd5a2e1SPrashanth Swaminathan ``Software''), to deal in the Software without restriction, including 11*1fd5a2e1SPrashanth Swaminathan without limitation the rights to use, copy, modify, merge, publish, 12*1fd5a2e1SPrashanth Swaminathan distribute, sublicense, and/or sell copies of the Software, and to 13*1fd5a2e1SPrashanth Swaminathan permit persons to whom the Software is furnished to do so, subject to 14*1fd5a2e1SPrashanth Swaminathan the following conditions: 15*1fd5a2e1SPrashanth Swaminathan 16*1fd5a2e1SPrashanth Swaminathan The above copyright notice and this permission notice shall be included 17*1fd5a2e1SPrashanth Swaminathan in all copies or substantial portions of the Software. 18*1fd5a2e1SPrashanth Swaminathan 19*1fd5a2e1SPrashanth Swaminathan THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND, 20*1fd5a2e1SPrashanth Swaminathan EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 21*1fd5a2e1SPrashanth Swaminathan MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 22*1fd5a2e1SPrashanth Swaminathan NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 23*1fd5a2e1SPrashanth Swaminathan HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 24*1fd5a2e1SPrashanth Swaminathan WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 25*1fd5a2e1SPrashanth Swaminathan OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 26*1fd5a2e1SPrashanth Swaminathan DEALINGS IN THE SOFTWARE. 27*1fd5a2e1SPrashanth Swaminathan ----------------------------------------------------------------------- */ 28*1fd5a2e1SPrashanth Swaminathan 29*1fd5a2e1SPrashanth Swaminathan#ifdef __i386__ 30*1fd5a2e1SPrashanth Swaminathan#ifndef _MSC_VER 31*1fd5a2e1SPrashanth Swaminathan 32*1fd5a2e1SPrashanth Swaminathan#define LIBFFI_ASM 33*1fd5a2e1SPrashanth Swaminathan#include <fficonfig.h> 34*1fd5a2e1SPrashanth Swaminathan#include <ffi.h> 35*1fd5a2e1SPrashanth Swaminathan#include "internal.h" 36*1fd5a2e1SPrashanth Swaminathan 37*1fd5a2e1SPrashanth Swaminathan#define C2(X, Y) X ## Y 38*1fd5a2e1SPrashanth Swaminathan#define C1(X, Y) C2(X, Y) 39*1fd5a2e1SPrashanth Swaminathan#ifdef __USER_LABEL_PREFIX__ 40*1fd5a2e1SPrashanth Swaminathan# define C(X) C1(__USER_LABEL_PREFIX__, X) 41*1fd5a2e1SPrashanth Swaminathan#else 42*1fd5a2e1SPrashanth Swaminathan# define C(X) X 43*1fd5a2e1SPrashanth Swaminathan#endif 44*1fd5a2e1SPrashanth Swaminathan 45*1fd5a2e1SPrashanth Swaminathan#ifdef X86_DARWIN 46*1fd5a2e1SPrashanth Swaminathan# define L(X) C1(L, X) 47*1fd5a2e1SPrashanth Swaminathan#else 48*1fd5a2e1SPrashanth Swaminathan# define L(X) C1(.L, X) 49*1fd5a2e1SPrashanth Swaminathan#endif 50*1fd5a2e1SPrashanth Swaminathan 51*1fd5a2e1SPrashanth Swaminathan#ifdef __ELF__ 52*1fd5a2e1SPrashanth Swaminathan# define ENDF(X) .type X,@function; .size X, . - X 53*1fd5a2e1SPrashanth Swaminathan#else 54*1fd5a2e1SPrashanth Swaminathan# define ENDF(X) 55*1fd5a2e1SPrashanth Swaminathan#endif 56*1fd5a2e1SPrashanth Swaminathan 57*1fd5a2e1SPrashanth Swaminathan/* Handle win32 fastcall name mangling. */ 58*1fd5a2e1SPrashanth Swaminathan#ifdef X86_WIN32 59*1fd5a2e1SPrashanth Swaminathan# define ffi_call_i386 @ffi_call_i386@8 60*1fd5a2e1SPrashanth Swaminathan# define ffi_closure_inner @ffi_closure_inner@8 61*1fd5a2e1SPrashanth Swaminathan#else 62*1fd5a2e1SPrashanth Swaminathan# define ffi_call_i386 C(ffi_call_i386) 63*1fd5a2e1SPrashanth Swaminathan# define ffi_closure_inner C(ffi_closure_inner) 64*1fd5a2e1SPrashanth Swaminathan#endif 65*1fd5a2e1SPrashanth Swaminathan 66*1fd5a2e1SPrashanth Swaminathan/* This macro allows the safe creation of jump tables without an 67*1fd5a2e1SPrashanth Swaminathan actual table. The entry points into the table are all 8 bytes. 68*1fd5a2e1SPrashanth Swaminathan The use of ORG asserts that we're at the correct location. */ 69*1fd5a2e1SPrashanth Swaminathan/* ??? The clang assembler doesn't handle .org with symbolic expressions. */ 70*1fd5a2e1SPrashanth Swaminathan#if defined(__clang__) || defined(__APPLE__) || (defined (__sun__) && defined(__svr4__)) 71*1fd5a2e1SPrashanth Swaminathan# define E(BASE, X) .balign 8 72*1fd5a2e1SPrashanth Swaminathan#else 73*1fd5a2e1SPrashanth Swaminathan# define E(BASE, X) .balign 8; .org BASE + X * 8 74*1fd5a2e1SPrashanth Swaminathan#endif 75*1fd5a2e1SPrashanth Swaminathan 76*1fd5a2e1SPrashanth Swaminathan .text 77*1fd5a2e1SPrashanth Swaminathan .balign 16 78*1fd5a2e1SPrashanth Swaminathan .globl ffi_call_i386 79*1fd5a2e1SPrashanth Swaminathan FFI_HIDDEN(ffi_call_i386) 80*1fd5a2e1SPrashanth Swaminathan 81*1fd5a2e1SPrashanth Swaminathan/* This is declared as 82*1fd5a2e1SPrashanth Swaminathan 83*1fd5a2e1SPrashanth Swaminathan void ffi_call_i386(struct call_frame *frame, char *argp) 84*1fd5a2e1SPrashanth Swaminathan __attribute__((fastcall)); 85*1fd5a2e1SPrashanth Swaminathan 86*1fd5a2e1SPrashanth Swaminathan Thus the arguments are present in 87*1fd5a2e1SPrashanth Swaminathan 88*1fd5a2e1SPrashanth Swaminathan ecx: frame 89*1fd5a2e1SPrashanth Swaminathan edx: argp 90*1fd5a2e1SPrashanth Swaminathan*/ 91*1fd5a2e1SPrashanth Swaminathan 92*1fd5a2e1SPrashanth Swaminathanffi_call_i386: 93*1fd5a2e1SPrashanth SwaminathanL(UW0): 94*1fd5a2e1SPrashanth Swaminathan # cfi_startproc 95*1fd5a2e1SPrashanth Swaminathan#if !HAVE_FASTCALL 96*1fd5a2e1SPrashanth Swaminathan movl 4(%esp), %ecx 97*1fd5a2e1SPrashanth Swaminathan movl 8(%esp), %edx 98*1fd5a2e1SPrashanth Swaminathan#endif 99*1fd5a2e1SPrashanth Swaminathan movl (%esp), %eax /* move the return address */ 100*1fd5a2e1SPrashanth Swaminathan movl %ebp, (%ecx) /* store %ebp into local frame */ 101*1fd5a2e1SPrashanth Swaminathan movl %eax, 4(%ecx) /* store retaddr into local frame */ 102*1fd5a2e1SPrashanth Swaminathan 103*1fd5a2e1SPrashanth Swaminathan /* New stack frame based off ebp. This is a itty bit of unwind 104*1fd5a2e1SPrashanth Swaminathan trickery in that the CFA *has* changed. There is no easy way 105*1fd5a2e1SPrashanth Swaminathan to describe it correctly on entry to the function. Fortunately, 106*1fd5a2e1SPrashanth Swaminathan it doesn't matter too much since at all points we can correctly 107*1fd5a2e1SPrashanth Swaminathan unwind back to ffi_call. Note that the location to which we 108*1fd5a2e1SPrashanth Swaminathan moved the return address is (the new) CFA-4, so from the 109*1fd5a2e1SPrashanth Swaminathan perspective of the unwind info, it hasn't moved. */ 110*1fd5a2e1SPrashanth Swaminathan movl %ecx, %ebp 111*1fd5a2e1SPrashanth SwaminathanL(UW1): 112*1fd5a2e1SPrashanth Swaminathan # cfi_def_cfa(%ebp, 8) 113*1fd5a2e1SPrashanth Swaminathan # cfi_rel_offset(%ebp, 0) 114*1fd5a2e1SPrashanth Swaminathan 115*1fd5a2e1SPrashanth Swaminathan movl %edx, %esp /* set outgoing argument stack */ 116*1fd5a2e1SPrashanth Swaminathan movl 20+R_EAX*4(%ebp), %eax /* set register arguments */ 117*1fd5a2e1SPrashanth Swaminathan movl 20+R_EDX*4(%ebp), %edx 118*1fd5a2e1SPrashanth Swaminathan movl 20+R_ECX*4(%ebp), %ecx 119*1fd5a2e1SPrashanth Swaminathan 120*1fd5a2e1SPrashanth Swaminathan call *8(%ebp) 121*1fd5a2e1SPrashanth Swaminathan 122*1fd5a2e1SPrashanth Swaminathan movl 12(%ebp), %ecx /* load return type code */ 123*1fd5a2e1SPrashanth Swaminathan movl %ebx, 8(%ebp) /* preserve %ebx */ 124*1fd5a2e1SPrashanth SwaminathanL(UW2): 125*1fd5a2e1SPrashanth Swaminathan # cfi_rel_offset(%ebx, 8) 126*1fd5a2e1SPrashanth Swaminathan 127*1fd5a2e1SPrashanth Swaminathan andl $X86_RET_TYPE_MASK, %ecx 128*1fd5a2e1SPrashanth Swaminathan#ifdef __PIC__ 129*1fd5a2e1SPrashanth Swaminathan call C(__x86.get_pc_thunk.bx) 130*1fd5a2e1SPrashanth SwaminathanL(pc1): 131*1fd5a2e1SPrashanth Swaminathan leal L(store_table)-L(pc1)(%ebx, %ecx, 8), %ebx 132*1fd5a2e1SPrashanth Swaminathan#else 133*1fd5a2e1SPrashanth Swaminathan leal L(store_table)(,%ecx, 8), %ebx 134*1fd5a2e1SPrashanth Swaminathan#endif 135*1fd5a2e1SPrashanth Swaminathan movl 16(%ebp), %ecx /* load result address */ 136*1fd5a2e1SPrashanth Swaminathan jmp *%ebx 137*1fd5a2e1SPrashanth Swaminathan 138*1fd5a2e1SPrashanth Swaminathan .balign 8 139*1fd5a2e1SPrashanth SwaminathanL(store_table): 140*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_FLOAT) 141*1fd5a2e1SPrashanth Swaminathan fstps (%ecx) 142*1fd5a2e1SPrashanth Swaminathan jmp L(e1) 143*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_DOUBLE) 144*1fd5a2e1SPrashanth Swaminathan fstpl (%ecx) 145*1fd5a2e1SPrashanth Swaminathan jmp L(e1) 146*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_LDOUBLE) 147*1fd5a2e1SPrashanth Swaminathan fstpt (%ecx) 148*1fd5a2e1SPrashanth Swaminathan jmp L(e1) 149*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_SINT8) 150*1fd5a2e1SPrashanth Swaminathan movsbl %al, %eax 151*1fd5a2e1SPrashanth Swaminathan mov %eax, (%ecx) 152*1fd5a2e1SPrashanth Swaminathan jmp L(e1) 153*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_SINT16) 154*1fd5a2e1SPrashanth Swaminathan movswl %ax, %eax 155*1fd5a2e1SPrashanth Swaminathan mov %eax, (%ecx) 156*1fd5a2e1SPrashanth Swaminathan jmp L(e1) 157*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_UINT8) 158*1fd5a2e1SPrashanth Swaminathan movzbl %al, %eax 159*1fd5a2e1SPrashanth Swaminathan mov %eax, (%ecx) 160*1fd5a2e1SPrashanth Swaminathan jmp L(e1) 161*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_UINT16) 162*1fd5a2e1SPrashanth Swaminathan movzwl %ax, %eax 163*1fd5a2e1SPrashanth Swaminathan mov %eax, (%ecx) 164*1fd5a2e1SPrashanth Swaminathan jmp L(e1) 165*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_INT64) 166*1fd5a2e1SPrashanth Swaminathan movl %edx, 4(%ecx) 167*1fd5a2e1SPrashanth Swaminathan /* fallthru */ 168*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_INT32) 169*1fd5a2e1SPrashanth Swaminathan movl %eax, (%ecx) 170*1fd5a2e1SPrashanth Swaminathan /* fallthru */ 171*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_VOID) 172*1fd5a2e1SPrashanth SwaminathanL(e1): 173*1fd5a2e1SPrashanth Swaminathan movl 8(%ebp), %ebx 174*1fd5a2e1SPrashanth Swaminathan movl %ebp, %esp 175*1fd5a2e1SPrashanth Swaminathan popl %ebp 176*1fd5a2e1SPrashanth SwaminathanL(UW3): 177*1fd5a2e1SPrashanth Swaminathan # cfi_remember_state 178*1fd5a2e1SPrashanth Swaminathan # cfi_def_cfa(%esp, 4) 179*1fd5a2e1SPrashanth Swaminathan # cfi_restore(%ebx) 180*1fd5a2e1SPrashanth Swaminathan # cfi_restore(%ebp) 181*1fd5a2e1SPrashanth Swaminathan ret 182*1fd5a2e1SPrashanth SwaminathanL(UW4): 183*1fd5a2e1SPrashanth Swaminathan # cfi_restore_state 184*1fd5a2e1SPrashanth Swaminathan 185*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_STRUCTPOP) 186*1fd5a2e1SPrashanth Swaminathan jmp L(e1) 187*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_STRUCTARG) 188*1fd5a2e1SPrashanth Swaminathan jmp L(e1) 189*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_STRUCT_1B) 190*1fd5a2e1SPrashanth Swaminathan movb %al, (%ecx) 191*1fd5a2e1SPrashanth Swaminathan jmp L(e1) 192*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_STRUCT_2B) 193*1fd5a2e1SPrashanth Swaminathan movw %ax, (%ecx) 194*1fd5a2e1SPrashanth Swaminathan jmp L(e1) 195*1fd5a2e1SPrashanth Swaminathan 196*1fd5a2e1SPrashanth Swaminathan /* Fill out the table so that bad values are predictable. */ 197*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_UNUSED14) 198*1fd5a2e1SPrashanth Swaminathan ud2 199*1fd5a2e1SPrashanth SwaminathanE(L(store_table), X86_RET_UNUSED15) 200*1fd5a2e1SPrashanth Swaminathan ud2 201*1fd5a2e1SPrashanth Swaminathan 202*1fd5a2e1SPrashanth SwaminathanL(UW5): 203*1fd5a2e1SPrashanth Swaminathan # cfi_endproc 204*1fd5a2e1SPrashanth SwaminathanENDF(ffi_call_i386) 205*1fd5a2e1SPrashanth Swaminathan 206*1fd5a2e1SPrashanth Swaminathan/* The inner helper is declared as 207*1fd5a2e1SPrashanth Swaminathan 208*1fd5a2e1SPrashanth Swaminathan void ffi_closure_inner(struct closure_frame *frame, char *argp) 209*1fd5a2e1SPrashanth Swaminathan __attribute_((fastcall)) 210*1fd5a2e1SPrashanth Swaminathan 211*1fd5a2e1SPrashanth Swaminathan Thus the arguments are placed in 212*1fd5a2e1SPrashanth Swaminathan 213*1fd5a2e1SPrashanth Swaminathan ecx: frame 214*1fd5a2e1SPrashanth Swaminathan edx: argp 215*1fd5a2e1SPrashanth Swaminathan*/ 216*1fd5a2e1SPrashanth Swaminathan 217*1fd5a2e1SPrashanth Swaminathan/* Macros to help setting up the closure_data structure. */ 218*1fd5a2e1SPrashanth Swaminathan 219*1fd5a2e1SPrashanth Swaminathan#if HAVE_FASTCALL 220*1fd5a2e1SPrashanth Swaminathan# define closure_FS (40 + 4) 221*1fd5a2e1SPrashanth Swaminathan# define closure_CF 0 222*1fd5a2e1SPrashanth Swaminathan#else 223*1fd5a2e1SPrashanth Swaminathan# define closure_FS (8 + 40 + 12) 224*1fd5a2e1SPrashanth Swaminathan# define closure_CF 8 225*1fd5a2e1SPrashanth Swaminathan#endif 226*1fd5a2e1SPrashanth Swaminathan 227*1fd5a2e1SPrashanth Swaminathan#define FFI_CLOSURE_SAVE_REGS \ 228*1fd5a2e1SPrashanth Swaminathan movl %eax, closure_CF+16+R_EAX*4(%esp); \ 229*1fd5a2e1SPrashanth Swaminathan movl %edx, closure_CF+16+R_EDX*4(%esp); \ 230*1fd5a2e1SPrashanth Swaminathan movl %ecx, closure_CF+16+R_ECX*4(%esp) 231*1fd5a2e1SPrashanth Swaminathan 232*1fd5a2e1SPrashanth Swaminathan#define FFI_CLOSURE_COPY_TRAMP_DATA \ 233*1fd5a2e1SPrashanth Swaminathan movl FFI_TRAMPOLINE_SIZE(%eax), %edx; /* copy cif */ \ 234*1fd5a2e1SPrashanth Swaminathan movl FFI_TRAMPOLINE_SIZE+4(%eax), %ecx; /* copy fun */ \ 235*1fd5a2e1SPrashanth Swaminathan movl FFI_TRAMPOLINE_SIZE+8(%eax), %eax; /* copy user_data */ \ 236*1fd5a2e1SPrashanth Swaminathan movl %edx, closure_CF+28(%esp); \ 237*1fd5a2e1SPrashanth Swaminathan movl %ecx, closure_CF+32(%esp); \ 238*1fd5a2e1SPrashanth Swaminathan movl %eax, closure_CF+36(%esp) 239*1fd5a2e1SPrashanth Swaminathan 240*1fd5a2e1SPrashanth Swaminathan#if HAVE_FASTCALL 241*1fd5a2e1SPrashanth Swaminathan# define FFI_CLOSURE_PREP_CALL \ 242*1fd5a2e1SPrashanth Swaminathan movl %esp, %ecx; /* load closure_data */ \ 243*1fd5a2e1SPrashanth Swaminathan leal closure_FS+4(%esp), %edx; /* load incoming stack */ 244*1fd5a2e1SPrashanth Swaminathan#else 245*1fd5a2e1SPrashanth Swaminathan# define FFI_CLOSURE_PREP_CALL \ 246*1fd5a2e1SPrashanth Swaminathan leal closure_CF(%esp), %ecx; /* load closure_data */ \ 247*1fd5a2e1SPrashanth Swaminathan leal closure_FS+4(%esp), %edx; /* load incoming stack */ \ 248*1fd5a2e1SPrashanth Swaminathan movl %ecx, (%esp); \ 249*1fd5a2e1SPrashanth Swaminathan movl %edx, 4(%esp) 250*1fd5a2e1SPrashanth Swaminathan#endif 251*1fd5a2e1SPrashanth Swaminathan 252*1fd5a2e1SPrashanth Swaminathan#define FFI_CLOSURE_CALL_INNER(UWN) \ 253*1fd5a2e1SPrashanth Swaminathan call ffi_closure_inner 254*1fd5a2e1SPrashanth Swaminathan 255*1fd5a2e1SPrashanth Swaminathan#define FFI_CLOSURE_MASK_AND_JUMP(N, UW) \ 256*1fd5a2e1SPrashanth Swaminathan andl $X86_RET_TYPE_MASK, %eax; \ 257*1fd5a2e1SPrashanth Swaminathan leal L(C1(load_table,N))(, %eax, 8), %edx; \ 258*1fd5a2e1SPrashanth Swaminathan movl closure_CF(%esp), %eax; /* optimiztic load */ \ 259*1fd5a2e1SPrashanth Swaminathan jmp *%edx 260*1fd5a2e1SPrashanth Swaminathan 261*1fd5a2e1SPrashanth Swaminathan#ifdef __PIC__ 262*1fd5a2e1SPrashanth Swaminathan# if defined X86_DARWIN || defined HAVE_HIDDEN_VISIBILITY_ATTRIBUTE 263*1fd5a2e1SPrashanth Swaminathan# undef FFI_CLOSURE_MASK_AND_JUMP 264*1fd5a2e1SPrashanth Swaminathan# define FFI_CLOSURE_MASK_AND_JUMP(N, UW) \ 265*1fd5a2e1SPrashanth Swaminathan andl $X86_RET_TYPE_MASK, %eax; \ 266*1fd5a2e1SPrashanth Swaminathan call C(__x86.get_pc_thunk.dx); \ 267*1fd5a2e1SPrashanth SwaminathanL(C1(pc,N)): \ 268*1fd5a2e1SPrashanth Swaminathan leal L(C1(load_table,N))-L(C1(pc,N))(%edx, %eax, 8), %edx; \ 269*1fd5a2e1SPrashanth Swaminathan movl closure_CF(%esp), %eax; /* optimiztic load */ \ 270*1fd5a2e1SPrashanth Swaminathan jmp *%edx 271*1fd5a2e1SPrashanth Swaminathan# else 272*1fd5a2e1SPrashanth Swaminathan# define FFI_CLOSURE_CALL_INNER_SAVE_EBX 273*1fd5a2e1SPrashanth Swaminathan# undef FFI_CLOSURE_CALL_INNER 274*1fd5a2e1SPrashanth Swaminathan# define FFI_CLOSURE_CALL_INNER(UWN) \ 275*1fd5a2e1SPrashanth Swaminathan movl %ebx, 40(%esp); /* save ebx */ \ 276*1fd5a2e1SPrashanth SwaminathanL(C1(UW,UWN)): \ 277*1fd5a2e1SPrashanth Swaminathan /* cfi_rel_offset(%ebx, 40); */ \ 278*1fd5a2e1SPrashanth Swaminathan call C(__x86.get_pc_thunk.bx); /* load got register */ \ 279*1fd5a2e1SPrashanth Swaminathan addl $C(_GLOBAL_OFFSET_TABLE_), %ebx; \ 280*1fd5a2e1SPrashanth Swaminathan call ffi_closure_inner@PLT 281*1fd5a2e1SPrashanth Swaminathan# undef FFI_CLOSURE_MASK_AND_JUMP 282*1fd5a2e1SPrashanth Swaminathan# define FFI_CLOSURE_MASK_AND_JUMP(N, UWN) \ 283*1fd5a2e1SPrashanth Swaminathan andl $X86_RET_TYPE_MASK, %eax; \ 284*1fd5a2e1SPrashanth Swaminathan leal L(C1(load_table,N))@GOTOFF(%ebx, %eax, 8), %edx; \ 285*1fd5a2e1SPrashanth Swaminathan movl 40(%esp), %ebx; /* restore ebx */ \ 286*1fd5a2e1SPrashanth SwaminathanL(C1(UW,UWN)): \ 287*1fd5a2e1SPrashanth Swaminathan /* cfi_restore(%ebx); */ \ 288*1fd5a2e1SPrashanth Swaminathan movl closure_CF(%esp), %eax; /* optimiztic load */ \ 289*1fd5a2e1SPrashanth Swaminathan jmp *%edx 290*1fd5a2e1SPrashanth Swaminathan# endif /* DARWIN || HIDDEN */ 291*1fd5a2e1SPrashanth Swaminathan#endif /* __PIC__ */ 292*1fd5a2e1SPrashanth Swaminathan 293*1fd5a2e1SPrashanth Swaminathan .balign 16 294*1fd5a2e1SPrashanth Swaminathan .globl C(ffi_go_closure_EAX) 295*1fd5a2e1SPrashanth Swaminathan FFI_HIDDEN(C(ffi_go_closure_EAX)) 296*1fd5a2e1SPrashanth SwaminathanC(ffi_go_closure_EAX): 297*1fd5a2e1SPrashanth SwaminathanL(UW6): 298*1fd5a2e1SPrashanth Swaminathan # cfi_startproc 299*1fd5a2e1SPrashanth Swaminathan subl $closure_FS, %esp 300*1fd5a2e1SPrashanth SwaminathanL(UW7): 301*1fd5a2e1SPrashanth Swaminathan # cfi_def_cfa_offset(closure_FS + 4) 302*1fd5a2e1SPrashanth Swaminathan FFI_CLOSURE_SAVE_REGS 303*1fd5a2e1SPrashanth Swaminathan movl 4(%eax), %edx /* copy cif */ 304*1fd5a2e1SPrashanth Swaminathan movl 8(%eax), %ecx /* copy fun */ 305*1fd5a2e1SPrashanth Swaminathan movl %edx, closure_CF+28(%esp) 306*1fd5a2e1SPrashanth Swaminathan movl %ecx, closure_CF+32(%esp) 307*1fd5a2e1SPrashanth Swaminathan movl %eax, closure_CF+36(%esp) /* closure is user_data */ 308*1fd5a2e1SPrashanth Swaminathan jmp L(do_closure_i386) 309*1fd5a2e1SPrashanth SwaminathanL(UW8): 310*1fd5a2e1SPrashanth Swaminathan # cfi_endproc 311*1fd5a2e1SPrashanth SwaminathanENDF(C(ffi_go_closure_EAX)) 312*1fd5a2e1SPrashanth Swaminathan 313*1fd5a2e1SPrashanth Swaminathan .balign 16 314*1fd5a2e1SPrashanth Swaminathan .globl C(ffi_go_closure_ECX) 315*1fd5a2e1SPrashanth Swaminathan FFI_HIDDEN(C(ffi_go_closure_ECX)) 316*1fd5a2e1SPrashanth SwaminathanC(ffi_go_closure_ECX): 317*1fd5a2e1SPrashanth SwaminathanL(UW9): 318*1fd5a2e1SPrashanth Swaminathan # cfi_startproc 319*1fd5a2e1SPrashanth Swaminathan subl $closure_FS, %esp 320*1fd5a2e1SPrashanth SwaminathanL(UW10): 321*1fd5a2e1SPrashanth Swaminathan # cfi_def_cfa_offset(closure_FS + 4) 322*1fd5a2e1SPrashanth Swaminathan FFI_CLOSURE_SAVE_REGS 323*1fd5a2e1SPrashanth Swaminathan movl 4(%ecx), %edx /* copy cif */ 324*1fd5a2e1SPrashanth Swaminathan movl 8(%ecx), %eax /* copy fun */ 325*1fd5a2e1SPrashanth Swaminathan movl %edx, closure_CF+28(%esp) 326*1fd5a2e1SPrashanth Swaminathan movl %eax, closure_CF+32(%esp) 327*1fd5a2e1SPrashanth Swaminathan movl %ecx, closure_CF+36(%esp) /* closure is user_data */ 328*1fd5a2e1SPrashanth Swaminathan jmp L(do_closure_i386) 329*1fd5a2e1SPrashanth SwaminathanL(UW11): 330*1fd5a2e1SPrashanth Swaminathan # cfi_endproc 331*1fd5a2e1SPrashanth SwaminathanENDF(C(ffi_go_closure_ECX)) 332*1fd5a2e1SPrashanth Swaminathan 333*1fd5a2e1SPrashanth Swaminathan/* The closure entry points are reached from the ffi_closure trampoline. 334*1fd5a2e1SPrashanth Swaminathan On entry, %eax contains the address of the ffi_closure. */ 335*1fd5a2e1SPrashanth Swaminathan 336*1fd5a2e1SPrashanth Swaminathan .balign 16 337*1fd5a2e1SPrashanth Swaminathan .globl C(ffi_closure_i386) 338*1fd5a2e1SPrashanth Swaminathan FFI_HIDDEN(C(ffi_closure_i386)) 339*1fd5a2e1SPrashanth Swaminathan 340*1fd5a2e1SPrashanth SwaminathanC(ffi_closure_i386): 341*1fd5a2e1SPrashanth SwaminathanL(UW12): 342*1fd5a2e1SPrashanth Swaminathan # cfi_startproc 343*1fd5a2e1SPrashanth Swaminathan subl $closure_FS, %esp 344*1fd5a2e1SPrashanth SwaminathanL(UW13): 345*1fd5a2e1SPrashanth Swaminathan # cfi_def_cfa_offset(closure_FS + 4) 346*1fd5a2e1SPrashanth Swaminathan 347*1fd5a2e1SPrashanth Swaminathan FFI_CLOSURE_SAVE_REGS 348*1fd5a2e1SPrashanth Swaminathan FFI_CLOSURE_COPY_TRAMP_DATA 349*1fd5a2e1SPrashanth Swaminathan 350*1fd5a2e1SPrashanth Swaminathan /* Entry point from preceeding Go closures. */ 351*1fd5a2e1SPrashanth SwaminathanL(do_closure_i386): 352*1fd5a2e1SPrashanth Swaminathan 353*1fd5a2e1SPrashanth Swaminathan FFI_CLOSURE_PREP_CALL 354*1fd5a2e1SPrashanth Swaminathan FFI_CLOSURE_CALL_INNER(14) 355*1fd5a2e1SPrashanth Swaminathan FFI_CLOSURE_MASK_AND_JUMP(2, 15) 356*1fd5a2e1SPrashanth Swaminathan 357*1fd5a2e1SPrashanth Swaminathan .balign 8 358*1fd5a2e1SPrashanth SwaminathanL(load_table2): 359*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_FLOAT) 360*1fd5a2e1SPrashanth Swaminathan flds closure_CF(%esp) 361*1fd5a2e1SPrashanth Swaminathan jmp L(e2) 362*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_DOUBLE) 363*1fd5a2e1SPrashanth Swaminathan fldl closure_CF(%esp) 364*1fd5a2e1SPrashanth Swaminathan jmp L(e2) 365*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_LDOUBLE) 366*1fd5a2e1SPrashanth Swaminathan fldt closure_CF(%esp) 367*1fd5a2e1SPrashanth Swaminathan jmp L(e2) 368*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_SINT8) 369*1fd5a2e1SPrashanth Swaminathan movsbl %al, %eax 370*1fd5a2e1SPrashanth Swaminathan jmp L(e2) 371*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_SINT16) 372*1fd5a2e1SPrashanth Swaminathan movswl %ax, %eax 373*1fd5a2e1SPrashanth Swaminathan jmp L(e2) 374*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_UINT8) 375*1fd5a2e1SPrashanth Swaminathan movzbl %al, %eax 376*1fd5a2e1SPrashanth Swaminathan jmp L(e2) 377*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_UINT16) 378*1fd5a2e1SPrashanth Swaminathan movzwl %ax, %eax 379*1fd5a2e1SPrashanth Swaminathan jmp L(e2) 380*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_INT64) 381*1fd5a2e1SPrashanth Swaminathan movl closure_CF+4(%esp), %edx 382*1fd5a2e1SPrashanth Swaminathan jmp L(e2) 383*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_INT32) 384*1fd5a2e1SPrashanth Swaminathan nop 385*1fd5a2e1SPrashanth Swaminathan /* fallthru */ 386*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_VOID) 387*1fd5a2e1SPrashanth SwaminathanL(e2): 388*1fd5a2e1SPrashanth Swaminathan addl $closure_FS, %esp 389*1fd5a2e1SPrashanth SwaminathanL(UW16): 390*1fd5a2e1SPrashanth Swaminathan # cfi_adjust_cfa_offset(-closure_FS) 391*1fd5a2e1SPrashanth Swaminathan ret 392*1fd5a2e1SPrashanth SwaminathanL(UW17): 393*1fd5a2e1SPrashanth Swaminathan # cfi_adjust_cfa_offset(closure_FS) 394*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_STRUCTPOP) 395*1fd5a2e1SPrashanth Swaminathan addl $closure_FS, %esp 396*1fd5a2e1SPrashanth SwaminathanL(UW18): 397*1fd5a2e1SPrashanth Swaminathan # cfi_adjust_cfa_offset(-closure_FS) 398*1fd5a2e1SPrashanth Swaminathan ret $4 399*1fd5a2e1SPrashanth SwaminathanL(UW19): 400*1fd5a2e1SPrashanth Swaminathan # cfi_adjust_cfa_offset(closure_FS) 401*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_STRUCTARG) 402*1fd5a2e1SPrashanth Swaminathan jmp L(e2) 403*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_STRUCT_1B) 404*1fd5a2e1SPrashanth Swaminathan movzbl %al, %eax 405*1fd5a2e1SPrashanth Swaminathan jmp L(e2) 406*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_STRUCT_2B) 407*1fd5a2e1SPrashanth Swaminathan movzwl %ax, %eax 408*1fd5a2e1SPrashanth Swaminathan jmp L(e2) 409*1fd5a2e1SPrashanth Swaminathan 410*1fd5a2e1SPrashanth Swaminathan /* Fill out the table so that bad values are predictable. */ 411*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_UNUSED14) 412*1fd5a2e1SPrashanth Swaminathan ud2 413*1fd5a2e1SPrashanth SwaminathanE(L(load_table2), X86_RET_UNUSED15) 414*1fd5a2e1SPrashanth Swaminathan ud2 415*1fd5a2e1SPrashanth Swaminathan 416*1fd5a2e1SPrashanth SwaminathanL(UW20): 417*1fd5a2e1SPrashanth Swaminathan # cfi_endproc 418*1fd5a2e1SPrashanth SwaminathanENDF(C(ffi_closure_i386)) 419*1fd5a2e1SPrashanth Swaminathan 420*1fd5a2e1SPrashanth Swaminathan .balign 16 421*1fd5a2e1SPrashanth Swaminathan .globl C(ffi_go_closure_STDCALL) 422*1fd5a2e1SPrashanth Swaminathan FFI_HIDDEN(C(ffi_go_closure_STDCALL)) 423*1fd5a2e1SPrashanth SwaminathanC(ffi_go_closure_STDCALL): 424*1fd5a2e1SPrashanth SwaminathanL(UW21): 425*1fd5a2e1SPrashanth Swaminathan # cfi_startproc 426*1fd5a2e1SPrashanth Swaminathan subl $closure_FS, %esp 427*1fd5a2e1SPrashanth SwaminathanL(UW22): 428*1fd5a2e1SPrashanth Swaminathan # cfi_def_cfa_offset(closure_FS + 4) 429*1fd5a2e1SPrashanth Swaminathan FFI_CLOSURE_SAVE_REGS 430*1fd5a2e1SPrashanth Swaminathan movl 4(%ecx), %edx /* copy cif */ 431*1fd5a2e1SPrashanth Swaminathan movl 8(%ecx), %eax /* copy fun */ 432*1fd5a2e1SPrashanth Swaminathan movl %edx, closure_CF+28(%esp) 433*1fd5a2e1SPrashanth Swaminathan movl %eax, closure_CF+32(%esp) 434*1fd5a2e1SPrashanth Swaminathan movl %ecx, closure_CF+36(%esp) /* closure is user_data */ 435*1fd5a2e1SPrashanth Swaminathan jmp L(do_closure_STDCALL) 436*1fd5a2e1SPrashanth SwaminathanL(UW23): 437*1fd5a2e1SPrashanth Swaminathan # cfi_endproc 438*1fd5a2e1SPrashanth SwaminathanENDF(C(ffi_go_closure_STDCALL)) 439*1fd5a2e1SPrashanth Swaminathan 440*1fd5a2e1SPrashanth Swaminathan/* For REGISTER, we have no available parameter registers, and so we 441*1fd5a2e1SPrashanth Swaminathan enter here having pushed the closure onto the stack. */ 442*1fd5a2e1SPrashanth Swaminathan 443*1fd5a2e1SPrashanth Swaminathan .balign 16 444*1fd5a2e1SPrashanth Swaminathan .globl C(ffi_closure_REGISTER) 445*1fd5a2e1SPrashanth Swaminathan FFI_HIDDEN(C(ffi_closure_REGISTER)) 446*1fd5a2e1SPrashanth SwaminathanC(ffi_closure_REGISTER): 447*1fd5a2e1SPrashanth SwaminathanL(UW24): 448*1fd5a2e1SPrashanth Swaminathan # cfi_startproc 449*1fd5a2e1SPrashanth Swaminathan # cfi_def_cfa(%esp, 8) 450*1fd5a2e1SPrashanth Swaminathan # cfi_offset(%eip, -8) 451*1fd5a2e1SPrashanth Swaminathan subl $closure_FS-4, %esp 452*1fd5a2e1SPrashanth SwaminathanL(UW25): 453*1fd5a2e1SPrashanth Swaminathan # cfi_def_cfa_offset(closure_FS + 4) 454*1fd5a2e1SPrashanth Swaminathan FFI_CLOSURE_SAVE_REGS 455*1fd5a2e1SPrashanth Swaminathan movl closure_FS-4(%esp), %ecx /* load retaddr */ 456*1fd5a2e1SPrashanth Swaminathan movl closure_FS(%esp), %eax /* load closure */ 457*1fd5a2e1SPrashanth Swaminathan movl %ecx, closure_FS(%esp) /* move retaddr */ 458*1fd5a2e1SPrashanth Swaminathan jmp L(do_closure_REGISTER) 459*1fd5a2e1SPrashanth SwaminathanL(UW26): 460*1fd5a2e1SPrashanth Swaminathan # cfi_endproc 461*1fd5a2e1SPrashanth SwaminathanENDF(C(ffi_closure_REGISTER)) 462*1fd5a2e1SPrashanth Swaminathan 463*1fd5a2e1SPrashanth Swaminathan/* For STDCALL (and others), we need to pop N bytes of arguments off 464*1fd5a2e1SPrashanth Swaminathan the stack following the closure. The amount needing to be popped 465*1fd5a2e1SPrashanth Swaminathan is returned to us from ffi_closure_inner. */ 466*1fd5a2e1SPrashanth Swaminathan 467*1fd5a2e1SPrashanth Swaminathan .balign 16 468*1fd5a2e1SPrashanth Swaminathan .globl C(ffi_closure_STDCALL) 469*1fd5a2e1SPrashanth Swaminathan FFI_HIDDEN(C(ffi_closure_STDCALL)) 470*1fd5a2e1SPrashanth SwaminathanC(ffi_closure_STDCALL): 471*1fd5a2e1SPrashanth SwaminathanL(UW27): 472*1fd5a2e1SPrashanth Swaminathan # cfi_startproc 473*1fd5a2e1SPrashanth Swaminathan subl $closure_FS, %esp 474*1fd5a2e1SPrashanth SwaminathanL(UW28): 475*1fd5a2e1SPrashanth Swaminathan # cfi_def_cfa_offset(closure_FS + 4) 476*1fd5a2e1SPrashanth Swaminathan 477*1fd5a2e1SPrashanth Swaminathan FFI_CLOSURE_SAVE_REGS 478*1fd5a2e1SPrashanth Swaminathan 479*1fd5a2e1SPrashanth Swaminathan /* Entry point from ffi_closure_REGISTER. */ 480*1fd5a2e1SPrashanth SwaminathanL(do_closure_REGISTER): 481*1fd5a2e1SPrashanth Swaminathan 482*1fd5a2e1SPrashanth Swaminathan FFI_CLOSURE_COPY_TRAMP_DATA 483*1fd5a2e1SPrashanth Swaminathan 484*1fd5a2e1SPrashanth Swaminathan /* Entry point from preceeding Go closure. */ 485*1fd5a2e1SPrashanth SwaminathanL(do_closure_STDCALL): 486*1fd5a2e1SPrashanth Swaminathan 487*1fd5a2e1SPrashanth Swaminathan FFI_CLOSURE_PREP_CALL 488*1fd5a2e1SPrashanth Swaminathan FFI_CLOSURE_CALL_INNER(29) 489*1fd5a2e1SPrashanth Swaminathan 490*1fd5a2e1SPrashanth Swaminathan movl %eax, %ecx 491*1fd5a2e1SPrashanth Swaminathan shrl $X86_RET_POP_SHIFT, %ecx /* isolate pop count */ 492*1fd5a2e1SPrashanth Swaminathan leal closure_FS(%esp, %ecx), %ecx /* compute popped esp */ 493*1fd5a2e1SPrashanth Swaminathan movl closure_FS(%esp), %edx /* move return address */ 494*1fd5a2e1SPrashanth Swaminathan movl %edx, (%ecx) 495*1fd5a2e1SPrashanth Swaminathan 496*1fd5a2e1SPrashanth Swaminathan /* From this point on, the value of %esp upon return is %ecx+4, 497*1fd5a2e1SPrashanth Swaminathan and we've copied the return address to %ecx to make return easy. 498*1fd5a2e1SPrashanth Swaminathan There's no point in representing this in the unwind info, as 499*1fd5a2e1SPrashanth Swaminathan there is always a window between the mov and the ret which 500*1fd5a2e1SPrashanth Swaminathan will be wrong from one point of view or another. */ 501*1fd5a2e1SPrashanth Swaminathan 502*1fd5a2e1SPrashanth Swaminathan FFI_CLOSURE_MASK_AND_JUMP(3, 30) 503*1fd5a2e1SPrashanth Swaminathan 504*1fd5a2e1SPrashanth Swaminathan .balign 8 505*1fd5a2e1SPrashanth SwaminathanL(load_table3): 506*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_FLOAT) 507*1fd5a2e1SPrashanth Swaminathan flds closure_CF(%esp) 508*1fd5a2e1SPrashanth Swaminathan movl %ecx, %esp 509*1fd5a2e1SPrashanth Swaminathan ret 510*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_DOUBLE) 511*1fd5a2e1SPrashanth Swaminathan fldl closure_CF(%esp) 512*1fd5a2e1SPrashanth Swaminathan movl %ecx, %esp 513*1fd5a2e1SPrashanth Swaminathan ret 514*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_LDOUBLE) 515*1fd5a2e1SPrashanth Swaminathan fldt closure_CF(%esp) 516*1fd5a2e1SPrashanth Swaminathan movl %ecx, %esp 517*1fd5a2e1SPrashanth Swaminathan ret 518*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_SINT8) 519*1fd5a2e1SPrashanth Swaminathan movsbl %al, %eax 520*1fd5a2e1SPrashanth Swaminathan movl %ecx, %esp 521*1fd5a2e1SPrashanth Swaminathan ret 522*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_SINT16) 523*1fd5a2e1SPrashanth Swaminathan movswl %ax, %eax 524*1fd5a2e1SPrashanth Swaminathan movl %ecx, %esp 525*1fd5a2e1SPrashanth Swaminathan ret 526*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_UINT8) 527*1fd5a2e1SPrashanth Swaminathan movzbl %al, %eax 528*1fd5a2e1SPrashanth Swaminathan movl %ecx, %esp 529*1fd5a2e1SPrashanth Swaminathan ret 530*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_UINT16) 531*1fd5a2e1SPrashanth Swaminathan movzwl %ax, %eax 532*1fd5a2e1SPrashanth Swaminathan movl %ecx, %esp 533*1fd5a2e1SPrashanth Swaminathan ret 534*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_INT64) 535*1fd5a2e1SPrashanth Swaminathan movl closure_CF+4(%esp), %edx 536*1fd5a2e1SPrashanth Swaminathan movl %ecx, %esp 537*1fd5a2e1SPrashanth Swaminathan ret 538*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_INT32) 539*1fd5a2e1SPrashanth Swaminathan movl %ecx, %esp 540*1fd5a2e1SPrashanth Swaminathan ret 541*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_VOID) 542*1fd5a2e1SPrashanth Swaminathan movl %ecx, %esp 543*1fd5a2e1SPrashanth Swaminathan ret 544*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_STRUCTPOP) 545*1fd5a2e1SPrashanth Swaminathan movl %ecx, %esp 546*1fd5a2e1SPrashanth Swaminathan ret 547*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_STRUCTARG) 548*1fd5a2e1SPrashanth Swaminathan movl %ecx, %esp 549*1fd5a2e1SPrashanth Swaminathan ret 550*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_STRUCT_1B) 551*1fd5a2e1SPrashanth Swaminathan movzbl %al, %eax 552*1fd5a2e1SPrashanth Swaminathan movl %ecx, %esp 553*1fd5a2e1SPrashanth Swaminathan ret 554*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_STRUCT_2B) 555*1fd5a2e1SPrashanth Swaminathan movzwl %ax, %eax 556*1fd5a2e1SPrashanth Swaminathan movl %ecx, %esp 557*1fd5a2e1SPrashanth Swaminathan ret 558*1fd5a2e1SPrashanth Swaminathan 559*1fd5a2e1SPrashanth Swaminathan /* Fill out the table so that bad values are predictable. */ 560*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_UNUSED14) 561*1fd5a2e1SPrashanth Swaminathan ud2 562*1fd5a2e1SPrashanth SwaminathanE(L(load_table3), X86_RET_UNUSED15) 563*1fd5a2e1SPrashanth Swaminathan ud2 564*1fd5a2e1SPrashanth Swaminathan 565*1fd5a2e1SPrashanth SwaminathanL(UW31): 566*1fd5a2e1SPrashanth Swaminathan # cfi_endproc 567*1fd5a2e1SPrashanth SwaminathanENDF(C(ffi_closure_STDCALL)) 568*1fd5a2e1SPrashanth Swaminathan 569*1fd5a2e1SPrashanth Swaminathan#if !FFI_NO_RAW_API 570*1fd5a2e1SPrashanth Swaminathan 571*1fd5a2e1SPrashanth Swaminathan#define raw_closure_S_FS (16+16+12) 572*1fd5a2e1SPrashanth Swaminathan 573*1fd5a2e1SPrashanth Swaminathan .balign 16 574*1fd5a2e1SPrashanth Swaminathan .globl C(ffi_closure_raw_SYSV) 575*1fd5a2e1SPrashanth Swaminathan FFI_HIDDEN(C(ffi_closure_raw_SYSV)) 576*1fd5a2e1SPrashanth SwaminathanC(ffi_closure_raw_SYSV): 577*1fd5a2e1SPrashanth SwaminathanL(UW32): 578*1fd5a2e1SPrashanth Swaminathan # cfi_startproc 579*1fd5a2e1SPrashanth Swaminathan subl $raw_closure_S_FS, %esp 580*1fd5a2e1SPrashanth SwaminathanL(UW33): 581*1fd5a2e1SPrashanth Swaminathan # cfi_def_cfa_offset(raw_closure_S_FS + 4) 582*1fd5a2e1SPrashanth Swaminathan movl %ebx, raw_closure_S_FS-4(%esp) 583*1fd5a2e1SPrashanth SwaminathanL(UW34): 584*1fd5a2e1SPrashanth Swaminathan # cfi_rel_offset(%ebx, raw_closure_S_FS-4) 585*1fd5a2e1SPrashanth Swaminathan 586*1fd5a2e1SPrashanth Swaminathan movl FFI_TRAMPOLINE_SIZE+8(%eax), %edx /* load cl->user_data */ 587*1fd5a2e1SPrashanth Swaminathan movl %edx, 12(%esp) 588*1fd5a2e1SPrashanth Swaminathan leal raw_closure_S_FS+4(%esp), %edx /* load raw_args */ 589*1fd5a2e1SPrashanth Swaminathan movl %edx, 8(%esp) 590*1fd5a2e1SPrashanth Swaminathan leal 16(%esp), %edx /* load &res */ 591*1fd5a2e1SPrashanth Swaminathan movl %edx, 4(%esp) 592*1fd5a2e1SPrashanth Swaminathan movl FFI_TRAMPOLINE_SIZE(%eax), %ebx /* load cl->cif */ 593*1fd5a2e1SPrashanth Swaminathan movl %ebx, (%esp) 594*1fd5a2e1SPrashanth Swaminathan call *FFI_TRAMPOLINE_SIZE+4(%eax) /* call cl->fun */ 595*1fd5a2e1SPrashanth Swaminathan 596*1fd5a2e1SPrashanth Swaminathan movl 20(%ebx), %eax /* load cif->flags */ 597*1fd5a2e1SPrashanth Swaminathan andl $X86_RET_TYPE_MASK, %eax 598*1fd5a2e1SPrashanth Swaminathan#ifdef __PIC__ 599*1fd5a2e1SPrashanth Swaminathan call C(__x86.get_pc_thunk.bx) 600*1fd5a2e1SPrashanth SwaminathanL(pc4): 601*1fd5a2e1SPrashanth Swaminathan leal L(load_table4)-L(pc4)(%ebx, %eax, 8), %ecx 602*1fd5a2e1SPrashanth Swaminathan#else 603*1fd5a2e1SPrashanth Swaminathan leal L(load_table4)(,%eax, 8), %ecx 604*1fd5a2e1SPrashanth Swaminathan#endif 605*1fd5a2e1SPrashanth Swaminathan movl raw_closure_S_FS-4(%esp), %ebx 606*1fd5a2e1SPrashanth SwaminathanL(UW35): 607*1fd5a2e1SPrashanth Swaminathan # cfi_restore(%ebx) 608*1fd5a2e1SPrashanth Swaminathan movl 16(%esp), %eax /* Optimistic load */ 609*1fd5a2e1SPrashanth Swaminathan jmp *%ecx 610*1fd5a2e1SPrashanth Swaminathan 611*1fd5a2e1SPrashanth Swaminathan .balign 8 612*1fd5a2e1SPrashanth SwaminathanL(load_table4): 613*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_FLOAT) 614*1fd5a2e1SPrashanth Swaminathan flds 16(%esp) 615*1fd5a2e1SPrashanth Swaminathan jmp L(e4) 616*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_DOUBLE) 617*1fd5a2e1SPrashanth Swaminathan fldl 16(%esp) 618*1fd5a2e1SPrashanth Swaminathan jmp L(e4) 619*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_LDOUBLE) 620*1fd5a2e1SPrashanth Swaminathan fldt 16(%esp) 621*1fd5a2e1SPrashanth Swaminathan jmp L(e4) 622*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_SINT8) 623*1fd5a2e1SPrashanth Swaminathan movsbl %al, %eax 624*1fd5a2e1SPrashanth Swaminathan jmp L(e4) 625*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_SINT16) 626*1fd5a2e1SPrashanth Swaminathan movswl %ax, %eax 627*1fd5a2e1SPrashanth Swaminathan jmp L(e4) 628*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_UINT8) 629*1fd5a2e1SPrashanth Swaminathan movzbl %al, %eax 630*1fd5a2e1SPrashanth Swaminathan jmp L(e4) 631*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_UINT16) 632*1fd5a2e1SPrashanth Swaminathan movzwl %ax, %eax 633*1fd5a2e1SPrashanth Swaminathan jmp L(e4) 634*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_INT64) 635*1fd5a2e1SPrashanth Swaminathan movl 16+4(%esp), %edx 636*1fd5a2e1SPrashanth Swaminathan jmp L(e4) 637*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_INT32) 638*1fd5a2e1SPrashanth Swaminathan nop 639*1fd5a2e1SPrashanth Swaminathan /* fallthru */ 640*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_VOID) 641*1fd5a2e1SPrashanth SwaminathanL(e4): 642*1fd5a2e1SPrashanth Swaminathan addl $raw_closure_S_FS, %esp 643*1fd5a2e1SPrashanth SwaminathanL(UW36): 644*1fd5a2e1SPrashanth Swaminathan # cfi_adjust_cfa_offset(-raw_closure_S_FS) 645*1fd5a2e1SPrashanth Swaminathan ret 646*1fd5a2e1SPrashanth SwaminathanL(UW37): 647*1fd5a2e1SPrashanth Swaminathan # cfi_adjust_cfa_offset(raw_closure_S_FS) 648*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_STRUCTPOP) 649*1fd5a2e1SPrashanth Swaminathan addl $raw_closure_S_FS, %esp 650*1fd5a2e1SPrashanth SwaminathanL(UW38): 651*1fd5a2e1SPrashanth Swaminathan # cfi_adjust_cfa_offset(-raw_closure_S_FS) 652*1fd5a2e1SPrashanth Swaminathan ret $4 653*1fd5a2e1SPrashanth SwaminathanL(UW39): 654*1fd5a2e1SPrashanth Swaminathan # cfi_adjust_cfa_offset(raw_closure_S_FS) 655*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_STRUCTARG) 656*1fd5a2e1SPrashanth Swaminathan jmp L(e4) 657*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_STRUCT_1B) 658*1fd5a2e1SPrashanth Swaminathan movzbl %al, %eax 659*1fd5a2e1SPrashanth Swaminathan jmp L(e4) 660*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_STRUCT_2B) 661*1fd5a2e1SPrashanth Swaminathan movzwl %ax, %eax 662*1fd5a2e1SPrashanth Swaminathan jmp L(e4) 663*1fd5a2e1SPrashanth Swaminathan 664*1fd5a2e1SPrashanth Swaminathan /* Fill out the table so that bad values are predictable. */ 665*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_UNUSED14) 666*1fd5a2e1SPrashanth Swaminathan ud2 667*1fd5a2e1SPrashanth SwaminathanE(L(load_table4), X86_RET_UNUSED15) 668*1fd5a2e1SPrashanth Swaminathan ud2 669*1fd5a2e1SPrashanth Swaminathan 670*1fd5a2e1SPrashanth SwaminathanL(UW40): 671*1fd5a2e1SPrashanth Swaminathan # cfi_endproc 672*1fd5a2e1SPrashanth SwaminathanENDF(C(ffi_closure_raw_SYSV)) 673*1fd5a2e1SPrashanth Swaminathan 674*1fd5a2e1SPrashanth Swaminathan#define raw_closure_T_FS (16+16+8) 675*1fd5a2e1SPrashanth Swaminathan 676*1fd5a2e1SPrashanth Swaminathan .balign 16 677*1fd5a2e1SPrashanth Swaminathan .globl C(ffi_closure_raw_THISCALL) 678*1fd5a2e1SPrashanth Swaminathan FFI_HIDDEN(C(ffi_closure_raw_THISCALL)) 679*1fd5a2e1SPrashanth SwaminathanC(ffi_closure_raw_THISCALL): 680*1fd5a2e1SPrashanth SwaminathanL(UW41): 681*1fd5a2e1SPrashanth Swaminathan # cfi_startproc 682*1fd5a2e1SPrashanth Swaminathan /* Rearrange the stack such that %ecx is the first argument. 683*1fd5a2e1SPrashanth Swaminathan This means moving the return address. */ 684*1fd5a2e1SPrashanth Swaminathan popl %edx 685*1fd5a2e1SPrashanth SwaminathanL(UW42): 686*1fd5a2e1SPrashanth Swaminathan # cfi_def_cfa_offset(0) 687*1fd5a2e1SPrashanth Swaminathan # cfi_register(%eip, %edx) 688*1fd5a2e1SPrashanth Swaminathan pushl %ecx 689*1fd5a2e1SPrashanth SwaminathanL(UW43): 690*1fd5a2e1SPrashanth Swaminathan # cfi_adjust_cfa_offset(4) 691*1fd5a2e1SPrashanth Swaminathan pushl %edx 692*1fd5a2e1SPrashanth SwaminathanL(UW44): 693*1fd5a2e1SPrashanth Swaminathan # cfi_adjust_cfa_offset(4) 694*1fd5a2e1SPrashanth Swaminathan # cfi_rel_offset(%eip, 0) 695*1fd5a2e1SPrashanth Swaminathan subl $raw_closure_T_FS, %esp 696*1fd5a2e1SPrashanth SwaminathanL(UW45): 697*1fd5a2e1SPrashanth Swaminathan # cfi_adjust_cfa_offset(raw_closure_T_FS) 698*1fd5a2e1SPrashanth Swaminathan movl %ebx, raw_closure_T_FS-4(%esp) 699*1fd5a2e1SPrashanth SwaminathanL(UW46): 700*1fd5a2e1SPrashanth Swaminathan # cfi_rel_offset(%ebx, raw_closure_T_FS-4) 701*1fd5a2e1SPrashanth Swaminathan 702*1fd5a2e1SPrashanth Swaminathan movl FFI_TRAMPOLINE_SIZE+8(%eax), %edx /* load cl->user_data */ 703*1fd5a2e1SPrashanth Swaminathan movl %edx, 12(%esp) 704*1fd5a2e1SPrashanth Swaminathan leal raw_closure_T_FS+4(%esp), %edx /* load raw_args */ 705*1fd5a2e1SPrashanth Swaminathan movl %edx, 8(%esp) 706*1fd5a2e1SPrashanth Swaminathan leal 16(%esp), %edx /* load &res */ 707*1fd5a2e1SPrashanth Swaminathan movl %edx, 4(%esp) 708*1fd5a2e1SPrashanth Swaminathan movl FFI_TRAMPOLINE_SIZE(%eax), %ebx /* load cl->cif */ 709*1fd5a2e1SPrashanth Swaminathan movl %ebx, (%esp) 710*1fd5a2e1SPrashanth Swaminathan call *FFI_TRAMPOLINE_SIZE+4(%eax) /* call cl->fun */ 711*1fd5a2e1SPrashanth Swaminathan 712*1fd5a2e1SPrashanth Swaminathan movl 20(%ebx), %eax /* load cif->flags */ 713*1fd5a2e1SPrashanth Swaminathan andl $X86_RET_TYPE_MASK, %eax 714*1fd5a2e1SPrashanth Swaminathan#ifdef __PIC__ 715*1fd5a2e1SPrashanth Swaminathan call C(__x86.get_pc_thunk.bx) 716*1fd5a2e1SPrashanth SwaminathanL(pc5): 717*1fd5a2e1SPrashanth Swaminathan leal L(load_table5)-L(pc5)(%ebx, %eax, 8), %ecx 718*1fd5a2e1SPrashanth Swaminathan#else 719*1fd5a2e1SPrashanth Swaminathan leal L(load_table5)(,%eax, 8), %ecx 720*1fd5a2e1SPrashanth Swaminathan#endif 721*1fd5a2e1SPrashanth Swaminathan movl raw_closure_T_FS-4(%esp), %ebx 722*1fd5a2e1SPrashanth SwaminathanL(UW47): 723*1fd5a2e1SPrashanth Swaminathan # cfi_restore(%ebx) 724*1fd5a2e1SPrashanth Swaminathan movl 16(%esp), %eax /* Optimistic load */ 725*1fd5a2e1SPrashanth Swaminathan jmp *%ecx 726*1fd5a2e1SPrashanth Swaminathan 727*1fd5a2e1SPrashanth Swaminathan .balign 8 728*1fd5a2e1SPrashanth SwaminathanL(load_table5): 729*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_FLOAT) 730*1fd5a2e1SPrashanth Swaminathan flds 16(%esp) 731*1fd5a2e1SPrashanth Swaminathan jmp L(e5) 732*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_DOUBLE) 733*1fd5a2e1SPrashanth Swaminathan fldl 16(%esp) 734*1fd5a2e1SPrashanth Swaminathan jmp L(e5) 735*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_LDOUBLE) 736*1fd5a2e1SPrashanth Swaminathan fldt 16(%esp) 737*1fd5a2e1SPrashanth Swaminathan jmp L(e5) 738*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_SINT8) 739*1fd5a2e1SPrashanth Swaminathan movsbl %al, %eax 740*1fd5a2e1SPrashanth Swaminathan jmp L(e5) 741*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_SINT16) 742*1fd5a2e1SPrashanth Swaminathan movswl %ax, %eax 743*1fd5a2e1SPrashanth Swaminathan jmp L(e5) 744*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_UINT8) 745*1fd5a2e1SPrashanth Swaminathan movzbl %al, %eax 746*1fd5a2e1SPrashanth Swaminathan jmp L(e5) 747*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_UINT16) 748*1fd5a2e1SPrashanth Swaminathan movzwl %ax, %eax 749*1fd5a2e1SPrashanth Swaminathan jmp L(e5) 750*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_INT64) 751*1fd5a2e1SPrashanth Swaminathan movl 16+4(%esp), %edx 752*1fd5a2e1SPrashanth Swaminathan jmp L(e5) 753*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_INT32) 754*1fd5a2e1SPrashanth Swaminathan nop 755*1fd5a2e1SPrashanth Swaminathan /* fallthru */ 756*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_VOID) 757*1fd5a2e1SPrashanth SwaminathanL(e5): 758*1fd5a2e1SPrashanth Swaminathan addl $raw_closure_T_FS, %esp 759*1fd5a2e1SPrashanth SwaminathanL(UW48): 760*1fd5a2e1SPrashanth Swaminathan # cfi_adjust_cfa_offset(-raw_closure_T_FS) 761*1fd5a2e1SPrashanth Swaminathan /* Remove the extra %ecx argument we pushed. */ 762*1fd5a2e1SPrashanth Swaminathan ret $4 763*1fd5a2e1SPrashanth SwaminathanL(UW49): 764*1fd5a2e1SPrashanth Swaminathan # cfi_adjust_cfa_offset(raw_closure_T_FS) 765*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_STRUCTPOP) 766*1fd5a2e1SPrashanth Swaminathan addl $raw_closure_T_FS, %esp 767*1fd5a2e1SPrashanth SwaminathanL(UW50): 768*1fd5a2e1SPrashanth Swaminathan # cfi_adjust_cfa_offset(-raw_closure_T_FS) 769*1fd5a2e1SPrashanth Swaminathan ret $8 770*1fd5a2e1SPrashanth SwaminathanL(UW51): 771*1fd5a2e1SPrashanth Swaminathan # cfi_adjust_cfa_offset(raw_closure_T_FS) 772*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_STRUCTARG) 773*1fd5a2e1SPrashanth Swaminathan jmp L(e5) 774*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_STRUCT_1B) 775*1fd5a2e1SPrashanth Swaminathan movzbl %al, %eax 776*1fd5a2e1SPrashanth Swaminathan jmp L(e5) 777*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_STRUCT_2B) 778*1fd5a2e1SPrashanth Swaminathan movzwl %ax, %eax 779*1fd5a2e1SPrashanth Swaminathan jmp L(e5) 780*1fd5a2e1SPrashanth Swaminathan 781*1fd5a2e1SPrashanth Swaminathan /* Fill out the table so that bad values are predictable. */ 782*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_UNUSED14) 783*1fd5a2e1SPrashanth Swaminathan ud2 784*1fd5a2e1SPrashanth SwaminathanE(L(load_table5), X86_RET_UNUSED15) 785*1fd5a2e1SPrashanth Swaminathan ud2 786*1fd5a2e1SPrashanth Swaminathan 787*1fd5a2e1SPrashanth SwaminathanL(UW52): 788*1fd5a2e1SPrashanth Swaminathan # cfi_endproc 789*1fd5a2e1SPrashanth SwaminathanENDF(C(ffi_closure_raw_THISCALL)) 790*1fd5a2e1SPrashanth Swaminathan 791*1fd5a2e1SPrashanth Swaminathan#endif /* !FFI_NO_RAW_API */ 792*1fd5a2e1SPrashanth Swaminathan 793*1fd5a2e1SPrashanth Swaminathan#ifdef X86_DARWIN 794*1fd5a2e1SPrashanth Swaminathan# define COMDAT(X) \ 795*1fd5a2e1SPrashanth Swaminathan .section __TEXT,__text,coalesced,pure_instructions; \ 796*1fd5a2e1SPrashanth Swaminathan .weak_definition X; \ 797*1fd5a2e1SPrashanth Swaminathan FFI_HIDDEN(X) 798*1fd5a2e1SPrashanth Swaminathan#elif defined __ELF__ && !(defined(__sun__) && defined(__svr4__)) 799*1fd5a2e1SPrashanth Swaminathan# define COMDAT(X) \ 800*1fd5a2e1SPrashanth Swaminathan .section .text.X,"axG",@progbits,X,comdat; \ 801*1fd5a2e1SPrashanth Swaminathan .globl X; \ 802*1fd5a2e1SPrashanth Swaminathan FFI_HIDDEN(X) 803*1fd5a2e1SPrashanth Swaminathan#else 804*1fd5a2e1SPrashanth Swaminathan# define COMDAT(X) 805*1fd5a2e1SPrashanth Swaminathan#endif 806*1fd5a2e1SPrashanth Swaminathan 807*1fd5a2e1SPrashanth Swaminathan#if defined(__PIC__) 808*1fd5a2e1SPrashanth Swaminathan COMDAT(C(__x86.get_pc_thunk.bx)) 809*1fd5a2e1SPrashanth SwaminathanC(__x86.get_pc_thunk.bx): 810*1fd5a2e1SPrashanth Swaminathan movl (%esp), %ebx 811*1fd5a2e1SPrashanth Swaminathan ret 812*1fd5a2e1SPrashanth SwaminathanENDF(C(__x86.get_pc_thunk.bx)) 813*1fd5a2e1SPrashanth Swaminathan# if defined X86_DARWIN || defined HAVE_HIDDEN_VISIBILITY_ATTRIBUTE 814*1fd5a2e1SPrashanth Swaminathan COMDAT(C(__x86.get_pc_thunk.dx)) 815*1fd5a2e1SPrashanth SwaminathanC(__x86.get_pc_thunk.dx): 816*1fd5a2e1SPrashanth Swaminathan movl (%esp), %edx 817*1fd5a2e1SPrashanth Swaminathan ret 818*1fd5a2e1SPrashanth SwaminathanENDF(C(__x86.get_pc_thunk.dx)) 819*1fd5a2e1SPrashanth Swaminathan#endif /* DARWIN || HIDDEN */ 820*1fd5a2e1SPrashanth Swaminathan#endif /* __PIC__ */ 821*1fd5a2e1SPrashanth Swaminathan 822*1fd5a2e1SPrashanth Swaminathan/* Sadly, OSX cctools-as doesn't understand .cfi directives at all. */ 823*1fd5a2e1SPrashanth Swaminathan 824*1fd5a2e1SPrashanth Swaminathan#ifdef __APPLE__ 825*1fd5a2e1SPrashanth Swaminathan.section __TEXT,__eh_frame,coalesced,no_toc+strip_static_syms+live_support 826*1fd5a2e1SPrashanth SwaminathanEHFrame0: 827*1fd5a2e1SPrashanth Swaminathan#elif defined(X86_WIN32) 828*1fd5a2e1SPrashanth Swaminathan.section .eh_frame,"r" 829*1fd5a2e1SPrashanth Swaminathan#elif defined(HAVE_AS_X86_64_UNWIND_SECTION_TYPE) 830*1fd5a2e1SPrashanth Swaminathan.section .eh_frame,EH_FRAME_FLAGS,@unwind 831*1fd5a2e1SPrashanth Swaminathan#else 832*1fd5a2e1SPrashanth Swaminathan.section .eh_frame,EH_FRAME_FLAGS,@progbits 833*1fd5a2e1SPrashanth Swaminathan#endif 834*1fd5a2e1SPrashanth Swaminathan 835*1fd5a2e1SPrashanth Swaminathan#ifdef HAVE_AS_X86_PCREL 836*1fd5a2e1SPrashanth Swaminathan# define PCREL(X) X - . 837*1fd5a2e1SPrashanth Swaminathan#else 838*1fd5a2e1SPrashanth Swaminathan# define PCREL(X) X@rel 839*1fd5a2e1SPrashanth Swaminathan#endif 840*1fd5a2e1SPrashanth Swaminathan 841*1fd5a2e1SPrashanth Swaminathan/* Simplify advancing between labels. Assume DW_CFA_advance_loc1 fits. */ 842*1fd5a2e1SPrashanth Swaminathan#define ADV(N, P) .byte 2, L(N)-L(P) 843*1fd5a2e1SPrashanth Swaminathan 844*1fd5a2e1SPrashanth Swaminathan .balign 4 845*1fd5a2e1SPrashanth SwaminathanL(CIE): 846*1fd5a2e1SPrashanth Swaminathan .set L(set0),L(ECIE)-L(SCIE) 847*1fd5a2e1SPrashanth Swaminathan .long L(set0) /* CIE Length */ 848*1fd5a2e1SPrashanth SwaminathanL(SCIE): 849*1fd5a2e1SPrashanth Swaminathan .long 0 /* CIE Identifier Tag */ 850*1fd5a2e1SPrashanth Swaminathan .byte 1 /* CIE Version */ 851*1fd5a2e1SPrashanth Swaminathan .ascii "zR\0" /* CIE Augmentation */ 852*1fd5a2e1SPrashanth Swaminathan .byte 1 /* CIE Code Alignment Factor */ 853*1fd5a2e1SPrashanth Swaminathan .byte 0x7c /* CIE Data Alignment Factor */ 854*1fd5a2e1SPrashanth Swaminathan .byte 0x8 /* CIE RA Column */ 855*1fd5a2e1SPrashanth Swaminathan .byte 1 /* Augmentation size */ 856*1fd5a2e1SPrashanth Swaminathan .byte 0x1b /* FDE Encoding (pcrel sdata4) */ 857*1fd5a2e1SPrashanth Swaminathan .byte 0xc, 4, 4 /* DW_CFA_def_cfa, %esp offset 4 */ 858*1fd5a2e1SPrashanth Swaminathan .byte 0x80+8, 1 /* DW_CFA_offset, %eip offset 1*-4 */ 859*1fd5a2e1SPrashanth Swaminathan .balign 4 860*1fd5a2e1SPrashanth SwaminathanL(ECIE): 861*1fd5a2e1SPrashanth Swaminathan 862*1fd5a2e1SPrashanth Swaminathan .set L(set1),L(EFDE1)-L(SFDE1) 863*1fd5a2e1SPrashanth Swaminathan .long L(set1) /* FDE Length */ 864*1fd5a2e1SPrashanth SwaminathanL(SFDE1): 865*1fd5a2e1SPrashanth Swaminathan .long L(SFDE1)-L(CIE) /* FDE CIE offset */ 866*1fd5a2e1SPrashanth Swaminathan .long PCREL(L(UW0)) /* Initial location */ 867*1fd5a2e1SPrashanth Swaminathan .long L(UW5)-L(UW0) /* Address range */ 868*1fd5a2e1SPrashanth Swaminathan .byte 0 /* Augmentation size */ 869*1fd5a2e1SPrashanth Swaminathan ADV(UW1, UW0) 870*1fd5a2e1SPrashanth Swaminathan .byte 0xc, 5, 8 /* DW_CFA_def_cfa, %ebp 8 */ 871*1fd5a2e1SPrashanth Swaminathan .byte 0x80+5, 2 /* DW_CFA_offset, %ebp 2*-4 */ 872*1fd5a2e1SPrashanth Swaminathan ADV(UW2, UW1) 873*1fd5a2e1SPrashanth Swaminathan .byte 0x80+3, 0 /* DW_CFA_offset, %ebx 0*-4 */ 874*1fd5a2e1SPrashanth Swaminathan ADV(UW3, UW2) 875*1fd5a2e1SPrashanth Swaminathan .byte 0xa /* DW_CFA_remember_state */ 876*1fd5a2e1SPrashanth Swaminathan .byte 0xc, 4, 4 /* DW_CFA_def_cfa, %esp 4 */ 877*1fd5a2e1SPrashanth Swaminathan .byte 0xc0+3 /* DW_CFA_restore, %ebx */ 878*1fd5a2e1SPrashanth Swaminathan .byte 0xc0+5 /* DW_CFA_restore, %ebp */ 879*1fd5a2e1SPrashanth Swaminathan ADV(UW4, UW3) 880*1fd5a2e1SPrashanth Swaminathan .byte 0xb /* DW_CFA_restore_state */ 881*1fd5a2e1SPrashanth Swaminathan .balign 4 882*1fd5a2e1SPrashanth SwaminathanL(EFDE1): 883*1fd5a2e1SPrashanth Swaminathan 884*1fd5a2e1SPrashanth Swaminathan .set L(set2),L(EFDE2)-L(SFDE2) 885*1fd5a2e1SPrashanth Swaminathan .long L(set2) /* FDE Length */ 886*1fd5a2e1SPrashanth SwaminathanL(SFDE2): 887*1fd5a2e1SPrashanth Swaminathan .long L(SFDE2)-L(CIE) /* FDE CIE offset */ 888*1fd5a2e1SPrashanth Swaminathan .long PCREL(L(UW6)) /* Initial location */ 889*1fd5a2e1SPrashanth Swaminathan .long L(UW8)-L(UW6) /* Address range */ 890*1fd5a2e1SPrashanth Swaminathan .byte 0 /* Augmentation size */ 891*1fd5a2e1SPrashanth Swaminathan ADV(UW7, UW6) 892*1fd5a2e1SPrashanth Swaminathan .byte 0xe, closure_FS+4 /* DW_CFA_def_cfa_offset */ 893*1fd5a2e1SPrashanth Swaminathan .balign 4 894*1fd5a2e1SPrashanth SwaminathanL(EFDE2): 895*1fd5a2e1SPrashanth Swaminathan 896*1fd5a2e1SPrashanth Swaminathan .set L(set3),L(EFDE3)-L(SFDE3) 897*1fd5a2e1SPrashanth Swaminathan .long L(set3) /* FDE Length */ 898*1fd5a2e1SPrashanth SwaminathanL(SFDE3): 899*1fd5a2e1SPrashanth Swaminathan .long L(SFDE3)-L(CIE) /* FDE CIE offset */ 900*1fd5a2e1SPrashanth Swaminathan .long PCREL(L(UW9)) /* Initial location */ 901*1fd5a2e1SPrashanth Swaminathan .long L(UW11)-L(UW9) /* Address range */ 902*1fd5a2e1SPrashanth Swaminathan .byte 0 /* Augmentation size */ 903*1fd5a2e1SPrashanth Swaminathan ADV(UW10, UW9) 904*1fd5a2e1SPrashanth Swaminathan .byte 0xe, closure_FS+4 /* DW_CFA_def_cfa_offset */ 905*1fd5a2e1SPrashanth Swaminathan .balign 4 906*1fd5a2e1SPrashanth SwaminathanL(EFDE3): 907*1fd5a2e1SPrashanth Swaminathan 908*1fd5a2e1SPrashanth Swaminathan .set L(set4),L(EFDE4)-L(SFDE4) 909*1fd5a2e1SPrashanth Swaminathan .long L(set4) /* FDE Length */ 910*1fd5a2e1SPrashanth SwaminathanL(SFDE4): 911*1fd5a2e1SPrashanth Swaminathan .long L(SFDE4)-L(CIE) /* FDE CIE offset */ 912*1fd5a2e1SPrashanth Swaminathan .long PCREL(L(UW12)) /* Initial location */ 913*1fd5a2e1SPrashanth Swaminathan .long L(UW20)-L(UW12) /* Address range */ 914*1fd5a2e1SPrashanth Swaminathan .byte 0 /* Augmentation size */ 915*1fd5a2e1SPrashanth Swaminathan ADV(UW13, UW12) 916*1fd5a2e1SPrashanth Swaminathan .byte 0xe, closure_FS+4 /* DW_CFA_def_cfa_offset */ 917*1fd5a2e1SPrashanth Swaminathan#ifdef FFI_CLOSURE_CALL_INNER_SAVE_EBX 918*1fd5a2e1SPrashanth Swaminathan ADV(UW14, UW13) 919*1fd5a2e1SPrashanth Swaminathan .byte 0x80+3, (40-(closure_FS+4))/-4 /* DW_CFA_offset %ebx */ 920*1fd5a2e1SPrashanth Swaminathan ADV(UW15, UW14) 921*1fd5a2e1SPrashanth Swaminathan .byte 0xc0+3 /* DW_CFA_restore %ebx */ 922*1fd5a2e1SPrashanth Swaminathan ADV(UW16, UW15) 923*1fd5a2e1SPrashanth Swaminathan#else 924*1fd5a2e1SPrashanth Swaminathan ADV(UW16, UW13) 925*1fd5a2e1SPrashanth Swaminathan#endif 926*1fd5a2e1SPrashanth Swaminathan .byte 0xe, 4 /* DW_CFA_def_cfa_offset */ 927*1fd5a2e1SPrashanth Swaminathan ADV(UW17, UW16) 928*1fd5a2e1SPrashanth Swaminathan .byte 0xe, closure_FS+4 /* DW_CFA_def_cfa_offset */ 929*1fd5a2e1SPrashanth Swaminathan ADV(UW18, UW17) 930*1fd5a2e1SPrashanth Swaminathan .byte 0xe, 4 /* DW_CFA_def_cfa_offset */ 931*1fd5a2e1SPrashanth Swaminathan ADV(UW19, UW18) 932*1fd5a2e1SPrashanth Swaminathan .byte 0xe, closure_FS+4 /* DW_CFA_def_cfa_offset */ 933*1fd5a2e1SPrashanth Swaminathan .balign 4 934*1fd5a2e1SPrashanth SwaminathanL(EFDE4): 935*1fd5a2e1SPrashanth Swaminathan 936*1fd5a2e1SPrashanth Swaminathan .set L(set5),L(EFDE5)-L(SFDE5) 937*1fd5a2e1SPrashanth Swaminathan .long L(set5) /* FDE Length */ 938*1fd5a2e1SPrashanth SwaminathanL(SFDE5): 939*1fd5a2e1SPrashanth Swaminathan .long L(SFDE5)-L(CIE) /* FDE CIE offset */ 940*1fd5a2e1SPrashanth Swaminathan .long PCREL(L(UW21)) /* Initial location */ 941*1fd5a2e1SPrashanth Swaminathan .long L(UW23)-L(UW21) /* Address range */ 942*1fd5a2e1SPrashanth Swaminathan .byte 0 /* Augmentation size */ 943*1fd5a2e1SPrashanth Swaminathan ADV(UW22, UW21) 944*1fd5a2e1SPrashanth Swaminathan .byte 0xe, closure_FS+4 /* DW_CFA_def_cfa_offset */ 945*1fd5a2e1SPrashanth Swaminathan .balign 4 946*1fd5a2e1SPrashanth SwaminathanL(EFDE5): 947*1fd5a2e1SPrashanth Swaminathan 948*1fd5a2e1SPrashanth Swaminathan .set L(set6),L(EFDE6)-L(SFDE6) 949*1fd5a2e1SPrashanth Swaminathan .long L(set6) /* FDE Length */ 950*1fd5a2e1SPrashanth SwaminathanL(SFDE6): 951*1fd5a2e1SPrashanth Swaminathan .long L(SFDE6)-L(CIE) /* FDE CIE offset */ 952*1fd5a2e1SPrashanth Swaminathan .long PCREL(L(UW24)) /* Initial location */ 953*1fd5a2e1SPrashanth Swaminathan .long L(UW26)-L(UW24) /* Address range */ 954*1fd5a2e1SPrashanth Swaminathan .byte 0 /* Augmentation size */ 955*1fd5a2e1SPrashanth Swaminathan .byte 0xe, 8 /* DW_CFA_def_cfa_offset */ 956*1fd5a2e1SPrashanth Swaminathan .byte 0x80+8, 2 /* DW_CFA_offset %eip, 2*-4 */ 957*1fd5a2e1SPrashanth Swaminathan ADV(UW25, UW24) 958*1fd5a2e1SPrashanth Swaminathan .byte 0xe, closure_FS+4 /* DW_CFA_def_cfa_offset */ 959*1fd5a2e1SPrashanth Swaminathan .balign 4 960*1fd5a2e1SPrashanth SwaminathanL(EFDE6): 961*1fd5a2e1SPrashanth Swaminathan 962*1fd5a2e1SPrashanth Swaminathan .set L(set7),L(EFDE7)-L(SFDE7) 963*1fd5a2e1SPrashanth Swaminathan .long L(set7) /* FDE Length */ 964*1fd5a2e1SPrashanth SwaminathanL(SFDE7): 965*1fd5a2e1SPrashanth Swaminathan .long L(SFDE7)-L(CIE) /* FDE CIE offset */ 966*1fd5a2e1SPrashanth Swaminathan .long PCREL(L(UW27)) /* Initial location */ 967*1fd5a2e1SPrashanth Swaminathan .long L(UW31)-L(UW27) /* Address range */ 968*1fd5a2e1SPrashanth Swaminathan .byte 0 /* Augmentation size */ 969*1fd5a2e1SPrashanth Swaminathan ADV(UW28, UW27) 970*1fd5a2e1SPrashanth Swaminathan .byte 0xe, closure_FS+4 /* DW_CFA_def_cfa_offset */ 971*1fd5a2e1SPrashanth Swaminathan#ifdef FFI_CLOSURE_CALL_INNER_SAVE_EBX 972*1fd5a2e1SPrashanth Swaminathan ADV(UW29, UW28) 973*1fd5a2e1SPrashanth Swaminathan .byte 0x80+3, (40-(closure_FS+4))/-4 /* DW_CFA_offset %ebx */ 974*1fd5a2e1SPrashanth Swaminathan ADV(UW30, UW29) 975*1fd5a2e1SPrashanth Swaminathan .byte 0xc0+3 /* DW_CFA_restore %ebx */ 976*1fd5a2e1SPrashanth Swaminathan#endif 977*1fd5a2e1SPrashanth Swaminathan .balign 4 978*1fd5a2e1SPrashanth SwaminathanL(EFDE7): 979*1fd5a2e1SPrashanth Swaminathan 980*1fd5a2e1SPrashanth Swaminathan#if !FFI_NO_RAW_API 981*1fd5a2e1SPrashanth Swaminathan .set L(set8),L(EFDE8)-L(SFDE8) 982*1fd5a2e1SPrashanth Swaminathan .long L(set8) /* FDE Length */ 983*1fd5a2e1SPrashanth SwaminathanL(SFDE8): 984*1fd5a2e1SPrashanth Swaminathan .long L(SFDE8)-L(CIE) /* FDE CIE offset */ 985*1fd5a2e1SPrashanth Swaminathan .long PCREL(L(UW32)) /* Initial location */ 986*1fd5a2e1SPrashanth Swaminathan .long L(UW40)-L(UW32) /* Address range */ 987*1fd5a2e1SPrashanth Swaminathan .byte 0 /* Augmentation size */ 988*1fd5a2e1SPrashanth Swaminathan ADV(UW33, UW32) 989*1fd5a2e1SPrashanth Swaminathan .byte 0xe, raw_closure_S_FS+4 /* DW_CFA_def_cfa_offset */ 990*1fd5a2e1SPrashanth Swaminathan ADV(UW34, UW33) 991*1fd5a2e1SPrashanth Swaminathan .byte 0x80+3, 2 /* DW_CFA_offset %ebx 2*-4 */ 992*1fd5a2e1SPrashanth Swaminathan ADV(UW35, UW34) 993*1fd5a2e1SPrashanth Swaminathan .byte 0xc0+3 /* DW_CFA_restore %ebx */ 994*1fd5a2e1SPrashanth Swaminathan ADV(UW36, UW35) 995*1fd5a2e1SPrashanth Swaminathan .byte 0xe, 4 /* DW_CFA_def_cfa_offset */ 996*1fd5a2e1SPrashanth Swaminathan ADV(UW37, UW36) 997*1fd5a2e1SPrashanth Swaminathan .byte 0xe, raw_closure_S_FS+4 /* DW_CFA_def_cfa_offset */ 998*1fd5a2e1SPrashanth Swaminathan ADV(UW38, UW37) 999*1fd5a2e1SPrashanth Swaminathan .byte 0xe, 4 /* DW_CFA_def_cfa_offset */ 1000*1fd5a2e1SPrashanth Swaminathan ADV(UW39, UW38) 1001*1fd5a2e1SPrashanth Swaminathan .byte 0xe, raw_closure_S_FS+4 /* DW_CFA_def_cfa_offset */ 1002*1fd5a2e1SPrashanth Swaminathan .balign 4 1003*1fd5a2e1SPrashanth SwaminathanL(EFDE8): 1004*1fd5a2e1SPrashanth Swaminathan 1005*1fd5a2e1SPrashanth Swaminathan .set L(set9),L(EFDE9)-L(SFDE9) 1006*1fd5a2e1SPrashanth Swaminathan .long L(set9) /* FDE Length */ 1007*1fd5a2e1SPrashanth SwaminathanL(SFDE9): 1008*1fd5a2e1SPrashanth Swaminathan .long L(SFDE9)-L(CIE) /* FDE CIE offset */ 1009*1fd5a2e1SPrashanth Swaminathan .long PCREL(L(UW41)) /* Initial location */ 1010*1fd5a2e1SPrashanth Swaminathan .long L(UW52)-L(UW41) /* Address range */ 1011*1fd5a2e1SPrashanth Swaminathan .byte 0 /* Augmentation size */ 1012*1fd5a2e1SPrashanth Swaminathan ADV(UW42, UW41) 1013*1fd5a2e1SPrashanth Swaminathan .byte 0xe, 0 /* DW_CFA_def_cfa_offset */ 1014*1fd5a2e1SPrashanth Swaminathan .byte 0x9, 8, 2 /* DW_CFA_register %eip, %edx */ 1015*1fd5a2e1SPrashanth Swaminathan ADV(UW43, UW42) 1016*1fd5a2e1SPrashanth Swaminathan .byte 0xe, 4 /* DW_CFA_def_cfa_offset */ 1017*1fd5a2e1SPrashanth Swaminathan ADV(UW44, UW43) 1018*1fd5a2e1SPrashanth Swaminathan .byte 0xe, 8 /* DW_CFA_def_cfa_offset */ 1019*1fd5a2e1SPrashanth Swaminathan .byte 0x80+8, 2 /* DW_CFA_offset %eip 2*-4 */ 1020*1fd5a2e1SPrashanth Swaminathan ADV(UW45, UW44) 1021*1fd5a2e1SPrashanth Swaminathan .byte 0xe, raw_closure_T_FS+8 /* DW_CFA_def_cfa_offset */ 1022*1fd5a2e1SPrashanth Swaminathan ADV(UW46, UW45) 1023*1fd5a2e1SPrashanth Swaminathan .byte 0x80+3, 3 /* DW_CFA_offset %ebx 3*-4 */ 1024*1fd5a2e1SPrashanth Swaminathan ADV(UW47, UW46) 1025*1fd5a2e1SPrashanth Swaminathan .byte 0xc0+3 /* DW_CFA_restore %ebx */ 1026*1fd5a2e1SPrashanth Swaminathan ADV(UW48, UW47) 1027*1fd5a2e1SPrashanth Swaminathan .byte 0xe, 8 /* DW_CFA_def_cfa_offset */ 1028*1fd5a2e1SPrashanth Swaminathan ADV(UW49, UW48) 1029*1fd5a2e1SPrashanth Swaminathan .byte 0xe, raw_closure_T_FS+8 /* DW_CFA_def_cfa_offset */ 1030*1fd5a2e1SPrashanth Swaminathan ADV(UW50, UW49) 1031*1fd5a2e1SPrashanth Swaminathan .byte 0xe, 8 /* DW_CFA_def_cfa_offset */ 1032*1fd5a2e1SPrashanth Swaminathan ADV(UW51, UW50) 1033*1fd5a2e1SPrashanth Swaminathan .byte 0xe, raw_closure_T_FS+8 /* DW_CFA_def_cfa_offset */ 1034*1fd5a2e1SPrashanth Swaminathan .balign 4 1035*1fd5a2e1SPrashanth SwaminathanL(EFDE9): 1036*1fd5a2e1SPrashanth Swaminathan#endif /* !FFI_NO_RAW_API */ 1037*1fd5a2e1SPrashanth Swaminathan 1038*1fd5a2e1SPrashanth Swaminathan#ifdef _WIN32 1039*1fd5a2e1SPrashanth Swaminathan .def @feat.00; 1040*1fd5a2e1SPrashanth Swaminathan .scl 3; 1041*1fd5a2e1SPrashanth Swaminathan .type 0; 1042*1fd5a2e1SPrashanth Swaminathan .endef 1043*1fd5a2e1SPrashanth Swaminathan .globl @feat.00 1044*1fd5a2e1SPrashanth Swaminathan@feat.00 = 1 1045*1fd5a2e1SPrashanth Swaminathan#endif 1046*1fd5a2e1SPrashanth Swaminathan 1047*1fd5a2e1SPrashanth Swaminathan#ifdef __APPLE__ 1048*1fd5a2e1SPrashanth Swaminathan .subsections_via_symbols 1049*1fd5a2e1SPrashanth Swaminathan .section __LD,__compact_unwind,regular,debug 1050*1fd5a2e1SPrashanth Swaminathan 1051*1fd5a2e1SPrashanth Swaminathan /* compact unwind for ffi_call_i386 */ 1052*1fd5a2e1SPrashanth Swaminathan .long C(ffi_call_i386) 1053*1fd5a2e1SPrashanth Swaminathan .set L1,L(UW5)-L(UW0) 1054*1fd5a2e1SPrashanth Swaminathan .long L1 1055*1fd5a2e1SPrashanth Swaminathan .long 0x04000000 /* use dwarf unwind info */ 1056*1fd5a2e1SPrashanth Swaminathan .long 0 1057*1fd5a2e1SPrashanth Swaminathan .long 0 1058*1fd5a2e1SPrashanth Swaminathan 1059*1fd5a2e1SPrashanth Swaminathan /* compact unwind for ffi_go_closure_EAX */ 1060*1fd5a2e1SPrashanth Swaminathan .long C(ffi_go_closure_EAX) 1061*1fd5a2e1SPrashanth Swaminathan .set L2,L(UW8)-L(UW6) 1062*1fd5a2e1SPrashanth Swaminathan .long L2 1063*1fd5a2e1SPrashanth Swaminathan .long 0x04000000 /* use dwarf unwind info */ 1064*1fd5a2e1SPrashanth Swaminathan .long 0 1065*1fd5a2e1SPrashanth Swaminathan .long 0 1066*1fd5a2e1SPrashanth Swaminathan 1067*1fd5a2e1SPrashanth Swaminathan /* compact unwind for ffi_go_closure_ECX */ 1068*1fd5a2e1SPrashanth Swaminathan .long C(ffi_go_closure_ECX) 1069*1fd5a2e1SPrashanth Swaminathan .set L3,L(UW11)-L(UW9) 1070*1fd5a2e1SPrashanth Swaminathan .long L3 1071*1fd5a2e1SPrashanth Swaminathan .long 0x04000000 /* use dwarf unwind info */ 1072*1fd5a2e1SPrashanth Swaminathan .long 0 1073*1fd5a2e1SPrashanth Swaminathan .long 0 1074*1fd5a2e1SPrashanth Swaminathan 1075*1fd5a2e1SPrashanth Swaminathan /* compact unwind for ffi_closure_i386 */ 1076*1fd5a2e1SPrashanth Swaminathan .long C(ffi_closure_i386) 1077*1fd5a2e1SPrashanth Swaminathan .set L4,L(UW20)-L(UW12) 1078*1fd5a2e1SPrashanth Swaminathan .long L4 1079*1fd5a2e1SPrashanth Swaminathan .long 0x04000000 /* use dwarf unwind info */ 1080*1fd5a2e1SPrashanth Swaminathan .long 0 1081*1fd5a2e1SPrashanth Swaminathan .long 0 1082*1fd5a2e1SPrashanth Swaminathan 1083*1fd5a2e1SPrashanth Swaminathan /* compact unwind for ffi_go_closure_STDCALL */ 1084*1fd5a2e1SPrashanth Swaminathan .long C(ffi_go_closure_STDCALL) 1085*1fd5a2e1SPrashanth Swaminathan .set L5,L(UW23)-L(UW21) 1086*1fd5a2e1SPrashanth Swaminathan .long L5 1087*1fd5a2e1SPrashanth Swaminathan .long 0x04000000 /* use dwarf unwind info */ 1088*1fd5a2e1SPrashanth Swaminathan .long 0 1089*1fd5a2e1SPrashanth Swaminathan .long 0 1090*1fd5a2e1SPrashanth Swaminathan 1091*1fd5a2e1SPrashanth Swaminathan /* compact unwind for ffi_closure_REGISTER */ 1092*1fd5a2e1SPrashanth Swaminathan .long C(ffi_closure_REGISTER) 1093*1fd5a2e1SPrashanth Swaminathan .set L6,L(UW26)-L(UW24) 1094*1fd5a2e1SPrashanth Swaminathan .long L6 1095*1fd5a2e1SPrashanth Swaminathan .long 0x04000000 /* use dwarf unwind info */ 1096*1fd5a2e1SPrashanth Swaminathan .long 0 1097*1fd5a2e1SPrashanth Swaminathan .long 0 1098*1fd5a2e1SPrashanth Swaminathan 1099*1fd5a2e1SPrashanth Swaminathan /* compact unwind for ffi_closure_STDCALL */ 1100*1fd5a2e1SPrashanth Swaminathan .long C(ffi_closure_STDCALL) 1101*1fd5a2e1SPrashanth Swaminathan .set L7,L(UW31)-L(UW27) 1102*1fd5a2e1SPrashanth Swaminathan .long L7 1103*1fd5a2e1SPrashanth Swaminathan .long 0x04000000 /* use dwarf unwind info */ 1104*1fd5a2e1SPrashanth Swaminathan .long 0 1105*1fd5a2e1SPrashanth Swaminathan .long 0 1106*1fd5a2e1SPrashanth Swaminathan 1107*1fd5a2e1SPrashanth Swaminathan /* compact unwind for ffi_closure_raw_SYSV */ 1108*1fd5a2e1SPrashanth Swaminathan .long C(ffi_closure_raw_SYSV) 1109*1fd5a2e1SPrashanth Swaminathan .set L8,L(UW40)-L(UW32) 1110*1fd5a2e1SPrashanth Swaminathan .long L8 1111*1fd5a2e1SPrashanth Swaminathan .long 0x04000000 /* use dwarf unwind info */ 1112*1fd5a2e1SPrashanth Swaminathan .long 0 1113*1fd5a2e1SPrashanth Swaminathan .long 0 1114*1fd5a2e1SPrashanth Swaminathan 1115*1fd5a2e1SPrashanth Swaminathan /* compact unwind for ffi_closure_raw_THISCALL */ 1116*1fd5a2e1SPrashanth Swaminathan .long C(ffi_closure_raw_THISCALL) 1117*1fd5a2e1SPrashanth Swaminathan .set L9,L(UW52)-L(UW41) 1118*1fd5a2e1SPrashanth Swaminathan .long L9 1119*1fd5a2e1SPrashanth Swaminathan .long 0x04000000 /* use dwarf unwind info */ 1120*1fd5a2e1SPrashanth Swaminathan .long 0 1121*1fd5a2e1SPrashanth Swaminathan .long 0 1122*1fd5a2e1SPrashanth Swaminathan#endif /* __APPLE__ */ 1123*1fd5a2e1SPrashanth Swaminathan 1124*1fd5a2e1SPrashanth Swaminathan#endif /* ifndef _MSC_VER */ 1125*1fd5a2e1SPrashanth Swaminathan#endif /* ifdef __i386__ */ 1126*1fd5a2e1SPrashanth Swaminathan 1127*1fd5a2e1SPrashanth Swaminathan#if defined __ELF__ && defined __linux__ 1128*1fd5a2e1SPrashanth Swaminathan .section .note.GNU-stack,"",@progbits 1129*1fd5a2e1SPrashanth Swaminathan#endif 1130