xref: /aosp_15_r20/art/runtime/interpreter/mterp/x86ng/main.S (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1%def header():
2/*
3 * Copyright (C) 2021 The Android Open Source Project
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 *      http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18/*
19 * This is a #include, not a %include, because we want the C pre-processor
20 * to expand the macros into assembler assignment statements.
21 */
22#include "asm_support.h"
23#include "arch/x86/asm_support_x86.S"
24
25/**
26 * x86 ABI general notes:
27 *
28 * Caller save set:
29 *      eax, ebx, edx, ecx, st(0)-st(7)
30 * Callee save set:
31 *      esi, edi, ebp
32 * Return regs:
33 *      32-bit in eax
34 *      64-bit in edx:eax (low-order 32 in eax)
35 *      fp on top of fp stack st(0)
36 *
37 * Stack must be 16-byte aligned to support SSE in native code.
38 */
39
40#define ARG3        %ebx
41#define ARG2        %edx
42#define ARG1        %ecx
43#define ARG0        %eax
44
45/*
46 * single-purpose registers, given names for clarity
47 */
48#define rSELF    %fs
49#define rPC      %esi
50#define CFI_DEX  6  // DWARF register number of the register holding dex-pc (esi).
51#define CFI_TMP  0  // DWARF register number of the first argument register (eax).
52#define rFP      %edi
53#define rINST    %ebx
54#define rINSTw   %bx
55#define rINSTbh  %bh
56#define rINSTbl  %bl
57#define rIBASE   %edx
58#define rREFS    %ebp
59#define CFI_REFS 5 // DWARF register number of the reference array (ebp).
60
61// Temporary registers while setting up a frame.
62#define rNEW_FP   %ecx
63#define rNEW_REFS %eax
64#define CFI_NEW_REFS 0
65
66#define LOCAL0 4
67#define LOCAL1 8
68#define LOCAL2 12
69
70/*
71 * Get/set the 32-bit value from a Dalvik register.
72 */
73#define VREG_ADDRESS(_vreg) (rFP,_vreg,4)
74#define VREG_HIGH_ADDRESS(_vreg) 4(rFP,_vreg,4)
75#define VREG_REF_ADDRESS(_vreg) (rREFS,_vreg,4)
76#define VREG_REF_HIGH_ADDRESS(_vreg) 4(rREFS,_vreg,4)
77
78.macro GET_VREG _reg _vreg
79    movl    VREG_ADDRESS(\_vreg), \_reg
80.endm
81
82.macro GET_VREG_OBJECT _reg _vreg
83    movl    VREG_REF_ADDRESS(\_vreg), \_reg
84.endm
85
86/* Read wide value to xmm. */
87.macro GET_WIDE_FP_VREG _reg _vreg
88    movq    VREG_ADDRESS(\_vreg), \_reg
89.endm
90
91.macro SET_VREG _reg _vreg
92    movl    \_reg, VREG_ADDRESS(\_vreg)
93    movl    MACRO_LITERAL(0), VREG_REF_ADDRESS(\_vreg)
94.endm
95
96/* Write wide value from xmm. xmm is clobbered. */
97.macro SET_WIDE_FP_VREG _reg _vreg
98    movq    \_reg, VREG_ADDRESS(\_vreg)
99    pxor    \_reg, \_reg
100    movq    \_reg, VREG_REF_ADDRESS(\_vreg)
101.endm
102
103.macro SET_VREG_OBJECT _reg _vreg
104    movl    \_reg, VREG_ADDRESS(\_vreg)
105    movl    \_reg, VREG_REF_ADDRESS(\_vreg)
106.endm
107
108.macro GET_VREG_HIGH _reg _vreg
109    movl    VREG_HIGH_ADDRESS(\_vreg), \_reg
110.endm
111
112.macro SET_VREG_HIGH _reg _vreg
113    movl    \_reg, VREG_HIGH_ADDRESS(\_vreg)
114    movl    MACRO_LITERAL(0), VREG_REF_HIGH_ADDRESS(\_vreg)
115.endm
116
117.macro CLEAR_REF _vreg
118    movl    MACRO_LITERAL(0), VREG_REF_ADDRESS(\_vreg)
119.endm
120
121.macro CLEAR_WIDE_REF _vreg
122    movl    MACRO_LITERAL(0), VREG_REF_ADDRESS(\_vreg)
123    movl    MACRO_LITERAL(0), VREG_REF_HIGH_ADDRESS(\_vreg)
124.endm
125
126.macro GET_VREG_XMMs _xmmreg _vreg
127    movss VREG_ADDRESS(\_vreg), \_xmmreg
128.endm
129.macro GET_VREG_XMMd _xmmreg _vreg
130    movsd VREG_ADDRESS(\_vreg), \_xmmreg
131.endm
132.macro SET_VREG_XMMs _xmmreg _vreg
133    movss \_xmmreg, VREG_ADDRESS(\_vreg)
134.endm
135.macro SET_VREG_XMMd _xmmreg _vreg
136    movsd \_xmmreg, VREG_ADDRESS(\_vreg)
137.endm
138
139// Includes the return address implicitly pushed on stack by 'call'.
140#define CALLEE_SAVES_SIZE (3 * 4 + 1 * 4)
141
142#define PARAMETERS_SAVES_SIZE (4 * 4)
143
144// +4 for the ArtMethod of the caller.
145#define OFFSET_TO_FIRST_ARGUMENT_IN_STACK (CALLEE_SAVES_SIZE + PARAMETERS_SAVES_SIZE + 4)
146
147/*
148 * Refresh rINST.
149 * At enter to handler rINST does not contain the opcode number.
150 * However some utilities require the full value, so this macro
151 * restores the opcode number.
152 */
153.macro REFRESH_INST _opnum
154    movb    rINSTbl, rINSTbh
155    movb    $$\_opnum, rINSTbl
156.endm
157
158/*
159 * Fetch the next instruction from rPC into rINSTw.  Does not advance rPC.
160 */
161.macro FETCH_INST
162    movzwl  (rPC), rINST
163.endm
164
165.macro FETCH_INST_CLEAR_OPCODE
166    movzbl 1(rPC), rINST
167.endm
168
169/*
170 * Remove opcode from rINST, compute the address of handler and jump to it.
171 */
172.macro GOTO_NEXT
173    movzx   rINSTbl,%ecx
174    movzbl  rINSTbh,rINST
175    shll    MACRO_LITERAL(${handler_size_bits}), %ecx
176    addl    rIBASE, %ecx
177    jmp     *%ecx
178.endm
179
180/*
181 * Advance rPC by instruction count.
182 */
183.macro ADVANCE_PC _count
184    leal    2*\_count(rPC), rPC
185.endm
186
187/*
188 * Advance rPC by instruction count, fetch instruction and jump to handler.
189 */
190.macro ADVANCE_PC_FETCH_AND_GOTO_NEXT _count
191    ADVANCE_PC \_count
192    FETCH_INST
193    GOTO_NEXT
194.endm
195
196.macro NTERP_DEF_CFA cfi_reg
197    CFI_DEF_CFA_BREG_PLUS_UCONST \cfi_reg, -4, CALLEE_SAVES_SIZE + PARAMETERS_SAVES_SIZE
198.endm
199
200.macro RESTORE_IBASE
201    call 0f
2020:
203    popl rIBASE
204    addl MACRO_LITERAL(SYMBOL(artNterpAsmInstructionStart) - 0b), rIBASE
205.endm
206
207.macro RESTORE_IBASE_WITH_CFA
208    call 0f
2090:
210    CFI_ADJUST_CFA_OFFSET(4)
211    popl rIBASE
212    CFI_ADJUST_CFA_OFFSET(-4)
213    addl MACRO_LITERAL(SYMBOL(artNterpAsmInstructionStart) - 0b), rIBASE
214.endm
215
216.macro SPILL_ALL_CORE_PARAMETERS
217    PUSH_ARG eax
218    PUSH_ARG ecx
219    PUSH_ARG edx
220    PUSH_ARG ebx
221.endm
222
223.macro RESTORE_ALL_CORE_PARAMETERS
224    POP_ARG ebx
225    POP_ARG edx
226    POP_ARG ecx
227    POP_ARG eax
228.endm
229
230.macro DROP_PARAMETERS_SAVES
231    addl $$(PARAMETERS_SAVES_SIZE), %esp
232.endm
233
234.macro SAVE_WIDE_RETURN
235    movl %edx, LOCAL2(%esp)
236.endm
237
238.macro LOAD_WIDE_RETURN reg
239    movl LOCAL2(%esp), \reg
240.endm
241
242// An assembly entry for nterp.
243.macro OAT_ENTRY name
244    FUNCTION_TYPE(\name)
245    ASM_HIDDEN SYMBOL(\name)
246    .global SYMBOL(\name)
247    .balign 16
248SYMBOL(\name):
249.endm
250
251.macro ENTRY name
252    .text
253    ASM_HIDDEN SYMBOL(\name)
254    .global SYMBOL(\name)
255    FUNCTION_TYPE(\name)
256SYMBOL(\name):
257.endm
258
259.macro END name
260    SIZE(\name)
261.endm
262
263// Macro for defining entrypoints into runtime. We don't need to save registers
264// (we're not holding references there), but there is no
265// kDontSave runtime method. So just use the kSaveRefsOnly runtime method.
266.macro NTERP_TRAMPOLINE name, helper
267DEFINE_FUNCTION \name
268  movd %ebx, %xmm0
269  SETUP_SAVE_REFS_ONLY_FRAME ebx
270  movd %xmm0, %ebx
271  PUSH_ARG ebx
272  PUSH_ARG edx
273  PUSH_ARG ecx
274  PUSH_ARG eax
275  call \helper
276  DECREASE_FRAME 16
277  RESTORE_IBASE_WITH_CFA
278  FETCH_INST_CLEAR_OPCODE
279  RESTORE_SAVE_REFS_ONLY_FRAME
280  cmpl LITERAL(0), %fs:THREAD_EXCEPTION_OFFSET
281  jne nterp_deliver_pending_exception
282  ret
283END_FUNCTION \name
284.endm
285
286.macro CLEAR_VOLATILE_MARKER reg
287  andl MACRO_LITERAL(-2), \reg
288.endm
289
290.macro EXPORT_PC
291    movl    rPC, -8(rREFS)
292.endm
293
294.macro FETCH_PC
295    movl    -8(rREFS), rPC
296.endm
297
298
299.macro BRANCH
300    leal    (rPC, rINST, 2), rPC
301    // Update method counter and do a suspend check if the branch is negative or zero.
302    testl rINST, rINST
303    jle 3f
3042:
305    FETCH_INST
306    GOTO_NEXT
3073:
308    movl (%esp), %eax
309    movzwl ART_METHOD_HOTNESS_COUNT_OFFSET(%eax), %ecx
310#if (NTERP_HOTNESS_VALUE != 0)
311#error Expected 0 for hotness value
312#endif
313    // If the counter is at zero, handle this in the runtime.
314    testw %cx, %cx
315    je NterpHandleHotnessOverflow
316    // Update counter.
317    addl $$-1, %ecx
318    movw %cx, ART_METHOD_HOTNESS_COUNT_OFFSET(%eax)
319    DO_SUSPEND_CHECK continue_label=2b
320.endm
321
322// Expects:
323// - edx, and eax to be available.
324// Outputs:
325// - \registers contains the dex registers size
326// - \outs contains the outs size
327// - if load_ins is 1, \ins contains the ins
328// - \code_item is replaced with a pointer to the instructions
329.macro FETCH_CODE_ITEM_INFO code_item, registers, outs, ins, load_ins
330    // Fetch dex register size.
331    movzwl CODE_ITEM_REGISTERS_SIZE_OFFSET(\code_item), \registers
332    // Fetch outs size.
333    movzwl CODE_ITEM_OUTS_SIZE_OFFSET(\code_item), \outs
334    .if \load_ins
335    movzwl CODE_ITEM_INS_SIZE_OFFSET(\code_item), \ins
336    .endif
337    addl $$CODE_ITEM_INSNS_OFFSET, \code_item
338.endm
339
340// Setup the stack to start executing the method. Expects:
341// - eax, edx, and ebx to be available.
342//
343// Inputs
344// - code_item: where the code item is
345// - refs: register where the pointer to dex references will be
346// - fp: register where the pointer to dex values will be
347// - cfi_refs: CFI register number of refs
348// - load_ins: whether to store the 'ins' value of the code item in esi
349//
350// Outputs
351// - ebx contains the dex registers size
352// - edx contains the old stack pointer.
353// - \code_item is replace with a pointer to the instructions
354// - if load_ins is 1, esi contains the ins
355.macro SETUP_STACK_FRAME code_item, refs, fp, cfi_refs, load_ins
356    FETCH_CODE_ITEM_INFO \code_item, %ebx, \refs, %esi, \load_ins
357
358    movl $$3, %eax
359    cmpl $$2, \refs
360    cmovle %eax, \refs
361
362    // Compute required frame size for dex registers: ((2 * ebx) + refs)
363    leal (\refs, %ebx, 2), %edx
364    sall $$2, %edx
365
366    // Compute new stack pointer in fp: add 12 for saving the previous frame,
367    // pc, and method being executed.
368    leal -12(%esp), \fp
369    subl %edx, \fp
370    // Alignment
371    andl $$-16, \fp
372
373    // Now setup the stack pointer.
374    movl %esp, %edx
375    CFI_DEF_CFA_REGISTER(edx)
376    movl \fp, %esp
377
378    leal 12(%esp, \refs, 4), \refs
379    leal (\refs, %ebx, 4), \fp
380
381    // Save old stack pointer.
382    movl %edx, -4(\refs)
383    NTERP_DEF_CFA \cfi_refs
384
385    // Save ArtMethod.
386    movl 12(%edx), %eax
387    movl %eax, (%esp)
388
389    // Put nulls in reference frame.
390    testl %ebx, %ebx
391    je 2f
392    movl \refs, %eax
3931:
394    movl $$0, (%eax)
395    addl $$4, %eax
396    cmpl %eax, \fp
397    jne 1b
3982:
399.endm
400
401// Puts the next floating point argument into the expected register,
402// fetching values based on a non-range invoke.
403// Uses eax as temporary.
404//
405// TODO: We could simplify a lot of code by loading the G argument into
406// the "inst" register. Given that we enter the handler with "1(rPC)" in
407// the rINST, we can just add rINST<<16 to the args and we don't even
408// need to pass "arg_index" around.
409.macro LOOP_OVER_SHORTY_LOADING_XMMS xmm_reg, inst, shorty, arg_index, finished
4101: // LOOP
411    movb (REG_VAR(shorty)), %al             // al := *shorty
412    addl MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
413    cmpb MACRO_LITERAL(0), %al              // if (al == '\0') goto finished
414    je VAR(finished)
415    cmpb MACRO_LITERAL(68), %al             // if (al == 'D') goto FOUND_DOUBLE
416    je 2f
417    cmpb MACRO_LITERAL(70), %al             // if (al == 'F') goto FOUND_FLOAT
418    je 3f
419    shrl MACRO_LITERAL(4), REG_VAR(inst)
420    addl MACRO_LITERAL(1), REG_VAR(arg_index)
421    //  Handle extra argument in arg array taken by a long.
422    cmpb MACRO_LITERAL(74), %al   // if (al != 'J') goto LOOP
423    jne 1b
424    shrl MACRO_LITERAL(4), REG_VAR(inst)
425    addl MACRO_LITERAL(1), REG_VAR(arg_index)
426    jmp 1b                        // goto LOOP
4272:  // FOUND_DOUBLE
428    subl MACRO_LITERAL(8), %esp
429    movl REG_VAR(inst), %eax
430    andl MACRO_LITERAL(0xf), %eax
431    GET_VREG %eax, %eax
432    movl %eax, (%esp)
433    shrl MACRO_LITERAL(4), REG_VAR(inst)
434    addl MACRO_LITERAL(1), REG_VAR(arg_index)
435    cmpl MACRO_LITERAL(4), REG_VAR(arg_index)
436    je 5f
437    movl REG_VAR(inst), %eax
438    andl MACRO_LITERAL(0xf), %eax
439    shrl MACRO_LITERAL(4), REG_VAR(inst)
440    addl MACRO_LITERAL(1), REG_VAR(arg_index)
441    jmp 6f
4425:
443    movzbl 1(rPC), %eax
444    andl MACRO_LITERAL(0xf), %eax
4456:
446    GET_VREG %eax, %eax
447    movl %eax, 4(%esp)
448    movq (%esp), REG_VAR(xmm_reg)
449    addl MACRO_LITERAL(8), %esp
450    jmp 4f
4513:  // FOUND_FLOAT
452    cmpl MACRO_LITERAL(4), REG_VAR(arg_index)
453    je 7f
454    movl REG_VAR(inst), %eax
455    andl MACRO_LITERAL(0xf), %eax
456    shrl MACRO_LITERAL(4), REG_VAR(inst)
457    addl MACRO_LITERAL(1), REG_VAR(arg_index)
458    jmp 8f
4597:
460    movzbl 1(rPC), %eax
461    andl MACRO_LITERAL(0xf), %eax
4628:
463    GET_VREG_XMMs REG_VAR(xmm_reg), %eax
4644:
465.endm
466
467// Puts the next int/long/object argument in the expected register,
468// fetching values based on a non-range invoke.
469// Uses eax as temporary.
470.macro LOOP_OVER_SHORTY_LOADING_GPRS gpr_reg, gpr_long_reg, inst, shorty, arg_index, finished, if_long, is_ebx
4711: // LOOP
472    movb (REG_VAR(shorty)), %al   // al := *shorty
473    addl MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
474    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
475    je  VAR(finished)
476    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
477    je 2f
478    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
479    je 3f
480    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
481    je 4f
482    cmpl MACRO_LITERAL(4), REG_VAR(arg_index)
483    je 7f
484    movl REG_VAR(inst), %eax
485    andl MACRO_LITERAL(0xf), %eax
486    shrl MACRO_LITERAL(4), REG_VAR(inst)
487    addl MACRO_LITERAL(1), REG_VAR(arg_index)
488    jmp 8f
4897:
490    // Fetch PC
491    movl LOCAL1(%esp), %eax
492    movl -8(%eax), %eax
493    movzbl 1(%eax), %eax
494    andl MACRO_LITERAL(0xf), %eax
4958:
496    GET_VREG REG_VAR(gpr_reg), %eax
497    jmp 5f
4982:  // FOUND_LONG
499    .if \is_ebx
500    // Put back shorty and exit
501    subl MACRO_LITERAL(1), REG_VAR(shorty)
502    jmp 5f
503    .else
504    movl REG_VAR(inst), %eax
505    andl MACRO_LITERAL(0xf), %eax
506    GET_VREG REG_VAR(gpr_reg), %eax
507    shrl MACRO_LITERAL(4), REG_VAR(inst)
508    addl MACRO_LITERAL(1), REG_VAR(arg_index)
509    cmpl MACRO_LITERAL(4), REG_VAR(arg_index)
510    je 9f
511    movl REG_VAR(inst), %eax
512    andl MACRO_LITERAL(0xf), %eax
513    shrl MACRO_LITERAL(4), REG_VAR(inst)
514    addl MACRO_LITERAL(1), REG_VAR(arg_index)
515    jmp 10f
5169:
517    // Fetch PC
518    movl LOCAL1(%esp), %eax
519    movl -8(%eax), %eax
520    movzbl 1(%eax), %eax
521    andl MACRO_LITERAL(0xf), %eax
52210:
523    GET_VREG REG_VAR(gpr_long_reg), %eax
524    jmp \if_long
525    .endif
5263:  // SKIP_FLOAT
527    shrl MACRO_LITERAL(4), REG_VAR(inst)
528    addl MACRO_LITERAL(1), REG_VAR(arg_index)
529    jmp 1b
5304:  // SKIP_DOUBLE
531    shrl MACRO_LITERAL(8), REG_VAR(inst)
532    addl MACRO_LITERAL(2), REG_VAR(arg_index)
533    jmp 1b
5345:
535.endm
536
537// Puts the next int/long/object argument in the expected stack slot,
538// fetching values based on a non-range invoke.
539// Uses eax as temporary.
540.macro LOOP_OVER_SHORTY_LOADING_INTS stack_offset, shorty, inst, arg_index, finished, is_string_init
5411:  // LOOP
542    movb (REG_VAR(shorty)), %al   // al := *shorty
543    addl MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
544    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
545    je  VAR(finished)
546    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
547    je 2f
548    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
549    je 3f
550    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
551    je 4f
552    .if \is_string_init
553    cmpl MACRO_LITERAL(3), REG_VAR(arg_index)
554    .else
555    cmpl MACRO_LITERAL(4), REG_VAR(arg_index)
556    .endif
557    je 7f
558    movl REG_VAR(inst), %eax
559    andl MACRO_LITERAL(0xf), %eax
560    shrl MACRO_LITERAL(4), REG_VAR(inst)
561    jmp 8f
5627:
563    // Fetch PC.
564    movl (LOCAL1 + \stack_offset)(%esp), %eax
565    movl -8(%eax), %eax
566    movzbl 1(%eax), %eax
567    andl MACRO_LITERAL(0xf), %eax
5688:
569    GET_VREG %eax, %eax
570    // Add 4 for the ArtMethod.
571    movl %eax, (4 + \stack_offset)(%esp, REG_VAR(arg_index), 4)
572    addl MACRO_LITERAL(1), REG_VAR(arg_index)
573    jmp 1b
5742:  // FOUND_LONG
575    movl REG_VAR(inst), %eax
576    andl MACRO_LITERAL(0xf), %eax
577    GET_VREG %eax, %eax
578    // Add 4 for the ArtMethod.
579    movl %eax, (4 + \stack_offset)(%esp, REG_VAR(arg_index), 4)
580    shrl MACRO_LITERAL(4), REG_VAR(inst)
581    addl MACRO_LITERAL(1), REG_VAR(arg_index)
582    .if \is_string_init
583    cmpl MACRO_LITERAL(3), REG_VAR(arg_index)
584    .else
585    cmpl MACRO_LITERAL(4), REG_VAR(arg_index)
586    .endif
587    je 9f
588    movl REG_VAR(inst), %eax
589    andl MACRO_LITERAL(0xf), %eax
590    shrl MACRO_LITERAL(4), REG_VAR(inst)
591    jmp 10f
5929:
593    // Fetch PC.
594    movl (LOCAL1 + \stack_offset)(%esp), %eax
595    movl -8(%eax), %eax
596    movzbl 1(%eax), %eax
597    andl MACRO_LITERAL(0xf), %eax
59810:
599    GET_VREG %eax, %eax
600    // +4 for the ArtMethod.
601    movl %eax, (4 + \stack_offset)(%esp, REG_VAR(arg_index), 4)
602    addl MACRO_LITERAL(1), REG_VAR(arg_index)
603    jmp 1b
6043:  // SKIP_FLOAT
605    shrl MACRO_LITERAL(4), REG_VAR(inst)
606    addl MACRO_LITERAL(1), REG_VAR(arg_index)
607    jmp 1b
6084:  // SKIP_DOUBLE
609    shrl MACRO_LITERAL(8), REG_VAR(inst)
610    addl MACRO_LITERAL(2), REG_VAR(arg_index)
611    jmp 1b
612.endm
613
614// Puts the next floating point argument into the expected register,
615// fetching values based on a range invoke.
616// Uses eax as temporary.
617.macro LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm_reg, shorty, arg_index, stack_index, finished
6181: // LOOP
619    movb (REG_VAR(shorty)), %al             // al := *shorty
620    addl MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
621    cmpb MACRO_LITERAL(0), %al              // if (al == '\0') goto finished
622    je VAR(finished)
623    cmpb MACRO_LITERAL(68), %al             // if (al == 'D') goto FOUND_DOUBLE
624    je 2f
625    cmpb MACRO_LITERAL(70), %al             // if (al == 'F') goto FOUND_FLOAT
626    je 3f
627    addl MACRO_LITERAL(1), REG_VAR(arg_index)
628    addl MACRO_LITERAL(1), REG_VAR(stack_index)
629    //  Handle extra argument in arg array taken by a long.
630    cmpb MACRO_LITERAL(74), %al   // if (al != 'J') goto LOOP
631    jne 1b
632    addl MACRO_LITERAL(1), REG_VAR(arg_index)
633    addl MACRO_LITERAL(1), REG_VAR(stack_index)
634    jmp 1b                        // goto LOOP
6352:  // FOUND_DOUBLE
636    GET_VREG_XMMd REG_VAR(xmm_reg), REG_VAR(arg_index)
637    addl MACRO_LITERAL(2), REG_VAR(arg_index)
638    addl MACRO_LITERAL(2), REG_VAR(stack_index)
639    jmp 4f
6403:  // FOUND_FLOAT
641    GET_VREG_XMMs REG_VAR(xmm_reg), REG_VAR(arg_index)
642    addl MACRO_LITERAL(1), REG_VAR(arg_index)
643    add MACRO_LITERAL(1), REG_VAR(stack_index)
6444:
645.endm
646
647// Puts the next floating point argument into the expected stack slot,
648// fetching values based on a range invoke.
649// Uses eax as temporary.
650//
651// TODO: We could just copy all the vregs to the stack slots in a simple loop
652// (or REP MOVSD) without looking at the shorty at all. (We could also drop
653// the "stack_index" from the macros for loading registers.) We could also do
654// that conditionally if argument word count > 3; otherwise we know that all
655// args fit into registers.
656.macro LOOP_RANGE_OVER_FPs shorty, arg_index, stack_index, finished
6571: // LOOP
658    movb (REG_VAR(shorty)), %al             // bl := *shorty
659    addl MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
660    cmpb MACRO_LITERAL(0), %al              // if (al == '\0') goto finished
661    je VAR(finished)
662    cmpb MACRO_LITERAL(68), %al             // if (al == 'D') goto FOUND_DOUBLE
663    je 2f
664    cmpb MACRO_LITERAL(70), %al             // if (al == 'F') goto FOUND_FLOAT
665    je 3f
666    addl MACRO_LITERAL(1), REG_VAR(arg_index)
667    addl MACRO_LITERAL(1), REG_VAR(stack_index)
668    //  Handle extra argument in arg array taken by a long.
669    cmpb MACRO_LITERAL(74), %al   // if (al != 'J') goto LOOP
670    jne 1b
671    addl MACRO_LITERAL(1), REG_VAR(arg_index)
672    addl MACRO_LITERAL(1), REG_VAR(stack_index)
673    jmp 1b                        // goto LOOP
6742:  // FOUND_DOUBLE
675    movq (rFP, REG_VAR(arg_index), 4), %xmm4
676    movq %xmm4, 4(%esp, REG_VAR(stack_index), 4)
677    addl MACRO_LITERAL(2), REG_VAR(arg_index)
678    addl MACRO_LITERAL(2), REG_VAR(stack_index)
679    jmp 1b
6803:  // FOUND_FLOAT
681    movl (rFP, REG_VAR(arg_index), 4), %eax
682    movl %eax, 4(%esp, REG_VAR(stack_index), 4)
683    addl MACRO_LITERAL(1), REG_VAR(arg_index)
684    addl MACRO_LITERAL(1), REG_VAR(stack_index)
685    jmp 1b
686.endm
687
688// Puts the next int/long/object argument in the expected register,
689// fetching values based on a range invoke.
690// Uses eax as temporary.
691.macro LOOP_RANGE_OVER_SHORTY_LOADING_GPRS gpr_reg, gpr_long_reg, shorty, arg_index, stack_index, finished, if_long, is_ebx
6921: // LOOP
693    movb (REG_VAR(shorty)), %al             // al := *shorty
694    addl MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
695    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
696    je VAR(finished)
697    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
698    je 2f
699    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
700    je 3f
701    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
702    je 4f
703    movl       (rFP, REG_VAR(arg_index), 4), REG_VAR(gpr_reg)
704    addl MACRO_LITERAL(1), REG_VAR(arg_index)
705    addl MACRO_LITERAL(1), REG_VAR(stack_index)
706    jmp 5f
7072:  // FOUND_LONG
708    .if \is_ebx
709    // Put back shorty and exit
710    subl MACRO_LITERAL(1), REG_VAR(shorty)
711    .else
712    movl (rFP, REG_VAR(arg_index), 4), REG_VAR(gpr_reg)
713    movl 4(rFP, REG_VAR(arg_index), 4), REG_VAR(gpr_long_reg)
714    addl MACRO_LITERAL(2), REG_VAR(arg_index)
715    addl MACRO_LITERAL(2), REG_VAR(stack_index)
716    .endif
717    jmp \if_long
7183:  // SKIP_FLOAT
719    addl MACRO_LITERAL(1), REG_VAR(arg_index)
720    addl MACRO_LITERAL(1), REG_VAR(stack_index)
721    jmp 1b
7224:  // SKIP_DOUBLE
723    addl MACRO_LITERAL(2), REG_VAR(arg_index)
724    addl MACRO_LITERAL(2), REG_VAR(stack_index)
725    jmp 1b
7265:
727.endm
728
729// Puts the next int/long/object argument in the expected stack slot,
730// fetching values based on a range invoke.
731// Uses eax as temporary.
732.macro LOOP_RANGE_OVER_INTs offset, shorty, arg_index, stack_index, finished
7331: // LOOP
734    movb (REG_VAR(shorty)), %al             // al := *shorty
735    addl MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
736    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
737    je  VAR(finished)
738    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
739    je 2f
740    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
741    je 3f
742    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
743    je 4f
744    movl (rFP, REG_VAR(arg_index), 4), %eax
745    // Add 4 for the ArtMethod.
746    movl %eax, (4 + \offset)(%esp, REG_VAR(stack_index), 4)
7473:  // SKIP_FLOAT
748    addl MACRO_LITERAL(1), REG_VAR(arg_index)
749    addl MACRO_LITERAL(1), REG_VAR(stack_index)
750    jmp 1b
7512:  // FOUND_LONG
752    movl (rFP, REG_VAR(arg_index), 4), %eax
753    // Add 4 for the ArtMethod.
754    movl %eax, (4 + \offset)(%esp, REG_VAR(stack_index), 4)
755    movl 4(rFP, REG_VAR(arg_index), 4), %eax
756    // Add 4 for the ArtMethod and 4 for other half.
757    movl %eax, (4 + 4 + \offset)(%esp, REG_VAR(stack_index), 4)
7584:  // SKIP_DOUBLE
759    addl MACRO_LITERAL(2), REG_VAR(arg_index)
760    addl MACRO_LITERAL(2), REG_VAR(stack_index)
761    jmp 1b
762.endm
763
764// Puts the next floating point parameter passed in physical register
765// in the expected dex register array entry.
766// Uses eax as temporary.
767.macro LOOP_OVER_SHORTY_STORING_XMMS xmm_reg, shorty, arg_index, fp, finished
7681: // LOOP
769    movb (REG_VAR(shorty)), %al             // al := *shorty
770    addl MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
771    cmpb MACRO_LITERAL(0), %al              // if (al == '\0') goto finished
772    je VAR(finished)
773    cmpb MACRO_LITERAL(68), %al             // if (al == 'D') goto FOUND_DOUBLE
774    je 2f
775    cmpb MACRO_LITERAL(70), %al             // if (al == 'F') goto FOUND_FLOAT
776    je 3f
777    addl MACRO_LITERAL(1), REG_VAR(arg_index)
778    //  Handle extra argument in arg array taken by a long.
779    cmpb MACRO_LITERAL(74), %al   // if (al != 'J') goto LOOP
780    jne 1b
781    addl MACRO_LITERAL(1), REG_VAR(arg_index)
782    jmp 1b                        // goto LOOP
7832:  // FOUND_DOUBLE
784    movq REG_VAR(xmm_reg),(REG_VAR(fp), REG_VAR(arg_index), 4)
785    addl MACRO_LITERAL(2), REG_VAR(arg_index)
786    jmp 4f
7873:  // FOUND_FLOAT
788    movss REG_VAR(xmm_reg), (REG_VAR(fp), REG_VAR(arg_index), 4)
789    addl MACRO_LITERAL(1), REG_VAR(arg_index)
7904:
791.endm
792
793// Puts the next int/long/object parameter passed in physical register
794// in the expected dex register array entry, and in case of object in the
795// expected reference array entry.
796// Uses eax as temporary.
797.macro LOOP_OVER_SHORTY_STORING_GPRS offset, offset_long, stack_ptr, shorty, arg_index, regs, refs, finished, if_long, is_ebx
7981: // LOOP
799    movb (REG_VAR(shorty)), %al             // al := *shorty
800    addl MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
801    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
802    je  VAR(finished)
803    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
804    je 2f
805    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
806    je 3f
807    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
808    je 4f
809    cmpb MACRO_LITERAL(76), %al   // if (al != 'L') goto NOT_REFERENCE
810    jne 6f
811    movl \offset(REG_VAR(stack_ptr)), %eax
812    movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4)
813    movl %eax, (REG_VAR(refs), REG_VAR(arg_index), 4)
814    addl MACRO_LITERAL(1), REG_VAR(arg_index)
815    jmp 5f
8162:  // FOUND_LONG
817    .if \is_ebx
818    // Put back shorty and jump to \if_long
819    subl MACRO_LITERAL(1), REG_VAR(shorty)
820    .else
821    movl \offset(REG_VAR(stack_ptr)), %eax
822    movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4)
823    movl \offset_long(REG_VAR(stack_ptr)), %eax
824    movl %eax, 4(REG_VAR(regs), REG_VAR(arg_index), 4)
825    addl MACRO_LITERAL(2), REG_VAR(arg_index)
826    .endif
827    jmp \if_long
8283:  // SKIP_FLOAT
829    addl MACRO_LITERAL(1), REG_VAR(arg_index)
830    jmp 1b
8314:  // SKIP_DOUBLE
832    addl MACRO_LITERAL(2), REG_VAR(arg_index)
833    jmp 1b
8346:  // NOT_REFERENCE
835    movl \offset(REG_VAR(stack_ptr)), %eax
836    movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4)
837    addl MACRO_LITERAL(1), REG_VAR(arg_index)
8385:
839.endm
840
841// Puts the next floating point parameter passed in stack
842// in the expected dex register array entry.
843// Uses eax as temporary.
844//
845// TODO: Or we could just spill regs to the reserved slots in the caller's
846// frame and copy all regs in a simple loop. This time, however, we would
847// need to look at the shorty anyway to look for the references.
848// (The trade-off is different for passing arguments and receiving them.)
849.macro LOOP_OVER_FPs shorty, arg_index, regs, stack_ptr, finished
8501: // LOOP
851    movb (REG_VAR(shorty)), %al             // al := *shorty
852    addl MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
853    cmpb MACRO_LITERAL(0), %al              // if (al == '\0') goto finished
854    je VAR(finished)
855    cmpb MACRO_LITERAL(68), %al             // if (al == 'D') goto FOUND_DOUBLE
856    je 2f
857    cmpb MACRO_LITERAL(70), %al             // if (al == 'F') goto FOUND_FLOAT
858    je 3f
859    addl MACRO_LITERAL(1), REG_VAR(arg_index)
860    //  Handle extra argument in arg array taken by a long.
861    cmpb MACRO_LITERAL(74), %al   // if (al != 'J') goto LOOP
862    jne 1b
863    addl MACRO_LITERAL(1), REG_VAR(arg_index)
864    jmp 1b                        // goto LOOP
8652:  // FOUND_DOUBLE
866    movq OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %xmm4
867    movq %xmm4, (REG_VAR(regs), REG_VAR(arg_index), 4)
868    addl MACRO_LITERAL(2), REG_VAR(arg_index)
869    jmp 1b
8703:  // FOUND_FLOAT
871    movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax
872    movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4)
873    addl MACRO_LITERAL(1), REG_VAR(arg_index)
874    jmp 1b
875.endm
876
877// Puts the next int/long/object parameter passed in stack
878// in the expected dex register array entry, and in case of object in the
879// expected reference array entry.
880// Uses eax as temporary.
881.macro LOOP_OVER_INTs shorty, arg_index, regs, refs, stack_ptr, finished
8821: // LOOP
883    movb (REG_VAR(shorty)), %al             // al := *shorty
884    addl MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
885    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
886    je  VAR(finished)
887    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
888    je 2f
889    cmpb MACRO_LITERAL(76), %al   // if (al == 'L') goto FOUND_REFERENCE
890    je 6f
891    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
892    je 3f
893    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
894    je 4f
895    movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax
896    movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4)
897    addl MACRO_LITERAL(1), REG_VAR(arg_index)
898    jmp 1b
8996:  // FOUND_REFERENCE
900    movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax
901    movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4)
902    movl %eax, (REG_VAR(refs), REG_VAR(arg_index), 4)
9033:  // SKIP_FLOAT
904    addl MACRO_LITERAL(1), REG_VAR(arg_index)
905    jmp 1b
9062:  // FOUND_LONG
907    movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax
908    movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4)
909    movl (OFFSET_TO_FIRST_ARGUMENT_IN_STACK+4)(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax
910    movl %eax, 4(REG_VAR(regs), REG_VAR(arg_index), 4)
9114:  // SKIP_DOUBLE
912    addl MACRO_LITERAL(2), REG_VAR(arg_index)
913    jmp 1b
914.endm
915
916// Increase method hotness and do suspend check before starting executing the method.
917.macro START_EXECUTING_INSTRUCTIONS
918   movl (%esp), %eax
919   movzwl ART_METHOD_HOTNESS_COUNT_OFFSET(%eax), %ecx
920#if (NTERP_HOTNESS_VALUE != 0)
921#error Expected 0 for hotness value
922#endif
923   // If the counter is at zero, handle this in the runtime.
924   testl %ecx, %ecx
925   je 3f
926   // Update counter.
927   addl $$-1, %ecx
928   movw %cx, ART_METHOD_HOTNESS_COUNT_OFFSET(%eax)
9291:
930   testl $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), rSELF:THREAD_FLAGS_OFFSET
931   jz 2f
932   EXPORT_PC
933   call SYMBOL(art_quick_test_suspend)
934   RESTORE_IBASE
9352:
936   FETCH_INST
937   GOTO_NEXT
9383:
939   CHECK_AND_UPDATE_SHARED_MEMORY_METHOD if_hot=4f, if_not_hot=1b
9404:
941   movl $$0, ARG1
942   movl rFP, ARG2
943   call nterp_hot_method
944   jmp 2b
945.endm
946
947.macro SPILL_ALL_CALLEE_SAVES
948    PUSH edi
949    PUSH esi
950    PUSH ebp
951.endm
952
953.macro RESTORE_ALL_CALLEE_SAVES
954    POP ebp
955    POP esi
956    POP edi
957.endm
958
959.macro GET_SHORTY dest, is_interface, is_polymorphic, is_custom
960   // Save eax (ArtMethod), ecx (potential this).
961   push %eax
962   push %ecx
963   .if \is_polymorphic
964   push rPC
965   push 12(%esp)
966   call SYMBOL(NterpGetShortyFromInvokePolymorphic)
967   addl MACRO_LITERAL(8), %esp
968   .elseif \is_custom
969   push rPC
970   push 12(%esp)
971   call SYMBOL(NterpGetShortyFromInvokeCustom)
972   addl MACRO_LITERAL(8), %esp
973   .elseif \is_interface
974   subl MACRO_LITERAL(16), %esp
975   // Save interface method.
976   movss %xmm7, (%esp)
977   movzwl 2(rPC), %eax
978   pushl %eax
979   // Caller is at 8 (saved ArtMethod + ecx) + 16 + 4 (second argument)
980   pushl 28(%esp)
981   call SYMBOL(NterpGetShortyFromMethodId)
982   // Restore interface method.
983   movss 8(%esp), %xmm7
984   addl MACRO_LITERAL(24), %esp
985   .else
986   subl MACRO_LITERAL(4), %esp  // Alignment
987   push %eax
988   call SYMBOL(NterpGetShorty)
989   addl MACRO_LITERAL(8), %esp
990   .endif
991   movl %eax, \dest
992   pop %ecx
993   pop %eax
994.endm
995
996.macro GET_SHORTY_SLOW_PATH dest, is_interface
997   // Save all registers that can hold arguments in the fast path.
998   pushl %eax
999   pushl %ecx
1000   pushl %edx
1001   subl MACRO_LITERAL(4), %esp
1002   movss %xmm0, (%esp)
1003   .if \is_interface
1004   // Alignment.
1005   subl MACRO_LITERAL(8), %esp
1006   movzwl 2(rPC), %eax
1007   pushl %eax
1008   // Caller is at 16 (parameters) + 8 (alignment) + 4 (second argument).
1009   pushl 28(%esp)
1010   call SYMBOL(NterpGetShortyFromMethodId)
1011   movl %eax, \dest
1012   movss 16(%esp), %xmm0
1013   addl MACRO_LITERAL(20), %esp
1014   .else
1015   // Alignment.
1016   subl MACRO_LITERAL(12), %esp
1017   pushl %eax
1018   call SYMBOL(NterpGetShorty)
1019   movl %eax, \dest
1020   movss 16(%esp), %xmm0
1021   addl MACRO_LITERAL(20), %esp
1022   .endif
1023   popl %edx
1024   popl %ecx
1025   popl %eax
1026.endm
1027
1028// Uses ecx and edx as temporary
1029.macro UPDATE_REGISTERS_FOR_STRING_INIT old_value, new_value
1030   movl rREFS, %edx
1031   movl rFP, %ecx
10321:
1033   cmpl (%edx), \old_value
1034   jne 2f
1035   movl \new_value, (%edx)
1036   movl \new_value, (%ecx)
10372:
1038   addl $$4, %edx
1039   addl $$4, %ecx
1040   cmpl %edx, rFP
1041   jne 1b
1042.endm
1043
1044.macro DO_CALL is_polymorphic, is_custom
1045   .if \is_polymorphic
1046   call SYMBOL(art_quick_invoke_polymorphic)
1047   .elseif \is_custom
1048   call SYMBOL(art_quick_invoke_custom)
1049   .else
1050   call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax)
1051   .endif
1052.endm
1053
1054.macro COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, suffix="", is_string_init=0, is_polymorphic=0, is_custom=0
1055   .if \is_polymorphic
1056   // No fast path for polymorphic calls.
1057   .elseif \is_custom
1058   // No fast path for custom calls.
1059   .elseif \is_string_init
1060   // No fast path for string.init.
1061   .else
1062     testl $$ART_METHOD_NTERP_INVOKE_FAST_PATH_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%eax)
1063     je .Lfast_path_with_few_args_\suffix
1064     movzbl 1(rPC), %edx
1065     movl %edx, %ebx
1066     shrl MACRO_LITERAL(4), %ebx # Number of arguments
1067     .if \is_static
1068     jz .Linvoke_fast_path_\suffix  # shl sets the Z flag
1069     .else
1070     cmpl MACRO_LITERAL(1), %ebx
1071     je .Linvoke_fast_path_\suffix
1072     .endif
1073     movzwl 4(rPC), %esi
1074     cmpl MACRO_LITERAL(2), %ebx
1075     .if \is_static
1076     jl .Lone_arg_fast_path_\suffix
1077     .endif
1078     je .Ltwo_args_fast_path_\suffix
1079     cmpl MACRO_LITERAL(4), %ebx
1080     jl .Lthree_args_fast_path_\suffix
1081     je .Lfour_args_fast_path_\suffix
1082
1083     andl        MACRO_LITERAL(0xf), %edx
1084     GET_VREG    %edx, %edx
1085     movl        %edx, (4 + 4 * 4)(%esp)
1086.Lfour_args_fast_path_\suffix:
1087     movl        %esi, %edx
1088     shrl        MACRO_LITERAL(12), %edx
1089     GET_VREG    %edx, %edx
1090     movl        %edx, (4 + 3 * 4)(%esp)
1091.Lthree_args_fast_path_\suffix:
1092     movl        %esi, %ebx
1093     shrl        MACRO_LITERAL(8), %ebx
1094     andl        MACRO_LITERAL(0xf), %ebx
1095     GET_VREG    %ebx, %ebx
1096.Ltwo_args_fast_path_\suffix:
1097     movl        %esi, %edx
1098     shrl        MACRO_LITERAL(4), %edx
1099     andl        MACRO_LITERAL(0xf), %edx
1100     GET_VREG    %edx, %edx
1101.Lone_arg_fast_path_\suffix:
1102     .if \is_static
1103     andl        MACRO_LITERAL(0xf), %esi
1104     GET_VREG    %ecx, %esi
1105     .else
1106     // First argument already in %ecx.
1107     .endif
1108.Linvoke_fast_path_\suffix:
1109     // Fetch PC before calling for proper stack unwinding.
1110     FETCH_PC
1111     call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // Call the method.
1112     // In case of a long return, save the high half into LOCAL0
1113     SAVE_WIDE_RETURN
1114     RESTORE_IBASE
1115     ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1116
1117.Lfast_path_with_few_args_\suffix:
1118     // Fast path when we have zero or one argument (modulo 'this'). If there
1119     // is one argument, we can put it in both floating point and core register.
1120     movzbl 1(rPC), %edx
1121     shrl MACRO_LITERAL(4), %edx # Number of arguments
1122     .if \is_static
1123     cmpl MACRO_LITERAL(1), %edx
1124     jl .Linvoke_with_few_args_\suffix
1125     jne .Lget_shorty_\suffix
1126     movzwl 4(rPC), %ecx
1127     andl MACRO_LITERAL(0xf), %ecx  // dex register of first argument
1128     GET_VREG %ecx, %ecx
1129     movd %ecx, %xmm0
1130     .else
1131     cmpl MACRO_LITERAL(2), %edx
1132     jl .Linvoke_with_few_args_\suffix
1133     jne .Lget_shorty_\suffix
1134     movzwl 4(rPC), %edx
1135     shrl MACRO_LITERAL(4), %edx
1136     andl MACRO_LITERAL(0xf), %edx  // dex register of second argument
1137     GET_VREG %edx, %edx
1138     movd %edx, %xmm0
1139     .endif
1140.Linvoke_with_few_args_\suffix:
1141     // Check if the next instruction is move-result or move-result-wide.
1142     // If it is, we fetch the shorty and jump to the regular invocation.
1143     movzwl  6(rPC), %ebx
1144     andl MACRO_LITERAL(0xfe), %ebx
1145     cmpl MACRO_LITERAL(0x0a), %ebx
1146     je .Lget_shorty_and_invoke_\suffix
1147     call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // Call the method.
1148     RESTORE_IBASE
1149     ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1150.Lget_shorty_and_invoke_\suffix:
1151     GET_SHORTY_SLOW_PATH %esi, \is_interface
1152     jmp .Lgpr_setup_finished_\suffix
1153   .endif
1154
1155.Lget_shorty_\suffix:
1156   GET_SHORTY %ebx, \is_interface, \is_polymorphic, \is_custom
1157   movl %eax, LOCAL0(%esp)
1158   movl %ebp, LOCAL1(%esp)
1159   movl %ebx, LOCAL2(%esp)
1160   // From this point:
1161   // - ebx contains shorty (in callee-save to switch over return value after call).
1162   // - eax, edx, and ebp are available
1163   // - ecx contains 'this' pointer for instance method.
1164   // TODO: ebp/rREFS is used for stack unwinding, can we find a way to preserve it?
1165   leal 1(%ebx), %edx  // shorty + 1  ; ie skip return arg character
1166   movzwl 4(rPC), %ebx // arguments
1167   .if \is_string_init
1168   shrl MACRO_LITERAL(4), %ebx
1169   movl $$1, %ebp       // ignore first argument
1170   .elseif \is_static
1171   movl $$0, %ebp       // arg_index
1172   .else
1173   shrl MACRO_LITERAL(4), %ebx
1174   movl $$1, %ebp       // arg_index
1175   .endif
1176   LOOP_OVER_SHORTY_LOADING_XMMS xmm0, ebx, edx, ebp, .Lxmm_setup_finished_\suffix
1177   LOOP_OVER_SHORTY_LOADING_XMMS xmm1, ebx, edx, ebp, .Lxmm_setup_finished_\suffix
1178   LOOP_OVER_SHORTY_LOADING_XMMS xmm2, ebx, edx, ebp, .Lxmm_setup_finished_\suffix
1179   LOOP_OVER_SHORTY_LOADING_XMMS xmm3, ebx, edx, ebp, .Lxmm_setup_finished_\suffix
1180   // We know this can only be a float.
1181   movb (%edx), %al                        // al := *shorty
1182   cmpb MACRO_LITERAL(70), %al             // if (al != 'F') goto finished
1183   jne .Lxmm_setup_finished_\suffix
1184   movzbl 1(rPC), %eax
1185   andl MACRO_LITERAL(0xf), %eax
1186   GET_VREG %eax, %eax
1187   // Add four for the ArtMethod.
1188   movl %eax, 4(%esp, %ebp, 4)
1189   // We know there is no more argument, jump to the call.
1190   jmp .Lrestore_saved_values_\suffix
1191.Lxmm_setup_finished_\suffix:
1192   // Reload rREFS for fetching the PC.
1193   movl LOCAL1(%esp), %ebp
1194   // Reload shorty
1195   movl LOCAL2(%esp), %ebx
1196   FETCH_PC
1197   leal 1(%ebx), %ebx  // shorty + 1  ; ie skip return arg character
1198   movzwl 4(rPC), %esi // arguments
1199   .if \is_string_init
1200   movl $$0, %ebp       // arg_index
1201   shrl MACRO_LITERAL(4), %esi
1202   LOOP_OVER_SHORTY_LOADING_GPRS ecx, edx, esi, ebx, ebp, .Lrestore_saved_values_\suffix, .Lif_long_ebx_\suffix, is_ebx=0
1203   .elseif \is_static
1204   movl $$0, %ebp       // arg_index
1205   LOOP_OVER_SHORTY_LOADING_GPRS ecx, edx, esi, ebx, ebp, .Lrestore_saved_values_\suffix, .Lif_long_ebx_\suffix, is_ebx=0
1206   .else
1207   shrl MACRO_LITERAL(4), %esi
1208   movl $$1, %ebp       // arg_index
1209   .endif
1210   // For long argument, store second half in eax to not overwrite the shorty.
1211   LOOP_OVER_SHORTY_LOADING_GPRS edx, eax, esi, ebx, ebp, .Lrestore_saved_values_\suffix, .Lif_long_\suffix, is_ebx=0
1212.Lif_long_ebx_\suffix:
1213   // Store in eax to not overwrite the shorty.
1214   LOOP_OVER_SHORTY_LOADING_GPRS eax, eax, esi, ebx, ebp, .Lrestore_saved_values_\suffix, .Lif_long_\suffix, is_ebx=1
1215.Lif_long_\suffix:
1216   // Save shorty, as LOOP_OVER_SHORTY_LOADING_INTS might overwrite the LOCAL2 slot for a long argument.
1217   pushl LOCAL2(%esp)
1218   pushl %eax
1219   LOOP_OVER_SHORTY_LOADING_INTS 8, ebx, esi, ebp, .Lrestore_ebx_\suffix, \is_string_init
1220.Lrestore_ebx_\suffix:
1221   popl %ebx
1222   popl %esi
1223   movl LOCAL0(%esp), %eax
1224   movl LOCAL1(%esp), %ebp
1225   jmp .Lgpr_setup_finished_\suffix
1226.Lrestore_saved_values_\suffix:
1227   movl LOCAL0(%esp), %eax
1228   movl LOCAL1(%esp), %ebp
1229   movl LOCAL2(%esp), %esi
1230.Lgpr_setup_finished_\suffix:
1231   // Look at the shorty now, as we'll want %esi to have the PC for proper stack unwinding
1232   // and we're running out of callee-save registers.
1233   cmpb LITERAL(68), (%esi)       // Test if result type char == 'D'.
1234   je .Linvoke_double_\suffix
1235   cmpb LITERAL(70), (%esi)       // Test if result type char == 'F'.
1236   je .Linvoke_float_\suffix
1237   FETCH_PC
1238   DO_CALL \is_polymorphic, \is_custom
1239   SAVE_WIDE_RETURN
1240.Ldone_return_\suffix:
1241   /* resume execution of caller */
1242   .if \is_string_init
1243   movzwl 4(rPC), %ecx // arguments
1244   andl $$0xf, %ecx
1245   GET_VREG rINST, %ecx
1246   UPDATE_REGISTERS_FOR_STRING_INIT rINST, %eax
1247   .endif
1248   RESTORE_IBASE
1249
1250   .if \is_polymorphic
1251   ADVANCE_PC_FETCH_AND_GOTO_NEXT 4
1252   .else
1253   ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1254   .endif
1255
1256.Linvoke_double_\suffix:
1257   FETCH_PC
1258   DO_CALL \is_polymorphic, \is_custom
1259   movq %xmm0, LOCAL1(%esp)
1260   movl LOCAL1(%esp), %eax
1261   jmp .Ldone_return_\suffix
1262.Linvoke_float_\suffix:
1263   FETCH_PC
1264   DO_CALL \is_polymorphic, \is_custom
1265   movd %xmm0, %eax
1266   jmp .Ldone_return_\suffix
1267.endm
1268
1269.macro COMMON_INVOKE_RANGE is_static=0, is_interface=0, suffix="", is_string_init=0, is_polymorphic=0, is_custom=0
1270   .if \is_polymorphic
1271   // No fast path for polymorphic calls.
1272   .elseif \is_custom
1273   // No fast path for custom calls.
1274   .elseif \is_string_init
1275   // No fast path for string.init.
1276   .else
1277     testl $$ART_METHOD_NTERP_INVOKE_FAST_PATH_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%eax)
1278     je .Lfast_path_with_few_args_range_\suffix
1279     movzbl 1(rPC), %edx  // number of arguments
1280     .if \is_static
1281     testl %edx, %edx
1282     je .Linvoke_fast_path_range_\suffix
1283     .else
1284     cmpl MACRO_LITERAL(1), %edx
1285     je .Linvoke_fast_path_range_\suffix
1286     .endif
1287     movzwl 4(rPC), %ebx  // dex register of first argument
1288     leal (rFP, %ebx, 4), %esi  // location of first dex register value
1289     cmpl MACRO_LITERAL(2), %edx
1290     .if \is_static
1291     jl .Lone_arg_fast_path_range_\suffix
1292     .endif
1293     je .Ltwo_args_fast_path_range_\suffix
1294     cmp MACRO_LITERAL(4), %edx
1295     jl .Lthree_args_fast_path_range_\suffix
1296
1297.Lloop_over_fast_path_range_\suffix:
1298     subl MACRO_LITERAL(1), %edx
1299     movl (%esi, %edx, 4), %ebx
1300     movl %ebx, 4(%esp, %edx, 4)  // Add 4 for the ArtMethod
1301     cmpl MACRO_LITERAL(3), %edx
1302     jne .Lloop_over_fast_path_range_\suffix
1303
1304.Lthree_args_fast_path_range_\suffix:
1305     movl 8(%esi), %ebx
1306.Ltwo_args_fast_path_range_\suffix:
1307     movl 4(%esi), %edx
1308.Lone_arg_fast_path_range_\suffix:
1309     .if \is_static
1310     movl 0(%esi), %ecx
1311     .else
1312     // First argument already in %ecx.
1313     .endif
1314.Linvoke_fast_path_range_\suffix:
1315     FETCH_PC
1316     call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // Call the method.
1317     SAVE_WIDE_RETURN
1318     RESTORE_IBASE
1319     ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1320
1321.Lfast_path_with_few_args_range_\suffix:
1322     // Fast path when we have zero or one argument (modulo 'this'). If there
1323     // is one argument, we can put it in both floating point and core register.
1324     movzbl 1(rPC), %ebx # Number of arguments
1325     .if \is_static
1326     cmpl MACRO_LITERAL(1), %ebx
1327     jl .Linvoke_with_few_args_range_\suffix
1328     jne .Lget_shorty_range_\suffix
1329     movzwl 4(rPC), %ebx  // Dex register of first argument
1330     GET_VREG %ecx, %ebx
1331     movd %ecx, %xmm0
1332     .else
1333     cmpl MACRO_LITERAL(2), %ebx
1334     jl .Linvoke_with_few_args_range_\suffix
1335     jne .Lget_shorty_range_\suffix
1336     movzwl 4(rPC), %ebx
1337     addl MACRO_LITERAL(1), %ebx  // dex register of second argument
1338     GET_VREG %edx, %ebx
1339     movd %edx, %xmm0
1340     .endif
1341.Linvoke_with_few_args_range_\suffix:
1342     // Check if the next instruction is move-result or move-result-wide.
1343     // If it is, we fetch the shorty and jump to the regular invocation.
1344     movzwl  6(rPC), %ebx
1345     and MACRO_LITERAL(0xfe), %ebx
1346     cmpl MACRO_LITERAL(0x0a), %ebx
1347     je .Lget_shorty_and_invoke_range_\suffix
1348     call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // Call the method.
1349     RESTORE_IBASE
1350     ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1351.Lget_shorty_and_invoke_range_\suffix:
1352     GET_SHORTY_SLOW_PATH %esi, \is_interface
1353     jmp .Lgpr_setup_finished_range_\suffix
1354   .endif
1355
1356.Lget_shorty_range_\suffix:
1357   GET_SHORTY %ebx, \is_interface, \is_polymorphic, \is_custom
1358   movl %eax, LOCAL0(%esp)
1359   movl %ebp, LOCAL1(%esp)
1360   movl %ebx, LOCAL2(%esp)
1361   // From this point:
1362   // - ebx contains shorty (in callee-save to switch over return value after call).
1363   // - eax, edx, ebx, and ebp are available.
1364   // - ecx contains 'this' pointer for instance method.
1365   // TODO: ebp/rREFS is used for stack unwinding, can we find a way to preserve it?
1366   leal 1(%ebx), %edx  // shorty + 1  ; ie skip return arg character
1367   movzwl 4(rPC), %ebx // arg start index
1368   .if \is_string_init
1369   addl $$1, %ebx       // arg start index
1370   movl $$0, %ebp       // index in stack
1371   .elseif \is_static
1372   movl $$0, %ebp       // index in stack
1373   .else
1374   addl $$1, %ebx       // arg start index
1375   movl $$1, %ebp       // index in stack
1376   .endif
1377   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm0, edx, ebx, ebp, .Lxmm_setup_finished_range_\suffix
1378   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm1, edx, ebx, ebp, .Lxmm_setup_finished_range_\suffix
1379   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm2, edx, ebx, ebp, .Lxmm_setup_finished_range_\suffix
1380   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm3, edx, ebx, ebp, .Lxmm_setup_finished_range_\suffix
1381   LOOP_RANGE_OVER_FPs edx, ebx, ebp, .Lxmm_setup_finished_range_\suffix
1382.Lxmm_setup_finished_range_\suffix:
1383   // Reload rREFS for fetching the PC.
1384   movl LOCAL1(%esp), %ebp
1385   // Reload shorty
1386   movl LOCAL2(%esp), %ebx
1387   FETCH_PC
1388   leal 1(%ebx), %ebx  // shorty + 1  ; ie skip return arg character
1389   // From this point:
1390   // - ebx contains shorty
1391   // - eax and ebp are available.
1392   // - ecx contains 'this' pointer for instance method.
1393   movzwl 4(rPC), %ebp // arg start index
1394   // rPC (esi) is now available
1395   .if \is_string_init
1396   addl $$1, %ebp       // arg start index
1397   movl $$0, %esi       // index in stack
1398   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS ecx, edx, ebx, ebp, esi, .Lrestore_saved_values_range_\suffix, .Lif_long_ebx_range_\suffix, is_ebx=0
1399   .elseif \is_static
1400   movl $$0, %esi // index in stack
1401   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS ecx, edx, ebx, ebp, esi, .Lrestore_saved_values_range_\suffix, .Lif_long_ebx_range_\suffix, is_ebx=0
1402   .else
1403   addl $$1, %ebp // arg start index
1404   movl $$1, %esi // index in stack
1405   .endif
1406   // For long argument, store second half in eax to not overwrite the shorty.
1407   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS edx, eax, ebx, ebp, esi, .Lrestore_saved_values_range_\suffix, .Lif_long_range_\suffix, is_ebx=0
1408.Lif_long_ebx_range_\suffix:
1409   // Store in eax to not overwrite the shorty.
1410   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS eax, eax, ebx, ebp, esi, .Lrestore_saved_values_range_\suffix, .Lif_long_range_\suffix, is_ebx=1
1411.Lif_long_range_\suffix:
1412   // Save shorty, as LOOP_RANGE_OVER_SHORTY_LOADING_INTS might overwrite the LOCAL2 slot for a long argument.
1413   pushl LOCAL2(%esp)
1414   pushl %eax
1415   LOOP_RANGE_OVER_INTs 8, ebx, ebp, esi, .Lrestore_ebx_range_\suffix
1416.Lrestore_ebx_range_\suffix:
1417   popl %ebx
1418   popl %esi
1419   movl LOCAL0(%esp), %eax
1420   movl LOCAL1(%esp), %ebp
1421   jmp .Lgpr_setup_finished_range_\suffix
1422
1423.Lrestore_saved_values_range_\suffix:
1424   movl LOCAL0(%esp), %eax
1425   movl LOCAL1(%esp), %ebp
1426   // Save shorty in callee-save register
1427   movl LOCAL2(%esp), %esi
1428
1429.Lgpr_setup_finished_range_\suffix:
1430   cmpb LITERAL(68), (%esi)       // Test if result type char == 'D'.
1431   je .Lreturn_range_double_\suffix
1432   cmpb LITERAL(70), (%esi)       // Test if result type char == 'F'.
1433   je .Lreturn_range_float_\suffix
1434
1435   FETCH_PC
1436   DO_CALL \is_polymorphic, \is_custom
1437   SAVE_WIDE_RETURN
1438.Ldone_return_range_\suffix:
1439   /* resume execution of caller */
1440   .if \is_string_init
1441   movzwl 4(rPC), %ecx // arguments
1442   GET_VREG rINST, %ecx
1443   UPDATE_REGISTERS_FOR_STRING_INIT rINST, %eax
1444   .endif
1445   RESTORE_IBASE
1446   .if \is_polymorphic
1447   ADVANCE_PC_FETCH_AND_GOTO_NEXT 4
1448   .else
1449   ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1450   .endif
1451.Lreturn_range_double_\suffix:
1452   FETCH_PC
1453   DO_CALL \is_polymorphic, \is_custom
1454   movq %xmm0, LOCAL1(%esp)
1455   movl LOCAL1(%esp), %eax
1456   jmp .Ldone_return_range_\suffix
1457.Lreturn_range_float_\suffix:
1458   FETCH_PC
1459   DO_CALL \is_polymorphic, \is_custom
1460   movd %xmm0, %eax
1461   jmp .Ldone_return_range_\suffix
1462.endm
1463
1464// Helper for static field get.
1465.macro OP_SGET load="movl", wide="0"
1466   // Fast-path which gets the field from thread-local cache.
1467%  fetch_from_thread_cache("%eax", miss_label="2f")
14681:
1469   movl ART_FIELD_OFFSET_OFFSET(%eax), %ecx
1470   movl ART_FIELD_DECLARING_CLASS_OFFSET(%eax), %eax
1471   cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1472   jne 3f
14734:
1474   .if \wide
1475   addl %ecx, %eax
1476   \load (%eax), %ecx
1477   SET_VREG %ecx, rINST            # fp[A] <- value
1478   \load 4(%eax), %ecx
1479   SET_VREG_HIGH %ecx, rINST
1480   .else
1481   \load (%eax, %ecx, 1), %eax
1482   SET_VREG %eax, rINST            # fp[A] <- value
1483   .endif
1484   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
14852:
1486   EXPORT_PC
1487   movl rSELF:THREAD_SELF_OFFSET, ARG0
1488   movl 0(%esp), ARG1
1489   movl rPC, ARG2
1490   movl $$0, ARG3
1491   call nterp_get_static_field
1492   .if !\wide
1493   CLEAR_VOLATILE_MARKER %eax
1494   jmp 1b
1495   .else
1496   testl MACRO_LITERAL(1), %eax
1497   je 1b
1498   CLEAR_VOLATILE_MARKER %eax
1499   movl ART_FIELD_OFFSET_OFFSET(%eax), %ecx
1500   movl ART_FIELD_DECLARING_CLASS_OFFSET(%eax), %eax
1501   cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1502   jne 5f
15036:
1504   movsd (%eax, %ecx, 1), %xmm0
1505   SET_WIDE_FP_VREG %xmm0, rINST
1506   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
15075:
1508   call art_quick_read_barrier_mark_reg00
1509   jmp 6b
1510   .endif
15113:
1512   call art_quick_read_barrier_mark_reg00
1513   jmp 4b
1514.endm
1515
1516// Helper for static field put.
1517.macro OP_SPUT rINST_reg="rINST", store="movl", wide="0":
1518   // Fast-path which gets the field from thread-local cache.
1519%  fetch_from_thread_cache("%eax", miss_label="2f")
15201:
1521   movl ART_FIELD_OFFSET_OFFSET(%eax), %ecx
1522   movl ART_FIELD_DECLARING_CLASS_OFFSET(%eax), %eax
1523   cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1524   jne 3f
15254:
1526   .if \wide
1527   addl %ecx, %eax
1528   GET_VREG %ecx, rINST                  # rINST <- v[A]
1529   movl %ecx, (%eax)
1530   GET_VREG_HIGH %ecx, rINST
1531   movl %ecx, 4(%eax)
1532   .else
1533   GET_VREG rINST, rINST                  # rINST <- v[A]
1534   \store    \rINST_reg, (%eax,%ecx,1)
1535   .endif
1536   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
15372:
1538   EXPORT_PC
1539   movl rSELF:THREAD_SELF_OFFSET, ARG0
1540   movl 0(%esp), ARG1
1541   movl rPC, ARG2
1542   movl $$0, ARG3
1543   call nterp_get_static_field
1544   testl MACRO_LITERAL(1), %eax
1545   je 1b
1546   // Clear the marker that we put for volatile fields.
1547   CLEAR_VOLATILE_MARKER %eax
1548   movl ART_FIELD_OFFSET_OFFSET(%eax), %ecx
1549   movl ART_FIELD_DECLARING_CLASS_OFFSET(%eax), %eax
1550   cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1551   jne 6f
15525:
1553   .if \wide
1554   addl %ecx, %eax
1555   GET_WIDE_FP_VREG %xmm0, rINST
1556   movsd %xmm0, (%eax)
1557   .else
1558   GET_VREG rINST, rINST                  # rINST <- v[A]
1559   \store    \rINST_reg, (%eax,%ecx,1)
1560   .endif
1561   lock addl $$0, (%esp)
1562   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
15633:
1564   call art_quick_read_barrier_mark_reg00
1565   jmp 4b
15666:
1567   call art_quick_read_barrier_mark_reg00
1568   jmp 5b
1569.endm
1570
1571
1572.macro OP_IPUT_INTERNAL rINST_reg="rINST", store="movl", wide="0", volatile="0":
1573   movzbl  rINSTbl, %ecx                   # ecx <- BA
1574   sarl    $$4, %ecx                       # ecx <- B
1575   GET_VREG %ecx, %ecx                     # vB (object we're operating on)
1576   testl   %ecx, %ecx                      # is object null?
1577   je      common_errNullObject
1578   andb    $$0xf, rINSTbl                  # rINST <- A
1579   .if \wide
1580   addl %ecx, %eax
1581   GET_WIDE_FP_VREG %xmm0, rINST
1582   movsd %xmm0, (%eax)
1583   .else
1584   GET_VREG rINST, rINST                  # rINST <- v[A]
1585   \store \rINST_reg, (%ecx,%eax,1)
1586   .endif
1587.endm
1588
1589// Helper for instance field put.
1590.macro OP_IPUT rINST_reg="rINST", store="movl", wide="0":
1591   // Fast-path which gets the field from thread-local cache.
1592%  fetch_from_thread_cache("%eax", miss_label="2f")
15931:
1594   OP_IPUT_INTERNAL \rINST_reg, \store, \wide, volatile=0
1595   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
15962:
1597   EXPORT_PC
1598   movl rSELF:THREAD_SELF_OFFSET, ARG0
1599   movl 0(%esp), ARG1
1600   movl rPC, ARG2
1601   movl $$0, ARG3
1602   call nterp_get_instance_field_offset
1603   testl %eax, %eax
1604   jns 1b
1605   negl %eax
1606   OP_IPUT_INTERNAL \rINST_reg, \store, \wide, volatile=1
1607   lock addl $$0, (%esp)
1608   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
1609.endm
1610
1611// Helper for instance field get.
1612.macro OP_IGET load="movl", wide="0"
1613   // Fast-path which gets the field from thread-local cache.
1614%  fetch_from_thread_cache("%eax", miss_label="2f")
16151:
1616   movl    rINST, %ecx                     # ecx <- BA
1617   sarl    $$4, %ecx                       # ecx <- B
1618   GET_VREG %ecx, %ecx                     # vB (object we're operating on)
1619   testl   %ecx, %ecx                      # is object null?
1620   je      common_errNullObject
1621   andb    $$0xf,rINSTbl                   # rINST <- A
1622   .if \wide
1623   addl %ecx, %eax
1624   \load (%eax), %ecx
1625   SET_VREG %ecx, rINST
1626   \load 4(%eax), %ecx
1627   SET_VREG_HIGH %ecx, rINST
1628   .else
1629   \load (%ecx,%eax,1), %eax
1630   SET_VREG %eax, rINST                    # fp[A] <- value
1631   .endif
1632   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
16332:
1634   EXPORT_PC
1635   movl rSELF:THREAD_SELF_OFFSET, ARG0
1636   movl 0(%esp), ARG1
1637   movl rPC, ARG2
1638   movl $$0, ARG3
1639   call nterp_get_instance_field_offset
1640   testl %eax, %eax
1641   jns 1b
1642   negl %eax
1643   .if !\wide
1644   jmp 1b
1645   .else
1646   movl    rINST, %ecx                     # ecx <- BA
1647   sarl    $$4, %ecx                       # ecx <- B
1648   GET_VREG %ecx, %ecx                     # vB (object we're operating on)
1649   testl   %ecx, %ecx                      # is object null?
1650   je      common_errNullObject
1651   andb    $$0xf,rINSTbl                   # rINST <- A
1652   movsd (%eax, %ecx, 1), %xmm0
1653   SET_WIDE_FP_VREG %xmm0, rINST
1654   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
1655   .endif
1656.endm
1657
1658// Store a reference parameter into our dex register frame.
1659// Uses xmm4 as temporary.
1660.macro SETUP_REFERENCE_PARAMETER_IN_GPR offset, stack_ptr, regs, refs, ins, arg_offset, finished
1661    movss \offset(REG_VAR(stack_ptr)), %xmm4
1662    movss %xmm4, (REG_VAR(regs), REG_VAR(arg_offset))
1663    movss %xmm4, (REG_VAR(refs), REG_VAR(arg_offset))
1664    addl MACRO_LITERAL(4), REG_VAR(arg_offset)
1665    subl MACRO_LITERAL(1), REG_VAR(ins)
1666    je \finished
1667.endm
1668
1669// Store a reference parameter into our dex register frame.
1670// Uses xmm4 as temporary.
1671.macro SETUP_REFERENCE_PARAMETERS_IN_STACK stack_ptr, regs, refs, ins, arg_offset
16721:
1673    movss OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_offset)), %xmm4
1674    movss %xmm4, (REG_VAR(regs), REG_VAR(arg_offset))
1675    movss %xmm4, (REG_VAR(refs), REG_VAR(arg_offset))
1676    addl MACRO_LITERAL(4), REG_VAR(arg_offset)
1677    subl MACRO_LITERAL(1), REG_VAR(ins)
1678    jne 1b
1679.endm
1680
1681.macro DO_SUSPEND_CHECK continue_label
1682    testl   $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), rSELF:THREAD_FLAGS_OFFSET
1683    jz      \continue_label
1684    jmp     NterpCallSuspendAndGotoNext
1685.endm
1686
1687.macro CHECK_AND_UPDATE_SHARED_MEMORY_METHOD if_hot, if_not_hot
1688    testl $$ART_METHOD_IS_MEMORY_SHARED_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%eax)
1689    jz \if_hot
1690    // Intrinsics are always in the boot image and considered hot.
1691    testl $$ART_METHOD_IS_INTRINSIC_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%eax)
1692    jnz \if_hot
1693    movzwl rSELF:THREAD_SHARED_METHOD_HOTNESS_OFFSET, %ecx
1694    testl %ecx, %ecx
1695    je \if_hot
1696    addl $$-1, %ecx
1697    movw %cx, rSELF:THREAD_SHARED_METHOD_HOTNESS_OFFSET
1698    jmp \if_not_hot
1699.endm
1700
1701
1702%def entry():
1703/*
1704 * ArtMethod entry point.
1705 *
1706 * On entry:
1707 *  eax   ArtMethod* callee
1708 *  rest  method parameters
1709 */
1710
1711OAT_ENTRY ExecuteNterpWithClinitImpl
1712    .cfi_startproc
1713    PUSH_ARG esi
1714    // For simplicity, we don't do a read barrier here, but instead rely
1715    // on art_quick_resolution_trampoline to always have a suspend point before
1716    // calling back here.
1717    movl ART_METHOD_DECLARING_CLASS_OFFSET(%eax), %esi
1718    cmpl $$(MIRROR_CLASS_STATUS_VISIBLY_INITIALIZED_SHIFTED), MIRROR_CLASS_STATUS_OFFSET(%esi)
1719    jae .Lcontinue_execute_nterp
1720    cmpl $$(MIRROR_CLASS_STATUS_INITIALIZING_SHIFTED), MIRROR_CLASS_STATUS_OFFSET(%esi)
1721    jb .Linvoke_trampoline
1722    movl MIRROR_CLASS_CLINIT_THREAD_ID_OFFSET(%esi), %esi
1723    cmpl %esi, rSELF:THREAD_TID_OFFSET
1724    CFI_REMEMBER_STATE
1725    je .Lcontinue_execute_nterp
1726.Linvoke_trampoline:
1727    POP_ARG esi
1728    jmp art_quick_resolution_trampoline
1729.Lcontinue_execute_nterp:
1730    CFI_RESTORE_STATE_AND_DEF_CFA esp, 8
1731    POP_ARG esi
1732    jmp ExecuteNterpImpl
1733    .cfi_endproc
1734    .global SYMBOL(EndExecuteNterpWithClinitImpl)
1735SYMBOL(EndExecuteNterpWithClinitImpl):
1736
1737OAT_ENTRY ExecuteNterpImpl
1738    .cfi_startproc
1739    .cfi_def_cfa esp, 4
1740    testl %eax, -STACK_OVERFLOW_RESERVED_BYTES(%esp)
1741    // Spill callee save regs
1742    SPILL_ALL_CALLEE_SAVES
1743
1744    // Make argument registers available.
1745    SPILL_ALL_CORE_PARAMETERS
1746
1747    // Fetch code item.
1748    movl ART_METHOD_DATA_OFFSET_32(%eax), %ecx
1749
1750    // Setup the stack for executing the method.
1751    SETUP_STACK_FRAME %ecx, rREFS, rFP, CFI_REFS, load_ins=1
1752
1753    // Save the PC
1754    movl %ecx, -8(rREFS)
1755
1756    // Setup the parameters
1757    testl %esi, %esi
1758    je .Lxmm_setup_finished
1759
1760    subl %esi, %ebx
1761    sall $$2, %ebx // ebx is now the offset for inputs into the registers array.
1762
1763    // Reload ArtMethod.
1764    movl (%esp), %eax
1765    testl $$ART_METHOD_NTERP_ENTRY_POINT_FAST_PATH_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%eax)
1766    je .Lsetup_slow_path
1767    leal (rREFS, %ebx, 1), %ecx
1768    leal (rFP, %ebx, 1), %ebx
1769    movl $$0, %eax
1770
1771    // edx is the old stack pointer
1772    SETUP_REFERENCE_PARAMETER_IN_GPR 8, edx, ebx, ecx, esi, eax, .Lxmm_setup_finished
1773    SETUP_REFERENCE_PARAMETER_IN_GPR 4, edx, ebx, ecx, esi, eax, .Lxmm_setup_finished
1774    SETUP_REFERENCE_PARAMETER_IN_GPR 0, edx, ebx, ecx, esi, eax, .Lxmm_setup_finished
1775    SETUP_REFERENCE_PARAMETERS_IN_STACK edx, ebx, ecx, esi, eax
1776    jmp .Lxmm_setup_finished
1777
1778.Lsetup_slow_path:
1779    // If the method is not static and there is one argument ('this'), we don't need to fetch the
1780    // shorty.
1781    testl $$ART_METHOD_IS_STATIC_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%eax)
1782    jne .Lsetup_with_shorty
1783
1784    // Record 'this'.
1785    movl 8(%edx), %eax
1786    movl %eax, (rFP, %ebx)
1787    movl %eax, (rREFS, %ebx)
1788
1789    cmpl $$1, %esi
1790    je .Lxmm_setup_finished
1791
1792.Lsetup_with_shorty:
1793    // Save xmm registers. Core registers have already been saved.
1794    subl MACRO_LITERAL(4 * 8), %esp
1795    movq %xmm0, 0(%esp)
1796    movq %xmm1, 8(%esp)
1797    movq %xmm2, 16(%esp)
1798    movq %xmm3, 24(%esp)
1799    subl MACRO_LITERAL(12), %esp
1800    pushl (4 * 8 + 12)(%esp)
1801    call SYMBOL(NterpGetShorty)
1802    addl MACRO_LITERAL(16), %esp
1803
1804    // Restore xmm registers
1805    movq 0(%esp), %xmm0
1806    movq 8(%esp), %xmm1
1807    movq 16(%esp), %xmm2
1808    movq 24(%esp), %xmm3
1809    addl MACRO_LITERAL(4 * 8), %esp
1810
1811    // Reload the old stack pointer.
1812    movl -4(rREFS), %edx
1813    // TODO: Get shorty in a better way and remove above
1814
1815    movl $$0, %esi
1816    movl (%esp), %ecx
1817    testl $$ART_METHOD_IS_STATIC_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%ecx)
1818
1819    // Note the leal and movl below don't change the flags.
1820    leal (rFP, %ebx, 1), %ecx
1821    leal (rREFS, %ebx, 1), %ebx
1822    // Save rFP (%edi), we're using it as temporary below.
1823    movl rFP, LOCAL1(%esp)
1824    leal 1(%eax), %edi  // shorty + 1  ; ie skip return arg character
1825    // Save shorty + 1
1826    movl %edi, LOCAL2(%esp)
1827    jne .Lhandle_static_method
1828    addl $$4, %ecx
1829    addl $$4, %ebx
1830    addl $$4, %edx
1831    LOOP_OVER_SHORTY_STORING_GPRS 0, -4, edx, edi, esi, ecx, ebx, .Lgpr_setup_finished, .Lif_long, is_ebx=0
1832    LOOP_OVER_SHORTY_STORING_GPRS -4, 0, edx, edi, esi, ecx, ebx, .Lgpr_setup_finished, .Lif_long, is_ebx=1
1833    jmp .Lif_long
1834.Lhandle_static_method:
1835    LOOP_OVER_SHORTY_STORING_GPRS 8, 4, edx, edi, esi, ecx, ebx, .Lgpr_setup_finished, .Lif_long_ebx, is_ebx=0
1836    LOOP_OVER_SHORTY_STORING_GPRS 4, 0, edx, edi, esi, ecx, ebx, .Lgpr_setup_finished, .Lif_long, is_ebx=0
1837.Lif_long_ebx:
1838    LOOP_OVER_SHORTY_STORING_GPRS 0, 0, edx, edi, esi, ecx, ebx, .Lgpr_setup_finished, .Lif_long, is_ebx=1
1839.Lif_long:
1840    LOOP_OVER_INTs edi, esi, ecx, ebx, edx, .Lgpr_setup_finished
1841.Lgpr_setup_finished:
1842    // Restore shorty + 1
1843    movl LOCAL2(%esp), %edi
1844    movl $$0, %esi // reset counter
1845    LOOP_OVER_SHORTY_STORING_XMMS xmm0, edi, esi, ecx, .Lrestore_fp
1846    LOOP_OVER_SHORTY_STORING_XMMS xmm1, edi, esi, ecx, .Lrestore_fp
1847    LOOP_OVER_SHORTY_STORING_XMMS xmm2, edi, esi, ecx, .Lrestore_fp
1848    LOOP_OVER_SHORTY_STORING_XMMS xmm3, edi, esi, ecx, .Lrestore_fp
1849    LOOP_OVER_FPs edi, esi, ecx, edx, .Lrestore_fp
1850.Lrestore_fp:
1851    movl LOCAL1(%esp), rFP
1852.Lxmm_setup_finished:
1853    FETCH_PC
1854    CFI_DEFINE_DEX_PC_WITH_OFFSET(CFI_TMP, CFI_DEX, 0)
1855    // Set rIBASE
1856    RESTORE_IBASE
1857    /* start executing the instruction at rPC */
1858    START_EXECUTING_INSTRUCTIONS
1859    /* NOTE: no fallthrough */
1860    // cfi info continues, and covers the whole nterp implementation.
1861    END ExecuteNterpImpl
1862
1863%def opcode_pre():
1864
1865%def fetch_from_thread_cache(dest_reg, miss_label):
1866   // Fetch some information from the thread cache.
1867   // Uses eax, and ecx as temporaries.
1868   movl rSELF:THREAD_SELF_OFFSET, %eax
1869   movl rPC, %ecx
1870   sall MACRO_LITERAL(THREAD_INTERPRETER_CACHE_SIZE_SHIFT), %ecx
1871   andl MACRO_LITERAL(THREAD_INTERPRETER_CACHE_SIZE_MASK), %ecx
1872   cmpl THREAD_INTERPRETER_CACHE_OFFSET(%eax, %ecx, 1), rPC
1873   jne  ${miss_label}
1874   movl __SIZEOF_POINTER__+THREAD_INTERPRETER_CACHE_OFFSET(%eax, %ecx, 1), ${dest_reg}
1875
1876%def footer():
1877/*
1878 * ===========================================================================
1879 *  Common subroutines and data
1880 * ===========================================================================
1881 */
1882
1883    .text
1884    .align  2
1885
1886// Enclose all code below in a symbol (which gets printed in backtraces).
1887ENTRY nterp_helper
1888
1889// Note: mterp also uses the common_* names below for helpers, but that's OK
1890// as the assembler compiled each interpreter separately.
1891common_errDivideByZero:
1892    EXPORT_PC
1893    call art_quick_throw_div_zero
1894
1895// Expect array in eax, index in ecx.
1896common_errArrayIndex:
1897    EXPORT_PC
1898    movl MIRROR_ARRAY_LENGTH_OFFSET(%eax), %edx
1899    movl %ecx, %eax
1900    movl %edx, %ecx
1901    call art_quick_throw_array_bounds
1902
1903common_errNullObject:
1904    EXPORT_PC
1905    call art_quick_throw_null_pointer_exception
1906
1907NterpCommonInvokeStatic:
1908    COMMON_INVOKE_NON_RANGE is_static=1, is_interface=0, suffix="invokeStatic"
1909
1910NterpCommonInvokeStaticRange:
1911    COMMON_INVOKE_RANGE is_static=1, is_interface=0, suffix="invokeStatic"
1912
1913NterpCommonInvokeInstance:
1914    COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, suffix="invokeInstance"
1915
1916NterpCommonInvokeInstanceRange:
1917    COMMON_INVOKE_RANGE is_static=0, is_interface=0, suffix="invokeInstance"
1918
1919NterpCommonInvokeInterface:
1920    COMMON_INVOKE_NON_RANGE is_static=0, is_interface=1, suffix="invokeInterface"
1921
1922NterpCommonInvokeInterfaceRange:
1923    COMMON_INVOKE_RANGE is_static=0, is_interface=1, suffix="invokeInterface"
1924
1925NterpCommonInvokePolymorphic:
1926    COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, is_polymorphic=1, suffix="invokePolymorphic"
1927
1928NterpCommonInvokePolymorphicRange:
1929    COMMON_INVOKE_RANGE is_static=0, is_interface=0, is_polymorphic=1, suffix="invokePolymorphic"
1930
1931NterpCommonInvokeCustom:
1932    COMMON_INVOKE_NON_RANGE is_static=1, is_interface=0, is_polymorphic=0, is_custom=1, suffix="invokeCustom"
1933
1934NterpCommonInvokeCustomRange:
1935    COMMON_INVOKE_RANGE is_static=1, is_interface=0, is_polymorphic=0, is_custom=1, suffix="invokeCustom"
1936
1937NterpHandleStringInit:
1938   COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, is_string_init=1, suffix="stringInit"
1939
1940NterpHandleStringInitRange:
1941   COMMON_INVOKE_RANGE is_static=0, is_interface=0, is_string_init=1, suffix="stringInit"
1942
1943NterpNewInstance:
1944   EXPORT_PC
1945   // Fast-path which gets the class from thread-local cache.
1946%  fetch_from_thread_cache("%eax", miss_label="2f")
1947   cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1948   jne 3f
19494:
1950   call *rSELF:THREAD_ALLOC_OBJECT_ENTRYPOINT_OFFSET
1951   RESTORE_IBASE
1952   FETCH_INST_CLEAR_OPCODE
19531:
1954   SET_VREG_OBJECT %eax, rINST             # fp[A] <- value
1955   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
19562:
1957   movl rSELF:THREAD_SELF_OFFSET, ARG0
1958   movl 0(%esp), ARG1
1959   movl rPC, ARG2
1960   call nterp_allocate_object
1961   jmp 1b
19623:
1963   // 00 is %eax
1964   call art_quick_read_barrier_mark_reg00
1965   jmp 4b
1966
1967NterpNewArray:
1968   /* new-array vA, vB, class@CCCC */
1969   EXPORT_PC
1970   // Fast-path which gets the class from thread-local cache.
1971%  fetch_from_thread_cache("%eax", miss_label="2f")
1972   cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1973   jne 3f
19741:
1975   movzbl  rINSTbl, %ecx
1976   sarl    $$4, %ecx                         # ecx<- B
1977   GET_VREG %ecx %ecx                        # ecx<- vB (array length)
1978   call *rSELF:THREAD_ALLOC_ARRAY_ENTRYPOINT_OFFSET
1979   RESTORE_IBASE
1980   FETCH_INST_CLEAR_OPCODE
1981   andb    $$0xf, rINSTbl                   # rINST<- A
1982   SET_VREG_OBJECT %eax, rINST              # fp[A] <- value
1983   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
19842:
1985   movl rSELF:THREAD_SELF_OFFSET, ARG0
1986   movl 0(%esp), ARG1
1987   movl rPC, ARG2
1988   call nterp_get_class
1989   jmp 1b
19903:
1991   // 00 is %eax
1992   call art_quick_read_barrier_mark_reg00
1993   jmp 1b
1994
1995NterpPutObjectInstanceField:
1996   // Fast-path which gets the field from thread-local cache.
1997%  fetch_from_thread_cache("%eax", miss_label="2f")
19981:
1999   movl    rINST, %ecx                     # ecx <- BA
2000   andl    $$0xf, %ecx                     # ecx <- A
2001   GET_VREG %ecx, %ecx                     # ecx <- v[A]
2002   sarl    $$4, rINST
2003   GET_VREG rINST, rINST                   # vB (object we're operating on)
2004   testl   rINST, rINST                    # is object null?
2005   je      common_errNullObject
2006   POISON_HEAP_REF ecx
2007   movl %ecx, (rINST, %eax, 1)
2008   testl %ecx, %ecx
2009   je 4f
2010   movl rSELF:THREAD_CARD_TABLE_OFFSET, %eax
2011   shrl $$CARD_TABLE_CARD_SHIFT, rINST
2012   movb %al, (%eax, rINST, 1)
20134:
2014   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
20152:
2016   EXPORT_PC
2017   // Fetch the value, needed by nterp_get_instance_field_offset.
2018   movl    rINST, %ecx                     # ecx <- BA
2019   andl    $$0xf, %ecx                     # ecx <- A
2020   GET_VREG ARG3, %ecx                     # ecx <- v[A]
2021   movl rSELF:THREAD_SELF_OFFSET, ARG0
2022   movl 0(%esp), ARG1
2023   movl rPC, ARG2
2024   call nterp_get_instance_field_offset
2025   testl %eax, %eax
2026   jns 1b
2027   negl %eax
2028   // Reload the value as it may have moved.
2029   movl    rINST, %ecx                     # ecx <- BA
2030   andl    $$0xf, %ecx                     # ecx <- A
2031   GET_VREG %ecx, %ecx                     # ecx <- v[A]
2032   sarl    $$4, rINST
2033   GET_VREG rINST, rINST                   # vB (object we're operating on)
2034   testl   rINST, rINST                    # is object null?
2035   je      common_errNullObject
2036   POISON_HEAP_REF ecx
2037   movl %ecx, (rINST, %eax, 1)
2038   testl %ecx, %ecx
2039   je 5f
2040   movl rSELF:THREAD_CARD_TABLE_OFFSET, %eax
2041   shrl $$CARD_TABLE_CARD_SHIFT, rINST
2042   movb %al, (%eax, rINST, 1)
20435:
2044   lock addl $$0, (%esp)
2045   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
2046
2047NterpGetObjectInstanceField:
2048   // Fast-path which gets the field from thread-local cache.
2049%  fetch_from_thread_cache("%eax", miss_label="2f")
20501:
2051   movl    rINST, %ecx                     # ecx <- BA
2052   sarl    $$4, %ecx                       # ecx <- B
2053   GET_VREG %ecx, %ecx                     # vB (object we're operating on)
2054   testl   %ecx, %ecx                      # is object null?
2055   je      common_errNullObject
2056   testb $$READ_BARRIER_TEST_VALUE, GRAY_BYTE_OFFSET(%ecx)
2057   movl (%ecx,%eax,1), %eax
2058   jnz 3f
2059   UNPOISON_HEAP_REF eax  // Affects flags, so we cannot unpoison before the jnz.
20604:
2061   andb    $$0xf,rINSTbl                   # rINST <- A
2062   SET_VREG_OBJECT %eax, rINST             # fp[A] <- value
2063   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
20642:
2065   EXPORT_PC
2066   movl rSELF:THREAD_SELF_OFFSET, ARG0
2067   movl 0(%esp), ARG1
2068   movl rPC, ARG2
2069   movl $$0, ARG3
2070   call nterp_get_instance_field_offset
2071   testl %eax, %eax
2072   jns 1b
2073   // For volatile fields, we return a negative offset. Remove the sign
2074   // and no need for any barrier thanks to the memory model.
2075   negl %eax
2076   jmp 1b
20773:
2078   UNPOISON_HEAP_REF eax
2079   // reg00 is eax
2080   call art_quick_read_barrier_mark_reg00
2081   jmp 4b
2082
2083NterpPutObjectStaticField:
2084   GET_VREG rINST, rINST
2085   // Fast-path which gets the field from thread-local cache.
2086%  fetch_from_thread_cache("%eax", miss_label="2f")
20871:
2088   movl ART_FIELD_OFFSET_OFFSET(%eax), %ecx
2089   movl ART_FIELD_DECLARING_CLASS_OFFSET(%eax), %eax
2090   cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
2091   jne 3f
20925:
2093   POISON_HEAP_REF ebx  // `rINST` is `%ebx` but we need to pass `ebx`.
2094   movl rINST, (%eax, %ecx, 1)
2095   testl rINST, rINST
2096   je 4f
2097   movl rSELF:THREAD_CARD_TABLE_OFFSET, %ecx
2098   shrl $$CARD_TABLE_CARD_SHIFT, %eax
2099   movb %cl, (%ecx, %eax, 1)
21004:
2101   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
21022:
2103   EXPORT_PC
2104   movl rSELF:THREAD_SELF_OFFSET, ARG0
2105   movl 0(%esp), ARG1
2106   movl rPC, ARG2
2107   movl rINST, ARG3
2108   call nterp_get_static_field
2109   // Reload the value as it may have moved.
2110   GET_VREG rINST, rINST
2111   testl MACRO_LITERAL(1), %eax
2112   je 1b
2113   CLEAR_VOLATILE_MARKER %eax
2114   movl ART_FIELD_OFFSET_OFFSET(%eax), %ecx
2115   movl ART_FIELD_DECLARING_CLASS_OFFSET(%eax), %eax
2116   cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
2117   jne 7f
21186:
2119   POISON_HEAP_REF ebx  // `rINST` is `%ebx` but we need to pass `ebx`.
2120   movl rINST, (%eax, %ecx, 1)
2121   testl rINST, rINST
2122   je 8f
2123   movl rSELF:THREAD_CARD_TABLE_OFFSET, %ecx
2124   shrl $$CARD_TABLE_CARD_SHIFT, %eax
2125   movb %cl, (%ecx, %eax, 1)
21268:
2127   lock addl $$0, (%esp)
2128   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
21293:
2130   call art_quick_read_barrier_mark_reg00
2131   jmp 5b
21327:
2133   call art_quick_read_barrier_mark_reg00
2134   jmp 6b
2135
2136NterpGetObjectStaticField:
2137   // Fast-path which gets the field from thread-local cache.
2138%  fetch_from_thread_cache("%eax", miss_label="2f")
21391:
2140   movl ART_FIELD_OFFSET_OFFSET(%eax), %ecx
2141   movl ART_FIELD_DECLARING_CLASS_OFFSET(%eax), %eax
2142   cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
2143   jne 5f
21446:
2145   testb $$READ_BARRIER_TEST_VALUE, GRAY_BYTE_OFFSET(%eax)
2146   movl (%eax, %ecx, 1), %eax
2147   jnz 3f
2148   UNPOISON_HEAP_REF eax  // Affects flags, so we cannot unpoison before the jnz.
21494:
2150   SET_VREG_OBJECT %eax, rINST             # fp[A] <- value
2151   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
21522:
2153   EXPORT_PC
2154   movl rSELF:THREAD_SELF_OFFSET, ARG0
2155   movl 0(%esp), ARG1
2156   movl rPC, ARG2
2157   movl $$0, ARG3
2158   call nterp_get_static_field
2159   CLEAR_VOLATILE_MARKER %eax
2160   jmp 1b
21613:
2162   UNPOISON_HEAP_REF eax
2163   call art_quick_read_barrier_mark_reg00
2164   jmp 4b
21655:
2166   call art_quick_read_barrier_mark_reg00
2167   jmp 6b
2168
2169NterpGetBooleanStaticField:
2170  OP_SGET load="movzbl", wide=0
2171
2172NterpGetByteStaticField:
2173  OP_SGET load="movsbl", wide=0
2174
2175NterpGetCharStaticField:
2176  OP_SGET load="movzwl", wide=0
2177
2178NterpGetShortStaticField:
2179  OP_SGET load="movswl", wide=0
2180
2181NterpGetWideStaticField:
2182  OP_SGET load="movl", wide=1
2183
2184NterpGetIntStaticField:
2185  OP_SGET load="movl", wide=0
2186
2187NterpPutStaticField:
2188  OP_SPUT rINST_reg=rINST, store="movl", wide=0
2189
2190NterpPutBooleanStaticField:
2191NterpPutByteStaticField:
2192  OP_SPUT rINST_reg=rINSTbl, store="movb", wide=0
2193
2194NterpPutCharStaticField:
2195NterpPutShortStaticField:
2196  OP_SPUT rINST_reg=rINSTw, store="movw", wide=0
2197
2198NterpPutWideStaticField:
2199  OP_SPUT rINST_reg=rINST, store="movl", wide=1
2200
2201NterpPutInstanceField:
2202  OP_IPUT rINST_reg=rINST, store="movl", wide=0
2203
2204NterpPutBooleanInstanceField:
2205NterpPutByteInstanceField:
2206  OP_IPUT rINST_reg=rINSTbl, store="movb", wide=0
2207
2208NterpPutCharInstanceField:
2209NterpPutShortInstanceField:
2210  OP_IPUT rINST_reg=rINSTw, store="movw", wide=0
2211
2212NterpPutWideInstanceField:
2213  OP_IPUT rINST_reg=rINST, store="movl", wide=1
2214
2215NterpGetBooleanInstanceField:
2216  OP_IGET load="movzbl", wide=0
2217
2218NterpGetByteInstanceField:
2219  OP_IGET load="movsbl", wide=0
2220
2221NterpGetCharInstanceField:
2222  OP_IGET load="movzwl", wide=0
2223
2224NterpGetShortInstanceField:
2225  OP_IGET load="movswl", wide=0
2226
2227NterpGetWideInstanceField:
2228  OP_IGET load="movl", wide=1
2229
2230NterpGetInstanceField:
2231  OP_IGET load="movl", wide=0
2232
2233NterpCallSuspendAndGotoNext:
2234    EXPORT_PC
2235    // Save branch offset.
2236    movl rINST, LOCAL0(%esp)
2237    call SYMBOL(art_quick_test_suspend)
2238    RESTORE_IBASE
2239    movl LOCAL0(%esp), rINST
2240    FETCH_INST
2241    GOTO_NEXT
2242
2243NterpHandleHotnessOverflow:
2244    CHECK_AND_UPDATE_SHARED_MEMORY_METHOD if_hot=1f, if_not_hot=4f
22451:
2246    movl rPC, %ecx
2247    movl rFP, ARG2
2248    // Save next PC.
2249    movl %ecx, LOCAL0(%esp)
2250    call nterp_hot_method
2251    testl %eax, %eax
2252    jne 3f
2253    // Fetch next PC.
2254    mov LOCAL0(%esp), rPC
22552:
2256    FETCH_INST
2257    GOTO_NEXT
22583:
2259    // Drop the current frame.
2260    movl -4(rREFS), %esp
2261    CFI_DEF_CFA(esp, PARAMETERS_SAVES_SIZE+CALLEE_SAVES_SIZE)
2262    DROP_PARAMETERS_SAVES
2263    CFI_DEF_CFA(esp, CALLEE_SAVES_SIZE)
2264
2265    // Setup the new frame
2266    movl OSR_DATA_FRAME_SIZE(%eax), %ecx
2267    // Given stack size contains all callee saved registers, remove them.
2268    subl $$CALLEE_SAVES_SIZE, %ecx
2269
2270    // Remember CFA.
2271    movl %esp, %ebp
2272    CFI_DEF_CFA_REGISTER(ebp)
2273
2274    subl %ecx, %esp
2275    movl %esp, %edi               // edi := beginning of stack
2276    leal OSR_DATA_MEMORY(%eax), %esi  // esi := memory to copy
2277    rep movsb                     // while (ecx--) { *edi++ = *esi++ }
2278
2279    // Fetch the native PC to jump to and save it in stack.
2280    pushl OSR_DATA_NATIVE_PC(%eax)
2281    CFI_ADJUST_CFA_OFFSET(4)
2282
2283    subl MACRO_LITERAL(8), %esp
2284    CFI_ADJUST_CFA_OFFSET(8)
2285    pushl %eax
2286    CFI_ADJUST_CFA_OFFSET(4)
2287    // Free the memory holding OSR Data.
2288    call SYMBOL(NterpFree)
2289    addl MACRO_LITERAL(12), %esp
2290    CFI_ADJUST_CFA_OFFSET(-12)
2291
2292    // Jump to the compiled code.
2293    ret
22944:
2295    DO_SUSPEND_CHECK continue_label=2b
2296
2297
2298NterpHandleInvokeInterfaceOnObjectMethodRange:
2299   shrl $$16, %eax
2300   movl MIRROR_CLASS_VTABLE_OFFSET_32(%edx, %eax, 4), %eax
2301   jmp NterpCommonInvokeInstanceRange
2302
2303NterpHandleInvokeInterfaceOnObjectMethod:
2304   shrl $$16, %eax
2305   movl MIRROR_CLASS_VTABLE_OFFSET_32(%edx, %eax, 4), %eax
2306   jmp NterpCommonInvokeInstance
2307
2308// This is the logical end of ExecuteNterpImpl, where the frame info applies.
2309// EndExecuteNterpImpl includes the methods below as we want the runtime to
2310// see them as part of the Nterp PCs.
2311.cfi_endproc
2312
2313END nterp_helper
2314
2315// This is the end of PCs contained by the OatQuickMethodHeader created for the interpreter
2316// entry point.
2317    FUNCTION_TYPE(EndExecuteNterpImpl)
2318    ASM_HIDDEN SYMBOL(EndExecuteNterpImpl)
2319    .global SYMBOL(EndExecuteNterpImpl)
2320SYMBOL(EndExecuteNterpImpl):
2321
2322// Entrypoints into runtime.
2323NTERP_TRAMPOLINE nterp_get_static_field, NterpGetStaticField
2324NTERP_TRAMPOLINE nterp_get_instance_field_offset, NterpGetInstanceFieldOffset
2325NTERP_TRAMPOLINE nterp_filled_new_array, NterpFilledNewArray
2326NTERP_TRAMPOLINE nterp_filled_new_array_range, NterpFilledNewArrayRange
2327NTERP_TRAMPOLINE nterp_get_class, NterpGetClass
2328NTERP_TRAMPOLINE nterp_allocate_object, NterpAllocateObject
2329NTERP_TRAMPOLINE nterp_get_method, NterpGetMethod
2330NTERP_TRAMPOLINE nterp_hot_method, NterpHotMethod
2331NTERP_TRAMPOLINE nterp_load_object, NterpLoadObject
2332
2333DEFINE_FUNCTION nterp_deliver_pending_exception
2334    DELIVER_PENDING_EXCEPTION
2335END_FUNCTION nterp_deliver_pending_exception
2336
2337// gen_mterp.py will inline the following definitions
2338// within [ExecuteNterpImpl, EndExecuteNterpImpl).
2339%def instruction_end():
2340
2341    FUNCTION_TYPE(artNterpAsmInstructionEnd)
2342    ASM_HIDDEN SYMBOL(artNterpAsmInstructionEnd)
2343    .global SYMBOL(artNterpAsmInstructionEnd)
2344SYMBOL(artNterpAsmInstructionEnd):
2345    // artNterpAsmInstructionEnd is used as landing pad for exception handling.
2346    RESTORE_IBASE
2347    FETCH_INST
2348    GOTO_NEXT
2349
2350%def instruction_start():
2351
2352    FUNCTION_TYPE(artNterpAsmInstructionStart)
2353    ASM_HIDDEN SYMBOL(artNterpAsmInstructionStart)
2354    .global SYMBOL(artNterpAsmInstructionStart)
2355SYMBOL(artNterpAsmInstructionStart) = .L_op_nop
2356    .text
2357
2358%def opcode_name_prefix():
2359%   return "nterp_"
2360%def opcode_start():
2361    ENTRY nterp_${opcode}
2362%def opcode_end():
2363    END nterp_${opcode}
2364    // Advance to the end of this handler. Causes error if we are past that point.
2365    .org nterp_${opcode} + NTERP_HANDLER_SIZE  // ${opcode} handler is too big!
2366%def opcode_slow_path_start(name):
2367    ENTRY ${name}
2368%def opcode_slow_path_end(name):
2369    END ${name}
2370