xref: /aosp_15_r20/external/vixl/src/aarch32/macro-assembler-aarch32.cc (revision f5c631da2f1efdd72b5fd1e20510e4042af13d77)
1*f5c631daSSadaf Ebrahimi // Copyright 2017, VIXL authors
2*f5c631daSSadaf Ebrahimi // All rights reserved.
3*f5c631daSSadaf Ebrahimi //
4*f5c631daSSadaf Ebrahimi // Redistribution and use in source and binary forms, with or without
5*f5c631daSSadaf Ebrahimi // modification, are permitted provided that the following conditions are met:
6*f5c631daSSadaf Ebrahimi //
7*f5c631daSSadaf Ebrahimi //   * Redistributions of source code must retain the above copyright notice,
8*f5c631daSSadaf Ebrahimi //     this list of conditions and the following disclaimer.
9*f5c631daSSadaf Ebrahimi //   * Redistributions in binary form must reproduce the above copyright
10*f5c631daSSadaf Ebrahimi //     notice, this list of conditions and the following disclaimer in the
11*f5c631daSSadaf Ebrahimi //     documentation and/or other materials provided with the distribution.
12*f5c631daSSadaf Ebrahimi //   * Neither the name of ARM Limited nor the names of its contributors may
13*f5c631daSSadaf Ebrahimi //     be used to endorse or promote products derived from this software
14*f5c631daSSadaf Ebrahimi //     without specific prior written permission.
15*f5c631daSSadaf Ebrahimi //
16*f5c631daSSadaf Ebrahimi // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17*f5c631daSSadaf Ebrahimi // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18*f5c631daSSadaf Ebrahimi // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19*f5c631daSSadaf Ebrahimi // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
20*f5c631daSSadaf Ebrahimi // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21*f5c631daSSadaf Ebrahimi // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22*f5c631daSSadaf Ebrahimi // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23*f5c631daSSadaf Ebrahimi // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24*f5c631daSSadaf Ebrahimi // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25*f5c631daSSadaf Ebrahimi // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26*f5c631daSSadaf Ebrahimi // POSSIBILITY OF SUCH DAMAGE.
27*f5c631daSSadaf Ebrahimi 
28*f5c631daSSadaf Ebrahimi #include "aarch32/macro-assembler-aarch32.h"
29*f5c631daSSadaf Ebrahimi 
30*f5c631daSSadaf Ebrahimi #define STRINGIFY(x) #x
31*f5c631daSSadaf Ebrahimi #define TOSTRING(x) STRINGIFY(x)
32*f5c631daSSadaf Ebrahimi 
33*f5c631daSSadaf Ebrahimi #define CONTEXT_SCOPE \
34*f5c631daSSadaf Ebrahimi   ContextScope context(this, __FILE__ ":" TOSTRING(__LINE__))
35*f5c631daSSadaf Ebrahimi 
36*f5c631daSSadaf Ebrahimi namespace vixl {
37*f5c631daSSadaf Ebrahimi namespace aarch32 {
38*f5c631daSSadaf Ebrahimi 
ExactAssemblyScopeWithoutPoolsCheck(MacroAssembler * masm,size_t size,SizePolicy size_policy)39*f5c631daSSadaf Ebrahimi ExactAssemblyScopeWithoutPoolsCheck::ExactAssemblyScopeWithoutPoolsCheck(
40*f5c631daSSadaf Ebrahimi     MacroAssembler* masm, size_t size, SizePolicy size_policy)
41*f5c631daSSadaf Ebrahimi     : ExactAssemblyScope(masm,
42*f5c631daSSadaf Ebrahimi                          size,
43*f5c631daSSadaf Ebrahimi                          size_policy,
44*f5c631daSSadaf Ebrahimi                          ExactAssemblyScope::kIgnorePools) {}
45*f5c631daSSadaf Ebrahimi 
Open(MacroAssembler * masm)46*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Open(MacroAssembler* masm) {
47*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(masm_ == NULL);
48*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(masm != NULL);
49*f5c631daSSadaf Ebrahimi   masm_ = masm;
50*f5c631daSSadaf Ebrahimi 
51*f5c631daSSadaf Ebrahimi   old_available_ = masm_->GetScratchRegisterList()->GetList();
52*f5c631daSSadaf Ebrahimi   old_available_vfp_ = masm_->GetScratchVRegisterList()->GetList();
53*f5c631daSSadaf Ebrahimi 
54*f5c631daSSadaf Ebrahimi   parent_ = masm->GetCurrentScratchRegisterScope();
55*f5c631daSSadaf Ebrahimi   masm->SetCurrentScratchRegisterScope(this);
56*f5c631daSSadaf Ebrahimi }
57*f5c631daSSadaf Ebrahimi 
58*f5c631daSSadaf Ebrahimi 
Close()59*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Close() {
60*f5c631daSSadaf Ebrahimi   if (masm_ != NULL) {
61*f5c631daSSadaf Ebrahimi     // Ensure that scopes nest perfectly, and do not outlive their parents.
62*f5c631daSSadaf Ebrahimi     // This is a run-time check because the order of destruction of objects in
63*f5c631daSSadaf Ebrahimi     // the _same_ scope is implementation-defined, and is likely to change in
64*f5c631daSSadaf Ebrahimi     // optimised builds.
65*f5c631daSSadaf Ebrahimi     VIXL_CHECK(masm_->GetCurrentScratchRegisterScope() == this);
66*f5c631daSSadaf Ebrahimi     masm_->SetCurrentScratchRegisterScope(parent_);
67*f5c631daSSadaf Ebrahimi 
68*f5c631daSSadaf Ebrahimi     masm_->GetScratchRegisterList()->SetList(old_available_);
69*f5c631daSSadaf Ebrahimi     masm_->GetScratchVRegisterList()->SetList(old_available_vfp_);
70*f5c631daSSadaf Ebrahimi 
71*f5c631daSSadaf Ebrahimi     masm_ = NULL;
72*f5c631daSSadaf Ebrahimi   }
73*f5c631daSSadaf Ebrahimi }
74*f5c631daSSadaf Ebrahimi 
75*f5c631daSSadaf Ebrahimi 
IsAvailable(const Register & reg) const76*f5c631daSSadaf Ebrahimi bool UseScratchRegisterScope::IsAvailable(const Register& reg) const {
77*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(masm_ != NULL);
78*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(reg.IsValid());
79*f5c631daSSadaf Ebrahimi   return masm_->GetScratchRegisterList()->Includes(reg);
80*f5c631daSSadaf Ebrahimi }
81*f5c631daSSadaf Ebrahimi 
82*f5c631daSSadaf Ebrahimi 
IsAvailable(const VRegister & reg) const83*f5c631daSSadaf Ebrahimi bool UseScratchRegisterScope::IsAvailable(const VRegister& reg) const {
84*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(masm_ != NULL);
85*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(reg.IsValid());
86*f5c631daSSadaf Ebrahimi   return masm_->GetScratchVRegisterList()->IncludesAllOf(reg);
87*f5c631daSSadaf Ebrahimi }
88*f5c631daSSadaf Ebrahimi 
89*f5c631daSSadaf Ebrahimi 
Acquire()90*f5c631daSSadaf Ebrahimi Register UseScratchRegisterScope::Acquire() {
91*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(masm_ != NULL);
92*f5c631daSSadaf Ebrahimi   Register reg = masm_->GetScratchRegisterList()->GetFirstAvailableRegister();
93*f5c631daSSadaf Ebrahimi   VIXL_CHECK(reg.IsValid());
94*f5c631daSSadaf Ebrahimi   masm_->GetScratchRegisterList()->Remove(reg);
95*f5c631daSSadaf Ebrahimi   return reg;
96*f5c631daSSadaf Ebrahimi }
97*f5c631daSSadaf Ebrahimi 
98*f5c631daSSadaf Ebrahimi 
AcquireV(unsigned size_in_bits)99*f5c631daSSadaf Ebrahimi VRegister UseScratchRegisterScope::AcquireV(unsigned size_in_bits) {
100*f5c631daSSadaf Ebrahimi   switch (size_in_bits) {
101*f5c631daSSadaf Ebrahimi     case kSRegSizeInBits:
102*f5c631daSSadaf Ebrahimi       return AcquireS();
103*f5c631daSSadaf Ebrahimi     case kDRegSizeInBits:
104*f5c631daSSadaf Ebrahimi       return AcquireD();
105*f5c631daSSadaf Ebrahimi     case kQRegSizeInBits:
106*f5c631daSSadaf Ebrahimi       return AcquireQ();
107*f5c631daSSadaf Ebrahimi     default:
108*f5c631daSSadaf Ebrahimi       VIXL_UNREACHABLE();
109*f5c631daSSadaf Ebrahimi       return NoVReg;
110*f5c631daSSadaf Ebrahimi   }
111*f5c631daSSadaf Ebrahimi }
112*f5c631daSSadaf Ebrahimi 
113*f5c631daSSadaf Ebrahimi 
AcquireQ()114*f5c631daSSadaf Ebrahimi QRegister UseScratchRegisterScope::AcquireQ() {
115*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(masm_ != NULL);
116*f5c631daSSadaf Ebrahimi   QRegister reg =
117*f5c631daSSadaf Ebrahimi       masm_->GetScratchVRegisterList()->GetFirstAvailableQRegister();
118*f5c631daSSadaf Ebrahimi   VIXL_CHECK(reg.IsValid());
119*f5c631daSSadaf Ebrahimi   masm_->GetScratchVRegisterList()->Remove(reg);
120*f5c631daSSadaf Ebrahimi   return reg;
121*f5c631daSSadaf Ebrahimi }
122*f5c631daSSadaf Ebrahimi 
123*f5c631daSSadaf Ebrahimi 
AcquireD()124*f5c631daSSadaf Ebrahimi DRegister UseScratchRegisterScope::AcquireD() {
125*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(masm_ != NULL);
126*f5c631daSSadaf Ebrahimi   DRegister reg =
127*f5c631daSSadaf Ebrahimi       masm_->GetScratchVRegisterList()->GetFirstAvailableDRegister();
128*f5c631daSSadaf Ebrahimi   VIXL_CHECK(reg.IsValid());
129*f5c631daSSadaf Ebrahimi   masm_->GetScratchVRegisterList()->Remove(reg);
130*f5c631daSSadaf Ebrahimi   return reg;
131*f5c631daSSadaf Ebrahimi }
132*f5c631daSSadaf Ebrahimi 
133*f5c631daSSadaf Ebrahimi 
AcquireS()134*f5c631daSSadaf Ebrahimi SRegister UseScratchRegisterScope::AcquireS() {
135*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(masm_ != NULL);
136*f5c631daSSadaf Ebrahimi   SRegister reg =
137*f5c631daSSadaf Ebrahimi       masm_->GetScratchVRegisterList()->GetFirstAvailableSRegister();
138*f5c631daSSadaf Ebrahimi   VIXL_CHECK(reg.IsValid());
139*f5c631daSSadaf Ebrahimi   masm_->GetScratchVRegisterList()->Remove(reg);
140*f5c631daSSadaf Ebrahimi   return reg;
141*f5c631daSSadaf Ebrahimi }
142*f5c631daSSadaf Ebrahimi 
143*f5c631daSSadaf Ebrahimi 
Release(const Register & reg)144*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Release(const Register& reg) {
145*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(masm_ != NULL);
146*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(reg.IsValid());
147*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(!masm_->GetScratchRegisterList()->Includes(reg));
148*f5c631daSSadaf Ebrahimi   masm_->GetScratchRegisterList()->Combine(reg);
149*f5c631daSSadaf Ebrahimi }
150*f5c631daSSadaf Ebrahimi 
151*f5c631daSSadaf Ebrahimi 
Release(const VRegister & reg)152*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Release(const VRegister& reg) {
153*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(masm_ != NULL);
154*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(reg.IsValid());
155*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(!masm_->GetScratchVRegisterList()->IncludesAliasOf(reg));
156*f5c631daSSadaf Ebrahimi   masm_->GetScratchVRegisterList()->Combine(reg);
157*f5c631daSSadaf Ebrahimi }
158*f5c631daSSadaf Ebrahimi 
159*f5c631daSSadaf Ebrahimi 
Include(const RegisterList & list)160*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Include(const RegisterList& list) {
161*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(masm_ != NULL);
162*f5c631daSSadaf Ebrahimi   RegisterList excluded_registers(sp, lr, pc);
163*f5c631daSSadaf Ebrahimi   uint32_t mask = list.GetList() & ~excluded_registers.GetList();
164*f5c631daSSadaf Ebrahimi   RegisterList* available = masm_->GetScratchRegisterList();
165*f5c631daSSadaf Ebrahimi   available->SetList(available->GetList() | mask);
166*f5c631daSSadaf Ebrahimi }
167*f5c631daSSadaf Ebrahimi 
168*f5c631daSSadaf Ebrahimi 
Include(const VRegisterList & list)169*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Include(const VRegisterList& list) {
170*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(masm_ != NULL);
171*f5c631daSSadaf Ebrahimi   VRegisterList* available = masm_->GetScratchVRegisterList();
172*f5c631daSSadaf Ebrahimi   available->SetList(available->GetList() | list.GetList());
173*f5c631daSSadaf Ebrahimi }
174*f5c631daSSadaf Ebrahimi 
175*f5c631daSSadaf Ebrahimi 
Exclude(const RegisterList & list)176*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Exclude(const RegisterList& list) {
177*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(masm_ != NULL);
178*f5c631daSSadaf Ebrahimi   RegisterList* available = masm_->GetScratchRegisterList();
179*f5c631daSSadaf Ebrahimi   available->SetList(available->GetList() & ~list.GetList());
180*f5c631daSSadaf Ebrahimi }
181*f5c631daSSadaf Ebrahimi 
182*f5c631daSSadaf Ebrahimi 
Exclude(const VRegisterList & list)183*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Exclude(const VRegisterList& list) {
184*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(masm_ != NULL);
185*f5c631daSSadaf Ebrahimi   VRegisterList* available = masm_->GetScratchVRegisterList();
186*f5c631daSSadaf Ebrahimi   available->SetList(available->GetList() & ~list.GetList());
187*f5c631daSSadaf Ebrahimi }
188*f5c631daSSadaf Ebrahimi 
189*f5c631daSSadaf Ebrahimi 
Exclude(const Operand & operand)190*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Exclude(const Operand& operand) {
191*f5c631daSSadaf Ebrahimi   if (operand.IsImmediateShiftedRegister()) {
192*f5c631daSSadaf Ebrahimi     Exclude(operand.GetBaseRegister());
193*f5c631daSSadaf Ebrahimi   } else if (operand.IsRegisterShiftedRegister()) {
194*f5c631daSSadaf Ebrahimi     Exclude(operand.GetBaseRegister(), operand.GetShiftRegister());
195*f5c631daSSadaf Ebrahimi   } else {
196*f5c631daSSadaf Ebrahimi     VIXL_ASSERT(operand.IsImmediate());
197*f5c631daSSadaf Ebrahimi   }
198*f5c631daSSadaf Ebrahimi }
199*f5c631daSSadaf Ebrahimi 
200*f5c631daSSadaf Ebrahimi 
ExcludeAll()201*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::ExcludeAll() {
202*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(masm_ != NULL);
203*f5c631daSSadaf Ebrahimi   masm_->GetScratchRegisterList()->SetList(0);
204*f5c631daSSadaf Ebrahimi   masm_->GetScratchVRegisterList()->SetList(0);
205*f5c631daSSadaf Ebrahimi }
206*f5c631daSSadaf Ebrahimi 
207*f5c631daSSadaf Ebrahimi 
EnsureEmitPoolsFor(size_t size_arg)208*f5c631daSSadaf Ebrahimi void MacroAssembler::EnsureEmitPoolsFor(size_t size_arg) {
209*f5c631daSSadaf Ebrahimi   // We skip the check when the pools are blocked.
210*f5c631daSSadaf Ebrahimi   if (ArePoolsBlocked()) return;
211*f5c631daSSadaf Ebrahimi 
212*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsUint32(size_arg));
213*f5c631daSSadaf Ebrahimi   uint32_t size = static_cast<uint32_t>(size_arg);
214*f5c631daSSadaf Ebrahimi 
215*f5c631daSSadaf Ebrahimi   if (pool_manager_.MustEmit(GetCursorOffset(), size)) {
216*f5c631daSSadaf Ebrahimi     int32_t new_pc = pool_manager_.Emit(this, GetCursorOffset(), size);
217*f5c631daSSadaf Ebrahimi     VIXL_ASSERT(new_pc == GetCursorOffset());
218*f5c631daSSadaf Ebrahimi     USE(new_pc);
219*f5c631daSSadaf Ebrahimi   }
220*f5c631daSSadaf Ebrahimi }
221*f5c631daSSadaf Ebrahimi 
222*f5c631daSSadaf Ebrahimi 
HandleOutOfBoundsImmediate(Condition cond,Register tmp,uint32_t imm)223*f5c631daSSadaf Ebrahimi void MacroAssembler::HandleOutOfBoundsImmediate(Condition cond,
224*f5c631daSSadaf Ebrahimi                                                 Register tmp,
225*f5c631daSSadaf Ebrahimi                                                 uint32_t imm) {
226*f5c631daSSadaf Ebrahimi   if (IsUintN(16, imm)) {
227*f5c631daSSadaf Ebrahimi     CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
228*f5c631daSSadaf Ebrahimi     mov(cond, tmp, imm & 0xffff);
229*f5c631daSSadaf Ebrahimi     return;
230*f5c631daSSadaf Ebrahimi   }
231*f5c631daSSadaf Ebrahimi   if (IsUsingT32()) {
232*f5c631daSSadaf Ebrahimi     if (ImmediateT32::IsImmediateT32(~imm)) {
233*f5c631daSSadaf Ebrahimi       CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
234*f5c631daSSadaf Ebrahimi       mvn(cond, tmp, ~imm);
235*f5c631daSSadaf Ebrahimi       return;
236*f5c631daSSadaf Ebrahimi     }
237*f5c631daSSadaf Ebrahimi   } else {
238*f5c631daSSadaf Ebrahimi     if (ImmediateA32::IsImmediateA32(~imm)) {
239*f5c631daSSadaf Ebrahimi       CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
240*f5c631daSSadaf Ebrahimi       mvn(cond, tmp, ~imm);
241*f5c631daSSadaf Ebrahimi       return;
242*f5c631daSSadaf Ebrahimi     }
243*f5c631daSSadaf Ebrahimi   }
244*f5c631daSSadaf Ebrahimi   CodeBufferCheckScope scope(this, 2 * kMaxInstructionSizeInBytes);
245*f5c631daSSadaf Ebrahimi   mov(cond, tmp, imm & 0xffff);
246*f5c631daSSadaf Ebrahimi   movt(cond, tmp, imm >> 16);
247*f5c631daSSadaf Ebrahimi }
248*f5c631daSSadaf Ebrahimi 
249*f5c631daSSadaf Ebrahimi 
MemOperandComputationHelper(Condition cond,Register scratch,Register base,uint32_t offset,uint32_t extra_offset_mask)250*f5c631daSSadaf Ebrahimi MemOperand MacroAssembler::MemOperandComputationHelper(
251*f5c631daSSadaf Ebrahimi     Condition cond,
252*f5c631daSSadaf Ebrahimi     Register scratch,
253*f5c631daSSadaf Ebrahimi     Register base,
254*f5c631daSSadaf Ebrahimi     uint32_t offset,
255*f5c631daSSadaf Ebrahimi     uint32_t extra_offset_mask) {
256*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(!AliasesAvailableScratchRegister(scratch));
257*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(!AliasesAvailableScratchRegister(base));
258*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(allow_macro_instructions_);
259*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(OutsideITBlock());
260*f5c631daSSadaf Ebrahimi 
261*f5c631daSSadaf Ebrahimi   // Check for the simple pass-through case.
262*f5c631daSSadaf Ebrahimi   if ((offset & extra_offset_mask) == offset) return MemOperand(base, offset);
263*f5c631daSSadaf Ebrahimi 
264*f5c631daSSadaf Ebrahimi   MacroEmissionCheckScope guard(this);
265*f5c631daSSadaf Ebrahimi   ITScope it_scope(this, &cond, guard);
266*f5c631daSSadaf Ebrahimi 
267*f5c631daSSadaf Ebrahimi   uint32_t load_store_offset = offset & extra_offset_mask;
268*f5c631daSSadaf Ebrahimi   uint32_t add_offset = offset & ~extra_offset_mask;
269*f5c631daSSadaf Ebrahimi   if ((add_offset != 0) &&
270*f5c631daSSadaf Ebrahimi       (IsModifiedImmediate(offset) || IsModifiedImmediate(-offset))) {
271*f5c631daSSadaf Ebrahimi     load_store_offset = 0;
272*f5c631daSSadaf Ebrahimi     add_offset = offset;
273*f5c631daSSadaf Ebrahimi   }
274*f5c631daSSadaf Ebrahimi 
275*f5c631daSSadaf Ebrahimi   if (base.IsPC()) {
276*f5c631daSSadaf Ebrahimi     // Special handling for PC bases. We must read the PC in the first
277*f5c631daSSadaf Ebrahimi     // instruction (and only in that instruction), and we must also take care to
278*f5c631daSSadaf Ebrahimi     // keep the same address calculation as loads and stores. For T32, that
279*f5c631daSSadaf Ebrahimi     // means using something like ADR, which uses AlignDown(PC, 4).
280*f5c631daSSadaf Ebrahimi 
281*f5c631daSSadaf Ebrahimi     // We don't handle positive offsets from PC because the intention is not
282*f5c631daSSadaf Ebrahimi     // clear; does the user expect the offset from the current
283*f5c631daSSadaf Ebrahimi     // GetCursorOffset(), or to allow a certain amount of space after the
284*f5c631daSSadaf Ebrahimi     // instruction?
285*f5c631daSSadaf Ebrahimi     VIXL_ASSERT((offset & 0x80000000) != 0);
286*f5c631daSSadaf Ebrahimi     if (IsUsingT32()) {
287*f5c631daSSadaf Ebrahimi       // T32: make the first instruction "SUB (immediate, from PC)" -- an alias
288*f5c631daSSadaf Ebrahimi       // of ADR -- to get behaviour like loads and stores. This ADR can handle
289*f5c631daSSadaf Ebrahimi       // at least as much offset as the load_store_offset so it can replace it.
290*f5c631daSSadaf Ebrahimi 
291*f5c631daSSadaf Ebrahimi       uint32_t sub_pc_offset = (-offset) & 0xfff;
292*f5c631daSSadaf Ebrahimi       load_store_offset = (offset + sub_pc_offset) & extra_offset_mask;
293*f5c631daSSadaf Ebrahimi       add_offset = (offset + sub_pc_offset) & ~extra_offset_mask;
294*f5c631daSSadaf Ebrahimi 
295*f5c631daSSadaf Ebrahimi       ExactAssemblyScope scope(this, k32BitT32InstructionSizeInBytes);
296*f5c631daSSadaf Ebrahimi       sub(cond, scratch, base, sub_pc_offset);
297*f5c631daSSadaf Ebrahimi 
298*f5c631daSSadaf Ebrahimi       if (add_offset == 0) return MemOperand(scratch, load_store_offset);
299*f5c631daSSadaf Ebrahimi 
300*f5c631daSSadaf Ebrahimi       // The rest of the offset can be generated in the usual way.
301*f5c631daSSadaf Ebrahimi       base = scratch;
302*f5c631daSSadaf Ebrahimi     }
303*f5c631daSSadaf Ebrahimi     // A32 can use any SUB instruction, so we don't have to do anything special
304*f5c631daSSadaf Ebrahimi     // here except to ensure that we read the PC first.
305*f5c631daSSadaf Ebrahimi   }
306*f5c631daSSadaf Ebrahimi 
307*f5c631daSSadaf Ebrahimi   add(cond, scratch, base, add_offset);
308*f5c631daSSadaf Ebrahimi   return MemOperand(scratch, load_store_offset);
309*f5c631daSSadaf Ebrahimi }
310*f5c631daSSadaf Ebrahimi 
311*f5c631daSSadaf Ebrahimi 
GetOffsetMask(InstructionType type,AddrMode addrmode)312*f5c631daSSadaf Ebrahimi uint32_t MacroAssembler::GetOffsetMask(InstructionType type,
313*f5c631daSSadaf Ebrahimi                                        AddrMode addrmode) {
314*f5c631daSSadaf Ebrahimi   switch (type) {
315*f5c631daSSadaf Ebrahimi     case kLdr:
316*f5c631daSSadaf Ebrahimi     case kLdrb:
317*f5c631daSSadaf Ebrahimi     case kStr:
318*f5c631daSSadaf Ebrahimi     case kStrb:
319*f5c631daSSadaf Ebrahimi       if (IsUsingA32() || (addrmode == Offset)) {
320*f5c631daSSadaf Ebrahimi         return 0xfff;
321*f5c631daSSadaf Ebrahimi       } else {
322*f5c631daSSadaf Ebrahimi         return 0xff;
323*f5c631daSSadaf Ebrahimi       }
324*f5c631daSSadaf Ebrahimi     case kLdrsb:
325*f5c631daSSadaf Ebrahimi     case kLdrh:
326*f5c631daSSadaf Ebrahimi     case kLdrsh:
327*f5c631daSSadaf Ebrahimi     case kStrh:
328*f5c631daSSadaf Ebrahimi       if (IsUsingT32() && (addrmode == Offset)) {
329*f5c631daSSadaf Ebrahimi         return 0xfff;
330*f5c631daSSadaf Ebrahimi       } else {
331*f5c631daSSadaf Ebrahimi         return 0xff;
332*f5c631daSSadaf Ebrahimi       }
333*f5c631daSSadaf Ebrahimi     case kVldr:
334*f5c631daSSadaf Ebrahimi     case kVstr:
335*f5c631daSSadaf Ebrahimi       return 0x3fc;
336*f5c631daSSadaf Ebrahimi     case kLdrd:
337*f5c631daSSadaf Ebrahimi     case kStrd:
338*f5c631daSSadaf Ebrahimi       if (IsUsingA32()) {
339*f5c631daSSadaf Ebrahimi         return 0xff;
340*f5c631daSSadaf Ebrahimi       } else {
341*f5c631daSSadaf Ebrahimi         return 0x3fc;
342*f5c631daSSadaf Ebrahimi       }
343*f5c631daSSadaf Ebrahimi     default:
344*f5c631daSSadaf Ebrahimi       VIXL_UNREACHABLE();
345*f5c631daSSadaf Ebrahimi       return 0;
346*f5c631daSSadaf Ebrahimi   }
347*f5c631daSSadaf Ebrahimi }
348*f5c631daSSadaf Ebrahimi 
349*f5c631daSSadaf Ebrahimi 
PrintfTrampolineRRRR(const char * format,uint32_t a,uint32_t b,uint32_t c,uint32_t d)350*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineRRRR(
351*f5c631daSSadaf Ebrahimi     const char* format, uint32_t a, uint32_t b, uint32_t c, uint32_t d) {
352*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
353*f5c631daSSadaf Ebrahimi }
354*f5c631daSSadaf Ebrahimi 
355*f5c631daSSadaf Ebrahimi 
PrintfTrampolineRRRD(const char * format,uint32_t a,uint32_t b,uint32_t c,double d)356*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineRRRD(
357*f5c631daSSadaf Ebrahimi     const char* format, uint32_t a, uint32_t b, uint32_t c, double d) {
358*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
359*f5c631daSSadaf Ebrahimi }
360*f5c631daSSadaf Ebrahimi 
361*f5c631daSSadaf Ebrahimi 
PrintfTrampolineRRDR(const char * format,uint32_t a,uint32_t b,double c,uint32_t d)362*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineRRDR(
363*f5c631daSSadaf Ebrahimi     const char* format, uint32_t a, uint32_t b, double c, uint32_t d) {
364*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
365*f5c631daSSadaf Ebrahimi }
366*f5c631daSSadaf Ebrahimi 
367*f5c631daSSadaf Ebrahimi 
PrintfTrampolineRRDD(const char * format,uint32_t a,uint32_t b,double c,double d)368*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineRRDD(
369*f5c631daSSadaf Ebrahimi     const char* format, uint32_t a, uint32_t b, double c, double d) {
370*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
371*f5c631daSSadaf Ebrahimi }
372*f5c631daSSadaf Ebrahimi 
373*f5c631daSSadaf Ebrahimi 
PrintfTrampolineRDRR(const char * format,uint32_t a,double b,uint32_t c,uint32_t d)374*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineRDRR(
375*f5c631daSSadaf Ebrahimi     const char* format, uint32_t a, double b, uint32_t c, uint32_t d) {
376*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
377*f5c631daSSadaf Ebrahimi }
378*f5c631daSSadaf Ebrahimi 
379*f5c631daSSadaf Ebrahimi 
PrintfTrampolineRDRD(const char * format,uint32_t a,double b,uint32_t c,double d)380*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineRDRD(
381*f5c631daSSadaf Ebrahimi     const char* format, uint32_t a, double b, uint32_t c, double d) {
382*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
383*f5c631daSSadaf Ebrahimi }
384*f5c631daSSadaf Ebrahimi 
385*f5c631daSSadaf Ebrahimi 
PrintfTrampolineRDDR(const char * format,uint32_t a,double b,double c,uint32_t d)386*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineRDDR(
387*f5c631daSSadaf Ebrahimi     const char* format, uint32_t a, double b, double c, uint32_t d) {
388*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
389*f5c631daSSadaf Ebrahimi }
390*f5c631daSSadaf Ebrahimi 
391*f5c631daSSadaf Ebrahimi 
PrintfTrampolineRDDD(const char * format,uint32_t a,double b,double c,double d)392*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineRDDD(
393*f5c631daSSadaf Ebrahimi     const char* format, uint32_t a, double b, double c, double d) {
394*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
395*f5c631daSSadaf Ebrahimi }
396*f5c631daSSadaf Ebrahimi 
397*f5c631daSSadaf Ebrahimi 
PrintfTrampolineDRRR(const char * format,double a,uint32_t b,uint32_t c,uint32_t d)398*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineDRRR(
399*f5c631daSSadaf Ebrahimi     const char* format, double a, uint32_t b, uint32_t c, uint32_t d) {
400*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
401*f5c631daSSadaf Ebrahimi }
402*f5c631daSSadaf Ebrahimi 
403*f5c631daSSadaf Ebrahimi 
PrintfTrampolineDRRD(const char * format,double a,uint32_t b,uint32_t c,double d)404*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineDRRD(
405*f5c631daSSadaf Ebrahimi     const char* format, double a, uint32_t b, uint32_t c, double d) {
406*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
407*f5c631daSSadaf Ebrahimi }
408*f5c631daSSadaf Ebrahimi 
409*f5c631daSSadaf Ebrahimi 
PrintfTrampolineDRDR(const char * format,double a,uint32_t b,double c,uint32_t d)410*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineDRDR(
411*f5c631daSSadaf Ebrahimi     const char* format, double a, uint32_t b, double c, uint32_t d) {
412*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
413*f5c631daSSadaf Ebrahimi }
414*f5c631daSSadaf Ebrahimi 
415*f5c631daSSadaf Ebrahimi 
PrintfTrampolineDRDD(const char * format,double a,uint32_t b,double c,double d)416*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineDRDD(
417*f5c631daSSadaf Ebrahimi     const char* format, double a, uint32_t b, double c, double d) {
418*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
419*f5c631daSSadaf Ebrahimi }
420*f5c631daSSadaf Ebrahimi 
421*f5c631daSSadaf Ebrahimi 
PrintfTrampolineDDRR(const char * format,double a,double b,uint32_t c,uint32_t d)422*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineDDRR(
423*f5c631daSSadaf Ebrahimi     const char* format, double a, double b, uint32_t c, uint32_t d) {
424*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
425*f5c631daSSadaf Ebrahimi }
426*f5c631daSSadaf Ebrahimi 
427*f5c631daSSadaf Ebrahimi 
PrintfTrampolineDDRD(const char * format,double a,double b,uint32_t c,double d)428*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineDDRD(
429*f5c631daSSadaf Ebrahimi     const char* format, double a, double b, uint32_t c, double d) {
430*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
431*f5c631daSSadaf Ebrahimi }
432*f5c631daSSadaf Ebrahimi 
433*f5c631daSSadaf Ebrahimi 
PrintfTrampolineDDDR(const char * format,double a,double b,double c,uint32_t d)434*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineDDDR(
435*f5c631daSSadaf Ebrahimi     const char* format, double a, double b, double c, uint32_t d) {
436*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
437*f5c631daSSadaf Ebrahimi }
438*f5c631daSSadaf Ebrahimi 
439*f5c631daSSadaf Ebrahimi 
PrintfTrampolineDDDD(const char * format,double a,double b,double c,double d)440*f5c631daSSadaf Ebrahimi HARDFLOAT void PrintfTrampolineDDDD(
441*f5c631daSSadaf Ebrahimi     const char* format, double a, double b, double c, double d) {
442*f5c631daSSadaf Ebrahimi   printf(format, a, b, c, d);
443*f5c631daSSadaf Ebrahimi }
444*f5c631daSSadaf Ebrahimi 
445*f5c631daSSadaf Ebrahimi 
Printf(const char * format,CPURegister reg1,CPURegister reg2,CPURegister reg3,CPURegister reg4)446*f5c631daSSadaf Ebrahimi void MacroAssembler::Printf(const char* format,
447*f5c631daSSadaf Ebrahimi                             CPURegister reg1,
448*f5c631daSSadaf Ebrahimi                             CPURegister reg2,
449*f5c631daSSadaf Ebrahimi                             CPURegister reg3,
450*f5c631daSSadaf Ebrahimi                             CPURegister reg4) {
451*f5c631daSSadaf Ebrahimi   // Exclude all registers from the available scratch registers, so
452*f5c631daSSadaf Ebrahimi   // that we are able to use ip below.
453*f5c631daSSadaf Ebrahimi   // TODO: Refactor this function to use UseScratchRegisterScope
454*f5c631daSSadaf Ebrahimi   // for temporary registers below.
455*f5c631daSSadaf Ebrahimi   UseScratchRegisterScope scratch(this);
456*f5c631daSSadaf Ebrahimi   scratch.ExcludeAll();
457*f5c631daSSadaf Ebrahimi   if (generate_simulator_code_) {
458*f5c631daSSadaf Ebrahimi     PushRegister(reg4);
459*f5c631daSSadaf Ebrahimi     PushRegister(reg3);
460*f5c631daSSadaf Ebrahimi     PushRegister(reg2);
461*f5c631daSSadaf Ebrahimi     PushRegister(reg1);
462*f5c631daSSadaf Ebrahimi     Push(RegisterList(r0, r1));
463*f5c631daSSadaf Ebrahimi     StringLiteral* format_literal =
464*f5c631daSSadaf Ebrahimi         new StringLiteral(format, RawLiteral::kDeletedOnPlacementByPool);
465*f5c631daSSadaf Ebrahimi     Adr(r0, format_literal);
466*f5c631daSSadaf Ebrahimi     uint32_t args = (reg4.GetType() << 12) | (reg3.GetType() << 8) |
467*f5c631daSSadaf Ebrahimi                     (reg2.GetType() << 4) | reg1.GetType();
468*f5c631daSSadaf Ebrahimi     Mov(r1, args);
469*f5c631daSSadaf Ebrahimi     Hvc(kPrintfCode);
470*f5c631daSSadaf Ebrahimi     Pop(RegisterList(r0, r1));
471*f5c631daSSadaf Ebrahimi     int size = reg4.GetRegSizeInBytes() + reg3.GetRegSizeInBytes() +
472*f5c631daSSadaf Ebrahimi                reg2.GetRegSizeInBytes() + reg1.GetRegSizeInBytes();
473*f5c631daSSadaf Ebrahimi     Drop(size);
474*f5c631daSSadaf Ebrahimi   } else {
475*f5c631daSSadaf Ebrahimi     // Generate on a native platform => 32 bit environment.
476*f5c631daSSadaf Ebrahimi     // Preserve core registers r0-r3, r12, r14
477*f5c631daSSadaf Ebrahimi     const uint32_t saved_registers_mask =
478*f5c631daSSadaf Ebrahimi         kCallerSavedRegistersMask | (1 << r5.GetCode());
479*f5c631daSSadaf Ebrahimi     Push(RegisterList(saved_registers_mask));
480*f5c631daSSadaf Ebrahimi     // Push VFP registers.
481*f5c631daSSadaf Ebrahimi     Vpush(Untyped64, DRegisterList(d0, 8));
482*f5c631daSSadaf Ebrahimi     if (Has32DRegs()) Vpush(Untyped64, DRegisterList(d16, 16));
483*f5c631daSSadaf Ebrahimi     // Search one register which has been saved and which doesn't need to be
484*f5c631daSSadaf Ebrahimi     // printed.
485*f5c631daSSadaf Ebrahimi     RegisterList available_registers(kCallerSavedRegistersMask);
486*f5c631daSSadaf Ebrahimi     if (reg1.GetType() == CPURegister::kRRegister) {
487*f5c631daSSadaf Ebrahimi       available_registers.Remove(Register(reg1.GetCode()));
488*f5c631daSSadaf Ebrahimi     }
489*f5c631daSSadaf Ebrahimi     if (reg2.GetType() == CPURegister::kRRegister) {
490*f5c631daSSadaf Ebrahimi       available_registers.Remove(Register(reg2.GetCode()));
491*f5c631daSSadaf Ebrahimi     }
492*f5c631daSSadaf Ebrahimi     if (reg3.GetType() == CPURegister::kRRegister) {
493*f5c631daSSadaf Ebrahimi       available_registers.Remove(Register(reg3.GetCode()));
494*f5c631daSSadaf Ebrahimi     }
495*f5c631daSSadaf Ebrahimi     if (reg4.GetType() == CPURegister::kRRegister) {
496*f5c631daSSadaf Ebrahimi       available_registers.Remove(Register(reg4.GetCode()));
497*f5c631daSSadaf Ebrahimi     }
498*f5c631daSSadaf Ebrahimi     Register tmp = available_registers.GetFirstAvailableRegister();
499*f5c631daSSadaf Ebrahimi     VIXL_ASSERT(tmp.GetType() == CPURegister::kRRegister);
500*f5c631daSSadaf Ebrahimi     // Push the flags.
501*f5c631daSSadaf Ebrahimi     Mrs(tmp, APSR);
502*f5c631daSSadaf Ebrahimi     Push(tmp);
503*f5c631daSSadaf Ebrahimi     Vmrs(RegisterOrAPSR_nzcv(tmp.GetCode()), FPSCR);
504*f5c631daSSadaf Ebrahimi     Push(tmp);
505*f5c631daSSadaf Ebrahimi     // Push the registers to print on the stack.
506*f5c631daSSadaf Ebrahimi     PushRegister(reg4);
507*f5c631daSSadaf Ebrahimi     PushRegister(reg3);
508*f5c631daSSadaf Ebrahimi     PushRegister(reg2);
509*f5c631daSSadaf Ebrahimi     PushRegister(reg1);
510*f5c631daSSadaf Ebrahimi     int core_count = 1;
511*f5c631daSSadaf Ebrahimi     int vfp_count = 0;
512*f5c631daSSadaf Ebrahimi     uint32_t printf_type = 0;
513*f5c631daSSadaf Ebrahimi     // Pop the registers to print and store them into r1-r3 and/or d0-d3.
514*f5c631daSSadaf Ebrahimi     // Reg4 may stay into the stack if all the register to print are core
515*f5c631daSSadaf Ebrahimi     // registers.
516*f5c631daSSadaf Ebrahimi     PreparePrintfArgument(reg1, &core_count, &vfp_count, &printf_type);
517*f5c631daSSadaf Ebrahimi     PreparePrintfArgument(reg2, &core_count, &vfp_count, &printf_type);
518*f5c631daSSadaf Ebrahimi     PreparePrintfArgument(reg3, &core_count, &vfp_count, &printf_type);
519*f5c631daSSadaf Ebrahimi     PreparePrintfArgument(reg4, &core_count, &vfp_count, &printf_type);
520*f5c631daSSadaf Ebrahimi     // Ensure that the stack is aligned on 8 bytes.
521*f5c631daSSadaf Ebrahimi     And(r5, sp, 0x7);
522*f5c631daSSadaf Ebrahimi     if (core_count == 5) {
523*f5c631daSSadaf Ebrahimi       // One 32 bit argument (reg4) has been left on the stack =>  align the
524*f5c631daSSadaf Ebrahimi       // stack
525*f5c631daSSadaf Ebrahimi       // before the argument.
526*f5c631daSSadaf Ebrahimi       Pop(r0);
527*f5c631daSSadaf Ebrahimi       Sub(sp, sp, r5);
528*f5c631daSSadaf Ebrahimi       Push(r0);
529*f5c631daSSadaf Ebrahimi     } else {
530*f5c631daSSadaf Ebrahimi       Sub(sp, sp, r5);
531*f5c631daSSadaf Ebrahimi     }
532*f5c631daSSadaf Ebrahimi     // Select the right trampoline depending on the arguments.
533*f5c631daSSadaf Ebrahimi     uintptr_t address;
534*f5c631daSSadaf Ebrahimi     switch (printf_type) {
535*f5c631daSSadaf Ebrahimi       case 0:
536*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineRRRR);
537*f5c631daSSadaf Ebrahimi         break;
538*f5c631daSSadaf Ebrahimi       case 1:
539*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineDRRR);
540*f5c631daSSadaf Ebrahimi         break;
541*f5c631daSSadaf Ebrahimi       case 2:
542*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineRDRR);
543*f5c631daSSadaf Ebrahimi         break;
544*f5c631daSSadaf Ebrahimi       case 3:
545*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineDDRR);
546*f5c631daSSadaf Ebrahimi         break;
547*f5c631daSSadaf Ebrahimi       case 4:
548*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineRRDR);
549*f5c631daSSadaf Ebrahimi         break;
550*f5c631daSSadaf Ebrahimi       case 5:
551*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineDRDR);
552*f5c631daSSadaf Ebrahimi         break;
553*f5c631daSSadaf Ebrahimi       case 6:
554*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineRDDR);
555*f5c631daSSadaf Ebrahimi         break;
556*f5c631daSSadaf Ebrahimi       case 7:
557*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineDDDR);
558*f5c631daSSadaf Ebrahimi         break;
559*f5c631daSSadaf Ebrahimi       case 8:
560*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineRRRD);
561*f5c631daSSadaf Ebrahimi         break;
562*f5c631daSSadaf Ebrahimi       case 9:
563*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineDRRD);
564*f5c631daSSadaf Ebrahimi         break;
565*f5c631daSSadaf Ebrahimi       case 10:
566*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineRDRD);
567*f5c631daSSadaf Ebrahimi         break;
568*f5c631daSSadaf Ebrahimi       case 11:
569*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineDDRD);
570*f5c631daSSadaf Ebrahimi         break;
571*f5c631daSSadaf Ebrahimi       case 12:
572*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineRRDD);
573*f5c631daSSadaf Ebrahimi         break;
574*f5c631daSSadaf Ebrahimi       case 13:
575*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineDRDD);
576*f5c631daSSadaf Ebrahimi         break;
577*f5c631daSSadaf Ebrahimi       case 14:
578*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineRDDD);
579*f5c631daSSadaf Ebrahimi         break;
580*f5c631daSSadaf Ebrahimi       case 15:
581*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineDDDD);
582*f5c631daSSadaf Ebrahimi         break;
583*f5c631daSSadaf Ebrahimi       default:
584*f5c631daSSadaf Ebrahimi         VIXL_UNREACHABLE();
585*f5c631daSSadaf Ebrahimi         address = reinterpret_cast<uintptr_t>(PrintfTrampolineRRRR);
586*f5c631daSSadaf Ebrahimi         break;
587*f5c631daSSadaf Ebrahimi     }
588*f5c631daSSadaf Ebrahimi     StringLiteral* format_literal =
589*f5c631daSSadaf Ebrahimi         new StringLiteral(format, RawLiteral::kDeletedOnPlacementByPool);
590*f5c631daSSadaf Ebrahimi     Adr(r0, format_literal);
591*f5c631daSSadaf Ebrahimi     Mov(ip, Operand::From(address));
592*f5c631daSSadaf Ebrahimi     Blx(ip);
593*f5c631daSSadaf Ebrahimi     // If register reg4 was left on the stack => skip it.
594*f5c631daSSadaf Ebrahimi     if (core_count == 5) Drop(kRegSizeInBytes);
595*f5c631daSSadaf Ebrahimi     // Restore the stack as it was before alignment.
596*f5c631daSSadaf Ebrahimi     Add(sp, sp, r5);
597*f5c631daSSadaf Ebrahimi     // Restore the flags.
598*f5c631daSSadaf Ebrahimi     Pop(tmp);
599*f5c631daSSadaf Ebrahimi     Vmsr(FPSCR, tmp);
600*f5c631daSSadaf Ebrahimi     Pop(tmp);
601*f5c631daSSadaf Ebrahimi     Msr(APSR_nzcvqg, tmp);
602*f5c631daSSadaf Ebrahimi     // Restore the regsisters.
603*f5c631daSSadaf Ebrahimi     if (Has32DRegs()) Vpop(Untyped64, DRegisterList(d16, 16));
604*f5c631daSSadaf Ebrahimi     Vpop(Untyped64, DRegisterList(d0, 8));
605*f5c631daSSadaf Ebrahimi     Pop(RegisterList(saved_registers_mask));
606*f5c631daSSadaf Ebrahimi   }
607*f5c631daSSadaf Ebrahimi }
608*f5c631daSSadaf Ebrahimi 
609*f5c631daSSadaf Ebrahimi 
PushRegister(CPURegister reg)610*f5c631daSSadaf Ebrahimi void MacroAssembler::PushRegister(CPURegister reg) {
611*f5c631daSSadaf Ebrahimi   switch (reg.GetType()) {
612*f5c631daSSadaf Ebrahimi     case CPURegister::kNoRegister:
613*f5c631daSSadaf Ebrahimi       break;
614*f5c631daSSadaf Ebrahimi     case CPURegister::kRRegister:
615*f5c631daSSadaf Ebrahimi       Push(Register(reg.GetCode()));
616*f5c631daSSadaf Ebrahimi       break;
617*f5c631daSSadaf Ebrahimi     case CPURegister::kSRegister:
618*f5c631daSSadaf Ebrahimi       Vpush(Untyped32, SRegisterList(SRegister(reg.GetCode())));
619*f5c631daSSadaf Ebrahimi       break;
620*f5c631daSSadaf Ebrahimi     case CPURegister::kDRegister:
621*f5c631daSSadaf Ebrahimi       Vpush(Untyped64, DRegisterList(DRegister(reg.GetCode())));
622*f5c631daSSadaf Ebrahimi       break;
623*f5c631daSSadaf Ebrahimi     case CPURegister::kQRegister:
624*f5c631daSSadaf Ebrahimi       VIXL_UNIMPLEMENTED();
625*f5c631daSSadaf Ebrahimi       break;
626*f5c631daSSadaf Ebrahimi   }
627*f5c631daSSadaf Ebrahimi }
628*f5c631daSSadaf Ebrahimi 
629*f5c631daSSadaf Ebrahimi 
PreparePrintfArgument(CPURegister reg,int * core_count,int * vfp_count,uint32_t * printf_type)630*f5c631daSSadaf Ebrahimi void MacroAssembler::PreparePrintfArgument(CPURegister reg,
631*f5c631daSSadaf Ebrahimi                                            int* core_count,
632*f5c631daSSadaf Ebrahimi                                            int* vfp_count,
633*f5c631daSSadaf Ebrahimi                                            uint32_t* printf_type) {
634*f5c631daSSadaf Ebrahimi   switch (reg.GetType()) {
635*f5c631daSSadaf Ebrahimi     case CPURegister::kNoRegister:
636*f5c631daSSadaf Ebrahimi       break;
637*f5c631daSSadaf Ebrahimi     case CPURegister::kRRegister:
638*f5c631daSSadaf Ebrahimi       VIXL_ASSERT(*core_count <= 4);
639*f5c631daSSadaf Ebrahimi       if (*core_count < 4) Pop(Register(*core_count));
640*f5c631daSSadaf Ebrahimi       *core_count += 1;
641*f5c631daSSadaf Ebrahimi       break;
642*f5c631daSSadaf Ebrahimi     case CPURegister::kSRegister:
643*f5c631daSSadaf Ebrahimi       VIXL_ASSERT(*vfp_count < 4);
644*f5c631daSSadaf Ebrahimi       *printf_type |= 1 << (*core_count + *vfp_count - 1);
645*f5c631daSSadaf Ebrahimi       Vpop(Untyped32, SRegisterList(SRegister(*vfp_count * 2)));
646*f5c631daSSadaf Ebrahimi       Vcvt(F64, F32, DRegister(*vfp_count), SRegister(*vfp_count * 2));
647*f5c631daSSadaf Ebrahimi       *vfp_count += 1;
648*f5c631daSSadaf Ebrahimi       break;
649*f5c631daSSadaf Ebrahimi     case CPURegister::kDRegister:
650*f5c631daSSadaf Ebrahimi       VIXL_ASSERT(*vfp_count < 4);
651*f5c631daSSadaf Ebrahimi       *printf_type |= 1 << (*core_count + *vfp_count - 1);
652*f5c631daSSadaf Ebrahimi       Vpop(Untyped64, DRegisterList(DRegister(*vfp_count)));
653*f5c631daSSadaf Ebrahimi       *vfp_count += 1;
654*f5c631daSSadaf Ebrahimi       break;
655*f5c631daSSadaf Ebrahimi     case CPURegister::kQRegister:
656*f5c631daSSadaf Ebrahimi       VIXL_UNIMPLEMENTED();
657*f5c631daSSadaf Ebrahimi       break;
658*f5c631daSSadaf Ebrahimi   }
659*f5c631daSSadaf Ebrahimi }
660*f5c631daSSadaf Ebrahimi 
661*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondROp instruction,Condition cond,Register rn,const Operand & operand)662*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
663*f5c631daSSadaf Ebrahimi                               InstructionCondROp instruction,
664*f5c631daSSadaf Ebrahimi                               Condition cond,
665*f5c631daSSadaf Ebrahimi                               Register rn,
666*f5c631daSSadaf Ebrahimi                               const Operand& operand) {
667*f5c631daSSadaf Ebrahimi   VIXL_ASSERT((type == kMovt) || (type == kSxtb16) || (type == kTeq) ||
668*f5c631daSSadaf Ebrahimi               (type == kUxtb16));
669*f5c631daSSadaf Ebrahimi 
670*f5c631daSSadaf Ebrahimi   if (type == kMovt) {
671*f5c631daSSadaf Ebrahimi     VIXL_ABORT_WITH_MSG("`Movt` expects a 16-bit immediate.\n");
672*f5c631daSSadaf Ebrahimi   }
673*f5c631daSSadaf Ebrahimi 
674*f5c631daSSadaf Ebrahimi   // This delegate only supports teq with immediates.
675*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
676*f5c631daSSadaf Ebrahimi   if ((type == kTeq) && operand.IsImmediate()) {
677*f5c631daSSadaf Ebrahimi     UseScratchRegisterScope temps(this);
678*f5c631daSSadaf Ebrahimi     Register scratch = temps.Acquire();
679*f5c631daSSadaf Ebrahimi     HandleOutOfBoundsImmediate(cond, scratch, operand.GetImmediate());
680*f5c631daSSadaf Ebrahimi     CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
681*f5c631daSSadaf Ebrahimi     teq(cond, rn, scratch);
682*f5c631daSSadaf Ebrahimi     return;
683*f5c631daSSadaf Ebrahimi   }
684*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, rn, operand);
685*f5c631daSSadaf Ebrahimi }
686*f5c631daSSadaf Ebrahimi 
687*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondSizeROp instruction,Condition cond,EncodingSize size,Register rn,const Operand & operand)688*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
689*f5c631daSSadaf Ebrahimi                               InstructionCondSizeROp instruction,
690*f5c631daSSadaf Ebrahimi                               Condition cond,
691*f5c631daSSadaf Ebrahimi                               EncodingSize size,
692*f5c631daSSadaf Ebrahimi                               Register rn,
693*f5c631daSSadaf Ebrahimi                               const Operand& operand) {
694*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
695*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(size.IsBest());
696*f5c631daSSadaf Ebrahimi   VIXL_ASSERT((type == kCmn) || (type == kCmp) || (type == kMov) ||
697*f5c631daSSadaf Ebrahimi               (type == kMovs) || (type == kMvn) || (type == kMvns) ||
698*f5c631daSSadaf Ebrahimi               (type == kSxtb) || (type == kSxth) || (type == kTst) ||
699*f5c631daSSadaf Ebrahimi               (type == kUxtb) || (type == kUxth));
700*f5c631daSSadaf Ebrahimi   if (IsUsingT32() && operand.IsRegisterShiftedRegister()) {
701*f5c631daSSadaf Ebrahimi     VIXL_ASSERT((type != kMov) || (type != kMovs));
702*f5c631daSSadaf Ebrahimi     InstructionCondRROp shiftop = NULL;
703*f5c631daSSadaf Ebrahimi     switch (operand.GetShift().GetType()) {
704*f5c631daSSadaf Ebrahimi       case LSL:
705*f5c631daSSadaf Ebrahimi         shiftop = &Assembler::lsl;
706*f5c631daSSadaf Ebrahimi         break;
707*f5c631daSSadaf Ebrahimi       case LSR:
708*f5c631daSSadaf Ebrahimi         shiftop = &Assembler::lsr;
709*f5c631daSSadaf Ebrahimi         break;
710*f5c631daSSadaf Ebrahimi       case ASR:
711*f5c631daSSadaf Ebrahimi         shiftop = &Assembler::asr;
712*f5c631daSSadaf Ebrahimi         break;
713*f5c631daSSadaf Ebrahimi       case RRX:
714*f5c631daSSadaf Ebrahimi         // A RegisterShiftedRegister operand cannot have a shift of type RRX.
715*f5c631daSSadaf Ebrahimi         VIXL_UNREACHABLE();
716*f5c631daSSadaf Ebrahimi         break;
717*f5c631daSSadaf Ebrahimi       case ROR:
718*f5c631daSSadaf Ebrahimi         shiftop = &Assembler::ror;
719*f5c631daSSadaf Ebrahimi         break;
720*f5c631daSSadaf Ebrahimi       default:
721*f5c631daSSadaf Ebrahimi         VIXL_UNREACHABLE();
722*f5c631daSSadaf Ebrahimi     }
723*f5c631daSSadaf Ebrahimi     if (shiftop != NULL) {
724*f5c631daSSadaf Ebrahimi       UseScratchRegisterScope temps(this);
725*f5c631daSSadaf Ebrahimi       Register scratch = temps.Acquire();
726*f5c631daSSadaf Ebrahimi       CodeBufferCheckScope scope(this, 2 * kMaxInstructionSizeInBytes);
727*f5c631daSSadaf Ebrahimi       (this->*shiftop)(cond,
728*f5c631daSSadaf Ebrahimi                        scratch,
729*f5c631daSSadaf Ebrahimi                        operand.GetBaseRegister(),
730*f5c631daSSadaf Ebrahimi                        operand.GetShiftRegister());
731*f5c631daSSadaf Ebrahimi       (this->*instruction)(cond, size, rn, scratch);
732*f5c631daSSadaf Ebrahimi       return;
733*f5c631daSSadaf Ebrahimi     }
734*f5c631daSSadaf Ebrahimi   }
735*f5c631daSSadaf Ebrahimi   if (operand.IsImmediate()) {
736*f5c631daSSadaf Ebrahimi     uint32_t imm = operand.GetImmediate();
737*f5c631daSSadaf Ebrahimi     switch (type) {
738*f5c631daSSadaf Ebrahimi       case kMov:
739*f5c631daSSadaf Ebrahimi       case kMovs:
740*f5c631daSSadaf Ebrahimi         if (!rn.IsPC()) {
741*f5c631daSSadaf Ebrahimi           // Immediate is too large, but not using PC, so handle with mov{t}.
742*f5c631daSSadaf Ebrahimi           HandleOutOfBoundsImmediate(cond, rn, imm);
743*f5c631daSSadaf Ebrahimi           if (type == kMovs) {
744*f5c631daSSadaf Ebrahimi             CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
745*f5c631daSSadaf Ebrahimi             tst(cond, rn, rn);
746*f5c631daSSadaf Ebrahimi           }
747*f5c631daSSadaf Ebrahimi           return;
748*f5c631daSSadaf Ebrahimi         } else if (type == kMov) {
749*f5c631daSSadaf Ebrahimi           VIXL_ASSERT(IsUsingA32() || cond.Is(al));
750*f5c631daSSadaf Ebrahimi           // Immediate is too large and using PC, so handle using a temporary
751*f5c631daSSadaf Ebrahimi           // register.
752*f5c631daSSadaf Ebrahimi           UseScratchRegisterScope temps(this);
753*f5c631daSSadaf Ebrahimi           Register scratch = temps.Acquire();
754*f5c631daSSadaf Ebrahimi           HandleOutOfBoundsImmediate(al, scratch, imm);
755*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
756*f5c631daSSadaf Ebrahimi           bx(cond, scratch);
757*f5c631daSSadaf Ebrahimi           return;
758*f5c631daSSadaf Ebrahimi         }
759*f5c631daSSadaf Ebrahimi         break;
760*f5c631daSSadaf Ebrahimi       case kCmn:
761*f5c631daSSadaf Ebrahimi       case kCmp:
762*f5c631daSSadaf Ebrahimi         if (IsUsingA32() || !rn.IsPC()) {
763*f5c631daSSadaf Ebrahimi           UseScratchRegisterScope temps(this);
764*f5c631daSSadaf Ebrahimi           Register scratch = temps.Acquire();
765*f5c631daSSadaf Ebrahimi           HandleOutOfBoundsImmediate(cond, scratch, imm);
766*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
767*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond, size, rn, scratch);
768*f5c631daSSadaf Ebrahimi           return;
769*f5c631daSSadaf Ebrahimi         }
770*f5c631daSSadaf Ebrahimi         break;
771*f5c631daSSadaf Ebrahimi       case kMvn:
772*f5c631daSSadaf Ebrahimi       case kMvns:
773*f5c631daSSadaf Ebrahimi         if (!rn.IsPC()) {
774*f5c631daSSadaf Ebrahimi           UseScratchRegisterScope temps(this);
775*f5c631daSSadaf Ebrahimi           Register scratch = temps.Acquire();
776*f5c631daSSadaf Ebrahimi           HandleOutOfBoundsImmediate(cond, scratch, imm);
777*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
778*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond, size, rn, scratch);
779*f5c631daSSadaf Ebrahimi           return;
780*f5c631daSSadaf Ebrahimi         }
781*f5c631daSSadaf Ebrahimi         break;
782*f5c631daSSadaf Ebrahimi       case kTst:
783*f5c631daSSadaf Ebrahimi         if (IsUsingA32() || !rn.IsPC()) {
784*f5c631daSSadaf Ebrahimi           UseScratchRegisterScope temps(this);
785*f5c631daSSadaf Ebrahimi           Register scratch = temps.Acquire();
786*f5c631daSSadaf Ebrahimi           HandleOutOfBoundsImmediate(cond, scratch, imm);
787*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
788*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond, size, rn, scratch);
789*f5c631daSSadaf Ebrahimi           return;
790*f5c631daSSadaf Ebrahimi         }
791*f5c631daSSadaf Ebrahimi         break;
792*f5c631daSSadaf Ebrahimi       default:  // kSxtb, Sxth, Uxtb, Uxth
793*f5c631daSSadaf Ebrahimi         break;
794*f5c631daSSadaf Ebrahimi     }
795*f5c631daSSadaf Ebrahimi   }
796*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, size, rn, operand);
797*f5c631daSSadaf Ebrahimi }
798*f5c631daSSadaf Ebrahimi 
799*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondRROp instruction,Condition cond,Register rd,Register rn,const Operand & operand)800*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
801*f5c631daSSadaf Ebrahimi                               InstructionCondRROp instruction,
802*f5c631daSSadaf Ebrahimi                               Condition cond,
803*f5c631daSSadaf Ebrahimi                               Register rd,
804*f5c631daSSadaf Ebrahimi                               Register rn,
805*f5c631daSSadaf Ebrahimi                               const Operand& operand) {
806*f5c631daSSadaf Ebrahimi   if ((type == kSxtab) || (type == kSxtab16) || (type == kSxtah) ||
807*f5c631daSSadaf Ebrahimi       (type == kUxtab) || (type == kUxtab16) || (type == kUxtah) ||
808*f5c631daSSadaf Ebrahimi       (type == kPkhbt) || (type == kPkhtb)) {
809*f5c631daSSadaf Ebrahimi     UnimplementedDelegate(type);
810*f5c631daSSadaf Ebrahimi     return;
811*f5c631daSSadaf Ebrahimi   }
812*f5c631daSSadaf Ebrahimi 
813*f5c631daSSadaf Ebrahimi   // This delegate only handles the following instructions.
814*f5c631daSSadaf Ebrahimi   VIXL_ASSERT((type == kOrn) || (type == kOrns) || (type == kRsc) ||
815*f5c631daSSadaf Ebrahimi               (type == kRscs));
816*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
817*f5c631daSSadaf Ebrahimi 
818*f5c631daSSadaf Ebrahimi   // T32 does not support register shifted register operands, emulate it.
819*f5c631daSSadaf Ebrahimi   if (IsUsingT32() && operand.IsRegisterShiftedRegister()) {
820*f5c631daSSadaf Ebrahimi     InstructionCondRROp shiftop = NULL;
821*f5c631daSSadaf Ebrahimi     switch (operand.GetShift().GetType()) {
822*f5c631daSSadaf Ebrahimi       case LSL:
823*f5c631daSSadaf Ebrahimi         shiftop = &Assembler::lsl;
824*f5c631daSSadaf Ebrahimi         break;
825*f5c631daSSadaf Ebrahimi       case LSR:
826*f5c631daSSadaf Ebrahimi         shiftop = &Assembler::lsr;
827*f5c631daSSadaf Ebrahimi         break;
828*f5c631daSSadaf Ebrahimi       case ASR:
829*f5c631daSSadaf Ebrahimi         shiftop = &Assembler::asr;
830*f5c631daSSadaf Ebrahimi         break;
831*f5c631daSSadaf Ebrahimi       case RRX:
832*f5c631daSSadaf Ebrahimi         // A RegisterShiftedRegister operand cannot have a shift of type RRX.
833*f5c631daSSadaf Ebrahimi         VIXL_UNREACHABLE();
834*f5c631daSSadaf Ebrahimi         break;
835*f5c631daSSadaf Ebrahimi       case ROR:
836*f5c631daSSadaf Ebrahimi         shiftop = &Assembler::ror;
837*f5c631daSSadaf Ebrahimi         break;
838*f5c631daSSadaf Ebrahimi       default:
839*f5c631daSSadaf Ebrahimi         VIXL_UNREACHABLE();
840*f5c631daSSadaf Ebrahimi     }
841*f5c631daSSadaf Ebrahimi     if (shiftop != NULL) {
842*f5c631daSSadaf Ebrahimi       UseScratchRegisterScope temps(this);
843*f5c631daSSadaf Ebrahimi       Register rm = operand.GetBaseRegister();
844*f5c631daSSadaf Ebrahimi       Register rs = operand.GetShiftRegister();
845*f5c631daSSadaf Ebrahimi       // Try to use rd as a scratch register. We can do this if it aliases rs or
846*f5c631daSSadaf Ebrahimi       // rm (because we read them in the first instruction), but not rn.
847*f5c631daSSadaf Ebrahimi       if (!rd.Is(rn)) temps.Include(rd);
848*f5c631daSSadaf Ebrahimi       Register scratch = temps.Acquire();
849*f5c631daSSadaf Ebrahimi       // TODO: The scope length was measured empirically. We should analyse the
850*f5c631daSSadaf Ebrahimi       // worst-case size and add targetted tests.
851*f5c631daSSadaf Ebrahimi       CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
852*f5c631daSSadaf Ebrahimi       (this->*shiftop)(cond, scratch, rm, rs);
853*f5c631daSSadaf Ebrahimi       (this->*instruction)(cond, rd, rn, scratch);
854*f5c631daSSadaf Ebrahimi       return;
855*f5c631daSSadaf Ebrahimi     }
856*f5c631daSSadaf Ebrahimi   }
857*f5c631daSSadaf Ebrahimi 
858*f5c631daSSadaf Ebrahimi   // T32 does not have a Rsc instruction, negate the lhs input and turn it into
859*f5c631daSSadaf Ebrahimi   // an Adc. Adc and Rsc are equivalent using a bitwise NOT:
860*f5c631daSSadaf Ebrahimi   //   adc rd, rn, operand <-> rsc rd, NOT(rn), operand
861*f5c631daSSadaf Ebrahimi   if (IsUsingT32() && ((type == kRsc) || (type == kRscs))) {
862*f5c631daSSadaf Ebrahimi     // The RegisterShiftRegister case should have been handled above.
863*f5c631daSSadaf Ebrahimi     VIXL_ASSERT(!operand.IsRegisterShiftedRegister());
864*f5c631daSSadaf Ebrahimi     UseScratchRegisterScope temps(this);
865*f5c631daSSadaf Ebrahimi     // Try to use rd as a scratch register. We can do this if it aliases rn
866*f5c631daSSadaf Ebrahimi     // (because we read it in the first instruction), but not rm.
867*f5c631daSSadaf Ebrahimi     temps.Include(rd);
868*f5c631daSSadaf Ebrahimi     temps.Exclude(operand);
869*f5c631daSSadaf Ebrahimi     Register negated_rn = temps.Acquire();
870*f5c631daSSadaf Ebrahimi     {
871*f5c631daSSadaf Ebrahimi       CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
872*f5c631daSSadaf Ebrahimi       mvn(cond, negated_rn, rn);
873*f5c631daSSadaf Ebrahimi     }
874*f5c631daSSadaf Ebrahimi     if (type == kRsc) {
875*f5c631daSSadaf Ebrahimi       CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
876*f5c631daSSadaf Ebrahimi       adc(cond, rd, negated_rn, operand);
877*f5c631daSSadaf Ebrahimi       return;
878*f5c631daSSadaf Ebrahimi     }
879*f5c631daSSadaf Ebrahimi     // TODO: We shouldn't have to specify how much space the next instruction
880*f5c631daSSadaf Ebrahimi     // needs.
881*f5c631daSSadaf Ebrahimi     CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
882*f5c631daSSadaf Ebrahimi     adcs(cond, rd, negated_rn, operand);
883*f5c631daSSadaf Ebrahimi     return;
884*f5c631daSSadaf Ebrahimi   }
885*f5c631daSSadaf Ebrahimi 
886*f5c631daSSadaf Ebrahimi   if (operand.IsImmediate()) {
887*f5c631daSSadaf Ebrahimi     // If the immediate can be encoded when inverted, turn Orn into Orr.
888*f5c631daSSadaf Ebrahimi     // Otherwise rely on HandleOutOfBoundsImmediate to generate a series of
889*f5c631daSSadaf Ebrahimi     // mov.
890*f5c631daSSadaf Ebrahimi     int32_t imm = operand.GetSignedImmediate();
891*f5c631daSSadaf Ebrahimi     if (((type == kOrn) || (type == kOrns)) && IsModifiedImmediate(~imm)) {
892*f5c631daSSadaf Ebrahimi       CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
893*f5c631daSSadaf Ebrahimi       switch (type) {
894*f5c631daSSadaf Ebrahimi         case kOrn:
895*f5c631daSSadaf Ebrahimi           orr(cond, rd, rn, ~imm);
896*f5c631daSSadaf Ebrahimi           return;
897*f5c631daSSadaf Ebrahimi         case kOrns:
898*f5c631daSSadaf Ebrahimi           orrs(cond, rd, rn, ~imm);
899*f5c631daSSadaf Ebrahimi           return;
900*f5c631daSSadaf Ebrahimi         default:
901*f5c631daSSadaf Ebrahimi           VIXL_UNREACHABLE();
902*f5c631daSSadaf Ebrahimi           break;
903*f5c631daSSadaf Ebrahimi       }
904*f5c631daSSadaf Ebrahimi     }
905*f5c631daSSadaf Ebrahimi   }
906*f5c631daSSadaf Ebrahimi 
907*f5c631daSSadaf Ebrahimi   // A32 does not have a Orn instruction, negate the rhs input and turn it into
908*f5c631daSSadaf Ebrahimi   // a Orr.
909*f5c631daSSadaf Ebrahimi   if (IsUsingA32() && ((type == kOrn) || (type == kOrns))) {
910*f5c631daSSadaf Ebrahimi     // TODO: orn r0, r1, imm -> orr r0, r1, neg(imm) if doable
911*f5c631daSSadaf Ebrahimi     //  mvn r0, r2
912*f5c631daSSadaf Ebrahimi     //  orr r0, r1, r0
913*f5c631daSSadaf Ebrahimi     Register scratch;
914*f5c631daSSadaf Ebrahimi     UseScratchRegisterScope temps(this);
915*f5c631daSSadaf Ebrahimi     // Try to use rd as a scratch register. We can do this if it aliases rs or
916*f5c631daSSadaf Ebrahimi     // rm (because we read them in the first instruction), but not rn.
917*f5c631daSSadaf Ebrahimi     if (!rd.Is(rn)) temps.Include(rd);
918*f5c631daSSadaf Ebrahimi     scratch = temps.Acquire();
919*f5c631daSSadaf Ebrahimi     {
920*f5c631daSSadaf Ebrahimi       // TODO: We shouldn't have to specify how much space the next instruction
921*f5c631daSSadaf Ebrahimi       // needs.
922*f5c631daSSadaf Ebrahimi       CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
923*f5c631daSSadaf Ebrahimi       mvn(cond, scratch, operand);
924*f5c631daSSadaf Ebrahimi     }
925*f5c631daSSadaf Ebrahimi     if (type == kOrns) {
926*f5c631daSSadaf Ebrahimi       CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
927*f5c631daSSadaf Ebrahimi       orrs(cond, rd, rn, scratch);
928*f5c631daSSadaf Ebrahimi       return;
929*f5c631daSSadaf Ebrahimi     }
930*f5c631daSSadaf Ebrahimi     CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
931*f5c631daSSadaf Ebrahimi     orr(cond, rd, rn, scratch);
932*f5c631daSSadaf Ebrahimi     return;
933*f5c631daSSadaf Ebrahimi   }
934*f5c631daSSadaf Ebrahimi 
935*f5c631daSSadaf Ebrahimi   if (operand.IsImmediate()) {
936*f5c631daSSadaf Ebrahimi     UseScratchRegisterScope temps(this);
937*f5c631daSSadaf Ebrahimi     // Allow using the destination as a scratch register if possible.
938*f5c631daSSadaf Ebrahimi     if (!rd.Is(rn)) temps.Include(rd);
939*f5c631daSSadaf Ebrahimi     Register scratch = temps.Acquire();
940*f5c631daSSadaf Ebrahimi     int32_t imm = operand.GetSignedImmediate();
941*f5c631daSSadaf Ebrahimi     HandleOutOfBoundsImmediate(cond, scratch, imm);
942*f5c631daSSadaf Ebrahimi     CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
943*f5c631daSSadaf Ebrahimi     (this->*instruction)(cond, rd, rn, scratch);
944*f5c631daSSadaf Ebrahimi     return;
945*f5c631daSSadaf Ebrahimi   }
946*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, rd, rn, operand);
947*f5c631daSSadaf Ebrahimi }
948*f5c631daSSadaf Ebrahimi 
949*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondSizeRL instruction,Condition cond,EncodingSize size,Register rd,Location * location)950*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
951*f5c631daSSadaf Ebrahimi                               InstructionCondSizeRL instruction,
952*f5c631daSSadaf Ebrahimi                               Condition cond,
953*f5c631daSSadaf Ebrahimi                               EncodingSize size,
954*f5c631daSSadaf Ebrahimi                               Register rd,
955*f5c631daSSadaf Ebrahimi                               Location* location) {
956*f5c631daSSadaf Ebrahimi   VIXL_ASSERT((type == kLdr) || (type == kAdr));
957*f5c631daSSadaf Ebrahimi 
958*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
959*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(size.IsBest());
960*f5c631daSSadaf Ebrahimi 
961*f5c631daSSadaf Ebrahimi   if ((type == kLdr) && location->IsBound()) {
962*f5c631daSSadaf Ebrahimi     CodeBufferCheckScope scope(this, 5 * kMaxInstructionSizeInBytes);
963*f5c631daSSadaf Ebrahimi     UseScratchRegisterScope temps(this);
964*f5c631daSSadaf Ebrahimi     temps.Include(rd);
965*f5c631daSSadaf Ebrahimi     uint32_t mask = GetOffsetMask(type, Offset);
966*f5c631daSSadaf Ebrahimi     ldr(rd, MemOperandComputationHelper(cond, temps.Acquire(), location, mask));
967*f5c631daSSadaf Ebrahimi     return;
968*f5c631daSSadaf Ebrahimi   }
969*f5c631daSSadaf Ebrahimi 
970*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, size, rd, location);
971*f5c631daSSadaf Ebrahimi }
972*f5c631daSSadaf Ebrahimi 
973*f5c631daSSadaf Ebrahimi 
GenerateSplitInstruction(InstructionCondSizeRROp instruction,Condition cond,Register rd,Register rn,uint32_t imm,uint32_t mask)974*f5c631daSSadaf Ebrahimi bool MacroAssembler::GenerateSplitInstruction(
975*f5c631daSSadaf Ebrahimi     InstructionCondSizeRROp instruction,
976*f5c631daSSadaf Ebrahimi     Condition cond,
977*f5c631daSSadaf Ebrahimi     Register rd,
978*f5c631daSSadaf Ebrahimi     Register rn,
979*f5c631daSSadaf Ebrahimi     uint32_t imm,
980*f5c631daSSadaf Ebrahimi     uint32_t mask) {
981*f5c631daSSadaf Ebrahimi   uint32_t high = imm & ~mask;
982*f5c631daSSadaf Ebrahimi   if (!IsModifiedImmediate(high) && !rn.IsPC()) return false;
983*f5c631daSSadaf Ebrahimi   // If high is a modified immediate, we can perform the operation with
984*f5c631daSSadaf Ebrahimi   // only 2 instructions.
985*f5c631daSSadaf Ebrahimi   // Else, if rn is PC, we want to avoid moving PC into a temporary.
986*f5c631daSSadaf Ebrahimi   // Therefore, we also use the pattern even if the second call may
987*f5c631daSSadaf Ebrahimi   // generate 3 instructions.
988*f5c631daSSadaf Ebrahimi   uint32_t low = imm & mask;
989*f5c631daSSadaf Ebrahimi   CodeBufferCheckScope scope(this,
990*f5c631daSSadaf Ebrahimi                              (rn.IsPC() ? 4 : 2) * kMaxInstructionSizeInBytes);
991*f5c631daSSadaf Ebrahimi   (this->*instruction)(cond, Best, rd, rn, low);
992*f5c631daSSadaf Ebrahimi   (this->*instruction)(cond, Best, rd, rd, high);
993*f5c631daSSadaf Ebrahimi   return true;
994*f5c631daSSadaf Ebrahimi }
995*f5c631daSSadaf Ebrahimi 
996*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondSizeRROp instruction,Condition cond,EncodingSize size,Register rd,Register rn,const Operand & operand)997*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
998*f5c631daSSadaf Ebrahimi                               InstructionCondSizeRROp instruction,
999*f5c631daSSadaf Ebrahimi                               Condition cond,
1000*f5c631daSSadaf Ebrahimi                               EncodingSize size,
1001*f5c631daSSadaf Ebrahimi                               Register rd,
1002*f5c631daSSadaf Ebrahimi                               Register rn,
1003*f5c631daSSadaf Ebrahimi                               const Operand& operand) {
1004*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(
1005*f5c631daSSadaf Ebrahimi       (type == kAdc) || (type == kAdcs) || (type == kAdd) || (type == kAdds) ||
1006*f5c631daSSadaf Ebrahimi       (type == kAnd) || (type == kAnds) || (type == kAsr) || (type == kAsrs) ||
1007*f5c631daSSadaf Ebrahimi       (type == kBic) || (type == kBics) || (type == kEor) || (type == kEors) ||
1008*f5c631daSSadaf Ebrahimi       (type == kLsl) || (type == kLsls) || (type == kLsr) || (type == kLsrs) ||
1009*f5c631daSSadaf Ebrahimi       (type == kOrr) || (type == kOrrs) || (type == kRor) || (type == kRors) ||
1010*f5c631daSSadaf Ebrahimi       (type == kRsb) || (type == kRsbs) || (type == kSbc) || (type == kSbcs) ||
1011*f5c631daSSadaf Ebrahimi       (type == kSub) || (type == kSubs));
1012*f5c631daSSadaf Ebrahimi 
1013*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
1014*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(size.IsBest());
1015*f5c631daSSadaf Ebrahimi   if (IsUsingT32() && operand.IsRegisterShiftedRegister()) {
1016*f5c631daSSadaf Ebrahimi     InstructionCondRROp shiftop = NULL;
1017*f5c631daSSadaf Ebrahimi     switch (operand.GetShift().GetType()) {
1018*f5c631daSSadaf Ebrahimi       case LSL:
1019*f5c631daSSadaf Ebrahimi         shiftop = &Assembler::lsl;
1020*f5c631daSSadaf Ebrahimi         break;
1021*f5c631daSSadaf Ebrahimi       case LSR:
1022*f5c631daSSadaf Ebrahimi         shiftop = &Assembler::lsr;
1023*f5c631daSSadaf Ebrahimi         break;
1024*f5c631daSSadaf Ebrahimi       case ASR:
1025*f5c631daSSadaf Ebrahimi         shiftop = &Assembler::asr;
1026*f5c631daSSadaf Ebrahimi         break;
1027*f5c631daSSadaf Ebrahimi       case RRX:
1028*f5c631daSSadaf Ebrahimi         // A RegisterShiftedRegister operand cannot have a shift of type RRX.
1029*f5c631daSSadaf Ebrahimi         VIXL_UNREACHABLE();
1030*f5c631daSSadaf Ebrahimi         break;
1031*f5c631daSSadaf Ebrahimi       case ROR:
1032*f5c631daSSadaf Ebrahimi         shiftop = &Assembler::ror;
1033*f5c631daSSadaf Ebrahimi         break;
1034*f5c631daSSadaf Ebrahimi       default:
1035*f5c631daSSadaf Ebrahimi         VIXL_UNREACHABLE();
1036*f5c631daSSadaf Ebrahimi     }
1037*f5c631daSSadaf Ebrahimi     if (shiftop != NULL) {
1038*f5c631daSSadaf Ebrahimi       UseScratchRegisterScope temps(this);
1039*f5c631daSSadaf Ebrahimi       Register rm = operand.GetBaseRegister();
1040*f5c631daSSadaf Ebrahimi       Register rs = operand.GetShiftRegister();
1041*f5c631daSSadaf Ebrahimi       // Try to use rd as a scratch register. We can do this if it aliases rs or
1042*f5c631daSSadaf Ebrahimi       // rm (because we read them in the first instruction), but not rn.
1043*f5c631daSSadaf Ebrahimi       if (!rd.Is(rn)) temps.Include(rd);
1044*f5c631daSSadaf Ebrahimi       Register scratch = temps.Acquire();
1045*f5c631daSSadaf Ebrahimi       CodeBufferCheckScope scope(this, 2 * kMaxInstructionSizeInBytes);
1046*f5c631daSSadaf Ebrahimi       (this->*shiftop)(cond, scratch, rm, rs);
1047*f5c631daSSadaf Ebrahimi       (this->*instruction)(cond, size, rd, rn, scratch);
1048*f5c631daSSadaf Ebrahimi       return;
1049*f5c631daSSadaf Ebrahimi     }
1050*f5c631daSSadaf Ebrahimi   }
1051*f5c631daSSadaf Ebrahimi   if (operand.IsImmediate()) {
1052*f5c631daSSadaf Ebrahimi     int32_t imm = operand.GetSignedImmediate();
1053*f5c631daSSadaf Ebrahimi     if (ImmediateT32::IsImmediateT32(~imm)) {
1054*f5c631daSSadaf Ebrahimi       if (IsUsingT32()) {
1055*f5c631daSSadaf Ebrahimi         switch (type) {
1056*f5c631daSSadaf Ebrahimi           case kOrr:
1057*f5c631daSSadaf Ebrahimi             orn(cond, rd, rn, ~imm);
1058*f5c631daSSadaf Ebrahimi             return;
1059*f5c631daSSadaf Ebrahimi           case kOrrs:
1060*f5c631daSSadaf Ebrahimi             orns(cond, rd, rn, ~imm);
1061*f5c631daSSadaf Ebrahimi             return;
1062*f5c631daSSadaf Ebrahimi           default:
1063*f5c631daSSadaf Ebrahimi             break;
1064*f5c631daSSadaf Ebrahimi         }
1065*f5c631daSSadaf Ebrahimi       }
1066*f5c631daSSadaf Ebrahimi     }
1067*f5c631daSSadaf Ebrahimi     if (imm < 0) {
1068*f5c631daSSadaf Ebrahimi       InstructionCondSizeRROp asmcb = NULL;
1069*f5c631daSSadaf Ebrahimi       // Add and sub are equivalent using an arithmetic negation:
1070*f5c631daSSadaf Ebrahimi       //   add rd, rn, #imm <-> sub rd, rn, - #imm
1071*f5c631daSSadaf Ebrahimi       // Add and sub with carry are equivalent using a bitwise NOT:
1072*f5c631daSSadaf Ebrahimi       //   adc rd, rn, #imm <-> sbc rd, rn, NOT #imm
1073*f5c631daSSadaf Ebrahimi       switch (type) {
1074*f5c631daSSadaf Ebrahimi         case kAdd:
1075*f5c631daSSadaf Ebrahimi           asmcb = &Assembler::sub;
1076*f5c631daSSadaf Ebrahimi           imm = -imm;
1077*f5c631daSSadaf Ebrahimi           break;
1078*f5c631daSSadaf Ebrahimi         case kAdds:
1079*f5c631daSSadaf Ebrahimi           asmcb = &Assembler::subs;
1080*f5c631daSSadaf Ebrahimi           imm = -imm;
1081*f5c631daSSadaf Ebrahimi           break;
1082*f5c631daSSadaf Ebrahimi         case kSub:
1083*f5c631daSSadaf Ebrahimi           asmcb = &Assembler::add;
1084*f5c631daSSadaf Ebrahimi           imm = -imm;
1085*f5c631daSSadaf Ebrahimi           break;
1086*f5c631daSSadaf Ebrahimi         case kSubs:
1087*f5c631daSSadaf Ebrahimi           asmcb = &Assembler::adds;
1088*f5c631daSSadaf Ebrahimi           imm = -imm;
1089*f5c631daSSadaf Ebrahimi           break;
1090*f5c631daSSadaf Ebrahimi         case kAdc:
1091*f5c631daSSadaf Ebrahimi           asmcb = &Assembler::sbc;
1092*f5c631daSSadaf Ebrahimi           imm = ~imm;
1093*f5c631daSSadaf Ebrahimi           break;
1094*f5c631daSSadaf Ebrahimi         case kAdcs:
1095*f5c631daSSadaf Ebrahimi           asmcb = &Assembler::sbcs;
1096*f5c631daSSadaf Ebrahimi           imm = ~imm;
1097*f5c631daSSadaf Ebrahimi           break;
1098*f5c631daSSadaf Ebrahimi         case kSbc:
1099*f5c631daSSadaf Ebrahimi           asmcb = &Assembler::adc;
1100*f5c631daSSadaf Ebrahimi           imm = ~imm;
1101*f5c631daSSadaf Ebrahimi           break;
1102*f5c631daSSadaf Ebrahimi         case kSbcs:
1103*f5c631daSSadaf Ebrahimi           asmcb = &Assembler::adcs;
1104*f5c631daSSadaf Ebrahimi           imm = ~imm;
1105*f5c631daSSadaf Ebrahimi           break;
1106*f5c631daSSadaf Ebrahimi         default:
1107*f5c631daSSadaf Ebrahimi           break;
1108*f5c631daSSadaf Ebrahimi       }
1109*f5c631daSSadaf Ebrahimi       if (asmcb != NULL) {
1110*f5c631daSSadaf Ebrahimi         CodeBufferCheckScope scope(this, 4 * kMaxInstructionSizeInBytes);
1111*f5c631daSSadaf Ebrahimi         (this->*asmcb)(cond, size, rd, rn, Operand(imm));
1112*f5c631daSSadaf Ebrahimi         return;
1113*f5c631daSSadaf Ebrahimi       }
1114*f5c631daSSadaf Ebrahimi     }
1115*f5c631daSSadaf Ebrahimi 
1116*f5c631daSSadaf Ebrahimi     // When rn is PC, only handle negative offsets. The correct way to handle
1117*f5c631daSSadaf Ebrahimi     // positive offsets isn't clear; does the user want the offset from the
1118*f5c631daSSadaf Ebrahimi     // start of the macro, or from the end (to allow a certain amount of space)?
1119*f5c631daSSadaf Ebrahimi     // When type is Add or Sub, imm is always positive (imm < 0 has just been
1120*f5c631daSSadaf Ebrahimi     // handled and imm == 0 would have been generated without the need of a
1121*f5c631daSSadaf Ebrahimi     // delegate). Therefore, only add to PC is forbidden here.
1122*f5c631daSSadaf Ebrahimi     if ((((type == kAdd) && !rn.IsPC()) || (type == kSub)) &&
1123*f5c631daSSadaf Ebrahimi         (IsUsingA32() || (!rd.IsPC() && !rn.IsPC()))) {
1124*f5c631daSSadaf Ebrahimi       VIXL_ASSERT(imm > 0);
1125*f5c631daSSadaf Ebrahimi       // Try to break the constant into two modified immediates.
1126*f5c631daSSadaf Ebrahimi       // For T32 also try to break the constant into one imm12 and one modified
1127*f5c631daSSadaf Ebrahimi       // immediate. Count the trailing zeroes and get the biggest even value.
1128*f5c631daSSadaf Ebrahimi       int trailing_zeroes = CountTrailingZeros(imm) & ~1u;
1129*f5c631daSSadaf Ebrahimi       uint32_t mask = ((trailing_zeroes < 4) && IsUsingT32())
1130*f5c631daSSadaf Ebrahimi                           ? 0xfff
1131*f5c631daSSadaf Ebrahimi                           : (0xff << trailing_zeroes);
1132*f5c631daSSadaf Ebrahimi       if (GenerateSplitInstruction(instruction, cond, rd, rn, imm, mask)) {
1133*f5c631daSSadaf Ebrahimi         return;
1134*f5c631daSSadaf Ebrahimi       }
1135*f5c631daSSadaf Ebrahimi       InstructionCondSizeRROp asmcb = NULL;
1136*f5c631daSSadaf Ebrahimi       switch (type) {
1137*f5c631daSSadaf Ebrahimi         case kAdd:
1138*f5c631daSSadaf Ebrahimi           asmcb = &Assembler::sub;
1139*f5c631daSSadaf Ebrahimi           break;
1140*f5c631daSSadaf Ebrahimi         case kSub:
1141*f5c631daSSadaf Ebrahimi           asmcb = &Assembler::add;
1142*f5c631daSSadaf Ebrahimi           break;
1143*f5c631daSSadaf Ebrahimi         default:
1144*f5c631daSSadaf Ebrahimi           VIXL_UNREACHABLE();
1145*f5c631daSSadaf Ebrahimi       }
1146*f5c631daSSadaf Ebrahimi       if (GenerateSplitInstruction(asmcb, cond, rd, rn, -imm, mask)) {
1147*f5c631daSSadaf Ebrahimi         return;
1148*f5c631daSSadaf Ebrahimi       }
1149*f5c631daSSadaf Ebrahimi     }
1150*f5c631daSSadaf Ebrahimi 
1151*f5c631daSSadaf Ebrahimi     UseScratchRegisterScope temps(this);
1152*f5c631daSSadaf Ebrahimi     // Allow using the destination as a scratch register if possible.
1153*f5c631daSSadaf Ebrahimi     if (!rd.Is(rn)) temps.Include(rd);
1154*f5c631daSSadaf Ebrahimi     if (rn.IsPC()) {
1155*f5c631daSSadaf Ebrahimi       // If we're reading the PC, we need to do it in the first instruction,
1156*f5c631daSSadaf Ebrahimi       // otherwise we'll read the wrong value. We rely on this to handle the
1157*f5c631daSSadaf Ebrahimi       // long-range PC-relative MemOperands which can result from user-managed
1158*f5c631daSSadaf Ebrahimi       // literals.
1159*f5c631daSSadaf Ebrahimi 
1160*f5c631daSSadaf Ebrahimi       // Only handle negative offsets. The correct way to handle positive
1161*f5c631daSSadaf Ebrahimi       // offsets isn't clear; does the user want the offset from the start of
1162*f5c631daSSadaf Ebrahimi       // the macro, or from the end (to allow a certain amount of space)?
1163*f5c631daSSadaf Ebrahimi       bool offset_is_negative_or_zero = (imm <= 0);
1164*f5c631daSSadaf Ebrahimi       switch (type) {
1165*f5c631daSSadaf Ebrahimi         case kAdd:
1166*f5c631daSSadaf Ebrahimi         case kAdds:
1167*f5c631daSSadaf Ebrahimi           offset_is_negative_or_zero = (imm <= 0);
1168*f5c631daSSadaf Ebrahimi           break;
1169*f5c631daSSadaf Ebrahimi         case kSub:
1170*f5c631daSSadaf Ebrahimi         case kSubs:
1171*f5c631daSSadaf Ebrahimi           offset_is_negative_or_zero = (imm >= 0);
1172*f5c631daSSadaf Ebrahimi           break;
1173*f5c631daSSadaf Ebrahimi         case kAdc:
1174*f5c631daSSadaf Ebrahimi         case kAdcs:
1175*f5c631daSSadaf Ebrahimi           offset_is_negative_or_zero = (imm < 0);
1176*f5c631daSSadaf Ebrahimi           break;
1177*f5c631daSSadaf Ebrahimi         case kSbc:
1178*f5c631daSSadaf Ebrahimi         case kSbcs:
1179*f5c631daSSadaf Ebrahimi           offset_is_negative_or_zero = (imm > 0);
1180*f5c631daSSadaf Ebrahimi           break;
1181*f5c631daSSadaf Ebrahimi         default:
1182*f5c631daSSadaf Ebrahimi           break;
1183*f5c631daSSadaf Ebrahimi       }
1184*f5c631daSSadaf Ebrahimi       if (offset_is_negative_or_zero) {
1185*f5c631daSSadaf Ebrahimi         {
1186*f5c631daSSadaf Ebrahimi           rn = temps.Acquire();
1187*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1188*f5c631daSSadaf Ebrahimi           mov(cond, rn, pc);
1189*f5c631daSSadaf Ebrahimi         }
1190*f5c631daSSadaf Ebrahimi         // Recurse rather than falling through, to try to get the immediate into
1191*f5c631daSSadaf Ebrahimi         // a single instruction.
1192*f5c631daSSadaf Ebrahimi         CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
1193*f5c631daSSadaf Ebrahimi         (this->*instruction)(cond, size, rd, rn, operand);
1194*f5c631daSSadaf Ebrahimi         return;
1195*f5c631daSSadaf Ebrahimi       }
1196*f5c631daSSadaf Ebrahimi     } else {
1197*f5c631daSSadaf Ebrahimi       Register scratch = temps.Acquire();
1198*f5c631daSSadaf Ebrahimi       // TODO: The scope length was measured empirically. We should analyse the
1199*f5c631daSSadaf Ebrahimi       // worst-case size and add targetted tests.
1200*f5c631daSSadaf Ebrahimi       CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
1201*f5c631daSSadaf Ebrahimi       mov(cond, scratch, operand.GetImmediate());
1202*f5c631daSSadaf Ebrahimi       (this->*instruction)(cond, size, rd, rn, scratch);
1203*f5c631daSSadaf Ebrahimi       return;
1204*f5c631daSSadaf Ebrahimi     }
1205*f5c631daSSadaf Ebrahimi   }
1206*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, size, rd, rn, operand);
1207*f5c631daSSadaf Ebrahimi }
1208*f5c631daSSadaf Ebrahimi 
1209*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionRL instruction,Register rn,Location * location)1210*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
1211*f5c631daSSadaf Ebrahimi                               InstructionRL instruction,
1212*f5c631daSSadaf Ebrahimi                               Register rn,
1213*f5c631daSSadaf Ebrahimi                               Location* location) {
1214*f5c631daSSadaf Ebrahimi   VIXL_ASSERT((type == kCbz) || (type == kCbnz));
1215*f5c631daSSadaf Ebrahimi 
1216*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
1217*f5c631daSSadaf Ebrahimi   CodeBufferCheckScope scope(this, 2 * kMaxInstructionSizeInBytes);
1218*f5c631daSSadaf Ebrahimi   if (IsUsingA32()) {
1219*f5c631daSSadaf Ebrahimi     if (type == kCbz) {
1220*f5c631daSSadaf Ebrahimi       VIXL_ABORT_WITH_MSG("Cbz is only available for T32.\n");
1221*f5c631daSSadaf Ebrahimi     } else {
1222*f5c631daSSadaf Ebrahimi       VIXL_ABORT_WITH_MSG("Cbnz is only available for T32.\n");
1223*f5c631daSSadaf Ebrahimi     }
1224*f5c631daSSadaf Ebrahimi   } else if (rn.IsLow()) {
1225*f5c631daSSadaf Ebrahimi     switch (type) {
1226*f5c631daSSadaf Ebrahimi       case kCbnz: {
1227*f5c631daSSadaf Ebrahimi         Label done;
1228*f5c631daSSadaf Ebrahimi         cbz(rn, &done);
1229*f5c631daSSadaf Ebrahimi         b(location);
1230*f5c631daSSadaf Ebrahimi         Bind(&done);
1231*f5c631daSSadaf Ebrahimi         return;
1232*f5c631daSSadaf Ebrahimi       }
1233*f5c631daSSadaf Ebrahimi       case kCbz: {
1234*f5c631daSSadaf Ebrahimi         Label done;
1235*f5c631daSSadaf Ebrahimi         cbnz(rn, &done);
1236*f5c631daSSadaf Ebrahimi         b(location);
1237*f5c631daSSadaf Ebrahimi         Bind(&done);
1238*f5c631daSSadaf Ebrahimi         return;
1239*f5c631daSSadaf Ebrahimi       }
1240*f5c631daSSadaf Ebrahimi       default:
1241*f5c631daSSadaf Ebrahimi         break;
1242*f5c631daSSadaf Ebrahimi     }
1243*f5c631daSSadaf Ebrahimi   }
1244*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, rn, location);
1245*f5c631daSSadaf Ebrahimi }
1246*f5c631daSSadaf Ebrahimi 
1247*f5c631daSSadaf Ebrahimi 
1248*f5c631daSSadaf Ebrahimi template <typename T>
IsI64BitPattern(T imm)1249*f5c631daSSadaf Ebrahimi static inline bool IsI64BitPattern(T imm) {
1250*f5c631daSSadaf Ebrahimi   for (T mask = 0xff << ((sizeof(T) - 1) * 8); mask != 0; mask >>= 8) {
1251*f5c631daSSadaf Ebrahimi     if (((imm & mask) != mask) && ((imm & mask) != 0)) return false;
1252*f5c631daSSadaf Ebrahimi   }
1253*f5c631daSSadaf Ebrahimi   return true;
1254*f5c631daSSadaf Ebrahimi }
1255*f5c631daSSadaf Ebrahimi 
1256*f5c631daSSadaf Ebrahimi 
1257*f5c631daSSadaf Ebrahimi template <typename T>
IsI8BitPattern(T imm)1258*f5c631daSSadaf Ebrahimi static inline bool IsI8BitPattern(T imm) {
1259*f5c631daSSadaf Ebrahimi   uint8_t imm8 = imm & 0xff;
1260*f5c631daSSadaf Ebrahimi   for (unsigned rep = sizeof(T) - 1; rep > 0; rep--) {
1261*f5c631daSSadaf Ebrahimi     imm >>= 8;
1262*f5c631daSSadaf Ebrahimi     if ((imm & 0xff) != imm8) return false;
1263*f5c631daSSadaf Ebrahimi   }
1264*f5c631daSSadaf Ebrahimi   return true;
1265*f5c631daSSadaf Ebrahimi }
1266*f5c631daSSadaf Ebrahimi 
1267*f5c631daSSadaf Ebrahimi 
CanBeInverted(uint32_t imm32)1268*f5c631daSSadaf Ebrahimi static inline bool CanBeInverted(uint32_t imm32) {
1269*f5c631daSSadaf Ebrahimi   uint32_t fill8 = 0;
1270*f5c631daSSadaf Ebrahimi 
1271*f5c631daSSadaf Ebrahimi   if ((imm32 & 0xffffff00) == 0xffffff00) {
1272*f5c631daSSadaf Ebrahimi     //    11111111 11111111 11111111 abcdefgh
1273*f5c631daSSadaf Ebrahimi     return true;
1274*f5c631daSSadaf Ebrahimi   }
1275*f5c631daSSadaf Ebrahimi   if (((imm32 & 0xff) == 0) || ((imm32 & 0xff) == 0xff)) {
1276*f5c631daSSadaf Ebrahimi     fill8 = imm32 & 0xff;
1277*f5c631daSSadaf Ebrahimi     imm32 >>= 8;
1278*f5c631daSSadaf Ebrahimi     if ((imm32 >> 8) == 0xffff) {
1279*f5c631daSSadaf Ebrahimi       //    11111111 11111111 abcdefgh 00000000
1280*f5c631daSSadaf Ebrahimi       // or 11111111 11111111 abcdefgh 11111111
1281*f5c631daSSadaf Ebrahimi       return true;
1282*f5c631daSSadaf Ebrahimi     }
1283*f5c631daSSadaf Ebrahimi     if ((imm32 & 0xff) == fill8) {
1284*f5c631daSSadaf Ebrahimi       imm32 >>= 8;
1285*f5c631daSSadaf Ebrahimi       if ((imm32 >> 8) == 0xff) {
1286*f5c631daSSadaf Ebrahimi         //    11111111 abcdefgh 00000000 00000000
1287*f5c631daSSadaf Ebrahimi         // or 11111111 abcdefgh 11111111 11111111
1288*f5c631daSSadaf Ebrahimi         return true;
1289*f5c631daSSadaf Ebrahimi       }
1290*f5c631daSSadaf Ebrahimi       if ((fill8 == 0xff) && ((imm32 & 0xff) == 0xff)) {
1291*f5c631daSSadaf Ebrahimi         //    abcdefgh 11111111 11111111 11111111
1292*f5c631daSSadaf Ebrahimi         return true;
1293*f5c631daSSadaf Ebrahimi       }
1294*f5c631daSSadaf Ebrahimi     }
1295*f5c631daSSadaf Ebrahimi   }
1296*f5c631daSSadaf Ebrahimi   return false;
1297*f5c631daSSadaf Ebrahimi }
1298*f5c631daSSadaf Ebrahimi 
1299*f5c631daSSadaf Ebrahimi 
1300*f5c631daSSadaf Ebrahimi template <typename RES, typename T>
replicate(T imm)1301*f5c631daSSadaf Ebrahimi static inline RES replicate(T imm) {
1302*f5c631daSSadaf Ebrahimi   VIXL_ASSERT((sizeof(RES) > sizeof(T)) &&
1303*f5c631daSSadaf Ebrahimi               (((sizeof(RES) / sizeof(T)) * sizeof(T)) == sizeof(RES)));
1304*f5c631daSSadaf Ebrahimi   RES res = imm;
1305*f5c631daSSadaf Ebrahimi   for (unsigned i = sizeof(RES) / sizeof(T) - 1; i > 0; i--) {
1306*f5c631daSSadaf Ebrahimi     res = (res << (sizeof(T) * 8)) | imm;
1307*f5c631daSSadaf Ebrahimi   }
1308*f5c631daSSadaf Ebrahimi   return res;
1309*f5c631daSSadaf Ebrahimi }
1310*f5c631daSSadaf Ebrahimi 
1311*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondDtSSop instruction,Condition cond,DataType dt,SRegister rd,const SOperand & operand)1312*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
1313*f5c631daSSadaf Ebrahimi                               InstructionCondDtSSop instruction,
1314*f5c631daSSadaf Ebrahimi                               Condition cond,
1315*f5c631daSSadaf Ebrahimi                               DataType dt,
1316*f5c631daSSadaf Ebrahimi                               SRegister rd,
1317*f5c631daSSadaf Ebrahimi                               const SOperand& operand) {
1318*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
1319*f5c631daSSadaf Ebrahimi   if (type == kVmov) {
1320*f5c631daSSadaf Ebrahimi     if (operand.IsImmediate() && dt.Is(F32)) {
1321*f5c631daSSadaf Ebrahimi       const NeonImmediate& neon_imm = operand.GetNeonImmediate();
1322*f5c631daSSadaf Ebrahimi       if (neon_imm.CanConvert<float>()) {
1323*f5c631daSSadaf Ebrahimi         // movw ip, imm16
1324*f5c631daSSadaf Ebrahimi         // movk ip, imm16
1325*f5c631daSSadaf Ebrahimi         // vmov s0, ip
1326*f5c631daSSadaf Ebrahimi         UseScratchRegisterScope temps(this);
1327*f5c631daSSadaf Ebrahimi         Register scratch = temps.Acquire();
1328*f5c631daSSadaf Ebrahimi         float f = neon_imm.GetImmediate<float>();
1329*f5c631daSSadaf Ebrahimi         // TODO: The scope length was measured empirically. We should analyse
1330*f5c631daSSadaf Ebrahimi         // the
1331*f5c631daSSadaf Ebrahimi         // worst-case size and add targetted tests.
1332*f5c631daSSadaf Ebrahimi         CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
1333*f5c631daSSadaf Ebrahimi         mov(cond, scratch, FloatToRawbits(f));
1334*f5c631daSSadaf Ebrahimi         vmov(cond, rd, scratch);
1335*f5c631daSSadaf Ebrahimi         return;
1336*f5c631daSSadaf Ebrahimi       }
1337*f5c631daSSadaf Ebrahimi     }
1338*f5c631daSSadaf Ebrahimi   }
1339*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, dt, rd, operand);
1340*f5c631daSSadaf Ebrahimi }
1341*f5c631daSSadaf Ebrahimi 
1342*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondDtDDop instruction,Condition cond,DataType dt,DRegister rd,const DOperand & operand)1343*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
1344*f5c631daSSadaf Ebrahimi                               InstructionCondDtDDop instruction,
1345*f5c631daSSadaf Ebrahimi                               Condition cond,
1346*f5c631daSSadaf Ebrahimi                               DataType dt,
1347*f5c631daSSadaf Ebrahimi                               DRegister rd,
1348*f5c631daSSadaf Ebrahimi                               const DOperand& operand) {
1349*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
1350*f5c631daSSadaf Ebrahimi   if (type == kVmov) {
1351*f5c631daSSadaf Ebrahimi     if (operand.IsImmediate()) {
1352*f5c631daSSadaf Ebrahimi       const NeonImmediate& neon_imm = operand.GetNeonImmediate();
1353*f5c631daSSadaf Ebrahimi       switch (dt.GetValue()) {
1354*f5c631daSSadaf Ebrahimi         case I32:
1355*f5c631daSSadaf Ebrahimi           if (neon_imm.CanConvert<uint32_t>()) {
1356*f5c631daSSadaf Ebrahimi             uint32_t imm = neon_imm.GetImmediate<uint32_t>();
1357*f5c631daSSadaf Ebrahimi             // vmov.i32 d0, 0xabababab will translate into vmov.i8 d0, 0xab
1358*f5c631daSSadaf Ebrahimi             if (IsI8BitPattern(imm)) {
1359*f5c631daSSadaf Ebrahimi               CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1360*f5c631daSSadaf Ebrahimi               vmov(cond, I8, rd, imm & 0xff);
1361*f5c631daSSadaf Ebrahimi               return;
1362*f5c631daSSadaf Ebrahimi             }
1363*f5c631daSSadaf Ebrahimi             // vmov.i32 d0, 0xff0000ff will translate into
1364*f5c631daSSadaf Ebrahimi             // vmov.i64 d0, 0xff0000ffff0000ff
1365*f5c631daSSadaf Ebrahimi             if (IsI64BitPattern(imm)) {
1366*f5c631daSSadaf Ebrahimi               CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1367*f5c631daSSadaf Ebrahimi               vmov(cond, I64, rd, replicate<uint64_t>(imm));
1368*f5c631daSSadaf Ebrahimi               return;
1369*f5c631daSSadaf Ebrahimi             }
1370*f5c631daSSadaf Ebrahimi             // vmov.i32 d0, 0xffab0000 will translate into
1371*f5c631daSSadaf Ebrahimi             // vmvn.i32 d0, 0x0054ffff
1372*f5c631daSSadaf Ebrahimi             if (cond.Is(al) && CanBeInverted(imm)) {
1373*f5c631daSSadaf Ebrahimi               CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1374*f5c631daSSadaf Ebrahimi               vmvn(I32, rd, ~imm);
1375*f5c631daSSadaf Ebrahimi               return;
1376*f5c631daSSadaf Ebrahimi             }
1377*f5c631daSSadaf Ebrahimi           }
1378*f5c631daSSadaf Ebrahimi           break;
1379*f5c631daSSadaf Ebrahimi         case I16:
1380*f5c631daSSadaf Ebrahimi           if (neon_imm.CanConvert<uint16_t>()) {
1381*f5c631daSSadaf Ebrahimi             uint16_t imm = neon_imm.GetImmediate<uint16_t>();
1382*f5c631daSSadaf Ebrahimi             // vmov.i16 d0, 0xabab will translate into vmov.i8 d0, 0xab
1383*f5c631daSSadaf Ebrahimi             if (IsI8BitPattern(imm)) {
1384*f5c631daSSadaf Ebrahimi               CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1385*f5c631daSSadaf Ebrahimi               vmov(cond, I8, rd, imm & 0xff);
1386*f5c631daSSadaf Ebrahimi               return;
1387*f5c631daSSadaf Ebrahimi             }
1388*f5c631daSSadaf Ebrahimi           }
1389*f5c631daSSadaf Ebrahimi           break;
1390*f5c631daSSadaf Ebrahimi         case I64:
1391*f5c631daSSadaf Ebrahimi           if (neon_imm.CanConvert<uint64_t>()) {
1392*f5c631daSSadaf Ebrahimi             uint64_t imm = neon_imm.GetImmediate<uint64_t>();
1393*f5c631daSSadaf Ebrahimi             // vmov.i64 d0, -1 will translate into vmov.i8 d0, 0xff
1394*f5c631daSSadaf Ebrahimi             if (IsI8BitPattern(imm)) {
1395*f5c631daSSadaf Ebrahimi               CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1396*f5c631daSSadaf Ebrahimi               vmov(cond, I8, rd, imm & 0xff);
1397*f5c631daSSadaf Ebrahimi               return;
1398*f5c631daSSadaf Ebrahimi             }
1399*f5c631daSSadaf Ebrahimi             // mov ip, lo(imm64)
1400*f5c631daSSadaf Ebrahimi             // vdup d0, ip
1401*f5c631daSSadaf Ebrahimi             // vdup is prefered to 'vmov d0[0]' as d0[1] does not need to be
1402*f5c631daSSadaf Ebrahimi             // preserved
1403*f5c631daSSadaf Ebrahimi             {
1404*f5c631daSSadaf Ebrahimi               UseScratchRegisterScope temps(this);
1405*f5c631daSSadaf Ebrahimi               Register scratch = temps.Acquire();
1406*f5c631daSSadaf Ebrahimi               {
1407*f5c631daSSadaf Ebrahimi                 // TODO: The scope length was measured empirically. We should
1408*f5c631daSSadaf Ebrahimi                 // analyse the
1409*f5c631daSSadaf Ebrahimi                 // worst-case size and add targetted tests.
1410*f5c631daSSadaf Ebrahimi                 CodeBufferCheckScope scope(this,
1411*f5c631daSSadaf Ebrahimi                                            2 * kMaxInstructionSizeInBytes);
1412*f5c631daSSadaf Ebrahimi                 mov(cond, scratch, static_cast<uint32_t>(imm & 0xffffffff));
1413*f5c631daSSadaf Ebrahimi               }
1414*f5c631daSSadaf Ebrahimi               CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1415*f5c631daSSadaf Ebrahimi               vdup(cond, Untyped32, rd, scratch);
1416*f5c631daSSadaf Ebrahimi             }
1417*f5c631daSSadaf Ebrahimi             // mov ip, hi(imm64)
1418*f5c631daSSadaf Ebrahimi             // vmov d0[1], ip
1419*f5c631daSSadaf Ebrahimi             {
1420*f5c631daSSadaf Ebrahimi               UseScratchRegisterScope temps(this);
1421*f5c631daSSadaf Ebrahimi               Register scratch = temps.Acquire();
1422*f5c631daSSadaf Ebrahimi               {
1423*f5c631daSSadaf Ebrahimi                 // TODO: The scope length was measured empirically. We should
1424*f5c631daSSadaf Ebrahimi                 // analyse the
1425*f5c631daSSadaf Ebrahimi                 // worst-case size and add targetted tests.
1426*f5c631daSSadaf Ebrahimi                 CodeBufferCheckScope scope(this,
1427*f5c631daSSadaf Ebrahimi                                            2 * kMaxInstructionSizeInBytes);
1428*f5c631daSSadaf Ebrahimi                 mov(cond, scratch, static_cast<uint32_t>(imm >> 32));
1429*f5c631daSSadaf Ebrahimi               }
1430*f5c631daSSadaf Ebrahimi               CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1431*f5c631daSSadaf Ebrahimi               vmov(cond, Untyped32, DRegisterLane(rd, 1), scratch);
1432*f5c631daSSadaf Ebrahimi             }
1433*f5c631daSSadaf Ebrahimi             return;
1434*f5c631daSSadaf Ebrahimi           }
1435*f5c631daSSadaf Ebrahimi           break;
1436*f5c631daSSadaf Ebrahimi         default:
1437*f5c631daSSadaf Ebrahimi           break;
1438*f5c631daSSadaf Ebrahimi       }
1439*f5c631daSSadaf Ebrahimi       VIXL_ASSERT(!dt.Is(I8));  // I8 cases should have been handled already.
1440*f5c631daSSadaf Ebrahimi       if ((dt.Is(I16) || dt.Is(I32)) && neon_imm.CanConvert<uint32_t>()) {
1441*f5c631daSSadaf Ebrahimi         // mov ip, imm32
1442*f5c631daSSadaf Ebrahimi         // vdup.16 d0, ip
1443*f5c631daSSadaf Ebrahimi         UseScratchRegisterScope temps(this);
1444*f5c631daSSadaf Ebrahimi         Register scratch = temps.Acquire();
1445*f5c631daSSadaf Ebrahimi         {
1446*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, 2 * kMaxInstructionSizeInBytes);
1447*f5c631daSSadaf Ebrahimi           mov(cond, scratch, neon_imm.GetImmediate<uint32_t>());
1448*f5c631daSSadaf Ebrahimi         }
1449*f5c631daSSadaf Ebrahimi         DataTypeValue vdup_dt = Untyped32;
1450*f5c631daSSadaf Ebrahimi         switch (dt.GetValue()) {
1451*f5c631daSSadaf Ebrahimi           case I16:
1452*f5c631daSSadaf Ebrahimi             vdup_dt = Untyped16;
1453*f5c631daSSadaf Ebrahimi             break;
1454*f5c631daSSadaf Ebrahimi           case I32:
1455*f5c631daSSadaf Ebrahimi             vdup_dt = Untyped32;
1456*f5c631daSSadaf Ebrahimi             break;
1457*f5c631daSSadaf Ebrahimi           default:
1458*f5c631daSSadaf Ebrahimi             VIXL_UNREACHABLE();
1459*f5c631daSSadaf Ebrahimi         }
1460*f5c631daSSadaf Ebrahimi         CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1461*f5c631daSSadaf Ebrahimi         vdup(cond, vdup_dt, rd, scratch);
1462*f5c631daSSadaf Ebrahimi         return;
1463*f5c631daSSadaf Ebrahimi       }
1464*f5c631daSSadaf Ebrahimi       if (dt.Is(F32) && neon_imm.CanConvert<float>()) {
1465*f5c631daSSadaf Ebrahimi         float f = neon_imm.GetImmediate<float>();
1466*f5c631daSSadaf Ebrahimi         // Punt to vmov.i32
1467*f5c631daSSadaf Ebrahimi         // TODO: The scope length was guessed based on the double case below. We
1468*f5c631daSSadaf Ebrahimi         // should analyse the worst-case size and add targetted tests.
1469*f5c631daSSadaf Ebrahimi         CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
1470*f5c631daSSadaf Ebrahimi         vmov(cond, I32, rd, FloatToRawbits(f));
1471*f5c631daSSadaf Ebrahimi         return;
1472*f5c631daSSadaf Ebrahimi       }
1473*f5c631daSSadaf Ebrahimi       if (dt.Is(F64) && neon_imm.CanConvert<double>()) {
1474*f5c631daSSadaf Ebrahimi         // Punt to vmov.i64
1475*f5c631daSSadaf Ebrahimi         double d = neon_imm.GetImmediate<double>();
1476*f5c631daSSadaf Ebrahimi         // TODO: The scope length was measured empirically. We should analyse
1477*f5c631daSSadaf Ebrahimi         // the
1478*f5c631daSSadaf Ebrahimi         // worst-case size and add targetted tests.
1479*f5c631daSSadaf Ebrahimi         CodeBufferCheckScope scope(this, 6 * kMaxInstructionSizeInBytes);
1480*f5c631daSSadaf Ebrahimi         vmov(cond, I64, rd, DoubleToRawbits(d));
1481*f5c631daSSadaf Ebrahimi         return;
1482*f5c631daSSadaf Ebrahimi       }
1483*f5c631daSSadaf Ebrahimi     }
1484*f5c631daSSadaf Ebrahimi   }
1485*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, dt, rd, operand);
1486*f5c631daSSadaf Ebrahimi }
1487*f5c631daSSadaf Ebrahimi 
1488*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondDtQQop instruction,Condition cond,DataType dt,QRegister rd,const QOperand & operand)1489*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
1490*f5c631daSSadaf Ebrahimi                               InstructionCondDtQQop instruction,
1491*f5c631daSSadaf Ebrahimi                               Condition cond,
1492*f5c631daSSadaf Ebrahimi                               DataType dt,
1493*f5c631daSSadaf Ebrahimi                               QRegister rd,
1494*f5c631daSSadaf Ebrahimi                               const QOperand& operand) {
1495*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
1496*f5c631daSSadaf Ebrahimi   if (type == kVmov) {
1497*f5c631daSSadaf Ebrahimi     if (operand.IsImmediate()) {
1498*f5c631daSSadaf Ebrahimi       const NeonImmediate& neon_imm = operand.GetNeonImmediate();
1499*f5c631daSSadaf Ebrahimi       switch (dt.GetValue()) {
1500*f5c631daSSadaf Ebrahimi         case I32:
1501*f5c631daSSadaf Ebrahimi           if (neon_imm.CanConvert<uint32_t>()) {
1502*f5c631daSSadaf Ebrahimi             uint32_t imm = neon_imm.GetImmediate<uint32_t>();
1503*f5c631daSSadaf Ebrahimi             // vmov.i32 d0, 0xabababab will translate into vmov.i8 d0, 0xab
1504*f5c631daSSadaf Ebrahimi             if (IsI8BitPattern(imm)) {
1505*f5c631daSSadaf Ebrahimi               CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1506*f5c631daSSadaf Ebrahimi               vmov(cond, I8, rd, imm & 0xff);
1507*f5c631daSSadaf Ebrahimi               return;
1508*f5c631daSSadaf Ebrahimi             }
1509*f5c631daSSadaf Ebrahimi             // vmov.i32 d0, 0xff0000ff will translate into
1510*f5c631daSSadaf Ebrahimi             // vmov.i64 d0, 0xff0000ffff0000ff
1511*f5c631daSSadaf Ebrahimi             if (IsI64BitPattern(imm)) {
1512*f5c631daSSadaf Ebrahimi               CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1513*f5c631daSSadaf Ebrahimi               vmov(cond, I64, rd, replicate<uint64_t>(imm));
1514*f5c631daSSadaf Ebrahimi               return;
1515*f5c631daSSadaf Ebrahimi             }
1516*f5c631daSSadaf Ebrahimi             // vmov.i32 d0, 0xffab0000 will translate into
1517*f5c631daSSadaf Ebrahimi             // vmvn.i32 d0, 0x0054ffff
1518*f5c631daSSadaf Ebrahimi             if (CanBeInverted(imm)) {
1519*f5c631daSSadaf Ebrahimi               CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1520*f5c631daSSadaf Ebrahimi               vmvn(cond, I32, rd, ~imm);
1521*f5c631daSSadaf Ebrahimi               return;
1522*f5c631daSSadaf Ebrahimi             }
1523*f5c631daSSadaf Ebrahimi           }
1524*f5c631daSSadaf Ebrahimi           break;
1525*f5c631daSSadaf Ebrahimi         case I16:
1526*f5c631daSSadaf Ebrahimi           if (neon_imm.CanConvert<uint16_t>()) {
1527*f5c631daSSadaf Ebrahimi             uint16_t imm = neon_imm.GetImmediate<uint16_t>();
1528*f5c631daSSadaf Ebrahimi             // vmov.i16 d0, 0xabab will translate into vmov.i8 d0, 0xab
1529*f5c631daSSadaf Ebrahimi             if (IsI8BitPattern(imm)) {
1530*f5c631daSSadaf Ebrahimi               CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1531*f5c631daSSadaf Ebrahimi               vmov(cond, I8, rd, imm & 0xff);
1532*f5c631daSSadaf Ebrahimi               return;
1533*f5c631daSSadaf Ebrahimi             }
1534*f5c631daSSadaf Ebrahimi           }
1535*f5c631daSSadaf Ebrahimi           break;
1536*f5c631daSSadaf Ebrahimi         case I64:
1537*f5c631daSSadaf Ebrahimi           if (neon_imm.CanConvert<uint64_t>()) {
1538*f5c631daSSadaf Ebrahimi             uint64_t imm = neon_imm.GetImmediate<uint64_t>();
1539*f5c631daSSadaf Ebrahimi             // vmov.i64 d0, -1 will translate into vmov.i8 d0, 0xff
1540*f5c631daSSadaf Ebrahimi             if (IsI8BitPattern(imm)) {
1541*f5c631daSSadaf Ebrahimi               CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1542*f5c631daSSadaf Ebrahimi               vmov(cond, I8, rd, imm & 0xff);
1543*f5c631daSSadaf Ebrahimi               return;
1544*f5c631daSSadaf Ebrahimi             }
1545*f5c631daSSadaf Ebrahimi             // mov ip, lo(imm64)
1546*f5c631daSSadaf Ebrahimi             // vdup q0, ip
1547*f5c631daSSadaf Ebrahimi             // vdup is prefered to 'vmov d0[0]' as d0[1-3] don't need to be
1548*f5c631daSSadaf Ebrahimi             // preserved
1549*f5c631daSSadaf Ebrahimi             {
1550*f5c631daSSadaf Ebrahimi               UseScratchRegisterScope temps(this);
1551*f5c631daSSadaf Ebrahimi               Register scratch = temps.Acquire();
1552*f5c631daSSadaf Ebrahimi               {
1553*f5c631daSSadaf Ebrahimi                 CodeBufferCheckScope scope(this,
1554*f5c631daSSadaf Ebrahimi                                            2 * kMaxInstructionSizeInBytes);
1555*f5c631daSSadaf Ebrahimi                 mov(cond, scratch, static_cast<uint32_t>(imm & 0xffffffff));
1556*f5c631daSSadaf Ebrahimi               }
1557*f5c631daSSadaf Ebrahimi               CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1558*f5c631daSSadaf Ebrahimi               vdup(cond, Untyped32, rd, scratch);
1559*f5c631daSSadaf Ebrahimi             }
1560*f5c631daSSadaf Ebrahimi             // mov ip, hi(imm64)
1561*f5c631daSSadaf Ebrahimi             // vmov.i32 d0[1], ip
1562*f5c631daSSadaf Ebrahimi             // vmov d1, d0
1563*f5c631daSSadaf Ebrahimi             {
1564*f5c631daSSadaf Ebrahimi               UseScratchRegisterScope temps(this);
1565*f5c631daSSadaf Ebrahimi               Register scratch = temps.Acquire();
1566*f5c631daSSadaf Ebrahimi               {
1567*f5c631daSSadaf Ebrahimi                 CodeBufferCheckScope scope(this,
1568*f5c631daSSadaf Ebrahimi                                            2 * kMaxInstructionSizeInBytes);
1569*f5c631daSSadaf Ebrahimi                 mov(cond, scratch, static_cast<uint32_t>(imm >> 32));
1570*f5c631daSSadaf Ebrahimi               }
1571*f5c631daSSadaf Ebrahimi               {
1572*f5c631daSSadaf Ebrahimi                 CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1573*f5c631daSSadaf Ebrahimi                 vmov(cond,
1574*f5c631daSSadaf Ebrahimi                      Untyped32,
1575*f5c631daSSadaf Ebrahimi                      DRegisterLane(rd.GetLowDRegister(), 1),
1576*f5c631daSSadaf Ebrahimi                      scratch);
1577*f5c631daSSadaf Ebrahimi               }
1578*f5c631daSSadaf Ebrahimi               CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1579*f5c631daSSadaf Ebrahimi               vmov(cond, F64, rd.GetHighDRegister(), rd.GetLowDRegister());
1580*f5c631daSSadaf Ebrahimi             }
1581*f5c631daSSadaf Ebrahimi             return;
1582*f5c631daSSadaf Ebrahimi           }
1583*f5c631daSSadaf Ebrahimi           break;
1584*f5c631daSSadaf Ebrahimi         default:
1585*f5c631daSSadaf Ebrahimi           break;
1586*f5c631daSSadaf Ebrahimi       }
1587*f5c631daSSadaf Ebrahimi       VIXL_ASSERT(!dt.Is(I8));  // I8 cases should have been handled already.
1588*f5c631daSSadaf Ebrahimi       if ((dt.Is(I16) || dt.Is(I32)) && neon_imm.CanConvert<uint32_t>()) {
1589*f5c631daSSadaf Ebrahimi         // mov ip, imm32
1590*f5c631daSSadaf Ebrahimi         // vdup.16 d0, ip
1591*f5c631daSSadaf Ebrahimi         UseScratchRegisterScope temps(this);
1592*f5c631daSSadaf Ebrahimi         Register scratch = temps.Acquire();
1593*f5c631daSSadaf Ebrahimi         {
1594*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, 2 * kMaxInstructionSizeInBytes);
1595*f5c631daSSadaf Ebrahimi           mov(cond, scratch, neon_imm.GetImmediate<uint32_t>());
1596*f5c631daSSadaf Ebrahimi         }
1597*f5c631daSSadaf Ebrahimi         DataTypeValue vdup_dt = Untyped32;
1598*f5c631daSSadaf Ebrahimi         switch (dt.GetValue()) {
1599*f5c631daSSadaf Ebrahimi           case I16:
1600*f5c631daSSadaf Ebrahimi             vdup_dt = Untyped16;
1601*f5c631daSSadaf Ebrahimi             break;
1602*f5c631daSSadaf Ebrahimi           case I32:
1603*f5c631daSSadaf Ebrahimi             vdup_dt = Untyped32;
1604*f5c631daSSadaf Ebrahimi             break;
1605*f5c631daSSadaf Ebrahimi           default:
1606*f5c631daSSadaf Ebrahimi             VIXL_UNREACHABLE();
1607*f5c631daSSadaf Ebrahimi         }
1608*f5c631daSSadaf Ebrahimi         CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1609*f5c631daSSadaf Ebrahimi         vdup(cond, vdup_dt, rd, scratch);
1610*f5c631daSSadaf Ebrahimi         return;
1611*f5c631daSSadaf Ebrahimi       }
1612*f5c631daSSadaf Ebrahimi       if (dt.Is(F32) && neon_imm.CanConvert<float>()) {
1613*f5c631daSSadaf Ebrahimi         // Punt to vmov.i64
1614*f5c631daSSadaf Ebrahimi         float f = neon_imm.GetImmediate<float>();
1615*f5c631daSSadaf Ebrahimi         CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
1616*f5c631daSSadaf Ebrahimi         vmov(cond, I32, rd, FloatToRawbits(f));
1617*f5c631daSSadaf Ebrahimi         return;
1618*f5c631daSSadaf Ebrahimi       }
1619*f5c631daSSadaf Ebrahimi       if (dt.Is(F64) && neon_imm.CanConvert<double>()) {
1620*f5c631daSSadaf Ebrahimi         // Use vmov to create the double in the low D register, then duplicate
1621*f5c631daSSadaf Ebrahimi         // it into the high D register.
1622*f5c631daSSadaf Ebrahimi         double d = neon_imm.GetImmediate<double>();
1623*f5c631daSSadaf Ebrahimi         CodeBufferCheckScope scope(this, 7 * kMaxInstructionSizeInBytes);
1624*f5c631daSSadaf Ebrahimi         vmov(cond, F64, rd.GetLowDRegister(), d);
1625*f5c631daSSadaf Ebrahimi         vmov(cond, F64, rd.GetHighDRegister(), rd.GetLowDRegister());
1626*f5c631daSSadaf Ebrahimi         return;
1627*f5c631daSSadaf Ebrahimi       }
1628*f5c631daSSadaf Ebrahimi     }
1629*f5c631daSSadaf Ebrahimi   }
1630*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, dt, rd, operand);
1631*f5c631daSSadaf Ebrahimi }
1632*f5c631daSSadaf Ebrahimi 
1633*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondRL instruction,Condition cond,Register rt,Location * location)1634*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
1635*f5c631daSSadaf Ebrahimi                               InstructionCondRL instruction,
1636*f5c631daSSadaf Ebrahimi                               Condition cond,
1637*f5c631daSSadaf Ebrahimi                               Register rt,
1638*f5c631daSSadaf Ebrahimi                               Location* location) {
1639*f5c631daSSadaf Ebrahimi   VIXL_ASSERT((type == kLdrb) || (type == kLdrh) || (type == kLdrsb) ||
1640*f5c631daSSadaf Ebrahimi               (type == kLdrsh));
1641*f5c631daSSadaf Ebrahimi 
1642*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
1643*f5c631daSSadaf Ebrahimi 
1644*f5c631daSSadaf Ebrahimi   if (location->IsBound()) {
1645*f5c631daSSadaf Ebrahimi     CodeBufferCheckScope scope(this, 5 * kMaxInstructionSizeInBytes);
1646*f5c631daSSadaf Ebrahimi     UseScratchRegisterScope temps(this);
1647*f5c631daSSadaf Ebrahimi     temps.Include(rt);
1648*f5c631daSSadaf Ebrahimi     Register scratch = temps.Acquire();
1649*f5c631daSSadaf Ebrahimi     uint32_t mask = GetOffsetMask(type, Offset);
1650*f5c631daSSadaf Ebrahimi     switch (type) {
1651*f5c631daSSadaf Ebrahimi       case kLdrb:
1652*f5c631daSSadaf Ebrahimi         ldrb(rt, MemOperandComputationHelper(cond, scratch, location, mask));
1653*f5c631daSSadaf Ebrahimi         return;
1654*f5c631daSSadaf Ebrahimi       case kLdrh:
1655*f5c631daSSadaf Ebrahimi         ldrh(rt, MemOperandComputationHelper(cond, scratch, location, mask));
1656*f5c631daSSadaf Ebrahimi         return;
1657*f5c631daSSadaf Ebrahimi       case kLdrsb:
1658*f5c631daSSadaf Ebrahimi         ldrsb(rt, MemOperandComputationHelper(cond, scratch, location, mask));
1659*f5c631daSSadaf Ebrahimi         return;
1660*f5c631daSSadaf Ebrahimi       case kLdrsh:
1661*f5c631daSSadaf Ebrahimi         ldrsh(rt, MemOperandComputationHelper(cond, scratch, location, mask));
1662*f5c631daSSadaf Ebrahimi         return;
1663*f5c631daSSadaf Ebrahimi       default:
1664*f5c631daSSadaf Ebrahimi         VIXL_UNREACHABLE();
1665*f5c631daSSadaf Ebrahimi     }
1666*f5c631daSSadaf Ebrahimi     return;
1667*f5c631daSSadaf Ebrahimi   }
1668*f5c631daSSadaf Ebrahimi 
1669*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, rt, location);
1670*f5c631daSSadaf Ebrahimi }
1671*f5c631daSSadaf Ebrahimi 
1672*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondRRL instruction,Condition cond,Register rt,Register rt2,Location * location)1673*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
1674*f5c631daSSadaf Ebrahimi                               InstructionCondRRL instruction,
1675*f5c631daSSadaf Ebrahimi                               Condition cond,
1676*f5c631daSSadaf Ebrahimi                               Register rt,
1677*f5c631daSSadaf Ebrahimi                               Register rt2,
1678*f5c631daSSadaf Ebrahimi                               Location* location) {
1679*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(type == kLdrd);
1680*f5c631daSSadaf Ebrahimi 
1681*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
1682*f5c631daSSadaf Ebrahimi 
1683*f5c631daSSadaf Ebrahimi   if (location->IsBound()) {
1684*f5c631daSSadaf Ebrahimi     CodeBufferCheckScope scope(this, 6 * kMaxInstructionSizeInBytes);
1685*f5c631daSSadaf Ebrahimi     UseScratchRegisterScope temps(this);
1686*f5c631daSSadaf Ebrahimi     temps.Include(rt, rt2);
1687*f5c631daSSadaf Ebrahimi     Register scratch = temps.Acquire();
1688*f5c631daSSadaf Ebrahimi     uint32_t mask = GetOffsetMask(type, Offset);
1689*f5c631daSSadaf Ebrahimi     ldrd(rt, rt2, MemOperandComputationHelper(cond, scratch, location, mask));
1690*f5c631daSSadaf Ebrahimi     return;
1691*f5c631daSSadaf Ebrahimi   }
1692*f5c631daSSadaf Ebrahimi 
1693*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, rt, rt2, location);
1694*f5c631daSSadaf Ebrahimi }
1695*f5c631daSSadaf Ebrahimi 
1696*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondSizeRMop instruction,Condition cond,EncodingSize size,Register rd,const MemOperand & operand)1697*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
1698*f5c631daSSadaf Ebrahimi                               InstructionCondSizeRMop instruction,
1699*f5c631daSSadaf Ebrahimi                               Condition cond,
1700*f5c631daSSadaf Ebrahimi                               EncodingSize size,
1701*f5c631daSSadaf Ebrahimi                               Register rd,
1702*f5c631daSSadaf Ebrahimi                               const MemOperand& operand) {
1703*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
1704*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(size.IsBest());
1705*f5c631daSSadaf Ebrahimi   VIXL_ASSERT((type == kLdr) || (type == kLdrb) || (type == kLdrh) ||
1706*f5c631daSSadaf Ebrahimi               (type == kLdrsb) || (type == kLdrsh) || (type == kStr) ||
1707*f5c631daSSadaf Ebrahimi               (type == kStrb) || (type == kStrh));
1708*f5c631daSSadaf Ebrahimi   if (operand.IsImmediate()) {
1709*f5c631daSSadaf Ebrahimi     const Register& rn = operand.GetBaseRegister();
1710*f5c631daSSadaf Ebrahimi     AddrMode addrmode = operand.GetAddrMode();
1711*f5c631daSSadaf Ebrahimi     int32_t offset = operand.GetOffsetImmediate();
1712*f5c631daSSadaf Ebrahimi     uint32_t extra_offset_mask = GetOffsetMask(type, addrmode);
1713*f5c631daSSadaf Ebrahimi     // Try to maximize the offset used by the MemOperand (load_store_offset).
1714*f5c631daSSadaf Ebrahimi     // Add the part which can't be used by the MemOperand (add_offset).
1715*f5c631daSSadaf Ebrahimi     uint32_t load_store_offset = offset & extra_offset_mask;
1716*f5c631daSSadaf Ebrahimi     uint32_t add_offset = offset & ~extra_offset_mask;
1717*f5c631daSSadaf Ebrahimi     if ((add_offset != 0) &&
1718*f5c631daSSadaf Ebrahimi         (IsModifiedImmediate(offset) || IsModifiedImmediate(-offset))) {
1719*f5c631daSSadaf Ebrahimi       load_store_offset = 0;
1720*f5c631daSSadaf Ebrahimi       add_offset = offset;
1721*f5c631daSSadaf Ebrahimi     }
1722*f5c631daSSadaf Ebrahimi     switch (addrmode) {
1723*f5c631daSSadaf Ebrahimi       case PreIndex:
1724*f5c631daSSadaf Ebrahimi         // Avoid the unpredictable case 'str r0, [r0, imm]!'
1725*f5c631daSSadaf Ebrahimi         if (!rn.Is(rd)) {
1726*f5c631daSSadaf Ebrahimi           // Pre-Indexed case:
1727*f5c631daSSadaf Ebrahimi           // ldr r0, [r1, 12345]! will translate into
1728*f5c631daSSadaf Ebrahimi           //   add r1, r1, 12345
1729*f5c631daSSadaf Ebrahimi           //   ldr r0, [r1]
1730*f5c631daSSadaf Ebrahimi           {
1731*f5c631daSSadaf Ebrahimi             CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
1732*f5c631daSSadaf Ebrahimi             add(cond, rn, rn, add_offset);
1733*f5c631daSSadaf Ebrahimi           }
1734*f5c631daSSadaf Ebrahimi           {
1735*f5c631daSSadaf Ebrahimi             CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1736*f5c631daSSadaf Ebrahimi             (this->*instruction)(cond,
1737*f5c631daSSadaf Ebrahimi                                  size,
1738*f5c631daSSadaf Ebrahimi                                  rd,
1739*f5c631daSSadaf Ebrahimi                                  MemOperand(rn, load_store_offset, PreIndex));
1740*f5c631daSSadaf Ebrahimi           }
1741*f5c631daSSadaf Ebrahimi           return;
1742*f5c631daSSadaf Ebrahimi         }
1743*f5c631daSSadaf Ebrahimi         break;
1744*f5c631daSSadaf Ebrahimi       case Offset: {
1745*f5c631daSSadaf Ebrahimi         UseScratchRegisterScope temps(this);
1746*f5c631daSSadaf Ebrahimi         // Allow using the destination as a scratch register if possible.
1747*f5c631daSSadaf Ebrahimi         if ((type != kStr) && (type != kStrb) && (type != kStrh) &&
1748*f5c631daSSadaf Ebrahimi             !rd.Is(rn)) {
1749*f5c631daSSadaf Ebrahimi           temps.Include(rd);
1750*f5c631daSSadaf Ebrahimi         }
1751*f5c631daSSadaf Ebrahimi         Register scratch = temps.Acquire();
1752*f5c631daSSadaf Ebrahimi         // Offset case:
1753*f5c631daSSadaf Ebrahimi         // ldr r0, [r1, 12345] will translate into
1754*f5c631daSSadaf Ebrahimi         //   add r0, r1, 12345
1755*f5c631daSSadaf Ebrahimi         //   ldr r0, [r0]
1756*f5c631daSSadaf Ebrahimi         {
1757*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
1758*f5c631daSSadaf Ebrahimi           add(cond, scratch, rn, add_offset);
1759*f5c631daSSadaf Ebrahimi         }
1760*f5c631daSSadaf Ebrahimi         {
1761*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1762*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond,
1763*f5c631daSSadaf Ebrahimi                                size,
1764*f5c631daSSadaf Ebrahimi                                rd,
1765*f5c631daSSadaf Ebrahimi                                MemOperand(scratch, load_store_offset));
1766*f5c631daSSadaf Ebrahimi         }
1767*f5c631daSSadaf Ebrahimi         return;
1768*f5c631daSSadaf Ebrahimi       }
1769*f5c631daSSadaf Ebrahimi       case PostIndex:
1770*f5c631daSSadaf Ebrahimi         // Avoid the unpredictable case 'ldr r0, [r0], imm'
1771*f5c631daSSadaf Ebrahimi         if (!rn.Is(rd)) {
1772*f5c631daSSadaf Ebrahimi           // Post-indexed case:
1773*f5c631daSSadaf Ebrahimi           // ldr r0. [r1], imm32 will translate into
1774*f5c631daSSadaf Ebrahimi           //   ldr r0, [r1]
1775*f5c631daSSadaf Ebrahimi           //   movw ip. imm32 & 0xffffffff
1776*f5c631daSSadaf Ebrahimi           //   movt ip, imm32 >> 16
1777*f5c631daSSadaf Ebrahimi           //   add r1, r1, ip
1778*f5c631daSSadaf Ebrahimi           {
1779*f5c631daSSadaf Ebrahimi             CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1780*f5c631daSSadaf Ebrahimi             (this->*instruction)(cond,
1781*f5c631daSSadaf Ebrahimi                                  size,
1782*f5c631daSSadaf Ebrahimi                                  rd,
1783*f5c631daSSadaf Ebrahimi                                  MemOperand(rn, load_store_offset, PostIndex));
1784*f5c631daSSadaf Ebrahimi           }
1785*f5c631daSSadaf Ebrahimi           {
1786*f5c631daSSadaf Ebrahimi             CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
1787*f5c631daSSadaf Ebrahimi             add(cond, rn, rn, add_offset);
1788*f5c631daSSadaf Ebrahimi           }
1789*f5c631daSSadaf Ebrahimi           return;
1790*f5c631daSSadaf Ebrahimi         }
1791*f5c631daSSadaf Ebrahimi         break;
1792*f5c631daSSadaf Ebrahimi     }
1793*f5c631daSSadaf Ebrahimi   } else if (operand.IsPlainRegister()) {
1794*f5c631daSSadaf Ebrahimi     const Register& rn = operand.GetBaseRegister();
1795*f5c631daSSadaf Ebrahimi     AddrMode addrmode = operand.GetAddrMode();
1796*f5c631daSSadaf Ebrahimi     const Register& rm = operand.GetOffsetRegister();
1797*f5c631daSSadaf Ebrahimi     if (rm.IsPC()) {
1798*f5c631daSSadaf Ebrahimi       VIXL_ABORT_WITH_MSG(
1799*f5c631daSSadaf Ebrahimi           "The MacroAssembler does not convert loads and stores with a PC "
1800*f5c631daSSadaf Ebrahimi           "offset register.\n");
1801*f5c631daSSadaf Ebrahimi     }
1802*f5c631daSSadaf Ebrahimi     if (rn.IsPC()) {
1803*f5c631daSSadaf Ebrahimi       if (addrmode == Offset) {
1804*f5c631daSSadaf Ebrahimi         if (IsUsingT32()) {
1805*f5c631daSSadaf Ebrahimi           VIXL_ABORT_WITH_MSG(
1806*f5c631daSSadaf Ebrahimi               "The MacroAssembler does not convert loads and stores with a PC "
1807*f5c631daSSadaf Ebrahimi               "base register for T32.\n");
1808*f5c631daSSadaf Ebrahimi         }
1809*f5c631daSSadaf Ebrahimi       } else {
1810*f5c631daSSadaf Ebrahimi         VIXL_ABORT_WITH_MSG(
1811*f5c631daSSadaf Ebrahimi             "The MacroAssembler does not convert loads and stores with a PC "
1812*f5c631daSSadaf Ebrahimi             "base register in pre-index or post-index mode.\n");
1813*f5c631daSSadaf Ebrahimi       }
1814*f5c631daSSadaf Ebrahimi     }
1815*f5c631daSSadaf Ebrahimi     switch (addrmode) {
1816*f5c631daSSadaf Ebrahimi       case PreIndex:
1817*f5c631daSSadaf Ebrahimi         // Avoid the unpredictable case 'str r0, [r0, imm]!'
1818*f5c631daSSadaf Ebrahimi         if (!rn.Is(rd)) {
1819*f5c631daSSadaf Ebrahimi           // Pre-Indexed case:
1820*f5c631daSSadaf Ebrahimi           // ldr r0, [r1, r2]! will translate into
1821*f5c631daSSadaf Ebrahimi           //   add r1, r1, r2
1822*f5c631daSSadaf Ebrahimi           //   ldr r0, [r1]
1823*f5c631daSSadaf Ebrahimi           {
1824*f5c631daSSadaf Ebrahimi             CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1825*f5c631daSSadaf Ebrahimi             if (operand.GetSign().IsPlus()) {
1826*f5c631daSSadaf Ebrahimi               add(cond, rn, rn, rm);
1827*f5c631daSSadaf Ebrahimi             } else {
1828*f5c631daSSadaf Ebrahimi               sub(cond, rn, rn, rm);
1829*f5c631daSSadaf Ebrahimi             }
1830*f5c631daSSadaf Ebrahimi           }
1831*f5c631daSSadaf Ebrahimi           {
1832*f5c631daSSadaf Ebrahimi             CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1833*f5c631daSSadaf Ebrahimi             (this->*instruction)(cond, size, rd, MemOperand(rn, Offset));
1834*f5c631daSSadaf Ebrahimi           }
1835*f5c631daSSadaf Ebrahimi           return;
1836*f5c631daSSadaf Ebrahimi         }
1837*f5c631daSSadaf Ebrahimi         break;
1838*f5c631daSSadaf Ebrahimi       case Offset: {
1839*f5c631daSSadaf Ebrahimi         UseScratchRegisterScope temps(this);
1840*f5c631daSSadaf Ebrahimi         // Allow using the destination as a scratch register if this is not a
1841*f5c631daSSadaf Ebrahimi         // store.
1842*f5c631daSSadaf Ebrahimi         // Avoid using PC as a temporary as this has side-effects.
1843*f5c631daSSadaf Ebrahimi         if ((type != kStr) && (type != kStrb) && (type != kStrh) &&
1844*f5c631daSSadaf Ebrahimi             !rd.IsPC()) {
1845*f5c631daSSadaf Ebrahimi           temps.Include(rd);
1846*f5c631daSSadaf Ebrahimi         }
1847*f5c631daSSadaf Ebrahimi         Register scratch = temps.Acquire();
1848*f5c631daSSadaf Ebrahimi         // Offset case:
1849*f5c631daSSadaf Ebrahimi         // ldr r0, [r1, r2] will translate into
1850*f5c631daSSadaf Ebrahimi         //   add r0, r1, r2
1851*f5c631daSSadaf Ebrahimi         //   ldr r0, [r0]
1852*f5c631daSSadaf Ebrahimi         {
1853*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1854*f5c631daSSadaf Ebrahimi           if (operand.GetSign().IsPlus()) {
1855*f5c631daSSadaf Ebrahimi             add(cond, scratch, rn, rm);
1856*f5c631daSSadaf Ebrahimi           } else {
1857*f5c631daSSadaf Ebrahimi             sub(cond, scratch, rn, rm);
1858*f5c631daSSadaf Ebrahimi           }
1859*f5c631daSSadaf Ebrahimi         }
1860*f5c631daSSadaf Ebrahimi         {
1861*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1862*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond, size, rd, MemOperand(scratch, Offset));
1863*f5c631daSSadaf Ebrahimi         }
1864*f5c631daSSadaf Ebrahimi         return;
1865*f5c631daSSadaf Ebrahimi       }
1866*f5c631daSSadaf Ebrahimi       case PostIndex:
1867*f5c631daSSadaf Ebrahimi         // Avoid the unpredictable case 'ldr r0, [r0], imm'
1868*f5c631daSSadaf Ebrahimi         if (!rn.Is(rd)) {
1869*f5c631daSSadaf Ebrahimi           // Post-indexed case:
1870*f5c631daSSadaf Ebrahimi           // ldr r0. [r1], r2 will translate into
1871*f5c631daSSadaf Ebrahimi           //   ldr r0, [r1]
1872*f5c631daSSadaf Ebrahimi           //   add r1, r1, r2
1873*f5c631daSSadaf Ebrahimi           {
1874*f5c631daSSadaf Ebrahimi             CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1875*f5c631daSSadaf Ebrahimi             (this->*instruction)(cond, size, rd, MemOperand(rn, Offset));
1876*f5c631daSSadaf Ebrahimi           }
1877*f5c631daSSadaf Ebrahimi           {
1878*f5c631daSSadaf Ebrahimi             CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1879*f5c631daSSadaf Ebrahimi             if (operand.GetSign().IsPlus()) {
1880*f5c631daSSadaf Ebrahimi               add(cond, rn, rn, rm);
1881*f5c631daSSadaf Ebrahimi             } else {
1882*f5c631daSSadaf Ebrahimi               sub(cond, rn, rn, rm);
1883*f5c631daSSadaf Ebrahimi             }
1884*f5c631daSSadaf Ebrahimi           }
1885*f5c631daSSadaf Ebrahimi           return;
1886*f5c631daSSadaf Ebrahimi         }
1887*f5c631daSSadaf Ebrahimi         break;
1888*f5c631daSSadaf Ebrahimi     }
1889*f5c631daSSadaf Ebrahimi   }
1890*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, size, rd, operand);
1891*f5c631daSSadaf Ebrahimi }
1892*f5c631daSSadaf Ebrahimi 
1893*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondRRMop instruction,Condition cond,Register rt,Register rt2,const MemOperand & operand)1894*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
1895*f5c631daSSadaf Ebrahimi                               InstructionCondRRMop instruction,
1896*f5c631daSSadaf Ebrahimi                               Condition cond,
1897*f5c631daSSadaf Ebrahimi                               Register rt,
1898*f5c631daSSadaf Ebrahimi                               Register rt2,
1899*f5c631daSSadaf Ebrahimi                               const MemOperand& operand) {
1900*f5c631daSSadaf Ebrahimi   if ((type == kLdaexd) || (type == kLdrexd) || (type == kStlex) ||
1901*f5c631daSSadaf Ebrahimi       (type == kStlexb) || (type == kStlexh) || (type == kStrex) ||
1902*f5c631daSSadaf Ebrahimi       (type == kStrexb) || (type == kStrexh)) {
1903*f5c631daSSadaf Ebrahimi     UnimplementedDelegate(type);
1904*f5c631daSSadaf Ebrahimi     return;
1905*f5c631daSSadaf Ebrahimi   }
1906*f5c631daSSadaf Ebrahimi 
1907*f5c631daSSadaf Ebrahimi   VIXL_ASSERT((type == kLdrd) || (type == kStrd));
1908*f5c631daSSadaf Ebrahimi 
1909*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
1910*f5c631daSSadaf Ebrahimi 
1911*f5c631daSSadaf Ebrahimi   // TODO: Should we allow these cases?
1912*f5c631daSSadaf Ebrahimi   if (IsUsingA32()) {
1913*f5c631daSSadaf Ebrahimi     // The first register needs to be even.
1914*f5c631daSSadaf Ebrahimi     if ((rt.GetCode() & 1) != 0) {
1915*f5c631daSSadaf Ebrahimi       UnimplementedDelegate(type);
1916*f5c631daSSadaf Ebrahimi       return;
1917*f5c631daSSadaf Ebrahimi     }
1918*f5c631daSSadaf Ebrahimi     // Registers need to be adjacent.
1919*f5c631daSSadaf Ebrahimi     if (((rt.GetCode() + 1) % kNumberOfRegisters) != rt2.GetCode()) {
1920*f5c631daSSadaf Ebrahimi       UnimplementedDelegate(type);
1921*f5c631daSSadaf Ebrahimi       return;
1922*f5c631daSSadaf Ebrahimi     }
1923*f5c631daSSadaf Ebrahimi     // LDRD lr, pc [...] is not allowed.
1924*f5c631daSSadaf Ebrahimi     if (rt.Is(lr)) {
1925*f5c631daSSadaf Ebrahimi       UnimplementedDelegate(type);
1926*f5c631daSSadaf Ebrahimi       return;
1927*f5c631daSSadaf Ebrahimi     }
1928*f5c631daSSadaf Ebrahimi   }
1929*f5c631daSSadaf Ebrahimi 
1930*f5c631daSSadaf Ebrahimi   if (operand.IsImmediate()) {
1931*f5c631daSSadaf Ebrahimi     const Register& rn = operand.GetBaseRegister();
1932*f5c631daSSadaf Ebrahimi     AddrMode addrmode = operand.GetAddrMode();
1933*f5c631daSSadaf Ebrahimi     int32_t offset = operand.GetOffsetImmediate();
1934*f5c631daSSadaf Ebrahimi     uint32_t extra_offset_mask = GetOffsetMask(type, addrmode);
1935*f5c631daSSadaf Ebrahimi     // Try to maximize the offset used by the MemOperand (load_store_offset).
1936*f5c631daSSadaf Ebrahimi     // Add the part which can't be used by the MemOperand (add_offset).
1937*f5c631daSSadaf Ebrahimi     uint32_t load_store_offset = offset & extra_offset_mask;
1938*f5c631daSSadaf Ebrahimi     uint32_t add_offset = offset & ~extra_offset_mask;
1939*f5c631daSSadaf Ebrahimi     if ((add_offset != 0) &&
1940*f5c631daSSadaf Ebrahimi         (IsModifiedImmediate(offset) || IsModifiedImmediate(-offset))) {
1941*f5c631daSSadaf Ebrahimi       load_store_offset = 0;
1942*f5c631daSSadaf Ebrahimi       add_offset = offset;
1943*f5c631daSSadaf Ebrahimi     }
1944*f5c631daSSadaf Ebrahimi     switch (addrmode) {
1945*f5c631daSSadaf Ebrahimi       case PreIndex: {
1946*f5c631daSSadaf Ebrahimi         // Allow using the destinations as a scratch registers if possible.
1947*f5c631daSSadaf Ebrahimi         UseScratchRegisterScope temps(this);
1948*f5c631daSSadaf Ebrahimi         if (type == kLdrd) {
1949*f5c631daSSadaf Ebrahimi           if (!rt.Is(rn)) temps.Include(rt);
1950*f5c631daSSadaf Ebrahimi           if (!rt2.Is(rn)) temps.Include(rt2);
1951*f5c631daSSadaf Ebrahimi         }
1952*f5c631daSSadaf Ebrahimi 
1953*f5c631daSSadaf Ebrahimi         // Pre-Indexed case:
1954*f5c631daSSadaf Ebrahimi         // ldrd r0, r1, [r2, 12345]! will translate into
1955*f5c631daSSadaf Ebrahimi         //   add r2, 12345
1956*f5c631daSSadaf Ebrahimi         //   ldrd r0, r1, [r2]
1957*f5c631daSSadaf Ebrahimi         {
1958*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
1959*f5c631daSSadaf Ebrahimi           add(cond, rn, rn, add_offset);
1960*f5c631daSSadaf Ebrahimi         }
1961*f5c631daSSadaf Ebrahimi         {
1962*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1963*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond,
1964*f5c631daSSadaf Ebrahimi                                rt,
1965*f5c631daSSadaf Ebrahimi                                rt2,
1966*f5c631daSSadaf Ebrahimi                                MemOperand(rn, load_store_offset, PreIndex));
1967*f5c631daSSadaf Ebrahimi         }
1968*f5c631daSSadaf Ebrahimi         return;
1969*f5c631daSSadaf Ebrahimi       }
1970*f5c631daSSadaf Ebrahimi       case Offset: {
1971*f5c631daSSadaf Ebrahimi         UseScratchRegisterScope temps(this);
1972*f5c631daSSadaf Ebrahimi         // Allow using the destinations as a scratch registers if possible.
1973*f5c631daSSadaf Ebrahimi         if (type == kLdrd) {
1974*f5c631daSSadaf Ebrahimi           if (!rt.Is(rn)) temps.Include(rt);
1975*f5c631daSSadaf Ebrahimi           if (!rt2.Is(rn)) temps.Include(rt2);
1976*f5c631daSSadaf Ebrahimi         }
1977*f5c631daSSadaf Ebrahimi         Register scratch = temps.Acquire();
1978*f5c631daSSadaf Ebrahimi         // Offset case:
1979*f5c631daSSadaf Ebrahimi         // ldrd r0, r1, [r2, 12345] will translate into
1980*f5c631daSSadaf Ebrahimi         //   add r0, r2, 12345
1981*f5c631daSSadaf Ebrahimi         //   ldrd r0, r1, [r0]
1982*f5c631daSSadaf Ebrahimi         {
1983*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
1984*f5c631daSSadaf Ebrahimi           add(cond, scratch, rn, add_offset);
1985*f5c631daSSadaf Ebrahimi         }
1986*f5c631daSSadaf Ebrahimi         {
1987*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
1988*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond,
1989*f5c631daSSadaf Ebrahimi                                rt,
1990*f5c631daSSadaf Ebrahimi                                rt2,
1991*f5c631daSSadaf Ebrahimi                                MemOperand(scratch, load_store_offset));
1992*f5c631daSSadaf Ebrahimi         }
1993*f5c631daSSadaf Ebrahimi         return;
1994*f5c631daSSadaf Ebrahimi       }
1995*f5c631daSSadaf Ebrahimi       case PostIndex:
1996*f5c631daSSadaf Ebrahimi         // Avoid the unpredictable case 'ldrd r0, r1, [r0], imm'
1997*f5c631daSSadaf Ebrahimi         if (!rn.Is(rt) && !rn.Is(rt2)) {
1998*f5c631daSSadaf Ebrahimi           // Post-indexed case:
1999*f5c631daSSadaf Ebrahimi           // ldrd r0, r1, [r2], imm32 will translate into
2000*f5c631daSSadaf Ebrahimi           //   ldrd r0, r1, [r2]
2001*f5c631daSSadaf Ebrahimi           //   movw ip. imm32 & 0xffffffff
2002*f5c631daSSadaf Ebrahimi           //   movt ip, imm32 >> 16
2003*f5c631daSSadaf Ebrahimi           //   add r2, ip
2004*f5c631daSSadaf Ebrahimi           {
2005*f5c631daSSadaf Ebrahimi             CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
2006*f5c631daSSadaf Ebrahimi             (this->*instruction)(cond,
2007*f5c631daSSadaf Ebrahimi                                  rt,
2008*f5c631daSSadaf Ebrahimi                                  rt2,
2009*f5c631daSSadaf Ebrahimi                                  MemOperand(rn, load_store_offset, PostIndex));
2010*f5c631daSSadaf Ebrahimi           }
2011*f5c631daSSadaf Ebrahimi           {
2012*f5c631daSSadaf Ebrahimi             CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
2013*f5c631daSSadaf Ebrahimi             add(cond, rn, rn, add_offset);
2014*f5c631daSSadaf Ebrahimi           }
2015*f5c631daSSadaf Ebrahimi           return;
2016*f5c631daSSadaf Ebrahimi         }
2017*f5c631daSSadaf Ebrahimi         break;
2018*f5c631daSSadaf Ebrahimi     }
2019*f5c631daSSadaf Ebrahimi   }
2020*f5c631daSSadaf Ebrahimi   if (operand.IsPlainRegister()) {
2021*f5c631daSSadaf Ebrahimi     const Register& rn = operand.GetBaseRegister();
2022*f5c631daSSadaf Ebrahimi     const Register& rm = operand.GetOffsetRegister();
2023*f5c631daSSadaf Ebrahimi     AddrMode addrmode = operand.GetAddrMode();
2024*f5c631daSSadaf Ebrahimi     switch (addrmode) {
2025*f5c631daSSadaf Ebrahimi       case PreIndex:
2026*f5c631daSSadaf Ebrahimi         // ldrd r0, r1, [r2, r3]! will translate into
2027*f5c631daSSadaf Ebrahimi         //   add r2, r3
2028*f5c631daSSadaf Ebrahimi         //   ldrd r0, r1, [r2]
2029*f5c631daSSadaf Ebrahimi         {
2030*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
2031*f5c631daSSadaf Ebrahimi           if (operand.GetSign().IsPlus()) {
2032*f5c631daSSadaf Ebrahimi             add(cond, rn, rn, rm);
2033*f5c631daSSadaf Ebrahimi           } else {
2034*f5c631daSSadaf Ebrahimi             sub(cond, rn, rn, rm);
2035*f5c631daSSadaf Ebrahimi           }
2036*f5c631daSSadaf Ebrahimi         }
2037*f5c631daSSadaf Ebrahimi         {
2038*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
2039*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond, rt, rt2, MemOperand(rn, Offset));
2040*f5c631daSSadaf Ebrahimi         }
2041*f5c631daSSadaf Ebrahimi         return;
2042*f5c631daSSadaf Ebrahimi       case PostIndex:
2043*f5c631daSSadaf Ebrahimi         // ldrd r0, r1, [r2], r3 will translate into
2044*f5c631daSSadaf Ebrahimi         //   ldrd r0, r1, [r2]
2045*f5c631daSSadaf Ebrahimi         //   add r2, r3
2046*f5c631daSSadaf Ebrahimi         {
2047*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
2048*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond, rt, rt2, MemOperand(rn, Offset));
2049*f5c631daSSadaf Ebrahimi         }
2050*f5c631daSSadaf Ebrahimi         {
2051*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
2052*f5c631daSSadaf Ebrahimi           if (operand.GetSign().IsPlus()) {
2053*f5c631daSSadaf Ebrahimi             add(cond, rn, rn, rm);
2054*f5c631daSSadaf Ebrahimi           } else {
2055*f5c631daSSadaf Ebrahimi             sub(cond, rn, rn, rm);
2056*f5c631daSSadaf Ebrahimi           }
2057*f5c631daSSadaf Ebrahimi         }
2058*f5c631daSSadaf Ebrahimi         return;
2059*f5c631daSSadaf Ebrahimi       case Offset: {
2060*f5c631daSSadaf Ebrahimi         UseScratchRegisterScope temps(this);
2061*f5c631daSSadaf Ebrahimi         // Allow using the destinations as a scratch registers if possible.
2062*f5c631daSSadaf Ebrahimi         if (type == kLdrd) {
2063*f5c631daSSadaf Ebrahimi           if (!rt.Is(rn)) temps.Include(rt);
2064*f5c631daSSadaf Ebrahimi           if (!rt2.Is(rn)) temps.Include(rt2);
2065*f5c631daSSadaf Ebrahimi         }
2066*f5c631daSSadaf Ebrahimi         Register scratch = temps.Acquire();
2067*f5c631daSSadaf Ebrahimi         // Offset case:
2068*f5c631daSSadaf Ebrahimi         // ldrd r0, r1, [r2, r3] will translate into
2069*f5c631daSSadaf Ebrahimi         //   add r0, r2, r3
2070*f5c631daSSadaf Ebrahimi         //   ldrd r0, r1, [r0]
2071*f5c631daSSadaf Ebrahimi         {
2072*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
2073*f5c631daSSadaf Ebrahimi           if (operand.GetSign().IsPlus()) {
2074*f5c631daSSadaf Ebrahimi             add(cond, scratch, rn, rm);
2075*f5c631daSSadaf Ebrahimi           } else {
2076*f5c631daSSadaf Ebrahimi             sub(cond, scratch, rn, rm);
2077*f5c631daSSadaf Ebrahimi           }
2078*f5c631daSSadaf Ebrahimi         }
2079*f5c631daSSadaf Ebrahimi         {
2080*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
2081*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond, rt, rt2, MemOperand(scratch, Offset));
2082*f5c631daSSadaf Ebrahimi         }
2083*f5c631daSSadaf Ebrahimi         return;
2084*f5c631daSSadaf Ebrahimi       }
2085*f5c631daSSadaf Ebrahimi     }
2086*f5c631daSSadaf Ebrahimi   }
2087*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, rt, rt2, operand);
2088*f5c631daSSadaf Ebrahimi }
2089*f5c631daSSadaf Ebrahimi 
2090*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondDtSMop instruction,Condition cond,DataType dt,SRegister rd,const MemOperand & operand)2091*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
2092*f5c631daSSadaf Ebrahimi                               InstructionCondDtSMop instruction,
2093*f5c631daSSadaf Ebrahimi                               Condition cond,
2094*f5c631daSSadaf Ebrahimi                               DataType dt,
2095*f5c631daSSadaf Ebrahimi                               SRegister rd,
2096*f5c631daSSadaf Ebrahimi                               const MemOperand& operand) {
2097*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
2098*f5c631daSSadaf Ebrahimi   if (operand.IsImmediate()) {
2099*f5c631daSSadaf Ebrahimi     const Register& rn = operand.GetBaseRegister();
2100*f5c631daSSadaf Ebrahimi     AddrMode addrmode = operand.GetAddrMode();
2101*f5c631daSSadaf Ebrahimi     int32_t offset = operand.GetOffsetImmediate();
2102*f5c631daSSadaf Ebrahimi     VIXL_ASSERT(((offset > 0) && operand.GetSign().IsPlus()) ||
2103*f5c631daSSadaf Ebrahimi                 ((offset < 0) && operand.GetSign().IsMinus()) || (offset == 0));
2104*f5c631daSSadaf Ebrahimi     if (rn.IsPC()) {
2105*f5c631daSSadaf Ebrahimi       VIXL_ABORT_WITH_MSG(
2106*f5c631daSSadaf Ebrahimi           "The MacroAssembler does not convert vldr or vstr with a PC base "
2107*f5c631daSSadaf Ebrahimi           "register.\n");
2108*f5c631daSSadaf Ebrahimi     }
2109*f5c631daSSadaf Ebrahimi     switch (addrmode) {
2110*f5c631daSSadaf Ebrahimi       case PreIndex:
2111*f5c631daSSadaf Ebrahimi         // Pre-Indexed case:
2112*f5c631daSSadaf Ebrahimi         // vldr.32 s0, [r1, 12345]! will translate into
2113*f5c631daSSadaf Ebrahimi         //   add r1, 12345
2114*f5c631daSSadaf Ebrahimi         //   vldr.32 s0, [r1]
2115*f5c631daSSadaf Ebrahimi         if (offset != 0) {
2116*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
2117*f5c631daSSadaf Ebrahimi           add(cond, rn, rn, offset);
2118*f5c631daSSadaf Ebrahimi         }
2119*f5c631daSSadaf Ebrahimi         {
2120*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
2121*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond, dt, rd, MemOperand(rn, Offset));
2122*f5c631daSSadaf Ebrahimi         }
2123*f5c631daSSadaf Ebrahimi         return;
2124*f5c631daSSadaf Ebrahimi       case Offset: {
2125*f5c631daSSadaf Ebrahimi         UseScratchRegisterScope temps(this);
2126*f5c631daSSadaf Ebrahimi         Register scratch = temps.Acquire();
2127*f5c631daSSadaf Ebrahimi         // Offset case:
2128*f5c631daSSadaf Ebrahimi         // vldr.32 s0, [r1, 12345] will translate into
2129*f5c631daSSadaf Ebrahimi         //   add ip, r1, 12345
2130*f5c631daSSadaf Ebrahimi         //   vldr.32 s0, [ip]
2131*f5c631daSSadaf Ebrahimi         {
2132*f5c631daSSadaf Ebrahimi           VIXL_ASSERT(offset != 0);
2133*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
2134*f5c631daSSadaf Ebrahimi           add(cond, scratch, rn, offset);
2135*f5c631daSSadaf Ebrahimi         }
2136*f5c631daSSadaf Ebrahimi         {
2137*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
2138*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond, dt, rd, MemOperand(scratch, Offset));
2139*f5c631daSSadaf Ebrahimi         }
2140*f5c631daSSadaf Ebrahimi         return;
2141*f5c631daSSadaf Ebrahimi       }
2142*f5c631daSSadaf Ebrahimi       case PostIndex:
2143*f5c631daSSadaf Ebrahimi         // Post-indexed case:
2144*f5c631daSSadaf Ebrahimi         // vldr.32 s0, [r1], imm32 will translate into
2145*f5c631daSSadaf Ebrahimi         //   vldr.32 s0, [r1]
2146*f5c631daSSadaf Ebrahimi         //   movw ip. imm32 & 0xffffffff
2147*f5c631daSSadaf Ebrahimi         //   movt ip, imm32 >> 16
2148*f5c631daSSadaf Ebrahimi         //   add r1, ip
2149*f5c631daSSadaf Ebrahimi         {
2150*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
2151*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond, dt, rd, MemOperand(rn, Offset));
2152*f5c631daSSadaf Ebrahimi         }
2153*f5c631daSSadaf Ebrahimi         if (offset != 0) {
2154*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
2155*f5c631daSSadaf Ebrahimi           add(cond, rn, rn, offset);
2156*f5c631daSSadaf Ebrahimi         }
2157*f5c631daSSadaf Ebrahimi         return;
2158*f5c631daSSadaf Ebrahimi     }
2159*f5c631daSSadaf Ebrahimi   }
2160*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, dt, rd, operand);
2161*f5c631daSSadaf Ebrahimi }
2162*f5c631daSSadaf Ebrahimi 
2163*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondDtDMop instruction,Condition cond,DataType dt,DRegister rd,const MemOperand & operand)2164*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
2165*f5c631daSSadaf Ebrahimi                               InstructionCondDtDMop instruction,
2166*f5c631daSSadaf Ebrahimi                               Condition cond,
2167*f5c631daSSadaf Ebrahimi                               DataType dt,
2168*f5c631daSSadaf Ebrahimi                               DRegister rd,
2169*f5c631daSSadaf Ebrahimi                               const MemOperand& operand) {
2170*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
2171*f5c631daSSadaf Ebrahimi   if (operand.IsImmediate()) {
2172*f5c631daSSadaf Ebrahimi     const Register& rn = operand.GetBaseRegister();
2173*f5c631daSSadaf Ebrahimi     AddrMode addrmode = operand.GetAddrMode();
2174*f5c631daSSadaf Ebrahimi     int32_t offset = operand.GetOffsetImmediate();
2175*f5c631daSSadaf Ebrahimi     VIXL_ASSERT(((offset > 0) && operand.GetSign().IsPlus()) ||
2176*f5c631daSSadaf Ebrahimi                 ((offset < 0) && operand.GetSign().IsMinus()) || (offset == 0));
2177*f5c631daSSadaf Ebrahimi     if (rn.IsPC()) {
2178*f5c631daSSadaf Ebrahimi       VIXL_ABORT_WITH_MSG(
2179*f5c631daSSadaf Ebrahimi           "The MacroAssembler does not convert vldr or vstr with a PC base "
2180*f5c631daSSadaf Ebrahimi           "register.\n");
2181*f5c631daSSadaf Ebrahimi     }
2182*f5c631daSSadaf Ebrahimi     switch (addrmode) {
2183*f5c631daSSadaf Ebrahimi       case PreIndex:
2184*f5c631daSSadaf Ebrahimi         // Pre-Indexed case:
2185*f5c631daSSadaf Ebrahimi         // vldr.64 d0, [r1, 12345]! will translate into
2186*f5c631daSSadaf Ebrahimi         //   add r1, 12345
2187*f5c631daSSadaf Ebrahimi         //   vldr.64 d0, [r1]
2188*f5c631daSSadaf Ebrahimi         if (offset != 0) {
2189*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
2190*f5c631daSSadaf Ebrahimi           add(cond, rn, rn, offset);
2191*f5c631daSSadaf Ebrahimi         }
2192*f5c631daSSadaf Ebrahimi         {
2193*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
2194*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond, dt, rd, MemOperand(rn, Offset));
2195*f5c631daSSadaf Ebrahimi         }
2196*f5c631daSSadaf Ebrahimi         return;
2197*f5c631daSSadaf Ebrahimi       case Offset: {
2198*f5c631daSSadaf Ebrahimi         UseScratchRegisterScope temps(this);
2199*f5c631daSSadaf Ebrahimi         Register scratch = temps.Acquire();
2200*f5c631daSSadaf Ebrahimi         // Offset case:
2201*f5c631daSSadaf Ebrahimi         // vldr.64 d0, [r1, 12345] will translate into
2202*f5c631daSSadaf Ebrahimi         //   add ip, r1, 12345
2203*f5c631daSSadaf Ebrahimi         //   vldr.32 s0, [ip]
2204*f5c631daSSadaf Ebrahimi         {
2205*f5c631daSSadaf Ebrahimi           VIXL_ASSERT(offset != 0);
2206*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
2207*f5c631daSSadaf Ebrahimi           add(cond, scratch, rn, offset);
2208*f5c631daSSadaf Ebrahimi         }
2209*f5c631daSSadaf Ebrahimi         {
2210*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
2211*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond, dt, rd, MemOperand(scratch, Offset));
2212*f5c631daSSadaf Ebrahimi         }
2213*f5c631daSSadaf Ebrahimi         return;
2214*f5c631daSSadaf Ebrahimi       }
2215*f5c631daSSadaf Ebrahimi       case PostIndex:
2216*f5c631daSSadaf Ebrahimi         // Post-indexed case:
2217*f5c631daSSadaf Ebrahimi         // vldr.64 d0. [r1], imm32 will translate into
2218*f5c631daSSadaf Ebrahimi         //   vldr.64 d0, [r1]
2219*f5c631daSSadaf Ebrahimi         //   movw ip. imm32 & 0xffffffff
2220*f5c631daSSadaf Ebrahimi         //   movt ip, imm32 >> 16
2221*f5c631daSSadaf Ebrahimi         //   add r1, ip
2222*f5c631daSSadaf Ebrahimi         {
2223*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
2224*f5c631daSSadaf Ebrahimi           (this->*instruction)(cond, dt, rd, MemOperand(rn, Offset));
2225*f5c631daSSadaf Ebrahimi         }
2226*f5c631daSSadaf Ebrahimi         if (offset != 0) {
2227*f5c631daSSadaf Ebrahimi           CodeBufferCheckScope scope(this, 3 * kMaxInstructionSizeInBytes);
2228*f5c631daSSadaf Ebrahimi           add(cond, rn, rn, offset);
2229*f5c631daSSadaf Ebrahimi         }
2230*f5c631daSSadaf Ebrahimi         return;
2231*f5c631daSSadaf Ebrahimi     }
2232*f5c631daSSadaf Ebrahimi   }
2233*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, dt, rd, operand);
2234*f5c631daSSadaf Ebrahimi }
2235*f5c631daSSadaf Ebrahimi 
2236*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondMsrOp instruction,Condition cond,MaskedSpecialRegister spec_reg,const Operand & operand)2237*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
2238*f5c631daSSadaf Ebrahimi                               InstructionCondMsrOp instruction,
2239*f5c631daSSadaf Ebrahimi                               Condition cond,
2240*f5c631daSSadaf Ebrahimi                               MaskedSpecialRegister spec_reg,
2241*f5c631daSSadaf Ebrahimi                               const Operand& operand) {
2242*f5c631daSSadaf Ebrahimi   USE(type);
2243*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(type == kMsr);
2244*f5c631daSSadaf Ebrahimi   if (operand.IsImmediate()) {
2245*f5c631daSSadaf Ebrahimi     UseScratchRegisterScope temps(this);
2246*f5c631daSSadaf Ebrahimi     Register scratch = temps.Acquire();
2247*f5c631daSSadaf Ebrahimi     {
2248*f5c631daSSadaf Ebrahimi       CodeBufferCheckScope scope(this, 2 * kMaxInstructionSizeInBytes);
2249*f5c631daSSadaf Ebrahimi       mov(cond, scratch, operand);
2250*f5c631daSSadaf Ebrahimi     }
2251*f5c631daSSadaf Ebrahimi     CodeBufferCheckScope scope(this, kMaxInstructionSizeInBytes);
2252*f5c631daSSadaf Ebrahimi     msr(cond, spec_reg, scratch);
2253*f5c631daSSadaf Ebrahimi     return;
2254*f5c631daSSadaf Ebrahimi   }
2255*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, spec_reg, operand);
2256*f5c631daSSadaf Ebrahimi }
2257*f5c631daSSadaf Ebrahimi 
2258*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondDtDL instruction,Condition cond,DataType dt,DRegister rd,Location * location)2259*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
2260*f5c631daSSadaf Ebrahimi                               InstructionCondDtDL instruction,
2261*f5c631daSSadaf Ebrahimi                               Condition cond,
2262*f5c631daSSadaf Ebrahimi                               DataType dt,
2263*f5c631daSSadaf Ebrahimi                               DRegister rd,
2264*f5c631daSSadaf Ebrahimi                               Location* location) {
2265*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(type == kVldr);
2266*f5c631daSSadaf Ebrahimi 
2267*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
2268*f5c631daSSadaf Ebrahimi 
2269*f5c631daSSadaf Ebrahimi   if (location->IsBound()) {
2270*f5c631daSSadaf Ebrahimi     CodeBufferCheckScope scope(this, 5 * kMaxInstructionSizeInBytes);
2271*f5c631daSSadaf Ebrahimi     UseScratchRegisterScope temps(this);
2272*f5c631daSSadaf Ebrahimi     Register scratch = temps.Acquire();
2273*f5c631daSSadaf Ebrahimi     uint32_t mask = GetOffsetMask(type, Offset);
2274*f5c631daSSadaf Ebrahimi     vldr(dt, rd, MemOperandComputationHelper(cond, scratch, location, mask));
2275*f5c631daSSadaf Ebrahimi     return;
2276*f5c631daSSadaf Ebrahimi   }
2277*f5c631daSSadaf Ebrahimi 
2278*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, dt, rd, location);
2279*f5c631daSSadaf Ebrahimi }
2280*f5c631daSSadaf Ebrahimi 
2281*f5c631daSSadaf Ebrahimi 
Delegate(InstructionType type,InstructionCondDtSL instruction,Condition cond,DataType dt,SRegister rd,Location * location)2282*f5c631daSSadaf Ebrahimi void MacroAssembler::Delegate(InstructionType type,
2283*f5c631daSSadaf Ebrahimi                               InstructionCondDtSL instruction,
2284*f5c631daSSadaf Ebrahimi                               Condition cond,
2285*f5c631daSSadaf Ebrahimi                               DataType dt,
2286*f5c631daSSadaf Ebrahimi                               SRegister rd,
2287*f5c631daSSadaf Ebrahimi                               Location* location) {
2288*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(type == kVldr);
2289*f5c631daSSadaf Ebrahimi 
2290*f5c631daSSadaf Ebrahimi   CONTEXT_SCOPE;
2291*f5c631daSSadaf Ebrahimi 
2292*f5c631daSSadaf Ebrahimi   if (location->IsBound()) {
2293*f5c631daSSadaf Ebrahimi     CodeBufferCheckScope scope(this, 5 * kMaxInstructionSizeInBytes);
2294*f5c631daSSadaf Ebrahimi     UseScratchRegisterScope temps(this);
2295*f5c631daSSadaf Ebrahimi     Register scratch = temps.Acquire();
2296*f5c631daSSadaf Ebrahimi     uint32_t mask = GetOffsetMask(type, Offset);
2297*f5c631daSSadaf Ebrahimi     vldr(dt, rd, MemOperandComputationHelper(cond, scratch, location, mask));
2298*f5c631daSSadaf Ebrahimi     return;
2299*f5c631daSSadaf Ebrahimi   }
2300*f5c631daSSadaf Ebrahimi 
2301*f5c631daSSadaf Ebrahimi   Assembler::Delegate(type, instruction, cond, dt, rd, location);
2302*f5c631daSSadaf Ebrahimi }
2303*f5c631daSSadaf Ebrahimi 
2304*f5c631daSSadaf Ebrahimi 
2305*f5c631daSSadaf Ebrahimi #undef CONTEXT_SCOPE
2306*f5c631daSSadaf Ebrahimi #undef TOSTRING
2307*f5c631daSSadaf Ebrahimi #undef STRINGIFY
2308*f5c631daSSadaf Ebrahimi 
2309*f5c631daSSadaf Ebrahimi // Start of generated code.
2310*f5c631daSSadaf Ebrahimi // End of generated code.
2311*f5c631daSSadaf Ebrahimi }  // namespace aarch32
2312*f5c631daSSadaf Ebrahimi }  // namespace vixl
2313