1*f5c631daSSadaf Ebrahimi // Copyright 2015, VIXL authors
2*f5c631daSSadaf Ebrahimi // All rights reserved.
3*f5c631daSSadaf Ebrahimi //
4*f5c631daSSadaf Ebrahimi // Redistribution and use in source and binary forms, with or without
5*f5c631daSSadaf Ebrahimi // modification, are permitted provided that the following conditions are met:
6*f5c631daSSadaf Ebrahimi //
7*f5c631daSSadaf Ebrahimi // * Redistributions of source code must retain the above copyright notice,
8*f5c631daSSadaf Ebrahimi // this list of conditions and the following disclaimer.
9*f5c631daSSadaf Ebrahimi // * Redistributions in binary form must reproduce the above copyright notice,
10*f5c631daSSadaf Ebrahimi // this list of conditions and the following disclaimer in the documentation
11*f5c631daSSadaf Ebrahimi // and/or other materials provided with the distribution.
12*f5c631daSSadaf Ebrahimi // * Neither the name of ARM Limited nor the names of its contributors may be
13*f5c631daSSadaf Ebrahimi // used to endorse or promote products derived from this software without
14*f5c631daSSadaf Ebrahimi // specific prior written permission.
15*f5c631daSSadaf Ebrahimi //
16*f5c631daSSadaf Ebrahimi // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17*f5c631daSSadaf Ebrahimi // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18*f5c631daSSadaf Ebrahimi // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19*f5c631daSSadaf Ebrahimi // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20*f5c631daSSadaf Ebrahimi // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21*f5c631daSSadaf Ebrahimi // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22*f5c631daSSadaf Ebrahimi // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23*f5c631daSSadaf Ebrahimi // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24*f5c631daSSadaf Ebrahimi // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25*f5c631daSSadaf Ebrahimi // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26*f5c631daSSadaf Ebrahimi
27*f5c631daSSadaf Ebrahimi #include <cctype>
28*f5c631daSSadaf Ebrahimi
29*f5c631daSSadaf Ebrahimi #include "macro-assembler-aarch64.h"
30*f5c631daSSadaf Ebrahimi
31*f5c631daSSadaf Ebrahimi namespace vixl {
32*f5c631daSSadaf Ebrahimi namespace aarch64 {
33*f5c631daSSadaf Ebrahimi
34*f5c631daSSadaf Ebrahimi
Release()35*f5c631daSSadaf Ebrahimi void Pool::Release() {
36*f5c631daSSadaf Ebrahimi if (--monitor_ == 0) {
37*f5c631daSSadaf Ebrahimi // Ensure the pool has not been blocked for too long.
38*f5c631daSSadaf Ebrahimi VIXL_ASSERT(masm_->GetCursorOffset() < checkpoint_);
39*f5c631daSSadaf Ebrahimi }
40*f5c631daSSadaf Ebrahimi }
41*f5c631daSSadaf Ebrahimi
42*f5c631daSSadaf Ebrahimi
SetNextCheckpoint(ptrdiff_t checkpoint)43*f5c631daSSadaf Ebrahimi void Pool::SetNextCheckpoint(ptrdiff_t checkpoint) {
44*f5c631daSSadaf Ebrahimi masm_->checkpoint_ = std::min(masm_->checkpoint_, checkpoint);
45*f5c631daSSadaf Ebrahimi checkpoint_ = checkpoint;
46*f5c631daSSadaf Ebrahimi }
47*f5c631daSSadaf Ebrahimi
48*f5c631daSSadaf Ebrahimi
LiteralPool(MacroAssembler * masm)49*f5c631daSSadaf Ebrahimi LiteralPool::LiteralPool(MacroAssembler* masm)
50*f5c631daSSadaf Ebrahimi : Pool(masm),
51*f5c631daSSadaf Ebrahimi size_(0),
52*f5c631daSSadaf Ebrahimi first_use_(-1),
53*f5c631daSSadaf Ebrahimi recommended_checkpoint_(kNoCheckpointRequired) {}
54*f5c631daSSadaf Ebrahimi
55*f5c631daSSadaf Ebrahimi
~LiteralPool()56*f5c631daSSadaf Ebrahimi LiteralPool::~LiteralPool() VIXL_NEGATIVE_TESTING_ALLOW_EXCEPTION {
57*f5c631daSSadaf Ebrahimi VIXL_ASSERT(IsEmpty());
58*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!IsBlocked());
59*f5c631daSSadaf Ebrahimi for (std::vector<RawLiteral*>::iterator it = deleted_on_destruction_.begin();
60*f5c631daSSadaf Ebrahimi it != deleted_on_destruction_.end();
61*f5c631daSSadaf Ebrahimi it++) {
62*f5c631daSSadaf Ebrahimi delete *it;
63*f5c631daSSadaf Ebrahimi }
64*f5c631daSSadaf Ebrahimi }
65*f5c631daSSadaf Ebrahimi
66*f5c631daSSadaf Ebrahimi
Reset()67*f5c631daSSadaf Ebrahimi void LiteralPool::Reset() {
68*f5c631daSSadaf Ebrahimi std::vector<RawLiteral*>::iterator it, end;
69*f5c631daSSadaf Ebrahimi for (it = entries_.begin(), end = entries_.end(); it != end; ++it) {
70*f5c631daSSadaf Ebrahimi RawLiteral* literal = *it;
71*f5c631daSSadaf Ebrahimi if (literal->deletion_policy_ == RawLiteral::kDeletedOnPlacementByPool) {
72*f5c631daSSadaf Ebrahimi delete literal;
73*f5c631daSSadaf Ebrahimi }
74*f5c631daSSadaf Ebrahimi }
75*f5c631daSSadaf Ebrahimi entries_.clear();
76*f5c631daSSadaf Ebrahimi size_ = 0;
77*f5c631daSSadaf Ebrahimi first_use_ = -1;
78*f5c631daSSadaf Ebrahimi Pool::Reset();
79*f5c631daSSadaf Ebrahimi recommended_checkpoint_ = kNoCheckpointRequired;
80*f5c631daSSadaf Ebrahimi }
81*f5c631daSSadaf Ebrahimi
82*f5c631daSSadaf Ebrahimi
CheckEmitFor(size_t amount,EmitOption option)83*f5c631daSSadaf Ebrahimi void LiteralPool::CheckEmitFor(size_t amount, EmitOption option) {
84*f5c631daSSadaf Ebrahimi if (IsEmpty() || IsBlocked()) return;
85*f5c631daSSadaf Ebrahimi
86*f5c631daSSadaf Ebrahimi ptrdiff_t distance = masm_->GetCursorOffset() + amount - first_use_;
87*f5c631daSSadaf Ebrahimi if (distance >= kRecommendedLiteralPoolRange) {
88*f5c631daSSadaf Ebrahimi Emit(option);
89*f5c631daSSadaf Ebrahimi }
90*f5c631daSSadaf Ebrahimi }
91*f5c631daSSadaf Ebrahimi
92*f5c631daSSadaf Ebrahimi
CheckEmitForBranch(size_t range)93*f5c631daSSadaf Ebrahimi void LiteralPool::CheckEmitForBranch(size_t range) {
94*f5c631daSSadaf Ebrahimi if (IsEmpty() || IsBlocked()) return;
95*f5c631daSSadaf Ebrahimi if (GetMaxSize() >= range) Emit();
96*f5c631daSSadaf Ebrahimi }
97*f5c631daSSadaf Ebrahimi
98*f5c631daSSadaf Ebrahimi // We use a subclass to access the protected `ExactAssemblyScope` constructor
99*f5c631daSSadaf Ebrahimi // giving us control over the pools. This allows us to use this scope within
100*f5c631daSSadaf Ebrahimi // code emitting pools without creating a circular dependency.
101*f5c631daSSadaf Ebrahimi // We keep the constructor private to restrict usage of this helper class.
102*f5c631daSSadaf Ebrahimi class ExactAssemblyScopeWithoutPoolsCheck : public ExactAssemblyScope {
103*f5c631daSSadaf Ebrahimi private:
ExactAssemblyScopeWithoutPoolsCheck(MacroAssembler * masm,size_t size)104*f5c631daSSadaf Ebrahimi ExactAssemblyScopeWithoutPoolsCheck(MacroAssembler* masm, size_t size)
105*f5c631daSSadaf Ebrahimi : ExactAssemblyScope(masm,
106*f5c631daSSadaf Ebrahimi size,
107*f5c631daSSadaf Ebrahimi ExactAssemblyScope::kExactSize,
108*f5c631daSSadaf Ebrahimi ExactAssemblyScope::kIgnorePools) {}
109*f5c631daSSadaf Ebrahimi
110*f5c631daSSadaf Ebrahimi friend void LiteralPool::Emit(LiteralPool::EmitOption);
111*f5c631daSSadaf Ebrahimi friend void VeneerPool::Emit(VeneerPool::EmitOption, size_t);
112*f5c631daSSadaf Ebrahimi };
113*f5c631daSSadaf Ebrahimi
114*f5c631daSSadaf Ebrahimi
Emit(EmitOption option)115*f5c631daSSadaf Ebrahimi void LiteralPool::Emit(EmitOption option) {
116*f5c631daSSadaf Ebrahimi // There is an issue if we are asked to emit a blocked or empty pool.
117*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!IsBlocked());
118*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!IsEmpty());
119*f5c631daSSadaf Ebrahimi
120*f5c631daSSadaf Ebrahimi size_t pool_size = GetSize();
121*f5c631daSSadaf Ebrahimi size_t emit_size = pool_size;
122*f5c631daSSadaf Ebrahimi if (option == kBranchRequired) emit_size += kInstructionSize;
123*f5c631daSSadaf Ebrahimi Label end_of_pool;
124*f5c631daSSadaf Ebrahimi
125*f5c631daSSadaf Ebrahimi VIXL_ASSERT(emit_size % kInstructionSize == 0);
126*f5c631daSSadaf Ebrahimi {
127*f5c631daSSadaf Ebrahimi CodeBufferCheckScope guard(masm_,
128*f5c631daSSadaf Ebrahimi emit_size,
129*f5c631daSSadaf Ebrahimi CodeBufferCheckScope::kCheck,
130*f5c631daSSadaf Ebrahimi CodeBufferCheckScope::kExactSize);
131*f5c631daSSadaf Ebrahimi #ifdef VIXL_DEBUG
132*f5c631daSSadaf Ebrahimi // Also explicitly disallow usage of the `MacroAssembler` here.
133*f5c631daSSadaf Ebrahimi masm_->SetAllowMacroInstructions(false);
134*f5c631daSSadaf Ebrahimi #endif
135*f5c631daSSadaf Ebrahimi if (option == kBranchRequired) {
136*f5c631daSSadaf Ebrahimi ExactAssemblyScopeWithoutPoolsCheck eas_guard(masm_, kInstructionSize);
137*f5c631daSSadaf Ebrahimi masm_->b(&end_of_pool);
138*f5c631daSSadaf Ebrahimi }
139*f5c631daSSadaf Ebrahimi
140*f5c631daSSadaf Ebrahimi {
141*f5c631daSSadaf Ebrahimi // Marker indicating the size of the literal pool in 32-bit words.
142*f5c631daSSadaf Ebrahimi VIXL_ASSERT((pool_size % kWRegSizeInBytes) == 0);
143*f5c631daSSadaf Ebrahimi ExactAssemblyScopeWithoutPoolsCheck eas_guard(masm_, kInstructionSize);
144*f5c631daSSadaf Ebrahimi masm_->ldr(xzr, static_cast<int>(pool_size / kWRegSizeInBytes));
145*f5c631daSSadaf Ebrahimi }
146*f5c631daSSadaf Ebrahimi
147*f5c631daSSadaf Ebrahimi // Now populate the literal pool.
148*f5c631daSSadaf Ebrahimi std::vector<RawLiteral*>::iterator it, end;
149*f5c631daSSadaf Ebrahimi for (it = entries_.begin(), end = entries_.end(); it != end; ++it) {
150*f5c631daSSadaf Ebrahimi VIXL_ASSERT((*it)->IsUsed());
151*f5c631daSSadaf Ebrahimi masm_->place(*it);
152*f5c631daSSadaf Ebrahimi }
153*f5c631daSSadaf Ebrahimi
154*f5c631daSSadaf Ebrahimi if (option == kBranchRequired) masm_->bind(&end_of_pool);
155*f5c631daSSadaf Ebrahimi #ifdef VIXL_DEBUG
156*f5c631daSSadaf Ebrahimi masm_->SetAllowMacroInstructions(true);
157*f5c631daSSadaf Ebrahimi #endif
158*f5c631daSSadaf Ebrahimi }
159*f5c631daSSadaf Ebrahimi
160*f5c631daSSadaf Ebrahimi Reset();
161*f5c631daSSadaf Ebrahimi }
162*f5c631daSSadaf Ebrahimi
163*f5c631daSSadaf Ebrahimi
AddEntry(RawLiteral * literal)164*f5c631daSSadaf Ebrahimi void LiteralPool::AddEntry(RawLiteral* literal) {
165*f5c631daSSadaf Ebrahimi // A literal must be registered immediately before its first use. Here we
166*f5c631daSSadaf Ebrahimi // cannot control that it is its first use, but we check no code has been
167*f5c631daSSadaf Ebrahimi // emitted since its last use.
168*f5c631daSSadaf Ebrahimi VIXL_ASSERT(masm_->GetCursorOffset() == literal->GetLastUse());
169*f5c631daSSadaf Ebrahimi
170*f5c631daSSadaf Ebrahimi UpdateFirstUse(masm_->GetCursorOffset());
171*f5c631daSSadaf Ebrahimi VIXL_ASSERT(masm_->GetCursorOffset() >= first_use_);
172*f5c631daSSadaf Ebrahimi entries_.push_back(literal);
173*f5c631daSSadaf Ebrahimi size_ += literal->GetSize();
174*f5c631daSSadaf Ebrahimi }
175*f5c631daSSadaf Ebrahimi
176*f5c631daSSadaf Ebrahimi
UpdateFirstUse(ptrdiff_t use_position)177*f5c631daSSadaf Ebrahimi void LiteralPool::UpdateFirstUse(ptrdiff_t use_position) {
178*f5c631daSSadaf Ebrahimi first_use_ = std::min(first_use_, use_position);
179*f5c631daSSadaf Ebrahimi if (first_use_ == -1) {
180*f5c631daSSadaf Ebrahimi first_use_ = use_position;
181*f5c631daSSadaf Ebrahimi SetNextRecommendedCheckpoint(GetNextRecommendedCheckpoint());
182*f5c631daSSadaf Ebrahimi SetNextCheckpoint(first_use_ + Instruction::kLoadLiteralRange);
183*f5c631daSSadaf Ebrahimi } else {
184*f5c631daSSadaf Ebrahimi VIXL_ASSERT(use_position > first_use_);
185*f5c631daSSadaf Ebrahimi }
186*f5c631daSSadaf Ebrahimi }
187*f5c631daSSadaf Ebrahimi
188*f5c631daSSadaf Ebrahimi
Reset()189*f5c631daSSadaf Ebrahimi void VeneerPool::Reset() {
190*f5c631daSSadaf Ebrahimi Pool::Reset();
191*f5c631daSSadaf Ebrahimi unresolved_branches_.Reset();
192*f5c631daSSadaf Ebrahimi }
193*f5c631daSSadaf Ebrahimi
194*f5c631daSSadaf Ebrahimi
Release()195*f5c631daSSadaf Ebrahimi void VeneerPool::Release() {
196*f5c631daSSadaf Ebrahimi if (--monitor_ == 0) {
197*f5c631daSSadaf Ebrahimi VIXL_ASSERT(IsEmpty() ||
198*f5c631daSSadaf Ebrahimi masm_->GetCursorOffset() <
199*f5c631daSSadaf Ebrahimi unresolved_branches_.GetFirstLimit());
200*f5c631daSSadaf Ebrahimi }
201*f5c631daSSadaf Ebrahimi }
202*f5c631daSSadaf Ebrahimi
203*f5c631daSSadaf Ebrahimi
RegisterUnresolvedBranch(ptrdiff_t branch_pos,Label * label,ImmBranchType branch_type)204*f5c631daSSadaf Ebrahimi void VeneerPool::RegisterUnresolvedBranch(ptrdiff_t branch_pos,
205*f5c631daSSadaf Ebrahimi Label* label,
206*f5c631daSSadaf Ebrahimi ImmBranchType branch_type) {
207*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!label->IsBound());
208*f5c631daSSadaf Ebrahimi BranchInfo branch_info = BranchInfo(branch_pos, label, branch_type);
209*f5c631daSSadaf Ebrahimi unresolved_branches_.insert(branch_info);
210*f5c631daSSadaf Ebrahimi UpdateNextCheckPoint();
211*f5c631daSSadaf Ebrahimi // TODO: In debug mode register the label with the assembler to make sure it
212*f5c631daSSadaf Ebrahimi // is bound with masm Bind and not asm bind.
213*f5c631daSSadaf Ebrahimi }
214*f5c631daSSadaf Ebrahimi
215*f5c631daSSadaf Ebrahimi
DeleteUnresolvedBranchInfoForLabel(Label * label)216*f5c631daSSadaf Ebrahimi void VeneerPool::DeleteUnresolvedBranchInfoForLabel(Label* label) {
217*f5c631daSSadaf Ebrahimi if (IsEmpty()) {
218*f5c631daSSadaf Ebrahimi VIXL_ASSERT(checkpoint_ == kNoCheckpointRequired);
219*f5c631daSSadaf Ebrahimi return;
220*f5c631daSSadaf Ebrahimi }
221*f5c631daSSadaf Ebrahimi
222*f5c631daSSadaf Ebrahimi if (label->IsLinked()) {
223*f5c631daSSadaf Ebrahimi Label::LabelLinksIterator links_it(label);
224*f5c631daSSadaf Ebrahimi for (; !links_it.Done(); links_it.Advance()) {
225*f5c631daSSadaf Ebrahimi ptrdiff_t link_offset = *links_it.Current();
226*f5c631daSSadaf Ebrahimi Instruction* link = masm_->GetInstructionAt(link_offset);
227*f5c631daSSadaf Ebrahimi
228*f5c631daSSadaf Ebrahimi // ADR instructions are not handled.
229*f5c631daSSadaf Ebrahimi if (BranchTypeUsesVeneers(link->GetBranchType())) {
230*f5c631daSSadaf Ebrahimi BranchInfo branch_info(link_offset, label, link->GetBranchType());
231*f5c631daSSadaf Ebrahimi unresolved_branches_.erase(branch_info);
232*f5c631daSSadaf Ebrahimi }
233*f5c631daSSadaf Ebrahimi }
234*f5c631daSSadaf Ebrahimi }
235*f5c631daSSadaf Ebrahimi
236*f5c631daSSadaf Ebrahimi UpdateNextCheckPoint();
237*f5c631daSSadaf Ebrahimi }
238*f5c631daSSadaf Ebrahimi
239*f5c631daSSadaf Ebrahimi
ShouldEmitVeneer(int64_t first_unreacheable_pc,size_t amount)240*f5c631daSSadaf Ebrahimi bool VeneerPool::ShouldEmitVeneer(int64_t first_unreacheable_pc,
241*f5c631daSSadaf Ebrahimi size_t amount) {
242*f5c631daSSadaf Ebrahimi ptrdiff_t offset =
243*f5c631daSSadaf Ebrahimi kPoolNonVeneerCodeSize + amount + GetMaxSize() + GetOtherPoolsMaxSize();
244*f5c631daSSadaf Ebrahimi return (masm_->GetCursorOffset() + offset) > first_unreacheable_pc;
245*f5c631daSSadaf Ebrahimi }
246*f5c631daSSadaf Ebrahimi
247*f5c631daSSadaf Ebrahimi
CheckEmitFor(size_t amount,EmitOption option)248*f5c631daSSadaf Ebrahimi void VeneerPool::CheckEmitFor(size_t amount, EmitOption option) {
249*f5c631daSSadaf Ebrahimi if (IsEmpty()) return;
250*f5c631daSSadaf Ebrahimi
251*f5c631daSSadaf Ebrahimi VIXL_ASSERT(masm_->GetCursorOffset() + kPoolNonVeneerCodeSize <
252*f5c631daSSadaf Ebrahimi unresolved_branches_.GetFirstLimit());
253*f5c631daSSadaf Ebrahimi
254*f5c631daSSadaf Ebrahimi if (IsBlocked()) return;
255*f5c631daSSadaf Ebrahimi
256*f5c631daSSadaf Ebrahimi if (ShouldEmitVeneers(amount)) {
257*f5c631daSSadaf Ebrahimi Emit(option, amount);
258*f5c631daSSadaf Ebrahimi } else {
259*f5c631daSSadaf Ebrahimi UpdateNextCheckPoint();
260*f5c631daSSadaf Ebrahimi }
261*f5c631daSSadaf Ebrahimi }
262*f5c631daSSadaf Ebrahimi
263*f5c631daSSadaf Ebrahimi
Emit(EmitOption option,size_t amount)264*f5c631daSSadaf Ebrahimi void VeneerPool::Emit(EmitOption option, size_t amount) {
265*f5c631daSSadaf Ebrahimi // There is an issue if we are asked to emit a blocked or empty pool.
266*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!IsBlocked());
267*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!IsEmpty());
268*f5c631daSSadaf Ebrahimi
269*f5c631daSSadaf Ebrahimi Label end;
270*f5c631daSSadaf Ebrahimi if (option == kBranchRequired) {
271*f5c631daSSadaf Ebrahimi ExactAssemblyScopeWithoutPoolsCheck guard(masm_, kInstructionSize);
272*f5c631daSSadaf Ebrahimi masm_->b(&end);
273*f5c631daSSadaf Ebrahimi }
274*f5c631daSSadaf Ebrahimi
275*f5c631daSSadaf Ebrahimi // We want to avoid generating veneer pools too often, so generate veneers for
276*f5c631daSSadaf Ebrahimi // branches that don't immediately require a veneer but will soon go out of
277*f5c631daSSadaf Ebrahimi // range.
278*f5c631daSSadaf Ebrahimi static const size_t kVeneerEmissionMargin = 1 * KBytes;
279*f5c631daSSadaf Ebrahimi
280*f5c631daSSadaf Ebrahimi for (BranchInfoSetIterator it(&unresolved_branches_); !it.Done();) {
281*f5c631daSSadaf Ebrahimi BranchInfo* branch_info = it.Current();
282*f5c631daSSadaf Ebrahimi if (ShouldEmitVeneer(branch_info->first_unreacheable_pc_,
283*f5c631daSSadaf Ebrahimi amount + kVeneerEmissionMargin)) {
284*f5c631daSSadaf Ebrahimi CodeBufferCheckScope scope(masm_,
285*f5c631daSSadaf Ebrahimi kVeneerCodeSize,
286*f5c631daSSadaf Ebrahimi CodeBufferCheckScope::kCheck,
287*f5c631daSSadaf Ebrahimi CodeBufferCheckScope::kExactSize);
288*f5c631daSSadaf Ebrahimi ptrdiff_t branch_pos = branch_info->pc_offset_;
289*f5c631daSSadaf Ebrahimi Instruction* branch = masm_->GetInstructionAt(branch_pos);
290*f5c631daSSadaf Ebrahimi Label* label = branch_info->label_;
291*f5c631daSSadaf Ebrahimi
292*f5c631daSSadaf Ebrahimi // Patch the branch to point to the current position, and emit a branch
293*f5c631daSSadaf Ebrahimi // to the label.
294*f5c631daSSadaf Ebrahimi Instruction* veneer = masm_->GetCursorAddress<Instruction*>();
295*f5c631daSSadaf Ebrahimi branch->SetImmPCOffsetTarget(veneer);
296*f5c631daSSadaf Ebrahimi {
297*f5c631daSSadaf Ebrahimi ExactAssemblyScopeWithoutPoolsCheck guard(masm_, kInstructionSize);
298*f5c631daSSadaf Ebrahimi masm_->b(label);
299*f5c631daSSadaf Ebrahimi }
300*f5c631daSSadaf Ebrahimi
301*f5c631daSSadaf Ebrahimi // Update the label. The branch patched does not point to it any longer.
302*f5c631daSSadaf Ebrahimi label->DeleteLink(branch_pos);
303*f5c631daSSadaf Ebrahimi
304*f5c631daSSadaf Ebrahimi it.DeleteCurrentAndAdvance();
305*f5c631daSSadaf Ebrahimi } else {
306*f5c631daSSadaf Ebrahimi it.AdvanceToNextType();
307*f5c631daSSadaf Ebrahimi }
308*f5c631daSSadaf Ebrahimi }
309*f5c631daSSadaf Ebrahimi
310*f5c631daSSadaf Ebrahimi UpdateNextCheckPoint();
311*f5c631daSSadaf Ebrahimi
312*f5c631daSSadaf Ebrahimi masm_->bind(&end);
313*f5c631daSSadaf Ebrahimi }
314*f5c631daSSadaf Ebrahimi
315*f5c631daSSadaf Ebrahimi
MacroAssembler(PositionIndependentCodeOption pic)316*f5c631daSSadaf Ebrahimi MacroAssembler::MacroAssembler(PositionIndependentCodeOption pic)
317*f5c631daSSadaf Ebrahimi : Assembler(pic),
318*f5c631daSSadaf Ebrahimi #ifdef VIXL_DEBUG
319*f5c631daSSadaf Ebrahimi allow_macro_instructions_(true),
320*f5c631daSSadaf Ebrahimi #endif
321*f5c631daSSadaf Ebrahimi generate_simulator_code_(VIXL_AARCH64_GENERATE_SIMULATOR_CODE),
322*f5c631daSSadaf Ebrahimi sp_(sp),
323*f5c631daSSadaf Ebrahimi tmp_list_(ip0, ip1),
324*f5c631daSSadaf Ebrahimi v_tmp_list_(d31),
325*f5c631daSSadaf Ebrahimi p_tmp_list_(CPURegList::Empty(CPURegister::kPRegister)),
326*f5c631daSSadaf Ebrahimi current_scratch_scope_(NULL),
327*f5c631daSSadaf Ebrahimi literal_pool_(this),
328*f5c631daSSadaf Ebrahimi veneer_pool_(this),
329*f5c631daSSadaf Ebrahimi recommended_checkpoint_(Pool::kNoCheckpointRequired),
330*f5c631daSSadaf Ebrahimi fp_nan_propagation_(NoFPMacroNaNPropagationSelected) {
331*f5c631daSSadaf Ebrahimi checkpoint_ = GetNextCheckPoint();
332*f5c631daSSadaf Ebrahimi #ifndef VIXL_DEBUG
333*f5c631daSSadaf Ebrahimi USE(allow_macro_instructions_);
334*f5c631daSSadaf Ebrahimi #endif
335*f5c631daSSadaf Ebrahimi }
336*f5c631daSSadaf Ebrahimi
337*f5c631daSSadaf Ebrahimi
MacroAssembler(size_t capacity,PositionIndependentCodeOption pic)338*f5c631daSSadaf Ebrahimi MacroAssembler::MacroAssembler(size_t capacity,
339*f5c631daSSadaf Ebrahimi PositionIndependentCodeOption pic)
340*f5c631daSSadaf Ebrahimi : Assembler(capacity, pic),
341*f5c631daSSadaf Ebrahimi #ifdef VIXL_DEBUG
342*f5c631daSSadaf Ebrahimi allow_macro_instructions_(true),
343*f5c631daSSadaf Ebrahimi #endif
344*f5c631daSSadaf Ebrahimi generate_simulator_code_(VIXL_AARCH64_GENERATE_SIMULATOR_CODE),
345*f5c631daSSadaf Ebrahimi sp_(sp),
346*f5c631daSSadaf Ebrahimi tmp_list_(ip0, ip1),
347*f5c631daSSadaf Ebrahimi v_tmp_list_(d31),
348*f5c631daSSadaf Ebrahimi p_tmp_list_(CPURegList::Empty(CPURegister::kPRegister)),
349*f5c631daSSadaf Ebrahimi current_scratch_scope_(NULL),
350*f5c631daSSadaf Ebrahimi literal_pool_(this),
351*f5c631daSSadaf Ebrahimi veneer_pool_(this),
352*f5c631daSSadaf Ebrahimi recommended_checkpoint_(Pool::kNoCheckpointRequired),
353*f5c631daSSadaf Ebrahimi fp_nan_propagation_(NoFPMacroNaNPropagationSelected) {
354*f5c631daSSadaf Ebrahimi checkpoint_ = GetNextCheckPoint();
355*f5c631daSSadaf Ebrahimi }
356*f5c631daSSadaf Ebrahimi
357*f5c631daSSadaf Ebrahimi
MacroAssembler(byte * buffer,size_t capacity,PositionIndependentCodeOption pic)358*f5c631daSSadaf Ebrahimi MacroAssembler::MacroAssembler(byte* buffer,
359*f5c631daSSadaf Ebrahimi size_t capacity,
360*f5c631daSSadaf Ebrahimi PositionIndependentCodeOption pic)
361*f5c631daSSadaf Ebrahimi : Assembler(buffer, capacity, pic),
362*f5c631daSSadaf Ebrahimi #ifdef VIXL_DEBUG
363*f5c631daSSadaf Ebrahimi allow_macro_instructions_(true),
364*f5c631daSSadaf Ebrahimi #endif
365*f5c631daSSadaf Ebrahimi generate_simulator_code_(VIXL_AARCH64_GENERATE_SIMULATOR_CODE),
366*f5c631daSSadaf Ebrahimi sp_(sp),
367*f5c631daSSadaf Ebrahimi tmp_list_(ip0, ip1),
368*f5c631daSSadaf Ebrahimi v_tmp_list_(d31),
369*f5c631daSSadaf Ebrahimi p_tmp_list_(CPURegList::Empty(CPURegister::kPRegister)),
370*f5c631daSSadaf Ebrahimi current_scratch_scope_(NULL),
371*f5c631daSSadaf Ebrahimi literal_pool_(this),
372*f5c631daSSadaf Ebrahimi veneer_pool_(this),
373*f5c631daSSadaf Ebrahimi recommended_checkpoint_(Pool::kNoCheckpointRequired),
374*f5c631daSSadaf Ebrahimi fp_nan_propagation_(NoFPMacroNaNPropagationSelected) {
375*f5c631daSSadaf Ebrahimi checkpoint_ = GetNextCheckPoint();
376*f5c631daSSadaf Ebrahimi }
377*f5c631daSSadaf Ebrahimi
378*f5c631daSSadaf Ebrahimi
~MacroAssembler()379*f5c631daSSadaf Ebrahimi MacroAssembler::~MacroAssembler() {}
380*f5c631daSSadaf Ebrahimi
381*f5c631daSSadaf Ebrahimi
Reset()382*f5c631daSSadaf Ebrahimi void MacroAssembler::Reset() {
383*f5c631daSSadaf Ebrahimi Assembler::Reset();
384*f5c631daSSadaf Ebrahimi
385*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!literal_pool_.IsBlocked());
386*f5c631daSSadaf Ebrahimi literal_pool_.Reset();
387*f5c631daSSadaf Ebrahimi veneer_pool_.Reset();
388*f5c631daSSadaf Ebrahimi
389*f5c631daSSadaf Ebrahimi checkpoint_ = GetNextCheckPoint();
390*f5c631daSSadaf Ebrahimi }
391*f5c631daSSadaf Ebrahimi
392*f5c631daSSadaf Ebrahimi
FinalizeCode(FinalizeOption option)393*f5c631daSSadaf Ebrahimi void MacroAssembler::FinalizeCode(FinalizeOption option) {
394*f5c631daSSadaf Ebrahimi if (!literal_pool_.IsEmpty()) {
395*f5c631daSSadaf Ebrahimi // The user may decide to emit more code after Finalize, emit a branch if
396*f5c631daSSadaf Ebrahimi // that's the case.
397*f5c631daSSadaf Ebrahimi literal_pool_.Emit(option == kUnreachable ? Pool::kNoBranchRequired
398*f5c631daSSadaf Ebrahimi : Pool::kBranchRequired);
399*f5c631daSSadaf Ebrahimi }
400*f5c631daSSadaf Ebrahimi VIXL_ASSERT(veneer_pool_.IsEmpty());
401*f5c631daSSadaf Ebrahimi
402*f5c631daSSadaf Ebrahimi Assembler::FinalizeCode();
403*f5c631daSSadaf Ebrahimi }
404*f5c631daSSadaf Ebrahimi
405*f5c631daSSadaf Ebrahimi
CheckEmitFor(size_t amount)406*f5c631daSSadaf Ebrahimi void MacroAssembler::CheckEmitFor(size_t amount) {
407*f5c631daSSadaf Ebrahimi CheckEmitPoolsFor(amount);
408*f5c631daSSadaf Ebrahimi GetBuffer()->EnsureSpaceFor(amount);
409*f5c631daSSadaf Ebrahimi }
410*f5c631daSSadaf Ebrahimi
411*f5c631daSSadaf Ebrahimi
CheckEmitPoolsFor(size_t amount)412*f5c631daSSadaf Ebrahimi void MacroAssembler::CheckEmitPoolsFor(size_t amount) {
413*f5c631daSSadaf Ebrahimi literal_pool_.CheckEmitFor(amount);
414*f5c631daSSadaf Ebrahimi veneer_pool_.CheckEmitFor(amount);
415*f5c631daSSadaf Ebrahimi checkpoint_ = GetNextCheckPoint();
416*f5c631daSSadaf Ebrahimi }
417*f5c631daSSadaf Ebrahimi
418*f5c631daSSadaf Ebrahimi
MoveImmediateHelper(MacroAssembler * masm,const Register & rd,uint64_t imm)419*f5c631daSSadaf Ebrahimi int MacroAssembler::MoveImmediateHelper(MacroAssembler* masm,
420*f5c631daSSadaf Ebrahimi const Register& rd,
421*f5c631daSSadaf Ebrahimi uint64_t imm) {
422*f5c631daSSadaf Ebrahimi bool emit_code = (masm != NULL);
423*f5c631daSSadaf Ebrahimi VIXL_ASSERT(IsUint32(imm) || IsInt32(imm) || rd.Is64Bits());
424*f5c631daSSadaf Ebrahimi // The worst case for size is mov 64-bit immediate to sp:
425*f5c631daSSadaf Ebrahimi // * up to 4 instructions to materialise the constant
426*f5c631daSSadaf Ebrahimi // * 1 instruction to move to sp
427*f5c631daSSadaf Ebrahimi MacroEmissionCheckScope guard(masm);
428*f5c631daSSadaf Ebrahimi
429*f5c631daSSadaf Ebrahimi // Immediates on Aarch64 can be produced using an initial value, and zero to
430*f5c631daSSadaf Ebrahimi // three move keep operations.
431*f5c631daSSadaf Ebrahimi //
432*f5c631daSSadaf Ebrahimi // Initial values can be generated with:
433*f5c631daSSadaf Ebrahimi // 1. 64-bit move zero (movz).
434*f5c631daSSadaf Ebrahimi // 2. 32-bit move inverted (movn).
435*f5c631daSSadaf Ebrahimi // 3. 64-bit move inverted.
436*f5c631daSSadaf Ebrahimi // 4. 32-bit orr immediate.
437*f5c631daSSadaf Ebrahimi // 5. 64-bit orr immediate.
438*f5c631daSSadaf Ebrahimi // Move-keep may then be used to modify each of the 16-bit half words.
439*f5c631daSSadaf Ebrahimi //
440*f5c631daSSadaf Ebrahimi // The code below supports all five initial value generators, and
441*f5c631daSSadaf Ebrahimi // applying move-keep operations to move-zero and move-inverted initial
442*f5c631daSSadaf Ebrahimi // values.
443*f5c631daSSadaf Ebrahimi
444*f5c631daSSadaf Ebrahimi // Try to move the immediate in one instruction, and if that fails, switch to
445*f5c631daSSadaf Ebrahimi // using multiple instructions.
446*f5c631daSSadaf Ebrahimi if (OneInstrMoveImmediateHelper(masm, rd, imm)) {
447*f5c631daSSadaf Ebrahimi return 1;
448*f5c631daSSadaf Ebrahimi } else {
449*f5c631daSSadaf Ebrahimi int instruction_count = 0;
450*f5c631daSSadaf Ebrahimi unsigned reg_size = rd.GetSizeInBits();
451*f5c631daSSadaf Ebrahimi
452*f5c631daSSadaf Ebrahimi // Generic immediate case. Imm will be represented by
453*f5c631daSSadaf Ebrahimi // [imm3, imm2, imm1, imm0], where each imm is 16 bits.
454*f5c631daSSadaf Ebrahimi // A move-zero or move-inverted is generated for the first non-zero or
455*f5c631daSSadaf Ebrahimi // non-0xffff immX, and a move-keep for subsequent non-zero immX.
456*f5c631daSSadaf Ebrahimi
457*f5c631daSSadaf Ebrahimi uint64_t ignored_halfword = 0;
458*f5c631daSSadaf Ebrahimi bool invert_move = false;
459*f5c631daSSadaf Ebrahimi // If the number of 0xffff halfwords is greater than the number of 0x0000
460*f5c631daSSadaf Ebrahimi // halfwords, it's more efficient to use move-inverted.
461*f5c631daSSadaf Ebrahimi if (CountClearHalfWords(~imm, reg_size) >
462*f5c631daSSadaf Ebrahimi CountClearHalfWords(imm, reg_size)) {
463*f5c631daSSadaf Ebrahimi ignored_halfword = 0xffff;
464*f5c631daSSadaf Ebrahimi invert_move = true;
465*f5c631daSSadaf Ebrahimi }
466*f5c631daSSadaf Ebrahimi
467*f5c631daSSadaf Ebrahimi // Mov instructions can't move values into the stack pointer, so set up a
468*f5c631daSSadaf Ebrahimi // temporary register, if needed.
469*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps;
470*f5c631daSSadaf Ebrahimi Register temp;
471*f5c631daSSadaf Ebrahimi if (emit_code) {
472*f5c631daSSadaf Ebrahimi temps.Open(masm);
473*f5c631daSSadaf Ebrahimi temp = rd.IsSP() ? temps.AcquireSameSizeAs(rd) : rd;
474*f5c631daSSadaf Ebrahimi }
475*f5c631daSSadaf Ebrahimi
476*f5c631daSSadaf Ebrahimi // Iterate through the halfwords. Use movn/movz for the first non-ignored
477*f5c631daSSadaf Ebrahimi // halfword, and movk for subsequent halfwords.
478*f5c631daSSadaf Ebrahimi VIXL_ASSERT((reg_size % 16) == 0);
479*f5c631daSSadaf Ebrahimi bool first_mov_done = false;
480*f5c631daSSadaf Ebrahimi for (unsigned i = 0; i < (reg_size / 16); i++) {
481*f5c631daSSadaf Ebrahimi uint64_t imm16 = (imm >> (16 * i)) & 0xffff;
482*f5c631daSSadaf Ebrahimi if (imm16 != ignored_halfword) {
483*f5c631daSSadaf Ebrahimi if (!first_mov_done) {
484*f5c631daSSadaf Ebrahimi if (invert_move) {
485*f5c631daSSadaf Ebrahimi if (emit_code) masm->movn(temp, ~imm16 & 0xffff, 16 * i);
486*f5c631daSSadaf Ebrahimi instruction_count++;
487*f5c631daSSadaf Ebrahimi } else {
488*f5c631daSSadaf Ebrahimi if (emit_code) masm->movz(temp, imm16, 16 * i);
489*f5c631daSSadaf Ebrahimi instruction_count++;
490*f5c631daSSadaf Ebrahimi }
491*f5c631daSSadaf Ebrahimi first_mov_done = true;
492*f5c631daSSadaf Ebrahimi } else {
493*f5c631daSSadaf Ebrahimi // Construct a wider constant.
494*f5c631daSSadaf Ebrahimi if (emit_code) masm->movk(temp, imm16, 16 * i);
495*f5c631daSSadaf Ebrahimi instruction_count++;
496*f5c631daSSadaf Ebrahimi }
497*f5c631daSSadaf Ebrahimi }
498*f5c631daSSadaf Ebrahimi }
499*f5c631daSSadaf Ebrahimi
500*f5c631daSSadaf Ebrahimi VIXL_ASSERT(first_mov_done);
501*f5c631daSSadaf Ebrahimi
502*f5c631daSSadaf Ebrahimi // Move the temporary if the original destination register was the stack
503*f5c631daSSadaf Ebrahimi // pointer.
504*f5c631daSSadaf Ebrahimi if (rd.IsSP()) {
505*f5c631daSSadaf Ebrahimi if (emit_code) masm->mov(rd, temp);
506*f5c631daSSadaf Ebrahimi instruction_count++;
507*f5c631daSSadaf Ebrahimi }
508*f5c631daSSadaf Ebrahimi return instruction_count;
509*f5c631daSSadaf Ebrahimi }
510*f5c631daSSadaf Ebrahimi }
511*f5c631daSSadaf Ebrahimi
512*f5c631daSSadaf Ebrahimi
B(Label * label,BranchType type,Register reg,int bit)513*f5c631daSSadaf Ebrahimi void MacroAssembler::B(Label* label, BranchType type, Register reg, int bit) {
514*f5c631daSSadaf Ebrahimi VIXL_ASSERT((reg.Is(NoReg) || (type >= kBranchTypeFirstUsingReg)) &&
515*f5c631daSSadaf Ebrahimi ((bit == -1) || (type >= kBranchTypeFirstUsingBit)));
516*f5c631daSSadaf Ebrahimi if (kBranchTypeFirstCondition <= type && type <= kBranchTypeLastCondition) {
517*f5c631daSSadaf Ebrahimi B(static_cast<Condition>(type), label);
518*f5c631daSSadaf Ebrahimi } else {
519*f5c631daSSadaf Ebrahimi switch (type) {
520*f5c631daSSadaf Ebrahimi case always:
521*f5c631daSSadaf Ebrahimi B(label);
522*f5c631daSSadaf Ebrahimi break;
523*f5c631daSSadaf Ebrahimi case never:
524*f5c631daSSadaf Ebrahimi break;
525*f5c631daSSadaf Ebrahimi case reg_zero:
526*f5c631daSSadaf Ebrahimi Cbz(reg, label);
527*f5c631daSSadaf Ebrahimi break;
528*f5c631daSSadaf Ebrahimi case reg_not_zero:
529*f5c631daSSadaf Ebrahimi Cbnz(reg, label);
530*f5c631daSSadaf Ebrahimi break;
531*f5c631daSSadaf Ebrahimi case reg_bit_clear:
532*f5c631daSSadaf Ebrahimi Tbz(reg, bit, label);
533*f5c631daSSadaf Ebrahimi break;
534*f5c631daSSadaf Ebrahimi case reg_bit_set:
535*f5c631daSSadaf Ebrahimi Tbnz(reg, bit, label);
536*f5c631daSSadaf Ebrahimi break;
537*f5c631daSSadaf Ebrahimi default:
538*f5c631daSSadaf Ebrahimi VIXL_UNREACHABLE();
539*f5c631daSSadaf Ebrahimi }
540*f5c631daSSadaf Ebrahimi }
541*f5c631daSSadaf Ebrahimi }
542*f5c631daSSadaf Ebrahimi
543*f5c631daSSadaf Ebrahimi
B(Label * label)544*f5c631daSSadaf Ebrahimi void MacroAssembler::B(Label* label) {
545*f5c631daSSadaf Ebrahimi // We don't need to check the size of the literal pool, because the size of
546*f5c631daSSadaf Ebrahimi // the literal pool is already bounded by the literal range, which is smaller
547*f5c631daSSadaf Ebrahimi // than the range of this branch.
548*f5c631daSSadaf Ebrahimi VIXL_ASSERT(Instruction::GetImmBranchForwardRange(UncondBranchType) >
549*f5c631daSSadaf Ebrahimi Instruction::kLoadLiteralRange);
550*f5c631daSSadaf Ebrahimi SingleEmissionCheckScope guard(this);
551*f5c631daSSadaf Ebrahimi b(label);
552*f5c631daSSadaf Ebrahimi }
553*f5c631daSSadaf Ebrahimi
554*f5c631daSSadaf Ebrahimi
B(Label * label,Condition cond)555*f5c631daSSadaf Ebrahimi void MacroAssembler::B(Label* label, Condition cond) {
556*f5c631daSSadaf Ebrahimi // We don't need to check the size of the literal pool, because the size of
557*f5c631daSSadaf Ebrahimi // the literal pool is already bounded by the literal range, which is smaller
558*f5c631daSSadaf Ebrahimi // than the range of this branch.
559*f5c631daSSadaf Ebrahimi VIXL_ASSERT(Instruction::GetImmBranchForwardRange(CondBranchType) >
560*f5c631daSSadaf Ebrahimi Instruction::kLoadLiteralRange);
561*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
562*f5c631daSSadaf Ebrahimi VIXL_ASSERT((cond != al) && (cond != nv));
563*f5c631daSSadaf Ebrahimi EmissionCheckScope guard(this, 2 * kInstructionSize);
564*f5c631daSSadaf Ebrahimi
565*f5c631daSSadaf Ebrahimi if (label->IsBound() && LabelIsOutOfRange(label, CondBranchType)) {
566*f5c631daSSadaf Ebrahimi Label done;
567*f5c631daSSadaf Ebrahimi b(&done, InvertCondition(cond));
568*f5c631daSSadaf Ebrahimi b(label);
569*f5c631daSSadaf Ebrahimi bind(&done);
570*f5c631daSSadaf Ebrahimi } else {
571*f5c631daSSadaf Ebrahimi if (!label->IsBound()) {
572*f5c631daSSadaf Ebrahimi veneer_pool_.RegisterUnresolvedBranch(GetCursorOffset(),
573*f5c631daSSadaf Ebrahimi label,
574*f5c631daSSadaf Ebrahimi CondBranchType);
575*f5c631daSSadaf Ebrahimi }
576*f5c631daSSadaf Ebrahimi b(label, cond);
577*f5c631daSSadaf Ebrahimi }
578*f5c631daSSadaf Ebrahimi }
579*f5c631daSSadaf Ebrahimi
580*f5c631daSSadaf Ebrahimi
Cbnz(const Register & rt,Label * label)581*f5c631daSSadaf Ebrahimi void MacroAssembler::Cbnz(const Register& rt, Label* label) {
582*f5c631daSSadaf Ebrahimi // We don't need to check the size of the literal pool, because the size of
583*f5c631daSSadaf Ebrahimi // the literal pool is already bounded by the literal range, which is smaller
584*f5c631daSSadaf Ebrahimi // than the range of this branch.
585*f5c631daSSadaf Ebrahimi VIXL_ASSERT(Instruction::GetImmBranchForwardRange(CompareBranchType) >
586*f5c631daSSadaf Ebrahimi Instruction::kLoadLiteralRange);
587*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
588*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!rt.IsZero());
589*f5c631daSSadaf Ebrahimi EmissionCheckScope guard(this, 2 * kInstructionSize);
590*f5c631daSSadaf Ebrahimi
591*f5c631daSSadaf Ebrahimi if (label->IsBound() && LabelIsOutOfRange(label, CondBranchType)) {
592*f5c631daSSadaf Ebrahimi Label done;
593*f5c631daSSadaf Ebrahimi cbz(rt, &done);
594*f5c631daSSadaf Ebrahimi b(label);
595*f5c631daSSadaf Ebrahimi bind(&done);
596*f5c631daSSadaf Ebrahimi } else {
597*f5c631daSSadaf Ebrahimi if (!label->IsBound()) {
598*f5c631daSSadaf Ebrahimi veneer_pool_.RegisterUnresolvedBranch(GetCursorOffset(),
599*f5c631daSSadaf Ebrahimi label,
600*f5c631daSSadaf Ebrahimi CompareBranchType);
601*f5c631daSSadaf Ebrahimi }
602*f5c631daSSadaf Ebrahimi cbnz(rt, label);
603*f5c631daSSadaf Ebrahimi }
604*f5c631daSSadaf Ebrahimi }
605*f5c631daSSadaf Ebrahimi
606*f5c631daSSadaf Ebrahimi
Cbz(const Register & rt,Label * label)607*f5c631daSSadaf Ebrahimi void MacroAssembler::Cbz(const Register& rt, Label* label) {
608*f5c631daSSadaf Ebrahimi // We don't need to check the size of the literal pool, because the size of
609*f5c631daSSadaf Ebrahimi // the literal pool is already bounded by the literal range, which is smaller
610*f5c631daSSadaf Ebrahimi // than the range of this branch.
611*f5c631daSSadaf Ebrahimi VIXL_ASSERT(Instruction::GetImmBranchForwardRange(CompareBranchType) >
612*f5c631daSSadaf Ebrahimi Instruction::kLoadLiteralRange);
613*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
614*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!rt.IsZero());
615*f5c631daSSadaf Ebrahimi EmissionCheckScope guard(this, 2 * kInstructionSize);
616*f5c631daSSadaf Ebrahimi
617*f5c631daSSadaf Ebrahimi if (label->IsBound() && LabelIsOutOfRange(label, CondBranchType)) {
618*f5c631daSSadaf Ebrahimi Label done;
619*f5c631daSSadaf Ebrahimi cbnz(rt, &done);
620*f5c631daSSadaf Ebrahimi b(label);
621*f5c631daSSadaf Ebrahimi bind(&done);
622*f5c631daSSadaf Ebrahimi } else {
623*f5c631daSSadaf Ebrahimi if (!label->IsBound()) {
624*f5c631daSSadaf Ebrahimi veneer_pool_.RegisterUnresolvedBranch(GetCursorOffset(),
625*f5c631daSSadaf Ebrahimi label,
626*f5c631daSSadaf Ebrahimi CompareBranchType);
627*f5c631daSSadaf Ebrahimi }
628*f5c631daSSadaf Ebrahimi cbz(rt, label);
629*f5c631daSSadaf Ebrahimi }
630*f5c631daSSadaf Ebrahimi }
631*f5c631daSSadaf Ebrahimi
632*f5c631daSSadaf Ebrahimi
Tbnz(const Register & rt,unsigned bit_pos,Label * label)633*f5c631daSSadaf Ebrahimi void MacroAssembler::Tbnz(const Register& rt, unsigned bit_pos, Label* label) {
634*f5c631daSSadaf Ebrahimi // This is to avoid a situation where emitting a veneer for a TBZ/TBNZ branch
635*f5c631daSSadaf Ebrahimi // can become impossible because we emit the literal pool first.
636*f5c631daSSadaf Ebrahimi literal_pool_.CheckEmitForBranch(
637*f5c631daSSadaf Ebrahimi Instruction::GetImmBranchForwardRange(TestBranchType));
638*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
639*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!rt.IsZero());
640*f5c631daSSadaf Ebrahimi EmissionCheckScope guard(this, 2 * kInstructionSize);
641*f5c631daSSadaf Ebrahimi
642*f5c631daSSadaf Ebrahimi if (label->IsBound() && LabelIsOutOfRange(label, TestBranchType)) {
643*f5c631daSSadaf Ebrahimi Label done;
644*f5c631daSSadaf Ebrahimi tbz(rt, bit_pos, &done);
645*f5c631daSSadaf Ebrahimi b(label);
646*f5c631daSSadaf Ebrahimi bind(&done);
647*f5c631daSSadaf Ebrahimi } else {
648*f5c631daSSadaf Ebrahimi if (!label->IsBound()) {
649*f5c631daSSadaf Ebrahimi veneer_pool_.RegisterUnresolvedBranch(GetCursorOffset(),
650*f5c631daSSadaf Ebrahimi label,
651*f5c631daSSadaf Ebrahimi TestBranchType);
652*f5c631daSSadaf Ebrahimi }
653*f5c631daSSadaf Ebrahimi tbnz(rt, bit_pos, label);
654*f5c631daSSadaf Ebrahimi }
655*f5c631daSSadaf Ebrahimi }
656*f5c631daSSadaf Ebrahimi
657*f5c631daSSadaf Ebrahimi
Tbz(const Register & rt,unsigned bit_pos,Label * label)658*f5c631daSSadaf Ebrahimi void MacroAssembler::Tbz(const Register& rt, unsigned bit_pos, Label* label) {
659*f5c631daSSadaf Ebrahimi // This is to avoid a situation where emitting a veneer for a TBZ/TBNZ branch
660*f5c631daSSadaf Ebrahimi // can become impossible because we emit the literal pool first.
661*f5c631daSSadaf Ebrahimi literal_pool_.CheckEmitForBranch(
662*f5c631daSSadaf Ebrahimi Instruction::GetImmBranchForwardRange(TestBranchType));
663*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
664*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!rt.IsZero());
665*f5c631daSSadaf Ebrahimi EmissionCheckScope guard(this, 2 * kInstructionSize);
666*f5c631daSSadaf Ebrahimi
667*f5c631daSSadaf Ebrahimi if (label->IsBound() && LabelIsOutOfRange(label, TestBranchType)) {
668*f5c631daSSadaf Ebrahimi Label done;
669*f5c631daSSadaf Ebrahimi tbnz(rt, bit_pos, &done);
670*f5c631daSSadaf Ebrahimi b(label);
671*f5c631daSSadaf Ebrahimi bind(&done);
672*f5c631daSSadaf Ebrahimi } else {
673*f5c631daSSadaf Ebrahimi if (!label->IsBound()) {
674*f5c631daSSadaf Ebrahimi veneer_pool_.RegisterUnresolvedBranch(GetCursorOffset(),
675*f5c631daSSadaf Ebrahimi label,
676*f5c631daSSadaf Ebrahimi TestBranchType);
677*f5c631daSSadaf Ebrahimi }
678*f5c631daSSadaf Ebrahimi tbz(rt, bit_pos, label);
679*f5c631daSSadaf Ebrahimi }
680*f5c631daSSadaf Ebrahimi }
681*f5c631daSSadaf Ebrahimi
Bind(Label * label,BranchTargetIdentifier id)682*f5c631daSSadaf Ebrahimi void MacroAssembler::Bind(Label* label, BranchTargetIdentifier id) {
683*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
684*f5c631daSSadaf Ebrahimi veneer_pool_.DeleteUnresolvedBranchInfoForLabel(label);
685*f5c631daSSadaf Ebrahimi if (id == EmitBTI_none) {
686*f5c631daSSadaf Ebrahimi bind(label);
687*f5c631daSSadaf Ebrahimi } else {
688*f5c631daSSadaf Ebrahimi // Emit this inside an ExactAssemblyScope to ensure there are no extra
689*f5c631daSSadaf Ebrahimi // instructions between the bind and the target identifier instruction.
690*f5c631daSSadaf Ebrahimi ExactAssemblyScope scope(this, kInstructionSize);
691*f5c631daSSadaf Ebrahimi bind(label);
692*f5c631daSSadaf Ebrahimi if (id == EmitPACIASP) {
693*f5c631daSSadaf Ebrahimi paciasp();
694*f5c631daSSadaf Ebrahimi } else if (id == EmitPACIBSP) {
695*f5c631daSSadaf Ebrahimi pacibsp();
696*f5c631daSSadaf Ebrahimi } else {
697*f5c631daSSadaf Ebrahimi bti(id);
698*f5c631daSSadaf Ebrahimi }
699*f5c631daSSadaf Ebrahimi }
700*f5c631daSSadaf Ebrahimi }
701*f5c631daSSadaf Ebrahimi
702*f5c631daSSadaf Ebrahimi // Bind a label to a specified offset from the start of the buffer.
BindToOffset(Label * label,ptrdiff_t offset)703*f5c631daSSadaf Ebrahimi void MacroAssembler::BindToOffset(Label* label, ptrdiff_t offset) {
704*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
705*f5c631daSSadaf Ebrahimi veneer_pool_.DeleteUnresolvedBranchInfoForLabel(label);
706*f5c631daSSadaf Ebrahimi Assembler::BindToOffset(label, offset);
707*f5c631daSSadaf Ebrahimi }
708*f5c631daSSadaf Ebrahimi
709*f5c631daSSadaf Ebrahimi
And(const Register & rd,const Register & rn,const Operand & operand)710*f5c631daSSadaf Ebrahimi void MacroAssembler::And(const Register& rd,
711*f5c631daSSadaf Ebrahimi const Register& rn,
712*f5c631daSSadaf Ebrahimi const Operand& operand) {
713*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
714*f5c631daSSadaf Ebrahimi LogicalMacro(rd, rn, operand, AND);
715*f5c631daSSadaf Ebrahimi }
716*f5c631daSSadaf Ebrahimi
717*f5c631daSSadaf Ebrahimi
Ands(const Register & rd,const Register & rn,const Operand & operand)718*f5c631daSSadaf Ebrahimi void MacroAssembler::Ands(const Register& rd,
719*f5c631daSSadaf Ebrahimi const Register& rn,
720*f5c631daSSadaf Ebrahimi const Operand& operand) {
721*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
722*f5c631daSSadaf Ebrahimi LogicalMacro(rd, rn, operand, ANDS);
723*f5c631daSSadaf Ebrahimi }
724*f5c631daSSadaf Ebrahimi
725*f5c631daSSadaf Ebrahimi
Tst(const Register & rn,const Operand & operand)726*f5c631daSSadaf Ebrahimi void MacroAssembler::Tst(const Register& rn, const Operand& operand) {
727*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
728*f5c631daSSadaf Ebrahimi Ands(AppropriateZeroRegFor(rn), rn, operand);
729*f5c631daSSadaf Ebrahimi }
730*f5c631daSSadaf Ebrahimi
731*f5c631daSSadaf Ebrahimi
Bic(const Register & rd,const Register & rn,const Operand & operand)732*f5c631daSSadaf Ebrahimi void MacroAssembler::Bic(const Register& rd,
733*f5c631daSSadaf Ebrahimi const Register& rn,
734*f5c631daSSadaf Ebrahimi const Operand& operand) {
735*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
736*f5c631daSSadaf Ebrahimi LogicalMacro(rd, rn, operand, BIC);
737*f5c631daSSadaf Ebrahimi }
738*f5c631daSSadaf Ebrahimi
739*f5c631daSSadaf Ebrahimi
Bics(const Register & rd,const Register & rn,const Operand & operand)740*f5c631daSSadaf Ebrahimi void MacroAssembler::Bics(const Register& rd,
741*f5c631daSSadaf Ebrahimi const Register& rn,
742*f5c631daSSadaf Ebrahimi const Operand& operand) {
743*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
744*f5c631daSSadaf Ebrahimi LogicalMacro(rd, rn, operand, BICS);
745*f5c631daSSadaf Ebrahimi }
746*f5c631daSSadaf Ebrahimi
747*f5c631daSSadaf Ebrahimi
Orr(const Register & rd,const Register & rn,const Operand & operand)748*f5c631daSSadaf Ebrahimi void MacroAssembler::Orr(const Register& rd,
749*f5c631daSSadaf Ebrahimi const Register& rn,
750*f5c631daSSadaf Ebrahimi const Operand& operand) {
751*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
752*f5c631daSSadaf Ebrahimi LogicalMacro(rd, rn, operand, ORR);
753*f5c631daSSadaf Ebrahimi }
754*f5c631daSSadaf Ebrahimi
755*f5c631daSSadaf Ebrahimi
Orn(const Register & rd,const Register & rn,const Operand & operand)756*f5c631daSSadaf Ebrahimi void MacroAssembler::Orn(const Register& rd,
757*f5c631daSSadaf Ebrahimi const Register& rn,
758*f5c631daSSadaf Ebrahimi const Operand& operand) {
759*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
760*f5c631daSSadaf Ebrahimi LogicalMacro(rd, rn, operand, ORN);
761*f5c631daSSadaf Ebrahimi }
762*f5c631daSSadaf Ebrahimi
763*f5c631daSSadaf Ebrahimi
Eor(const Register & rd,const Register & rn,const Operand & operand)764*f5c631daSSadaf Ebrahimi void MacroAssembler::Eor(const Register& rd,
765*f5c631daSSadaf Ebrahimi const Register& rn,
766*f5c631daSSadaf Ebrahimi const Operand& operand) {
767*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
768*f5c631daSSadaf Ebrahimi LogicalMacro(rd, rn, operand, EOR);
769*f5c631daSSadaf Ebrahimi }
770*f5c631daSSadaf Ebrahimi
771*f5c631daSSadaf Ebrahimi
Eon(const Register & rd,const Register & rn,const Operand & operand)772*f5c631daSSadaf Ebrahimi void MacroAssembler::Eon(const Register& rd,
773*f5c631daSSadaf Ebrahimi const Register& rn,
774*f5c631daSSadaf Ebrahimi const Operand& operand) {
775*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
776*f5c631daSSadaf Ebrahimi LogicalMacro(rd, rn, operand, EON);
777*f5c631daSSadaf Ebrahimi }
778*f5c631daSSadaf Ebrahimi
779*f5c631daSSadaf Ebrahimi
LogicalMacro(const Register & rd,const Register & rn,const Operand & operand,LogicalOp op)780*f5c631daSSadaf Ebrahimi void MacroAssembler::LogicalMacro(const Register& rd,
781*f5c631daSSadaf Ebrahimi const Register& rn,
782*f5c631daSSadaf Ebrahimi const Operand& operand,
783*f5c631daSSadaf Ebrahimi LogicalOp op) {
784*f5c631daSSadaf Ebrahimi // The worst case for size is logical immediate to sp:
785*f5c631daSSadaf Ebrahimi // * up to 4 instructions to materialise the constant
786*f5c631daSSadaf Ebrahimi // * 1 instruction to do the operation
787*f5c631daSSadaf Ebrahimi // * 1 instruction to move to sp
788*f5c631daSSadaf Ebrahimi MacroEmissionCheckScope guard(this);
789*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
790*f5c631daSSadaf Ebrahimi // Use `rd` as a temp, if we can.
791*f5c631daSSadaf Ebrahimi temps.Include(rd);
792*f5c631daSSadaf Ebrahimi // We read `rn` after evaluating `operand`.
793*f5c631daSSadaf Ebrahimi temps.Exclude(rn);
794*f5c631daSSadaf Ebrahimi // It doesn't matter if `operand` is in `temps` (e.g. because it alises `rd`)
795*f5c631daSSadaf Ebrahimi // because we don't need it after it is evaluated.
796*f5c631daSSadaf Ebrahimi
797*f5c631daSSadaf Ebrahimi if (operand.IsImmediate()) {
798*f5c631daSSadaf Ebrahimi uint64_t immediate = operand.GetImmediate();
799*f5c631daSSadaf Ebrahimi unsigned reg_size = rd.GetSizeInBits();
800*f5c631daSSadaf Ebrahimi
801*f5c631daSSadaf Ebrahimi // If the operation is NOT, invert the operation and immediate.
802*f5c631daSSadaf Ebrahimi if ((op & NOT) == NOT) {
803*f5c631daSSadaf Ebrahimi op = static_cast<LogicalOp>(op & ~NOT);
804*f5c631daSSadaf Ebrahimi immediate = ~immediate;
805*f5c631daSSadaf Ebrahimi }
806*f5c631daSSadaf Ebrahimi
807*f5c631daSSadaf Ebrahimi // Ignore the top 32 bits of an immediate if we're moving to a W register.
808*f5c631daSSadaf Ebrahimi if (rd.Is32Bits()) {
809*f5c631daSSadaf Ebrahimi // Check that the top 32 bits are consistent.
810*f5c631daSSadaf Ebrahimi VIXL_ASSERT(((immediate >> kWRegSize) == 0) ||
811*f5c631daSSadaf Ebrahimi ((immediate >> kWRegSize) == 0xffffffff));
812*f5c631daSSadaf Ebrahimi immediate &= kWRegMask;
813*f5c631daSSadaf Ebrahimi }
814*f5c631daSSadaf Ebrahimi
815*f5c631daSSadaf Ebrahimi VIXL_ASSERT(rd.Is64Bits() || IsUint32(immediate));
816*f5c631daSSadaf Ebrahimi
817*f5c631daSSadaf Ebrahimi // Special cases for all set or all clear immediates.
818*f5c631daSSadaf Ebrahimi if (immediate == 0) {
819*f5c631daSSadaf Ebrahimi switch (op) {
820*f5c631daSSadaf Ebrahimi case AND:
821*f5c631daSSadaf Ebrahimi Mov(rd, 0);
822*f5c631daSSadaf Ebrahimi return;
823*f5c631daSSadaf Ebrahimi case ORR:
824*f5c631daSSadaf Ebrahimi VIXL_FALLTHROUGH();
825*f5c631daSSadaf Ebrahimi case EOR:
826*f5c631daSSadaf Ebrahimi Mov(rd, rn);
827*f5c631daSSadaf Ebrahimi return;
828*f5c631daSSadaf Ebrahimi case ANDS:
829*f5c631daSSadaf Ebrahimi VIXL_FALLTHROUGH();
830*f5c631daSSadaf Ebrahimi case BICS:
831*f5c631daSSadaf Ebrahimi break;
832*f5c631daSSadaf Ebrahimi default:
833*f5c631daSSadaf Ebrahimi VIXL_UNREACHABLE();
834*f5c631daSSadaf Ebrahimi }
835*f5c631daSSadaf Ebrahimi } else if ((rd.Is64Bits() && (immediate == UINT64_C(0xffffffffffffffff))) ||
836*f5c631daSSadaf Ebrahimi (rd.Is32Bits() && (immediate == UINT64_C(0x00000000ffffffff)))) {
837*f5c631daSSadaf Ebrahimi switch (op) {
838*f5c631daSSadaf Ebrahimi case AND:
839*f5c631daSSadaf Ebrahimi Mov(rd, rn);
840*f5c631daSSadaf Ebrahimi return;
841*f5c631daSSadaf Ebrahimi case ORR:
842*f5c631daSSadaf Ebrahimi Mov(rd, immediate);
843*f5c631daSSadaf Ebrahimi return;
844*f5c631daSSadaf Ebrahimi case EOR:
845*f5c631daSSadaf Ebrahimi Mvn(rd, rn);
846*f5c631daSSadaf Ebrahimi return;
847*f5c631daSSadaf Ebrahimi case ANDS:
848*f5c631daSSadaf Ebrahimi VIXL_FALLTHROUGH();
849*f5c631daSSadaf Ebrahimi case BICS:
850*f5c631daSSadaf Ebrahimi break;
851*f5c631daSSadaf Ebrahimi default:
852*f5c631daSSadaf Ebrahimi VIXL_UNREACHABLE();
853*f5c631daSSadaf Ebrahimi }
854*f5c631daSSadaf Ebrahimi }
855*f5c631daSSadaf Ebrahimi
856*f5c631daSSadaf Ebrahimi unsigned n, imm_s, imm_r;
857*f5c631daSSadaf Ebrahimi if (IsImmLogical(immediate, reg_size, &n, &imm_s, &imm_r)) {
858*f5c631daSSadaf Ebrahimi // Immediate can be encoded in the instruction.
859*f5c631daSSadaf Ebrahimi LogicalImmediate(rd, rn, n, imm_s, imm_r, op);
860*f5c631daSSadaf Ebrahimi } else {
861*f5c631daSSadaf Ebrahimi // Immediate can't be encoded: synthesize using move immediate.
862*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireSameSizeAs(rn);
863*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!temp.Aliases(rn));
864*f5c631daSSadaf Ebrahimi
865*f5c631daSSadaf Ebrahimi // If the left-hand input is the stack pointer, we can't pre-shift the
866*f5c631daSSadaf Ebrahimi // immediate, as the encoding won't allow the subsequent post shift.
867*f5c631daSSadaf Ebrahimi PreShiftImmMode mode = rn.IsSP() ? kNoShift : kAnyShift;
868*f5c631daSSadaf Ebrahimi Operand imm_operand = MoveImmediateForShiftedOp(temp, immediate, mode);
869*f5c631daSSadaf Ebrahimi
870*f5c631daSSadaf Ebrahimi if (rd.Is(sp) || rd.Is(wsp)) {
871*f5c631daSSadaf Ebrahimi // If rd is the stack pointer we cannot use it as the destination
872*f5c631daSSadaf Ebrahimi // register so we use the temp register as an intermediate again.
873*f5c631daSSadaf Ebrahimi Logical(temp, rn, imm_operand, op);
874*f5c631daSSadaf Ebrahimi Mov(rd, temp);
875*f5c631daSSadaf Ebrahimi } else {
876*f5c631daSSadaf Ebrahimi Logical(rd, rn, imm_operand, op);
877*f5c631daSSadaf Ebrahimi }
878*f5c631daSSadaf Ebrahimi }
879*f5c631daSSadaf Ebrahimi } else if (operand.IsExtendedRegister()) {
880*f5c631daSSadaf Ebrahimi VIXL_ASSERT(operand.GetRegister().GetSizeInBits() <= rd.GetSizeInBits());
881*f5c631daSSadaf Ebrahimi // Add/sub extended supports shift <= 4. We want to support exactly the
882*f5c631daSSadaf Ebrahimi // same modes here.
883*f5c631daSSadaf Ebrahimi VIXL_ASSERT(operand.GetShiftAmount() <= 4);
884*f5c631daSSadaf Ebrahimi VIXL_ASSERT(
885*f5c631daSSadaf Ebrahimi operand.GetRegister().Is64Bits() ||
886*f5c631daSSadaf Ebrahimi ((operand.GetExtend() != UXTX) && (operand.GetExtend() != SXTX)));
887*f5c631daSSadaf Ebrahimi
888*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireSameSizeAs(rn);
889*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!temp.Aliases(rn));
890*f5c631daSSadaf Ebrahimi EmitExtendShift(temp,
891*f5c631daSSadaf Ebrahimi operand.GetRegister(),
892*f5c631daSSadaf Ebrahimi operand.GetExtend(),
893*f5c631daSSadaf Ebrahimi operand.GetShiftAmount());
894*f5c631daSSadaf Ebrahimi Logical(rd, rn, Operand(temp), op);
895*f5c631daSSadaf Ebrahimi } else {
896*f5c631daSSadaf Ebrahimi // The operand can be encoded in the instruction.
897*f5c631daSSadaf Ebrahimi VIXL_ASSERT(operand.IsShiftedRegister());
898*f5c631daSSadaf Ebrahimi Logical(rd, rn, operand, op);
899*f5c631daSSadaf Ebrahimi }
900*f5c631daSSadaf Ebrahimi }
901*f5c631daSSadaf Ebrahimi
902*f5c631daSSadaf Ebrahimi
Mov(const Register & rd,const Operand & operand,DiscardMoveMode discard_mode)903*f5c631daSSadaf Ebrahimi void MacroAssembler::Mov(const Register& rd,
904*f5c631daSSadaf Ebrahimi const Operand& operand,
905*f5c631daSSadaf Ebrahimi DiscardMoveMode discard_mode) {
906*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
907*f5c631daSSadaf Ebrahimi // The worst case for size is mov immediate with up to 4 instructions.
908*f5c631daSSadaf Ebrahimi MacroEmissionCheckScope guard(this);
909*f5c631daSSadaf Ebrahimi
910*f5c631daSSadaf Ebrahimi if (operand.IsImmediate()) {
911*f5c631daSSadaf Ebrahimi // Call the macro assembler for generic immediates.
912*f5c631daSSadaf Ebrahimi Mov(rd, operand.GetImmediate());
913*f5c631daSSadaf Ebrahimi } else if (operand.IsShiftedRegister() && (operand.GetShiftAmount() != 0)) {
914*f5c631daSSadaf Ebrahimi // Emit a shift instruction if moving a shifted register. This operation
915*f5c631daSSadaf Ebrahimi // could also be achieved using an orr instruction (like orn used by Mvn),
916*f5c631daSSadaf Ebrahimi // but using a shift instruction makes the disassembly clearer.
917*f5c631daSSadaf Ebrahimi EmitShift(rd,
918*f5c631daSSadaf Ebrahimi operand.GetRegister(),
919*f5c631daSSadaf Ebrahimi operand.GetShift(),
920*f5c631daSSadaf Ebrahimi operand.GetShiftAmount());
921*f5c631daSSadaf Ebrahimi } else if (operand.IsExtendedRegister()) {
922*f5c631daSSadaf Ebrahimi // Emit an extend instruction if moving an extended register. This handles
923*f5c631daSSadaf Ebrahimi // extend with post-shift operations, too.
924*f5c631daSSadaf Ebrahimi EmitExtendShift(rd,
925*f5c631daSSadaf Ebrahimi operand.GetRegister(),
926*f5c631daSSadaf Ebrahimi operand.GetExtend(),
927*f5c631daSSadaf Ebrahimi operand.GetShiftAmount());
928*f5c631daSSadaf Ebrahimi } else {
929*f5c631daSSadaf Ebrahimi Mov(rd, operand.GetRegister(), discard_mode);
930*f5c631daSSadaf Ebrahimi }
931*f5c631daSSadaf Ebrahimi }
932*f5c631daSSadaf Ebrahimi
933*f5c631daSSadaf Ebrahimi
Movi16bitHelper(const VRegister & vd,uint64_t imm)934*f5c631daSSadaf Ebrahimi void MacroAssembler::Movi16bitHelper(const VRegister& vd, uint64_t imm) {
935*f5c631daSSadaf Ebrahimi VIXL_ASSERT(IsUint16(imm));
936*f5c631daSSadaf Ebrahimi int byte1 = (imm & 0xff);
937*f5c631daSSadaf Ebrahimi int byte2 = ((imm >> 8) & 0xff);
938*f5c631daSSadaf Ebrahimi if (byte1 == byte2) {
939*f5c631daSSadaf Ebrahimi movi(vd.Is64Bits() ? vd.V8B() : vd.V16B(), byte1);
940*f5c631daSSadaf Ebrahimi } else if (byte1 == 0) {
941*f5c631daSSadaf Ebrahimi movi(vd, byte2, LSL, 8);
942*f5c631daSSadaf Ebrahimi } else if (byte2 == 0) {
943*f5c631daSSadaf Ebrahimi movi(vd, byte1);
944*f5c631daSSadaf Ebrahimi } else if (byte1 == 0xff) {
945*f5c631daSSadaf Ebrahimi mvni(vd, ~byte2 & 0xff, LSL, 8);
946*f5c631daSSadaf Ebrahimi } else if (byte2 == 0xff) {
947*f5c631daSSadaf Ebrahimi mvni(vd, ~byte1 & 0xff);
948*f5c631daSSadaf Ebrahimi } else {
949*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
950*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireW();
951*f5c631daSSadaf Ebrahimi movz(temp, imm);
952*f5c631daSSadaf Ebrahimi dup(vd, temp);
953*f5c631daSSadaf Ebrahimi }
954*f5c631daSSadaf Ebrahimi }
955*f5c631daSSadaf Ebrahimi
956*f5c631daSSadaf Ebrahimi
Movi32bitHelper(const VRegister & vd,uint64_t imm)957*f5c631daSSadaf Ebrahimi void MacroAssembler::Movi32bitHelper(const VRegister& vd, uint64_t imm) {
958*f5c631daSSadaf Ebrahimi VIXL_ASSERT(IsUint32(imm));
959*f5c631daSSadaf Ebrahimi
960*f5c631daSSadaf Ebrahimi uint8_t bytes[sizeof(imm)];
961*f5c631daSSadaf Ebrahimi memcpy(bytes, &imm, sizeof(imm));
962*f5c631daSSadaf Ebrahimi
963*f5c631daSSadaf Ebrahimi // All bytes are either 0x00 or 0xff.
964*f5c631daSSadaf Ebrahimi {
965*f5c631daSSadaf Ebrahimi bool all0orff = true;
966*f5c631daSSadaf Ebrahimi for (int i = 0; i < 4; ++i) {
967*f5c631daSSadaf Ebrahimi if ((bytes[i] != 0) && (bytes[i] != 0xff)) {
968*f5c631daSSadaf Ebrahimi all0orff = false;
969*f5c631daSSadaf Ebrahimi break;
970*f5c631daSSadaf Ebrahimi }
971*f5c631daSSadaf Ebrahimi }
972*f5c631daSSadaf Ebrahimi
973*f5c631daSSadaf Ebrahimi if (all0orff == true) {
974*f5c631daSSadaf Ebrahimi movi(vd.Is64Bits() ? vd.V1D() : vd.V2D(), ((imm << 32) | imm));
975*f5c631daSSadaf Ebrahimi return;
976*f5c631daSSadaf Ebrahimi }
977*f5c631daSSadaf Ebrahimi }
978*f5c631daSSadaf Ebrahimi
979*f5c631daSSadaf Ebrahimi // Of the 4 bytes, only one byte is non-zero.
980*f5c631daSSadaf Ebrahimi for (int i = 0; i < 4; i++) {
981*f5c631daSSadaf Ebrahimi if ((imm & (0xff << (i * 8))) == imm) {
982*f5c631daSSadaf Ebrahimi movi(vd, bytes[i], LSL, i * 8);
983*f5c631daSSadaf Ebrahimi return;
984*f5c631daSSadaf Ebrahimi }
985*f5c631daSSadaf Ebrahimi }
986*f5c631daSSadaf Ebrahimi
987*f5c631daSSadaf Ebrahimi // Of the 4 bytes, only one byte is not 0xff.
988*f5c631daSSadaf Ebrahimi for (int i = 0; i < 4; i++) {
989*f5c631daSSadaf Ebrahimi uint32_t mask = ~(0xff << (i * 8));
990*f5c631daSSadaf Ebrahimi if ((imm & mask) == mask) {
991*f5c631daSSadaf Ebrahimi mvni(vd, ~bytes[i] & 0xff, LSL, i * 8);
992*f5c631daSSadaf Ebrahimi return;
993*f5c631daSSadaf Ebrahimi }
994*f5c631daSSadaf Ebrahimi }
995*f5c631daSSadaf Ebrahimi
996*f5c631daSSadaf Ebrahimi // Immediate is of the form 0x00MMFFFF.
997*f5c631daSSadaf Ebrahimi if ((imm & 0xff00ffff) == 0x0000ffff) {
998*f5c631daSSadaf Ebrahimi movi(vd, bytes[2], MSL, 16);
999*f5c631daSSadaf Ebrahimi return;
1000*f5c631daSSadaf Ebrahimi }
1001*f5c631daSSadaf Ebrahimi
1002*f5c631daSSadaf Ebrahimi // Immediate is of the form 0x0000MMFF.
1003*f5c631daSSadaf Ebrahimi if ((imm & 0xffff00ff) == 0x000000ff) {
1004*f5c631daSSadaf Ebrahimi movi(vd, bytes[1], MSL, 8);
1005*f5c631daSSadaf Ebrahimi return;
1006*f5c631daSSadaf Ebrahimi }
1007*f5c631daSSadaf Ebrahimi
1008*f5c631daSSadaf Ebrahimi // Immediate is of the form 0xFFMM0000.
1009*f5c631daSSadaf Ebrahimi if ((imm & 0xff00ffff) == 0xff000000) {
1010*f5c631daSSadaf Ebrahimi mvni(vd, ~bytes[2] & 0xff, MSL, 16);
1011*f5c631daSSadaf Ebrahimi return;
1012*f5c631daSSadaf Ebrahimi }
1013*f5c631daSSadaf Ebrahimi // Immediate is of the form 0xFFFFMM00.
1014*f5c631daSSadaf Ebrahimi if ((imm & 0xffff00ff) == 0xffff0000) {
1015*f5c631daSSadaf Ebrahimi mvni(vd, ~bytes[1] & 0xff, MSL, 8);
1016*f5c631daSSadaf Ebrahimi return;
1017*f5c631daSSadaf Ebrahimi }
1018*f5c631daSSadaf Ebrahimi
1019*f5c631daSSadaf Ebrahimi // Top and bottom 16-bits are equal.
1020*f5c631daSSadaf Ebrahimi if (((imm >> 16) & 0xffff) == (imm & 0xffff)) {
1021*f5c631daSSadaf Ebrahimi Movi16bitHelper(vd.Is64Bits() ? vd.V4H() : vd.V8H(), imm & 0xffff);
1022*f5c631daSSadaf Ebrahimi return;
1023*f5c631daSSadaf Ebrahimi }
1024*f5c631daSSadaf Ebrahimi
1025*f5c631daSSadaf Ebrahimi // Default case.
1026*f5c631daSSadaf Ebrahimi {
1027*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
1028*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireW();
1029*f5c631daSSadaf Ebrahimi Mov(temp, imm);
1030*f5c631daSSadaf Ebrahimi dup(vd, temp);
1031*f5c631daSSadaf Ebrahimi }
1032*f5c631daSSadaf Ebrahimi }
1033*f5c631daSSadaf Ebrahimi
1034*f5c631daSSadaf Ebrahimi
Movi64bitHelper(const VRegister & vd,uint64_t imm)1035*f5c631daSSadaf Ebrahimi void MacroAssembler::Movi64bitHelper(const VRegister& vd, uint64_t imm) {
1036*f5c631daSSadaf Ebrahimi // All bytes are either 0x00 or 0xff.
1037*f5c631daSSadaf Ebrahimi {
1038*f5c631daSSadaf Ebrahimi bool all0orff = true;
1039*f5c631daSSadaf Ebrahimi for (int i = 0; i < 8; ++i) {
1040*f5c631daSSadaf Ebrahimi int byteval = (imm >> (i * 8)) & 0xff;
1041*f5c631daSSadaf Ebrahimi if (byteval != 0 && byteval != 0xff) {
1042*f5c631daSSadaf Ebrahimi all0orff = false;
1043*f5c631daSSadaf Ebrahimi break;
1044*f5c631daSSadaf Ebrahimi }
1045*f5c631daSSadaf Ebrahimi }
1046*f5c631daSSadaf Ebrahimi if (all0orff == true) {
1047*f5c631daSSadaf Ebrahimi movi(vd, imm);
1048*f5c631daSSadaf Ebrahimi return;
1049*f5c631daSSadaf Ebrahimi }
1050*f5c631daSSadaf Ebrahimi }
1051*f5c631daSSadaf Ebrahimi
1052*f5c631daSSadaf Ebrahimi // Top and bottom 32-bits are equal.
1053*f5c631daSSadaf Ebrahimi if (((imm >> 32) & 0xffffffff) == (imm & 0xffffffff)) {
1054*f5c631daSSadaf Ebrahimi Movi32bitHelper(vd.Is64Bits() ? vd.V2S() : vd.V4S(), imm & 0xffffffff);
1055*f5c631daSSadaf Ebrahimi return;
1056*f5c631daSSadaf Ebrahimi }
1057*f5c631daSSadaf Ebrahimi
1058*f5c631daSSadaf Ebrahimi // Default case.
1059*f5c631daSSadaf Ebrahimi {
1060*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
1061*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireX();
1062*f5c631daSSadaf Ebrahimi Mov(temp, imm);
1063*f5c631daSSadaf Ebrahimi if (vd.Is1D()) {
1064*f5c631daSSadaf Ebrahimi mov(vd.D(), 0, temp);
1065*f5c631daSSadaf Ebrahimi } else {
1066*f5c631daSSadaf Ebrahimi dup(vd.V2D(), temp);
1067*f5c631daSSadaf Ebrahimi }
1068*f5c631daSSadaf Ebrahimi }
1069*f5c631daSSadaf Ebrahimi }
1070*f5c631daSSadaf Ebrahimi
1071*f5c631daSSadaf Ebrahimi
Movi(const VRegister & vd,uint64_t imm,Shift shift,int shift_amount)1072*f5c631daSSadaf Ebrahimi void MacroAssembler::Movi(const VRegister& vd,
1073*f5c631daSSadaf Ebrahimi uint64_t imm,
1074*f5c631daSSadaf Ebrahimi Shift shift,
1075*f5c631daSSadaf Ebrahimi int shift_amount) {
1076*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1077*f5c631daSSadaf Ebrahimi MacroEmissionCheckScope guard(this);
1078*f5c631daSSadaf Ebrahimi if (shift_amount != 0 || shift != LSL) {
1079*f5c631daSSadaf Ebrahimi movi(vd, imm, shift, shift_amount);
1080*f5c631daSSadaf Ebrahimi } else if (vd.Is8B() || vd.Is16B()) {
1081*f5c631daSSadaf Ebrahimi // 8-bit immediate.
1082*f5c631daSSadaf Ebrahimi VIXL_ASSERT(IsUint8(imm));
1083*f5c631daSSadaf Ebrahimi movi(vd, imm);
1084*f5c631daSSadaf Ebrahimi } else if (vd.Is4H() || vd.Is8H()) {
1085*f5c631daSSadaf Ebrahimi // 16-bit immediate.
1086*f5c631daSSadaf Ebrahimi Movi16bitHelper(vd, imm);
1087*f5c631daSSadaf Ebrahimi } else if (vd.Is2S() || vd.Is4S()) {
1088*f5c631daSSadaf Ebrahimi // 32-bit immediate.
1089*f5c631daSSadaf Ebrahimi Movi32bitHelper(vd, imm);
1090*f5c631daSSadaf Ebrahimi } else {
1091*f5c631daSSadaf Ebrahimi // 64-bit immediate.
1092*f5c631daSSadaf Ebrahimi Movi64bitHelper(vd, imm);
1093*f5c631daSSadaf Ebrahimi }
1094*f5c631daSSadaf Ebrahimi }
1095*f5c631daSSadaf Ebrahimi
1096*f5c631daSSadaf Ebrahimi
Movi(const VRegister & vd,uint64_t hi,uint64_t lo)1097*f5c631daSSadaf Ebrahimi void MacroAssembler::Movi(const VRegister& vd, uint64_t hi, uint64_t lo) {
1098*f5c631daSSadaf Ebrahimi // TODO: Move 128-bit values in a more efficient way.
1099*f5c631daSSadaf Ebrahimi VIXL_ASSERT(vd.Is128Bits());
1100*f5c631daSSadaf Ebrahimi Movi(vd.V2D(), lo);
1101*f5c631daSSadaf Ebrahimi if (hi != lo) {
1102*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
1103*f5c631daSSadaf Ebrahimi // TODO: Figure out if using a temporary V register to materialise the
1104*f5c631daSSadaf Ebrahimi // immediate is better.
1105*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireX();
1106*f5c631daSSadaf Ebrahimi Mov(temp, hi);
1107*f5c631daSSadaf Ebrahimi Ins(vd.V2D(), 1, temp);
1108*f5c631daSSadaf Ebrahimi }
1109*f5c631daSSadaf Ebrahimi }
1110*f5c631daSSadaf Ebrahimi
1111*f5c631daSSadaf Ebrahimi
Mvn(const Register & rd,const Operand & operand)1112*f5c631daSSadaf Ebrahimi void MacroAssembler::Mvn(const Register& rd, const Operand& operand) {
1113*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1114*f5c631daSSadaf Ebrahimi // The worst case for size is mvn immediate with up to 4 instructions.
1115*f5c631daSSadaf Ebrahimi MacroEmissionCheckScope guard(this);
1116*f5c631daSSadaf Ebrahimi
1117*f5c631daSSadaf Ebrahimi if (operand.IsImmediate()) {
1118*f5c631daSSadaf Ebrahimi // Call the macro assembler for generic immediates.
1119*f5c631daSSadaf Ebrahimi Mvn(rd, operand.GetImmediate());
1120*f5c631daSSadaf Ebrahimi } else if (operand.IsExtendedRegister()) {
1121*f5c631daSSadaf Ebrahimi // Emit two instructions for the extend case. This differs from Mov, as
1122*f5c631daSSadaf Ebrahimi // the extend and invert can't be achieved in one instruction.
1123*f5c631daSSadaf Ebrahimi EmitExtendShift(rd,
1124*f5c631daSSadaf Ebrahimi operand.GetRegister(),
1125*f5c631daSSadaf Ebrahimi operand.GetExtend(),
1126*f5c631daSSadaf Ebrahimi operand.GetShiftAmount());
1127*f5c631daSSadaf Ebrahimi mvn(rd, rd);
1128*f5c631daSSadaf Ebrahimi } else {
1129*f5c631daSSadaf Ebrahimi // Otherwise, register and shifted register cases can be handled by the
1130*f5c631daSSadaf Ebrahimi // assembler directly, using orn.
1131*f5c631daSSadaf Ebrahimi mvn(rd, operand);
1132*f5c631daSSadaf Ebrahimi }
1133*f5c631daSSadaf Ebrahimi }
1134*f5c631daSSadaf Ebrahimi
1135*f5c631daSSadaf Ebrahimi
Mov(const Register & rd,uint64_t imm)1136*f5c631daSSadaf Ebrahimi void MacroAssembler::Mov(const Register& rd, uint64_t imm) {
1137*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1138*f5c631daSSadaf Ebrahimi MoveImmediateHelper(this, rd, imm);
1139*f5c631daSSadaf Ebrahimi }
1140*f5c631daSSadaf Ebrahimi
1141*f5c631daSSadaf Ebrahimi
Ccmp(const Register & rn,const Operand & operand,StatusFlags nzcv,Condition cond)1142*f5c631daSSadaf Ebrahimi void MacroAssembler::Ccmp(const Register& rn,
1143*f5c631daSSadaf Ebrahimi const Operand& operand,
1144*f5c631daSSadaf Ebrahimi StatusFlags nzcv,
1145*f5c631daSSadaf Ebrahimi Condition cond) {
1146*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1147*f5c631daSSadaf Ebrahimi if (operand.IsImmediate() && (operand.GetImmediate() < 0)) {
1148*f5c631daSSadaf Ebrahimi ConditionalCompareMacro(rn, -operand.GetImmediate(), nzcv, cond, CCMN);
1149*f5c631daSSadaf Ebrahimi } else {
1150*f5c631daSSadaf Ebrahimi ConditionalCompareMacro(rn, operand, nzcv, cond, CCMP);
1151*f5c631daSSadaf Ebrahimi }
1152*f5c631daSSadaf Ebrahimi }
1153*f5c631daSSadaf Ebrahimi
1154*f5c631daSSadaf Ebrahimi
Ccmn(const Register & rn,const Operand & operand,StatusFlags nzcv,Condition cond)1155*f5c631daSSadaf Ebrahimi void MacroAssembler::Ccmn(const Register& rn,
1156*f5c631daSSadaf Ebrahimi const Operand& operand,
1157*f5c631daSSadaf Ebrahimi StatusFlags nzcv,
1158*f5c631daSSadaf Ebrahimi Condition cond) {
1159*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1160*f5c631daSSadaf Ebrahimi if (operand.IsImmediate() && (operand.GetImmediate() < 0)) {
1161*f5c631daSSadaf Ebrahimi ConditionalCompareMacro(rn, -operand.GetImmediate(), nzcv, cond, CCMP);
1162*f5c631daSSadaf Ebrahimi } else {
1163*f5c631daSSadaf Ebrahimi ConditionalCompareMacro(rn, operand, nzcv, cond, CCMN);
1164*f5c631daSSadaf Ebrahimi }
1165*f5c631daSSadaf Ebrahimi }
1166*f5c631daSSadaf Ebrahimi
1167*f5c631daSSadaf Ebrahimi
ConditionalCompareMacro(const Register & rn,const Operand & operand,StatusFlags nzcv,Condition cond,ConditionalCompareOp op)1168*f5c631daSSadaf Ebrahimi void MacroAssembler::ConditionalCompareMacro(const Register& rn,
1169*f5c631daSSadaf Ebrahimi const Operand& operand,
1170*f5c631daSSadaf Ebrahimi StatusFlags nzcv,
1171*f5c631daSSadaf Ebrahimi Condition cond,
1172*f5c631daSSadaf Ebrahimi ConditionalCompareOp op) {
1173*f5c631daSSadaf Ebrahimi VIXL_ASSERT((cond != al) && (cond != nv));
1174*f5c631daSSadaf Ebrahimi // The worst case for size is ccmp immediate:
1175*f5c631daSSadaf Ebrahimi // * up to 4 instructions to materialise the constant
1176*f5c631daSSadaf Ebrahimi // * 1 instruction for ccmp
1177*f5c631daSSadaf Ebrahimi MacroEmissionCheckScope guard(this);
1178*f5c631daSSadaf Ebrahimi
1179*f5c631daSSadaf Ebrahimi if ((operand.IsShiftedRegister() && (operand.GetShiftAmount() == 0)) ||
1180*f5c631daSSadaf Ebrahimi (operand.IsImmediate() &&
1181*f5c631daSSadaf Ebrahimi IsImmConditionalCompare(operand.GetImmediate()))) {
1182*f5c631daSSadaf Ebrahimi // The immediate can be encoded in the instruction, or the operand is an
1183*f5c631daSSadaf Ebrahimi // unshifted register: call the assembler.
1184*f5c631daSSadaf Ebrahimi ConditionalCompare(rn, operand, nzcv, cond, op);
1185*f5c631daSSadaf Ebrahimi } else {
1186*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
1187*f5c631daSSadaf Ebrahimi // The operand isn't directly supported by the instruction: perform the
1188*f5c631daSSadaf Ebrahimi // operation on a temporary register.
1189*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireSameSizeAs(rn);
1190*f5c631daSSadaf Ebrahimi Mov(temp, operand);
1191*f5c631daSSadaf Ebrahimi ConditionalCompare(rn, temp, nzcv, cond, op);
1192*f5c631daSSadaf Ebrahimi }
1193*f5c631daSSadaf Ebrahimi }
1194*f5c631daSSadaf Ebrahimi
1195*f5c631daSSadaf Ebrahimi
CselHelper(MacroAssembler * masm,const Register & rd,Operand left,Operand right,Condition cond,bool * should_synthesise_left,bool * should_synthesise_right)1196*f5c631daSSadaf Ebrahimi void MacroAssembler::CselHelper(MacroAssembler* masm,
1197*f5c631daSSadaf Ebrahimi const Register& rd,
1198*f5c631daSSadaf Ebrahimi Operand left,
1199*f5c631daSSadaf Ebrahimi Operand right,
1200*f5c631daSSadaf Ebrahimi Condition cond,
1201*f5c631daSSadaf Ebrahimi bool* should_synthesise_left,
1202*f5c631daSSadaf Ebrahimi bool* should_synthesise_right) {
1203*f5c631daSSadaf Ebrahimi bool emit_code = (masm != NULL);
1204*f5c631daSSadaf Ebrahimi
1205*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!emit_code || masm->allow_macro_instructions_);
1206*f5c631daSSadaf Ebrahimi VIXL_ASSERT((cond != al) && (cond != nv));
1207*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!rd.IsZero() && !rd.IsSP());
1208*f5c631daSSadaf Ebrahimi VIXL_ASSERT(left.IsImmediate() || !left.GetRegister().IsSP());
1209*f5c631daSSadaf Ebrahimi VIXL_ASSERT(right.IsImmediate() || !right.GetRegister().IsSP());
1210*f5c631daSSadaf Ebrahimi
1211*f5c631daSSadaf Ebrahimi if (should_synthesise_left != NULL) *should_synthesise_left = false;
1212*f5c631daSSadaf Ebrahimi if (should_synthesise_right != NULL) *should_synthesise_right = false;
1213*f5c631daSSadaf Ebrahimi
1214*f5c631daSSadaf Ebrahimi // The worst case for size occurs when the inputs are two non encodable
1215*f5c631daSSadaf Ebrahimi // constants:
1216*f5c631daSSadaf Ebrahimi // * up to 4 instructions to materialise the left constant
1217*f5c631daSSadaf Ebrahimi // * up to 4 instructions to materialise the right constant
1218*f5c631daSSadaf Ebrahimi // * 1 instruction for csel
1219*f5c631daSSadaf Ebrahimi EmissionCheckScope guard(masm, 9 * kInstructionSize);
1220*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps;
1221*f5c631daSSadaf Ebrahimi if (masm != NULL) {
1222*f5c631daSSadaf Ebrahimi temps.Open(masm);
1223*f5c631daSSadaf Ebrahimi }
1224*f5c631daSSadaf Ebrahimi
1225*f5c631daSSadaf Ebrahimi // Try to handle cases where both inputs are immediates.
1226*f5c631daSSadaf Ebrahimi bool left_is_immediate = left.IsImmediate() || left.IsZero();
1227*f5c631daSSadaf Ebrahimi bool right_is_immediate = right.IsImmediate() || right.IsZero();
1228*f5c631daSSadaf Ebrahimi if (left_is_immediate && right_is_immediate &&
1229*f5c631daSSadaf Ebrahimi CselSubHelperTwoImmediates(masm,
1230*f5c631daSSadaf Ebrahimi rd,
1231*f5c631daSSadaf Ebrahimi left.GetEquivalentImmediate(),
1232*f5c631daSSadaf Ebrahimi right.GetEquivalentImmediate(),
1233*f5c631daSSadaf Ebrahimi cond,
1234*f5c631daSSadaf Ebrahimi should_synthesise_left,
1235*f5c631daSSadaf Ebrahimi should_synthesise_right)) {
1236*f5c631daSSadaf Ebrahimi return;
1237*f5c631daSSadaf Ebrahimi }
1238*f5c631daSSadaf Ebrahimi
1239*f5c631daSSadaf Ebrahimi // Handle cases where one of the two inputs is -1, 0, or 1.
1240*f5c631daSSadaf Ebrahimi bool left_is_small_immediate =
1241*f5c631daSSadaf Ebrahimi left_is_immediate && ((-1 <= left.GetEquivalentImmediate()) &&
1242*f5c631daSSadaf Ebrahimi (left.GetEquivalentImmediate() <= 1));
1243*f5c631daSSadaf Ebrahimi bool right_is_small_immediate =
1244*f5c631daSSadaf Ebrahimi right_is_immediate && ((-1 <= right.GetEquivalentImmediate()) &&
1245*f5c631daSSadaf Ebrahimi (right.GetEquivalentImmediate() <= 1));
1246*f5c631daSSadaf Ebrahimi if (right_is_small_immediate || left_is_small_immediate) {
1247*f5c631daSSadaf Ebrahimi bool swapped_inputs = false;
1248*f5c631daSSadaf Ebrahimi if (!right_is_small_immediate) {
1249*f5c631daSSadaf Ebrahimi std::swap(left, right);
1250*f5c631daSSadaf Ebrahimi cond = InvertCondition(cond);
1251*f5c631daSSadaf Ebrahimi swapped_inputs = true;
1252*f5c631daSSadaf Ebrahimi }
1253*f5c631daSSadaf Ebrahimi CselSubHelperRightSmallImmediate(masm,
1254*f5c631daSSadaf Ebrahimi &temps,
1255*f5c631daSSadaf Ebrahimi rd,
1256*f5c631daSSadaf Ebrahimi left,
1257*f5c631daSSadaf Ebrahimi right,
1258*f5c631daSSadaf Ebrahimi cond,
1259*f5c631daSSadaf Ebrahimi swapped_inputs ? should_synthesise_right
1260*f5c631daSSadaf Ebrahimi : should_synthesise_left);
1261*f5c631daSSadaf Ebrahimi return;
1262*f5c631daSSadaf Ebrahimi }
1263*f5c631daSSadaf Ebrahimi
1264*f5c631daSSadaf Ebrahimi // Otherwise both inputs need to be available in registers. Synthesise them
1265*f5c631daSSadaf Ebrahimi // if necessary and emit the `csel`.
1266*f5c631daSSadaf Ebrahimi if (!left.IsPlainRegister()) {
1267*f5c631daSSadaf Ebrahimi if (emit_code) {
1268*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireSameSizeAs(rd);
1269*f5c631daSSadaf Ebrahimi masm->Mov(temp, left);
1270*f5c631daSSadaf Ebrahimi left = temp;
1271*f5c631daSSadaf Ebrahimi }
1272*f5c631daSSadaf Ebrahimi if (should_synthesise_left != NULL) *should_synthesise_left = true;
1273*f5c631daSSadaf Ebrahimi }
1274*f5c631daSSadaf Ebrahimi if (!right.IsPlainRegister()) {
1275*f5c631daSSadaf Ebrahimi if (emit_code) {
1276*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireSameSizeAs(rd);
1277*f5c631daSSadaf Ebrahimi masm->Mov(temp, right);
1278*f5c631daSSadaf Ebrahimi right = temp;
1279*f5c631daSSadaf Ebrahimi }
1280*f5c631daSSadaf Ebrahimi if (should_synthesise_right != NULL) *should_synthesise_right = true;
1281*f5c631daSSadaf Ebrahimi }
1282*f5c631daSSadaf Ebrahimi if (emit_code) {
1283*f5c631daSSadaf Ebrahimi VIXL_ASSERT(left.IsPlainRegister() && right.IsPlainRegister());
1284*f5c631daSSadaf Ebrahimi if (left.GetRegister().Is(right.GetRegister())) {
1285*f5c631daSSadaf Ebrahimi masm->Mov(rd, left.GetRegister());
1286*f5c631daSSadaf Ebrahimi } else {
1287*f5c631daSSadaf Ebrahimi masm->csel(rd, left.GetRegister(), right.GetRegister(), cond);
1288*f5c631daSSadaf Ebrahimi }
1289*f5c631daSSadaf Ebrahimi }
1290*f5c631daSSadaf Ebrahimi }
1291*f5c631daSSadaf Ebrahimi
1292*f5c631daSSadaf Ebrahimi
CselSubHelperTwoImmediates(MacroAssembler * masm,const Register & rd,int64_t left,int64_t right,Condition cond,bool * should_synthesise_left,bool * should_synthesise_right)1293*f5c631daSSadaf Ebrahimi bool MacroAssembler::CselSubHelperTwoImmediates(MacroAssembler* masm,
1294*f5c631daSSadaf Ebrahimi const Register& rd,
1295*f5c631daSSadaf Ebrahimi int64_t left,
1296*f5c631daSSadaf Ebrahimi int64_t right,
1297*f5c631daSSadaf Ebrahimi Condition cond,
1298*f5c631daSSadaf Ebrahimi bool* should_synthesise_left,
1299*f5c631daSSadaf Ebrahimi bool* should_synthesise_right) {
1300*f5c631daSSadaf Ebrahimi bool emit_code = (masm != NULL);
1301*f5c631daSSadaf Ebrahimi if (should_synthesise_left != NULL) *should_synthesise_left = false;
1302*f5c631daSSadaf Ebrahimi if (should_synthesise_right != NULL) *should_synthesise_right = false;
1303*f5c631daSSadaf Ebrahimi
1304*f5c631daSSadaf Ebrahimi if (left == right) {
1305*f5c631daSSadaf Ebrahimi if (emit_code) masm->Mov(rd, left);
1306*f5c631daSSadaf Ebrahimi return true;
1307*f5c631daSSadaf Ebrahimi } else if (left == -right) {
1308*f5c631daSSadaf Ebrahimi if (should_synthesise_right != NULL) *should_synthesise_right = true;
1309*f5c631daSSadaf Ebrahimi if (emit_code) {
1310*f5c631daSSadaf Ebrahimi masm->Mov(rd, right);
1311*f5c631daSSadaf Ebrahimi masm->Cneg(rd, rd, cond);
1312*f5c631daSSadaf Ebrahimi }
1313*f5c631daSSadaf Ebrahimi return true;
1314*f5c631daSSadaf Ebrahimi }
1315*f5c631daSSadaf Ebrahimi
1316*f5c631daSSadaf Ebrahimi if (CselSubHelperTwoOrderedImmediates(masm, rd, left, right, cond)) {
1317*f5c631daSSadaf Ebrahimi return true;
1318*f5c631daSSadaf Ebrahimi } else {
1319*f5c631daSSadaf Ebrahimi std::swap(left, right);
1320*f5c631daSSadaf Ebrahimi if (CselSubHelperTwoOrderedImmediates(masm,
1321*f5c631daSSadaf Ebrahimi rd,
1322*f5c631daSSadaf Ebrahimi left,
1323*f5c631daSSadaf Ebrahimi right,
1324*f5c631daSSadaf Ebrahimi InvertCondition(cond))) {
1325*f5c631daSSadaf Ebrahimi return true;
1326*f5c631daSSadaf Ebrahimi }
1327*f5c631daSSadaf Ebrahimi }
1328*f5c631daSSadaf Ebrahimi
1329*f5c631daSSadaf Ebrahimi // TODO: Handle more situations. For example handle `csel rd, #5, #6, cond`
1330*f5c631daSSadaf Ebrahimi // with `cinc`.
1331*f5c631daSSadaf Ebrahimi return false;
1332*f5c631daSSadaf Ebrahimi }
1333*f5c631daSSadaf Ebrahimi
1334*f5c631daSSadaf Ebrahimi
CselSubHelperTwoOrderedImmediates(MacroAssembler * masm,const Register & rd,int64_t left,int64_t right,Condition cond)1335*f5c631daSSadaf Ebrahimi bool MacroAssembler::CselSubHelperTwoOrderedImmediates(MacroAssembler* masm,
1336*f5c631daSSadaf Ebrahimi const Register& rd,
1337*f5c631daSSadaf Ebrahimi int64_t left,
1338*f5c631daSSadaf Ebrahimi int64_t right,
1339*f5c631daSSadaf Ebrahimi Condition cond) {
1340*f5c631daSSadaf Ebrahimi bool emit_code = (masm != NULL);
1341*f5c631daSSadaf Ebrahimi
1342*f5c631daSSadaf Ebrahimi if ((left == 1) && (right == 0)) {
1343*f5c631daSSadaf Ebrahimi if (emit_code) masm->cset(rd, cond);
1344*f5c631daSSadaf Ebrahimi return true;
1345*f5c631daSSadaf Ebrahimi } else if ((left == -1) && (right == 0)) {
1346*f5c631daSSadaf Ebrahimi if (emit_code) masm->csetm(rd, cond);
1347*f5c631daSSadaf Ebrahimi return true;
1348*f5c631daSSadaf Ebrahimi }
1349*f5c631daSSadaf Ebrahimi return false;
1350*f5c631daSSadaf Ebrahimi }
1351*f5c631daSSadaf Ebrahimi
1352*f5c631daSSadaf Ebrahimi
CselSubHelperRightSmallImmediate(MacroAssembler * masm,UseScratchRegisterScope * temps,const Register & rd,const Operand & left,const Operand & right,Condition cond,bool * should_synthesise_left)1353*f5c631daSSadaf Ebrahimi void MacroAssembler::CselSubHelperRightSmallImmediate(
1354*f5c631daSSadaf Ebrahimi MacroAssembler* masm,
1355*f5c631daSSadaf Ebrahimi UseScratchRegisterScope* temps,
1356*f5c631daSSadaf Ebrahimi const Register& rd,
1357*f5c631daSSadaf Ebrahimi const Operand& left,
1358*f5c631daSSadaf Ebrahimi const Operand& right,
1359*f5c631daSSadaf Ebrahimi Condition cond,
1360*f5c631daSSadaf Ebrahimi bool* should_synthesise_left) {
1361*f5c631daSSadaf Ebrahimi bool emit_code = (masm != NULL);
1362*f5c631daSSadaf Ebrahimi VIXL_ASSERT((right.IsImmediate() || right.IsZero()) &&
1363*f5c631daSSadaf Ebrahimi (-1 <= right.GetEquivalentImmediate()) &&
1364*f5c631daSSadaf Ebrahimi (right.GetEquivalentImmediate() <= 1));
1365*f5c631daSSadaf Ebrahimi Register left_register;
1366*f5c631daSSadaf Ebrahimi
1367*f5c631daSSadaf Ebrahimi if (left.IsPlainRegister()) {
1368*f5c631daSSadaf Ebrahimi left_register = left.GetRegister();
1369*f5c631daSSadaf Ebrahimi } else {
1370*f5c631daSSadaf Ebrahimi if (emit_code) {
1371*f5c631daSSadaf Ebrahimi left_register = temps->AcquireSameSizeAs(rd);
1372*f5c631daSSadaf Ebrahimi masm->Mov(left_register, left);
1373*f5c631daSSadaf Ebrahimi }
1374*f5c631daSSadaf Ebrahimi if (should_synthesise_left != NULL) *should_synthesise_left = true;
1375*f5c631daSSadaf Ebrahimi }
1376*f5c631daSSadaf Ebrahimi if (emit_code) {
1377*f5c631daSSadaf Ebrahimi int64_t imm = right.GetEquivalentImmediate();
1378*f5c631daSSadaf Ebrahimi Register zr = AppropriateZeroRegFor(rd);
1379*f5c631daSSadaf Ebrahimi if (imm == 0) {
1380*f5c631daSSadaf Ebrahimi masm->csel(rd, left_register, zr, cond);
1381*f5c631daSSadaf Ebrahimi } else if (imm == 1) {
1382*f5c631daSSadaf Ebrahimi masm->csinc(rd, left_register, zr, cond);
1383*f5c631daSSadaf Ebrahimi } else {
1384*f5c631daSSadaf Ebrahimi VIXL_ASSERT(imm == -1);
1385*f5c631daSSadaf Ebrahimi masm->csinv(rd, left_register, zr, cond);
1386*f5c631daSSadaf Ebrahimi }
1387*f5c631daSSadaf Ebrahimi }
1388*f5c631daSSadaf Ebrahimi }
1389*f5c631daSSadaf Ebrahimi
1390*f5c631daSSadaf Ebrahimi
Add(const Register & rd,const Register & rn,const Operand & operand,FlagsUpdate S)1391*f5c631daSSadaf Ebrahimi void MacroAssembler::Add(const Register& rd,
1392*f5c631daSSadaf Ebrahimi const Register& rn,
1393*f5c631daSSadaf Ebrahimi const Operand& operand,
1394*f5c631daSSadaf Ebrahimi FlagsUpdate S) {
1395*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1396*f5c631daSSadaf Ebrahimi if (operand.IsImmediate()) {
1397*f5c631daSSadaf Ebrahimi int64_t imm = operand.GetImmediate();
1398*f5c631daSSadaf Ebrahimi if ((imm < 0) && (imm != std::numeric_limits<int64_t>::min()) &&
1399*f5c631daSSadaf Ebrahimi IsImmAddSub(-imm)) {
1400*f5c631daSSadaf Ebrahimi AddSubMacro(rd, rn, -imm, S, SUB);
1401*f5c631daSSadaf Ebrahimi return;
1402*f5c631daSSadaf Ebrahimi }
1403*f5c631daSSadaf Ebrahimi }
1404*f5c631daSSadaf Ebrahimi AddSubMacro(rd, rn, operand, S, ADD);
1405*f5c631daSSadaf Ebrahimi }
1406*f5c631daSSadaf Ebrahimi
1407*f5c631daSSadaf Ebrahimi
Adds(const Register & rd,const Register & rn,const Operand & operand)1408*f5c631daSSadaf Ebrahimi void MacroAssembler::Adds(const Register& rd,
1409*f5c631daSSadaf Ebrahimi const Register& rn,
1410*f5c631daSSadaf Ebrahimi const Operand& operand) {
1411*f5c631daSSadaf Ebrahimi Add(rd, rn, operand, SetFlags);
1412*f5c631daSSadaf Ebrahimi }
1413*f5c631daSSadaf Ebrahimi
1414*f5c631daSSadaf Ebrahimi
Sub(const Register & rd,const Register & rn,const Operand & operand,FlagsUpdate S)1415*f5c631daSSadaf Ebrahimi void MacroAssembler::Sub(const Register& rd,
1416*f5c631daSSadaf Ebrahimi const Register& rn,
1417*f5c631daSSadaf Ebrahimi const Operand& operand,
1418*f5c631daSSadaf Ebrahimi FlagsUpdate S) {
1419*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1420*f5c631daSSadaf Ebrahimi if (operand.IsImmediate()) {
1421*f5c631daSSadaf Ebrahimi int64_t imm = operand.GetImmediate();
1422*f5c631daSSadaf Ebrahimi if ((imm < 0) && (imm != std::numeric_limits<int64_t>::min()) &&
1423*f5c631daSSadaf Ebrahimi IsImmAddSub(-imm)) {
1424*f5c631daSSadaf Ebrahimi AddSubMacro(rd, rn, -imm, S, ADD);
1425*f5c631daSSadaf Ebrahimi return;
1426*f5c631daSSadaf Ebrahimi }
1427*f5c631daSSadaf Ebrahimi }
1428*f5c631daSSadaf Ebrahimi AddSubMacro(rd, rn, operand, S, SUB);
1429*f5c631daSSadaf Ebrahimi }
1430*f5c631daSSadaf Ebrahimi
1431*f5c631daSSadaf Ebrahimi
Subs(const Register & rd,const Register & rn,const Operand & operand)1432*f5c631daSSadaf Ebrahimi void MacroAssembler::Subs(const Register& rd,
1433*f5c631daSSadaf Ebrahimi const Register& rn,
1434*f5c631daSSadaf Ebrahimi const Operand& operand) {
1435*f5c631daSSadaf Ebrahimi Sub(rd, rn, operand, SetFlags);
1436*f5c631daSSadaf Ebrahimi }
1437*f5c631daSSadaf Ebrahimi
1438*f5c631daSSadaf Ebrahimi
Cmn(const Register & rn,const Operand & operand)1439*f5c631daSSadaf Ebrahimi void MacroAssembler::Cmn(const Register& rn, const Operand& operand) {
1440*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1441*f5c631daSSadaf Ebrahimi Adds(AppropriateZeroRegFor(rn), rn, operand);
1442*f5c631daSSadaf Ebrahimi }
1443*f5c631daSSadaf Ebrahimi
1444*f5c631daSSadaf Ebrahimi
Cmp(const Register & rn,const Operand & operand)1445*f5c631daSSadaf Ebrahimi void MacroAssembler::Cmp(const Register& rn, const Operand& operand) {
1446*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1447*f5c631daSSadaf Ebrahimi Subs(AppropriateZeroRegFor(rn), rn, operand);
1448*f5c631daSSadaf Ebrahimi }
1449*f5c631daSSadaf Ebrahimi
1450*f5c631daSSadaf Ebrahimi
Fcmp(const VRegister & fn,double value,FPTrapFlags trap)1451*f5c631daSSadaf Ebrahimi void MacroAssembler::Fcmp(const VRegister& fn, double value, FPTrapFlags trap) {
1452*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1453*f5c631daSSadaf Ebrahimi // The worst case for size is:
1454*f5c631daSSadaf Ebrahimi // * 1 to materialise the constant, using literal pool if necessary
1455*f5c631daSSadaf Ebrahimi // * 1 instruction for fcmp{e}
1456*f5c631daSSadaf Ebrahimi MacroEmissionCheckScope guard(this);
1457*f5c631daSSadaf Ebrahimi if (value != 0.0) {
1458*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
1459*f5c631daSSadaf Ebrahimi VRegister tmp = temps.AcquireSameSizeAs(fn);
1460*f5c631daSSadaf Ebrahimi Fmov(tmp, value);
1461*f5c631daSSadaf Ebrahimi FPCompareMacro(fn, tmp, trap);
1462*f5c631daSSadaf Ebrahimi } else {
1463*f5c631daSSadaf Ebrahimi FPCompareMacro(fn, value, trap);
1464*f5c631daSSadaf Ebrahimi }
1465*f5c631daSSadaf Ebrahimi }
1466*f5c631daSSadaf Ebrahimi
1467*f5c631daSSadaf Ebrahimi
Fcmpe(const VRegister & fn,double value)1468*f5c631daSSadaf Ebrahimi void MacroAssembler::Fcmpe(const VRegister& fn, double value) {
1469*f5c631daSSadaf Ebrahimi Fcmp(fn, value, EnableTrap);
1470*f5c631daSSadaf Ebrahimi }
1471*f5c631daSSadaf Ebrahimi
1472*f5c631daSSadaf Ebrahimi
Fmov(VRegister vd,double imm)1473*f5c631daSSadaf Ebrahimi void MacroAssembler::Fmov(VRegister vd, double imm) {
1474*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1475*f5c631daSSadaf Ebrahimi // Floating point immediates are loaded through the literal pool.
1476*f5c631daSSadaf Ebrahimi MacroEmissionCheckScope guard(this);
1477*f5c631daSSadaf Ebrahimi
1478*f5c631daSSadaf Ebrahimi if (vd.Is1H() || vd.Is4H() || vd.Is8H()) {
1479*f5c631daSSadaf Ebrahimi Fmov(vd, Float16(imm));
1480*f5c631daSSadaf Ebrahimi return;
1481*f5c631daSSadaf Ebrahimi }
1482*f5c631daSSadaf Ebrahimi
1483*f5c631daSSadaf Ebrahimi if (vd.Is1S() || vd.Is2S() || vd.Is4S()) {
1484*f5c631daSSadaf Ebrahimi Fmov(vd, static_cast<float>(imm));
1485*f5c631daSSadaf Ebrahimi return;
1486*f5c631daSSadaf Ebrahimi }
1487*f5c631daSSadaf Ebrahimi
1488*f5c631daSSadaf Ebrahimi VIXL_ASSERT(vd.Is1D() || vd.Is2D());
1489*f5c631daSSadaf Ebrahimi if (IsImmFP64(imm)) {
1490*f5c631daSSadaf Ebrahimi fmov(vd, imm);
1491*f5c631daSSadaf Ebrahimi } else {
1492*f5c631daSSadaf Ebrahimi uint64_t rawbits = DoubleToRawbits(imm);
1493*f5c631daSSadaf Ebrahimi if (vd.IsScalar()) {
1494*f5c631daSSadaf Ebrahimi if (rawbits == 0) {
1495*f5c631daSSadaf Ebrahimi fmov(vd, xzr);
1496*f5c631daSSadaf Ebrahimi } else {
1497*f5c631daSSadaf Ebrahimi ldr(vd,
1498*f5c631daSSadaf Ebrahimi new Literal<double>(imm,
1499*f5c631daSSadaf Ebrahimi &literal_pool_,
1500*f5c631daSSadaf Ebrahimi RawLiteral::kDeletedOnPlacementByPool));
1501*f5c631daSSadaf Ebrahimi }
1502*f5c631daSSadaf Ebrahimi } else {
1503*f5c631daSSadaf Ebrahimi // TODO: consider NEON support for load literal.
1504*f5c631daSSadaf Ebrahimi Movi(vd, rawbits);
1505*f5c631daSSadaf Ebrahimi }
1506*f5c631daSSadaf Ebrahimi }
1507*f5c631daSSadaf Ebrahimi }
1508*f5c631daSSadaf Ebrahimi
1509*f5c631daSSadaf Ebrahimi
Fmov(VRegister vd,float imm)1510*f5c631daSSadaf Ebrahimi void MacroAssembler::Fmov(VRegister vd, float imm) {
1511*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1512*f5c631daSSadaf Ebrahimi // Floating point immediates are loaded through the literal pool.
1513*f5c631daSSadaf Ebrahimi MacroEmissionCheckScope guard(this);
1514*f5c631daSSadaf Ebrahimi
1515*f5c631daSSadaf Ebrahimi if (vd.Is1H() || vd.Is4H() || vd.Is8H()) {
1516*f5c631daSSadaf Ebrahimi Fmov(vd, Float16(imm));
1517*f5c631daSSadaf Ebrahimi return;
1518*f5c631daSSadaf Ebrahimi }
1519*f5c631daSSadaf Ebrahimi
1520*f5c631daSSadaf Ebrahimi if (vd.Is1D() || vd.Is2D()) {
1521*f5c631daSSadaf Ebrahimi Fmov(vd, static_cast<double>(imm));
1522*f5c631daSSadaf Ebrahimi return;
1523*f5c631daSSadaf Ebrahimi }
1524*f5c631daSSadaf Ebrahimi
1525*f5c631daSSadaf Ebrahimi VIXL_ASSERT(vd.Is1S() || vd.Is2S() || vd.Is4S());
1526*f5c631daSSadaf Ebrahimi if (IsImmFP32(imm)) {
1527*f5c631daSSadaf Ebrahimi fmov(vd, imm);
1528*f5c631daSSadaf Ebrahimi } else {
1529*f5c631daSSadaf Ebrahimi uint32_t rawbits = FloatToRawbits(imm);
1530*f5c631daSSadaf Ebrahimi if (vd.IsScalar()) {
1531*f5c631daSSadaf Ebrahimi if (rawbits == 0) {
1532*f5c631daSSadaf Ebrahimi fmov(vd, wzr);
1533*f5c631daSSadaf Ebrahimi } else {
1534*f5c631daSSadaf Ebrahimi ldr(vd,
1535*f5c631daSSadaf Ebrahimi new Literal<float>(imm,
1536*f5c631daSSadaf Ebrahimi &literal_pool_,
1537*f5c631daSSadaf Ebrahimi RawLiteral::kDeletedOnPlacementByPool));
1538*f5c631daSSadaf Ebrahimi }
1539*f5c631daSSadaf Ebrahimi } else {
1540*f5c631daSSadaf Ebrahimi // TODO: consider NEON support for load literal.
1541*f5c631daSSadaf Ebrahimi Movi(vd, rawbits);
1542*f5c631daSSadaf Ebrahimi }
1543*f5c631daSSadaf Ebrahimi }
1544*f5c631daSSadaf Ebrahimi }
1545*f5c631daSSadaf Ebrahimi
1546*f5c631daSSadaf Ebrahimi
Fmov(VRegister vd,Float16 imm)1547*f5c631daSSadaf Ebrahimi void MacroAssembler::Fmov(VRegister vd, Float16 imm) {
1548*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1549*f5c631daSSadaf Ebrahimi MacroEmissionCheckScope guard(this);
1550*f5c631daSSadaf Ebrahimi
1551*f5c631daSSadaf Ebrahimi if (vd.Is1S() || vd.Is2S() || vd.Is4S()) {
1552*f5c631daSSadaf Ebrahimi Fmov(vd, FPToFloat(imm, kIgnoreDefaultNaN));
1553*f5c631daSSadaf Ebrahimi return;
1554*f5c631daSSadaf Ebrahimi }
1555*f5c631daSSadaf Ebrahimi
1556*f5c631daSSadaf Ebrahimi if (vd.Is1D() || vd.Is2D()) {
1557*f5c631daSSadaf Ebrahimi Fmov(vd, FPToDouble(imm, kIgnoreDefaultNaN));
1558*f5c631daSSadaf Ebrahimi return;
1559*f5c631daSSadaf Ebrahimi }
1560*f5c631daSSadaf Ebrahimi
1561*f5c631daSSadaf Ebrahimi VIXL_ASSERT(vd.Is1H() || vd.Is4H() || vd.Is8H());
1562*f5c631daSSadaf Ebrahimi uint16_t rawbits = Float16ToRawbits(imm);
1563*f5c631daSSadaf Ebrahimi if (IsImmFP16(imm)) {
1564*f5c631daSSadaf Ebrahimi fmov(vd, imm);
1565*f5c631daSSadaf Ebrahimi } else {
1566*f5c631daSSadaf Ebrahimi if (vd.IsScalar()) {
1567*f5c631daSSadaf Ebrahimi if (rawbits == 0x0) {
1568*f5c631daSSadaf Ebrahimi fmov(vd, wzr);
1569*f5c631daSSadaf Ebrahimi } else {
1570*f5c631daSSadaf Ebrahimi // We can use movz instead of the literal pool.
1571*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
1572*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireW();
1573*f5c631daSSadaf Ebrahimi Mov(temp, rawbits);
1574*f5c631daSSadaf Ebrahimi Fmov(vd, temp);
1575*f5c631daSSadaf Ebrahimi }
1576*f5c631daSSadaf Ebrahimi } else {
1577*f5c631daSSadaf Ebrahimi // TODO: consider NEON support for load literal.
1578*f5c631daSSadaf Ebrahimi Movi(vd, static_cast<uint64_t>(rawbits));
1579*f5c631daSSadaf Ebrahimi }
1580*f5c631daSSadaf Ebrahimi }
1581*f5c631daSSadaf Ebrahimi }
1582*f5c631daSSadaf Ebrahimi
1583*f5c631daSSadaf Ebrahimi
Neg(const Register & rd,const Operand & operand)1584*f5c631daSSadaf Ebrahimi void MacroAssembler::Neg(const Register& rd, const Operand& operand) {
1585*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1586*f5c631daSSadaf Ebrahimi if (operand.IsImmediate()) {
1587*f5c631daSSadaf Ebrahimi Mov(rd, -operand.GetImmediate());
1588*f5c631daSSadaf Ebrahimi } else {
1589*f5c631daSSadaf Ebrahimi Sub(rd, AppropriateZeroRegFor(rd), operand);
1590*f5c631daSSadaf Ebrahimi }
1591*f5c631daSSadaf Ebrahimi }
1592*f5c631daSSadaf Ebrahimi
1593*f5c631daSSadaf Ebrahimi
Negs(const Register & rd,const Operand & operand)1594*f5c631daSSadaf Ebrahimi void MacroAssembler::Negs(const Register& rd, const Operand& operand) {
1595*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1596*f5c631daSSadaf Ebrahimi Subs(rd, AppropriateZeroRegFor(rd), operand);
1597*f5c631daSSadaf Ebrahimi }
1598*f5c631daSSadaf Ebrahimi
1599*f5c631daSSadaf Ebrahimi
TryOneInstrMoveImmediate(const Register & dst,uint64_t imm)1600*f5c631daSSadaf Ebrahimi bool MacroAssembler::TryOneInstrMoveImmediate(const Register& dst,
1601*f5c631daSSadaf Ebrahimi uint64_t imm) {
1602*f5c631daSSadaf Ebrahimi return OneInstrMoveImmediateHelper(this, dst, imm);
1603*f5c631daSSadaf Ebrahimi }
1604*f5c631daSSadaf Ebrahimi
1605*f5c631daSSadaf Ebrahimi
MoveImmediateForShiftedOp(const Register & dst,uint64_t imm,PreShiftImmMode mode)1606*f5c631daSSadaf Ebrahimi Operand MacroAssembler::MoveImmediateForShiftedOp(const Register& dst,
1607*f5c631daSSadaf Ebrahimi uint64_t imm,
1608*f5c631daSSadaf Ebrahimi PreShiftImmMode mode) {
1609*f5c631daSSadaf Ebrahimi int reg_size = dst.GetSizeInBits();
1610*f5c631daSSadaf Ebrahimi
1611*f5c631daSSadaf Ebrahimi // Encode the immediate in a single move instruction, if possible.
1612*f5c631daSSadaf Ebrahimi if (TryOneInstrMoveImmediate(dst, imm)) {
1613*f5c631daSSadaf Ebrahimi // The move was successful; nothing to do here.
1614*f5c631daSSadaf Ebrahimi } else {
1615*f5c631daSSadaf Ebrahimi // Pre-shift the immediate to the least-significant bits of the register.
1616*f5c631daSSadaf Ebrahimi int shift_low = CountTrailingZeros(imm, reg_size);
1617*f5c631daSSadaf Ebrahimi if (mode == kLimitShiftForSP) {
1618*f5c631daSSadaf Ebrahimi // When applied to the stack pointer, the subsequent arithmetic operation
1619*f5c631daSSadaf Ebrahimi // can use the extend form to shift left by a maximum of four bits. Right
1620*f5c631daSSadaf Ebrahimi // shifts are not allowed, so we filter them out later before the new
1621*f5c631daSSadaf Ebrahimi // immediate is tested.
1622*f5c631daSSadaf Ebrahimi shift_low = std::min(shift_low, 4);
1623*f5c631daSSadaf Ebrahimi }
1624*f5c631daSSadaf Ebrahimi // TryOneInstrMoveImmediate handles `imm` with a value of zero, so shift_low
1625*f5c631daSSadaf Ebrahimi // must lie in the range [0, 63], and the shifts below are well-defined.
1626*f5c631daSSadaf Ebrahimi VIXL_ASSERT((shift_low >= 0) && (shift_low < 64));
1627*f5c631daSSadaf Ebrahimi // imm_low = imm >> shift_low (with sign extension)
1628*f5c631daSSadaf Ebrahimi uint64_t imm_low = ExtractSignedBitfield64(63, shift_low, imm);
1629*f5c631daSSadaf Ebrahimi
1630*f5c631daSSadaf Ebrahimi // Pre-shift the immediate to the most-significant bits of the register,
1631*f5c631daSSadaf Ebrahimi // inserting set bits in the least-significant bits.
1632*f5c631daSSadaf Ebrahimi int shift_high = CountLeadingZeros(imm, reg_size);
1633*f5c631daSSadaf Ebrahimi VIXL_ASSERT((shift_high >= 0) && (shift_high < 64));
1634*f5c631daSSadaf Ebrahimi uint64_t imm_high = (imm << shift_high) | GetUintMask(shift_high);
1635*f5c631daSSadaf Ebrahimi
1636*f5c631daSSadaf Ebrahimi if ((mode != kNoShift) && TryOneInstrMoveImmediate(dst, imm_low)) {
1637*f5c631daSSadaf Ebrahimi // The new immediate has been moved into the destination's low bits:
1638*f5c631daSSadaf Ebrahimi // return a new leftward-shifting operand.
1639*f5c631daSSadaf Ebrahimi return Operand(dst, LSL, shift_low);
1640*f5c631daSSadaf Ebrahimi } else if ((mode == kAnyShift) && TryOneInstrMoveImmediate(dst, imm_high)) {
1641*f5c631daSSadaf Ebrahimi // The new immediate has been moved into the destination's high bits:
1642*f5c631daSSadaf Ebrahimi // return a new rightward-shifting operand.
1643*f5c631daSSadaf Ebrahimi return Operand(dst, LSR, shift_high);
1644*f5c631daSSadaf Ebrahimi } else {
1645*f5c631daSSadaf Ebrahimi Mov(dst, imm);
1646*f5c631daSSadaf Ebrahimi }
1647*f5c631daSSadaf Ebrahimi }
1648*f5c631daSSadaf Ebrahimi return Operand(dst);
1649*f5c631daSSadaf Ebrahimi }
1650*f5c631daSSadaf Ebrahimi
1651*f5c631daSSadaf Ebrahimi
Move(const GenericOperand & dst,const GenericOperand & src)1652*f5c631daSSadaf Ebrahimi void MacroAssembler::Move(const GenericOperand& dst,
1653*f5c631daSSadaf Ebrahimi const GenericOperand& src) {
1654*f5c631daSSadaf Ebrahimi if (dst.Equals(src)) {
1655*f5c631daSSadaf Ebrahimi return;
1656*f5c631daSSadaf Ebrahimi }
1657*f5c631daSSadaf Ebrahimi
1658*f5c631daSSadaf Ebrahimi VIXL_ASSERT(dst.IsValid() && src.IsValid());
1659*f5c631daSSadaf Ebrahimi
1660*f5c631daSSadaf Ebrahimi // The sizes of the operands must match exactly.
1661*f5c631daSSadaf Ebrahimi VIXL_ASSERT(dst.GetSizeInBits() == src.GetSizeInBits());
1662*f5c631daSSadaf Ebrahimi VIXL_ASSERT(dst.GetSizeInBits() <= kXRegSize);
1663*f5c631daSSadaf Ebrahimi int operand_size = static_cast<int>(dst.GetSizeInBits());
1664*f5c631daSSadaf Ebrahimi
1665*f5c631daSSadaf Ebrahimi if (dst.IsCPURegister() && src.IsCPURegister()) {
1666*f5c631daSSadaf Ebrahimi CPURegister dst_reg = dst.GetCPURegister();
1667*f5c631daSSadaf Ebrahimi CPURegister src_reg = src.GetCPURegister();
1668*f5c631daSSadaf Ebrahimi if (dst_reg.IsRegister() && src_reg.IsRegister()) {
1669*f5c631daSSadaf Ebrahimi Mov(Register(dst_reg), Register(src_reg));
1670*f5c631daSSadaf Ebrahimi } else if (dst_reg.IsVRegister() && src_reg.IsVRegister()) {
1671*f5c631daSSadaf Ebrahimi Fmov(VRegister(dst_reg), VRegister(src_reg));
1672*f5c631daSSadaf Ebrahimi } else {
1673*f5c631daSSadaf Ebrahimi if (dst_reg.IsRegister()) {
1674*f5c631daSSadaf Ebrahimi Fmov(Register(dst_reg), VRegister(src_reg));
1675*f5c631daSSadaf Ebrahimi } else {
1676*f5c631daSSadaf Ebrahimi Fmov(VRegister(dst_reg), Register(src_reg));
1677*f5c631daSSadaf Ebrahimi }
1678*f5c631daSSadaf Ebrahimi }
1679*f5c631daSSadaf Ebrahimi return;
1680*f5c631daSSadaf Ebrahimi }
1681*f5c631daSSadaf Ebrahimi
1682*f5c631daSSadaf Ebrahimi if (dst.IsMemOperand() && src.IsMemOperand()) {
1683*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
1684*f5c631daSSadaf Ebrahimi CPURegister temp = temps.AcquireCPURegisterOfSize(operand_size);
1685*f5c631daSSadaf Ebrahimi Ldr(temp, src.GetMemOperand());
1686*f5c631daSSadaf Ebrahimi Str(temp, dst.GetMemOperand());
1687*f5c631daSSadaf Ebrahimi return;
1688*f5c631daSSadaf Ebrahimi }
1689*f5c631daSSadaf Ebrahimi
1690*f5c631daSSadaf Ebrahimi if (dst.IsCPURegister()) {
1691*f5c631daSSadaf Ebrahimi Ldr(dst.GetCPURegister(), src.GetMemOperand());
1692*f5c631daSSadaf Ebrahimi } else {
1693*f5c631daSSadaf Ebrahimi Str(src.GetCPURegister(), dst.GetMemOperand());
1694*f5c631daSSadaf Ebrahimi }
1695*f5c631daSSadaf Ebrahimi }
1696*f5c631daSSadaf Ebrahimi
1697*f5c631daSSadaf Ebrahimi
ComputeAddress(const Register & dst,const MemOperand & mem_op)1698*f5c631daSSadaf Ebrahimi void MacroAssembler::ComputeAddress(const Register& dst,
1699*f5c631daSSadaf Ebrahimi const MemOperand& mem_op) {
1700*f5c631daSSadaf Ebrahimi // We cannot handle pre-indexing or post-indexing.
1701*f5c631daSSadaf Ebrahimi VIXL_ASSERT(mem_op.GetAddrMode() == Offset);
1702*f5c631daSSadaf Ebrahimi Register base = mem_op.GetBaseRegister();
1703*f5c631daSSadaf Ebrahimi if (mem_op.IsImmediateOffset()) {
1704*f5c631daSSadaf Ebrahimi Add(dst, base, mem_op.GetOffset());
1705*f5c631daSSadaf Ebrahimi } else {
1706*f5c631daSSadaf Ebrahimi VIXL_ASSERT(mem_op.IsRegisterOffset());
1707*f5c631daSSadaf Ebrahimi Register reg_offset = mem_op.GetRegisterOffset();
1708*f5c631daSSadaf Ebrahimi Shift shift = mem_op.GetShift();
1709*f5c631daSSadaf Ebrahimi Extend extend = mem_op.GetExtend();
1710*f5c631daSSadaf Ebrahimi if (shift == NO_SHIFT) {
1711*f5c631daSSadaf Ebrahimi VIXL_ASSERT(extend != NO_EXTEND);
1712*f5c631daSSadaf Ebrahimi Add(dst, base, Operand(reg_offset, extend, mem_op.GetShiftAmount()));
1713*f5c631daSSadaf Ebrahimi } else {
1714*f5c631daSSadaf Ebrahimi VIXL_ASSERT(extend == NO_EXTEND);
1715*f5c631daSSadaf Ebrahimi Add(dst, base, Operand(reg_offset, shift, mem_op.GetShiftAmount()));
1716*f5c631daSSadaf Ebrahimi }
1717*f5c631daSSadaf Ebrahimi }
1718*f5c631daSSadaf Ebrahimi }
1719*f5c631daSSadaf Ebrahimi
1720*f5c631daSSadaf Ebrahimi
AddSubMacro(const Register & rd,const Register & rn,const Operand & operand,FlagsUpdate S,AddSubOp op)1721*f5c631daSSadaf Ebrahimi void MacroAssembler::AddSubMacro(const Register& rd,
1722*f5c631daSSadaf Ebrahimi const Register& rn,
1723*f5c631daSSadaf Ebrahimi const Operand& operand,
1724*f5c631daSSadaf Ebrahimi FlagsUpdate S,
1725*f5c631daSSadaf Ebrahimi AddSubOp op) {
1726*f5c631daSSadaf Ebrahimi // Worst case is add/sub immediate:
1727*f5c631daSSadaf Ebrahimi // * up to 4 instructions to materialise the constant
1728*f5c631daSSadaf Ebrahimi // * 1 instruction for add/sub
1729*f5c631daSSadaf Ebrahimi MacroEmissionCheckScope guard(this);
1730*f5c631daSSadaf Ebrahimi
1731*f5c631daSSadaf Ebrahimi if (operand.IsZero() && rd.Is(rn) && rd.Is64Bits() && rn.Is64Bits() &&
1732*f5c631daSSadaf Ebrahimi (S == LeaveFlags)) {
1733*f5c631daSSadaf Ebrahimi // The instruction would be a nop. Avoid generating useless code.
1734*f5c631daSSadaf Ebrahimi return;
1735*f5c631daSSadaf Ebrahimi }
1736*f5c631daSSadaf Ebrahimi
1737*f5c631daSSadaf Ebrahimi if ((operand.IsImmediate() && !IsImmAddSub(operand.GetImmediate())) ||
1738*f5c631daSSadaf Ebrahimi (rn.IsZero() && !operand.IsShiftedRegister()) ||
1739*f5c631daSSadaf Ebrahimi (operand.IsShiftedRegister() && (operand.GetShift() == ROR))) {
1740*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
1741*f5c631daSSadaf Ebrahimi // Use `rd` as a temp, if we can.
1742*f5c631daSSadaf Ebrahimi temps.Include(rd);
1743*f5c631daSSadaf Ebrahimi // We read `rn` after evaluating `operand`.
1744*f5c631daSSadaf Ebrahimi temps.Exclude(rn);
1745*f5c631daSSadaf Ebrahimi // It doesn't matter if `operand` is in `temps` (e.g. because it alises
1746*f5c631daSSadaf Ebrahimi // `rd`) because we don't need it after it is evaluated.
1747*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireSameSizeAs(rn);
1748*f5c631daSSadaf Ebrahimi if (operand.IsImmediate()) {
1749*f5c631daSSadaf Ebrahimi PreShiftImmMode mode = kAnyShift;
1750*f5c631daSSadaf Ebrahimi
1751*f5c631daSSadaf Ebrahimi // If the destination or source register is the stack pointer, we can
1752*f5c631daSSadaf Ebrahimi // only pre-shift the immediate right by values supported in the add/sub
1753*f5c631daSSadaf Ebrahimi // extend encoding.
1754*f5c631daSSadaf Ebrahimi if (rd.IsSP()) {
1755*f5c631daSSadaf Ebrahimi // If the destination is SP and flags will be set, we can't pre-shift
1756*f5c631daSSadaf Ebrahimi // the immediate at all.
1757*f5c631daSSadaf Ebrahimi mode = (S == SetFlags) ? kNoShift : kLimitShiftForSP;
1758*f5c631daSSadaf Ebrahimi } else if (rn.IsSP()) {
1759*f5c631daSSadaf Ebrahimi mode = kLimitShiftForSP;
1760*f5c631daSSadaf Ebrahimi }
1761*f5c631daSSadaf Ebrahimi
1762*f5c631daSSadaf Ebrahimi Operand imm_operand =
1763*f5c631daSSadaf Ebrahimi MoveImmediateForShiftedOp(temp, operand.GetImmediate(), mode);
1764*f5c631daSSadaf Ebrahimi AddSub(rd, rn, imm_operand, S, op);
1765*f5c631daSSadaf Ebrahimi } else {
1766*f5c631daSSadaf Ebrahimi Mov(temp, operand);
1767*f5c631daSSadaf Ebrahimi AddSub(rd, rn, temp, S, op);
1768*f5c631daSSadaf Ebrahimi }
1769*f5c631daSSadaf Ebrahimi } else {
1770*f5c631daSSadaf Ebrahimi AddSub(rd, rn, operand, S, op);
1771*f5c631daSSadaf Ebrahimi }
1772*f5c631daSSadaf Ebrahimi }
1773*f5c631daSSadaf Ebrahimi
1774*f5c631daSSadaf Ebrahimi
Adc(const Register & rd,const Register & rn,const Operand & operand)1775*f5c631daSSadaf Ebrahimi void MacroAssembler::Adc(const Register& rd,
1776*f5c631daSSadaf Ebrahimi const Register& rn,
1777*f5c631daSSadaf Ebrahimi const Operand& operand) {
1778*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1779*f5c631daSSadaf Ebrahimi AddSubWithCarryMacro(rd, rn, operand, LeaveFlags, ADC);
1780*f5c631daSSadaf Ebrahimi }
1781*f5c631daSSadaf Ebrahimi
1782*f5c631daSSadaf Ebrahimi
Adcs(const Register & rd,const Register & rn,const Operand & operand)1783*f5c631daSSadaf Ebrahimi void MacroAssembler::Adcs(const Register& rd,
1784*f5c631daSSadaf Ebrahimi const Register& rn,
1785*f5c631daSSadaf Ebrahimi const Operand& operand) {
1786*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1787*f5c631daSSadaf Ebrahimi AddSubWithCarryMacro(rd, rn, operand, SetFlags, ADC);
1788*f5c631daSSadaf Ebrahimi }
1789*f5c631daSSadaf Ebrahimi
1790*f5c631daSSadaf Ebrahimi
Sbc(const Register & rd,const Register & rn,const Operand & operand)1791*f5c631daSSadaf Ebrahimi void MacroAssembler::Sbc(const Register& rd,
1792*f5c631daSSadaf Ebrahimi const Register& rn,
1793*f5c631daSSadaf Ebrahimi const Operand& operand) {
1794*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1795*f5c631daSSadaf Ebrahimi AddSubWithCarryMacro(rd, rn, operand, LeaveFlags, SBC);
1796*f5c631daSSadaf Ebrahimi }
1797*f5c631daSSadaf Ebrahimi
1798*f5c631daSSadaf Ebrahimi
Sbcs(const Register & rd,const Register & rn,const Operand & operand)1799*f5c631daSSadaf Ebrahimi void MacroAssembler::Sbcs(const Register& rd,
1800*f5c631daSSadaf Ebrahimi const Register& rn,
1801*f5c631daSSadaf Ebrahimi const Operand& operand) {
1802*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1803*f5c631daSSadaf Ebrahimi AddSubWithCarryMacro(rd, rn, operand, SetFlags, SBC);
1804*f5c631daSSadaf Ebrahimi }
1805*f5c631daSSadaf Ebrahimi
1806*f5c631daSSadaf Ebrahimi
Ngc(const Register & rd,const Operand & operand)1807*f5c631daSSadaf Ebrahimi void MacroAssembler::Ngc(const Register& rd, const Operand& operand) {
1808*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1809*f5c631daSSadaf Ebrahimi Register zr = AppropriateZeroRegFor(rd);
1810*f5c631daSSadaf Ebrahimi Sbc(rd, zr, operand);
1811*f5c631daSSadaf Ebrahimi }
1812*f5c631daSSadaf Ebrahimi
1813*f5c631daSSadaf Ebrahimi
Ngcs(const Register & rd,const Operand & operand)1814*f5c631daSSadaf Ebrahimi void MacroAssembler::Ngcs(const Register& rd, const Operand& operand) {
1815*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1816*f5c631daSSadaf Ebrahimi Register zr = AppropriateZeroRegFor(rd);
1817*f5c631daSSadaf Ebrahimi Sbcs(rd, zr, operand);
1818*f5c631daSSadaf Ebrahimi }
1819*f5c631daSSadaf Ebrahimi
1820*f5c631daSSadaf Ebrahimi
AddSubWithCarryMacro(const Register & rd,const Register & rn,const Operand & operand,FlagsUpdate S,AddSubWithCarryOp op)1821*f5c631daSSadaf Ebrahimi void MacroAssembler::AddSubWithCarryMacro(const Register& rd,
1822*f5c631daSSadaf Ebrahimi const Register& rn,
1823*f5c631daSSadaf Ebrahimi const Operand& operand,
1824*f5c631daSSadaf Ebrahimi FlagsUpdate S,
1825*f5c631daSSadaf Ebrahimi AddSubWithCarryOp op) {
1826*f5c631daSSadaf Ebrahimi VIXL_ASSERT(rd.GetSizeInBits() == rn.GetSizeInBits());
1827*f5c631daSSadaf Ebrahimi // Worst case is addc/subc immediate:
1828*f5c631daSSadaf Ebrahimi // * up to 4 instructions to materialise the constant
1829*f5c631daSSadaf Ebrahimi // * 1 instruction for add/sub
1830*f5c631daSSadaf Ebrahimi MacroEmissionCheckScope guard(this);
1831*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
1832*f5c631daSSadaf Ebrahimi // Use `rd` as a temp, if we can.
1833*f5c631daSSadaf Ebrahimi temps.Include(rd);
1834*f5c631daSSadaf Ebrahimi // We read `rn` after evaluating `operand`.
1835*f5c631daSSadaf Ebrahimi temps.Exclude(rn);
1836*f5c631daSSadaf Ebrahimi // It doesn't matter if `operand` is in `temps` (e.g. because it alises `rd`)
1837*f5c631daSSadaf Ebrahimi // because we don't need it after it is evaluated.
1838*f5c631daSSadaf Ebrahimi
1839*f5c631daSSadaf Ebrahimi if (operand.IsImmediate() ||
1840*f5c631daSSadaf Ebrahimi (operand.IsShiftedRegister() && (operand.GetShift() == ROR))) {
1841*f5c631daSSadaf Ebrahimi // Add/sub with carry (immediate or ROR shifted register.)
1842*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireSameSizeAs(rn);
1843*f5c631daSSadaf Ebrahimi Mov(temp, operand);
1844*f5c631daSSadaf Ebrahimi AddSubWithCarry(rd, rn, Operand(temp), S, op);
1845*f5c631daSSadaf Ebrahimi } else if (operand.IsShiftedRegister() && (operand.GetShiftAmount() != 0)) {
1846*f5c631daSSadaf Ebrahimi // Add/sub with carry (shifted register).
1847*f5c631daSSadaf Ebrahimi VIXL_ASSERT(operand.GetRegister().GetSizeInBits() == rd.GetSizeInBits());
1848*f5c631daSSadaf Ebrahimi VIXL_ASSERT(operand.GetShift() != ROR);
1849*f5c631daSSadaf Ebrahimi VIXL_ASSERT(
1850*f5c631daSSadaf Ebrahimi IsUintN(rd.GetSizeInBits() == kXRegSize ? kXRegSizeLog2 : kWRegSizeLog2,
1851*f5c631daSSadaf Ebrahimi operand.GetShiftAmount()));
1852*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireSameSizeAs(rn);
1853*f5c631daSSadaf Ebrahimi EmitShift(temp,
1854*f5c631daSSadaf Ebrahimi operand.GetRegister(),
1855*f5c631daSSadaf Ebrahimi operand.GetShift(),
1856*f5c631daSSadaf Ebrahimi operand.GetShiftAmount());
1857*f5c631daSSadaf Ebrahimi AddSubWithCarry(rd, rn, Operand(temp), S, op);
1858*f5c631daSSadaf Ebrahimi } else if (operand.IsExtendedRegister()) {
1859*f5c631daSSadaf Ebrahimi // Add/sub with carry (extended register).
1860*f5c631daSSadaf Ebrahimi VIXL_ASSERT(operand.GetRegister().GetSizeInBits() <= rd.GetSizeInBits());
1861*f5c631daSSadaf Ebrahimi // Add/sub extended supports a shift <= 4. We want to support exactly the
1862*f5c631daSSadaf Ebrahimi // same modes.
1863*f5c631daSSadaf Ebrahimi VIXL_ASSERT(operand.GetShiftAmount() <= 4);
1864*f5c631daSSadaf Ebrahimi VIXL_ASSERT(
1865*f5c631daSSadaf Ebrahimi operand.GetRegister().Is64Bits() ||
1866*f5c631daSSadaf Ebrahimi ((operand.GetExtend() != UXTX) && (operand.GetExtend() != SXTX)));
1867*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireSameSizeAs(rn);
1868*f5c631daSSadaf Ebrahimi EmitExtendShift(temp,
1869*f5c631daSSadaf Ebrahimi operand.GetRegister(),
1870*f5c631daSSadaf Ebrahimi operand.GetExtend(),
1871*f5c631daSSadaf Ebrahimi operand.GetShiftAmount());
1872*f5c631daSSadaf Ebrahimi AddSubWithCarry(rd, rn, Operand(temp), S, op);
1873*f5c631daSSadaf Ebrahimi } else {
1874*f5c631daSSadaf Ebrahimi // The addressing mode is directly supported by the instruction.
1875*f5c631daSSadaf Ebrahimi AddSubWithCarry(rd, rn, operand, S, op);
1876*f5c631daSSadaf Ebrahimi }
1877*f5c631daSSadaf Ebrahimi }
1878*f5c631daSSadaf Ebrahimi
1879*f5c631daSSadaf Ebrahimi
Rmif(const Register & xn,unsigned shift,StatusFlags flags)1880*f5c631daSSadaf Ebrahimi void MacroAssembler::Rmif(const Register& xn,
1881*f5c631daSSadaf Ebrahimi unsigned shift,
1882*f5c631daSSadaf Ebrahimi StatusFlags flags) {
1883*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1884*f5c631daSSadaf Ebrahimi SingleEmissionCheckScope guard(this);
1885*f5c631daSSadaf Ebrahimi rmif(xn, shift, flags);
1886*f5c631daSSadaf Ebrahimi }
1887*f5c631daSSadaf Ebrahimi
1888*f5c631daSSadaf Ebrahimi
Setf8(const Register & wn)1889*f5c631daSSadaf Ebrahimi void MacroAssembler::Setf8(const Register& wn) {
1890*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1891*f5c631daSSadaf Ebrahimi SingleEmissionCheckScope guard(this);
1892*f5c631daSSadaf Ebrahimi setf8(wn);
1893*f5c631daSSadaf Ebrahimi }
1894*f5c631daSSadaf Ebrahimi
1895*f5c631daSSadaf Ebrahimi
Setf16(const Register & wn)1896*f5c631daSSadaf Ebrahimi void MacroAssembler::Setf16(const Register& wn) {
1897*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
1898*f5c631daSSadaf Ebrahimi SingleEmissionCheckScope guard(this);
1899*f5c631daSSadaf Ebrahimi setf16(wn);
1900*f5c631daSSadaf Ebrahimi }
1901*f5c631daSSadaf Ebrahimi
1902*f5c631daSSadaf Ebrahimi
1903*f5c631daSSadaf Ebrahimi #define DEFINE_FUNCTION(FN, REGTYPE, REG, OP) \
1904*f5c631daSSadaf Ebrahimi void MacroAssembler::FN(const REGTYPE REG, const MemOperand& addr) { \
1905*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_); \
1906*f5c631daSSadaf Ebrahimi LoadStoreMacro(REG, addr, OP); \
1907*f5c631daSSadaf Ebrahimi }
LS_MACRO_LIST(DEFINE_FUNCTION)1908*f5c631daSSadaf Ebrahimi LS_MACRO_LIST(DEFINE_FUNCTION)
1909*f5c631daSSadaf Ebrahimi #undef DEFINE_FUNCTION
1910*f5c631daSSadaf Ebrahimi
1911*f5c631daSSadaf Ebrahimi
1912*f5c631daSSadaf Ebrahimi void MacroAssembler::LoadStoreMacro(const CPURegister& rt,
1913*f5c631daSSadaf Ebrahimi const MemOperand& addr,
1914*f5c631daSSadaf Ebrahimi LoadStoreOp op) {
1915*f5c631daSSadaf Ebrahimi VIXL_ASSERT(addr.IsImmediateOffset() || addr.IsImmediatePostIndex() ||
1916*f5c631daSSadaf Ebrahimi addr.IsImmediatePreIndex() || addr.IsRegisterOffset());
1917*f5c631daSSadaf Ebrahimi
1918*f5c631daSSadaf Ebrahimi // Worst case is ldr/str pre/post index:
1919*f5c631daSSadaf Ebrahimi // * 1 instruction for ldr/str
1920*f5c631daSSadaf Ebrahimi // * up to 4 instructions to materialise the constant
1921*f5c631daSSadaf Ebrahimi // * 1 instruction to update the base
1922*f5c631daSSadaf Ebrahimi MacroEmissionCheckScope guard(this);
1923*f5c631daSSadaf Ebrahimi
1924*f5c631daSSadaf Ebrahimi int64_t offset = addr.GetOffset();
1925*f5c631daSSadaf Ebrahimi unsigned access_size = CalcLSDataSize(op);
1926*f5c631daSSadaf Ebrahimi
1927*f5c631daSSadaf Ebrahimi // Check if an immediate offset fits in the immediate field of the
1928*f5c631daSSadaf Ebrahimi // appropriate instruction. If not, emit two instructions to perform
1929*f5c631daSSadaf Ebrahimi // the operation.
1930*f5c631daSSadaf Ebrahimi if (addr.IsImmediateOffset() && !IsImmLSScaled(offset, access_size) &&
1931*f5c631daSSadaf Ebrahimi !IsImmLSUnscaled(offset)) {
1932*f5c631daSSadaf Ebrahimi // Immediate offset that can't be encoded using unsigned or unscaled
1933*f5c631daSSadaf Ebrahimi // addressing modes.
1934*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
1935*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireSameSizeAs(addr.GetBaseRegister());
1936*f5c631daSSadaf Ebrahimi Mov(temp, addr.GetOffset());
1937*f5c631daSSadaf Ebrahimi LoadStore(rt, MemOperand(addr.GetBaseRegister(), temp), op);
1938*f5c631daSSadaf Ebrahimi } else if (addr.IsImmediatePostIndex() && !IsImmLSUnscaled(offset)) {
1939*f5c631daSSadaf Ebrahimi // Post-index beyond unscaled addressing range.
1940*f5c631daSSadaf Ebrahimi LoadStore(rt, MemOperand(addr.GetBaseRegister()), op);
1941*f5c631daSSadaf Ebrahimi Add(addr.GetBaseRegister(), addr.GetBaseRegister(), Operand(offset));
1942*f5c631daSSadaf Ebrahimi } else if (addr.IsImmediatePreIndex() && !IsImmLSUnscaled(offset)) {
1943*f5c631daSSadaf Ebrahimi // Pre-index beyond unscaled addressing range.
1944*f5c631daSSadaf Ebrahimi Add(addr.GetBaseRegister(), addr.GetBaseRegister(), Operand(offset));
1945*f5c631daSSadaf Ebrahimi LoadStore(rt, MemOperand(addr.GetBaseRegister()), op);
1946*f5c631daSSadaf Ebrahimi } else {
1947*f5c631daSSadaf Ebrahimi // Encodable in one load/store instruction.
1948*f5c631daSSadaf Ebrahimi LoadStore(rt, addr, op);
1949*f5c631daSSadaf Ebrahimi }
1950*f5c631daSSadaf Ebrahimi }
1951*f5c631daSSadaf Ebrahimi
1952*f5c631daSSadaf Ebrahimi
1953*f5c631daSSadaf Ebrahimi #define DEFINE_FUNCTION(FN, REGTYPE, REG, REG2, OP) \
1954*f5c631daSSadaf Ebrahimi void MacroAssembler::FN(const REGTYPE REG, \
1955*f5c631daSSadaf Ebrahimi const REGTYPE REG2, \
1956*f5c631daSSadaf Ebrahimi const MemOperand& addr) { \
1957*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_); \
1958*f5c631daSSadaf Ebrahimi LoadStorePairMacro(REG, REG2, addr, OP); \
1959*f5c631daSSadaf Ebrahimi }
LSPAIR_MACRO_LIST(DEFINE_FUNCTION)1960*f5c631daSSadaf Ebrahimi LSPAIR_MACRO_LIST(DEFINE_FUNCTION)
1961*f5c631daSSadaf Ebrahimi #undef DEFINE_FUNCTION
1962*f5c631daSSadaf Ebrahimi
1963*f5c631daSSadaf Ebrahimi void MacroAssembler::LoadStorePairMacro(const CPURegister& rt,
1964*f5c631daSSadaf Ebrahimi const CPURegister& rt2,
1965*f5c631daSSadaf Ebrahimi const MemOperand& addr,
1966*f5c631daSSadaf Ebrahimi LoadStorePairOp op) {
1967*f5c631daSSadaf Ebrahimi // TODO(all): Should we support register offset for load-store-pair?
1968*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!addr.IsRegisterOffset());
1969*f5c631daSSadaf Ebrahimi // Worst case is ldp/stp immediate:
1970*f5c631daSSadaf Ebrahimi // * 1 instruction for ldp/stp
1971*f5c631daSSadaf Ebrahimi // * up to 4 instructions to materialise the constant
1972*f5c631daSSadaf Ebrahimi // * 1 instruction to update the base
1973*f5c631daSSadaf Ebrahimi MacroEmissionCheckScope guard(this);
1974*f5c631daSSadaf Ebrahimi
1975*f5c631daSSadaf Ebrahimi int64_t offset = addr.GetOffset();
1976*f5c631daSSadaf Ebrahimi unsigned access_size = CalcLSPairDataSize(op);
1977*f5c631daSSadaf Ebrahimi
1978*f5c631daSSadaf Ebrahimi // Check if the offset fits in the immediate field of the appropriate
1979*f5c631daSSadaf Ebrahimi // instruction. If not, emit two instructions to perform the operation.
1980*f5c631daSSadaf Ebrahimi if (IsImmLSPair(offset, access_size)) {
1981*f5c631daSSadaf Ebrahimi // Encodable in one load/store pair instruction.
1982*f5c631daSSadaf Ebrahimi LoadStorePair(rt, rt2, addr, op);
1983*f5c631daSSadaf Ebrahimi } else {
1984*f5c631daSSadaf Ebrahimi Register base = addr.GetBaseRegister();
1985*f5c631daSSadaf Ebrahimi if (addr.IsImmediateOffset()) {
1986*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
1987*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireSameSizeAs(base);
1988*f5c631daSSadaf Ebrahimi Add(temp, base, offset);
1989*f5c631daSSadaf Ebrahimi LoadStorePair(rt, rt2, MemOperand(temp), op);
1990*f5c631daSSadaf Ebrahimi } else if (addr.IsImmediatePostIndex()) {
1991*f5c631daSSadaf Ebrahimi LoadStorePair(rt, rt2, MemOperand(base), op);
1992*f5c631daSSadaf Ebrahimi Add(base, base, offset);
1993*f5c631daSSadaf Ebrahimi } else {
1994*f5c631daSSadaf Ebrahimi VIXL_ASSERT(addr.IsImmediatePreIndex());
1995*f5c631daSSadaf Ebrahimi Add(base, base, offset);
1996*f5c631daSSadaf Ebrahimi LoadStorePair(rt, rt2, MemOperand(base), op);
1997*f5c631daSSadaf Ebrahimi }
1998*f5c631daSSadaf Ebrahimi }
1999*f5c631daSSadaf Ebrahimi }
2000*f5c631daSSadaf Ebrahimi
2001*f5c631daSSadaf Ebrahimi
Prfm(PrefetchOperation op,const MemOperand & addr)2002*f5c631daSSadaf Ebrahimi void MacroAssembler::Prfm(PrefetchOperation op, const MemOperand& addr) {
2003*f5c631daSSadaf Ebrahimi MacroEmissionCheckScope guard(this);
2004*f5c631daSSadaf Ebrahimi
2005*f5c631daSSadaf Ebrahimi // There are no pre- or post-index modes for prfm.
2006*f5c631daSSadaf Ebrahimi VIXL_ASSERT(addr.IsImmediateOffset() || addr.IsRegisterOffset());
2007*f5c631daSSadaf Ebrahimi
2008*f5c631daSSadaf Ebrahimi // The access size is implicitly 8 bytes for all prefetch operations.
2009*f5c631daSSadaf Ebrahimi unsigned size = kXRegSizeInBytesLog2;
2010*f5c631daSSadaf Ebrahimi
2011*f5c631daSSadaf Ebrahimi // Check if an immediate offset fits in the immediate field of the
2012*f5c631daSSadaf Ebrahimi // appropriate instruction. If not, emit two instructions to perform
2013*f5c631daSSadaf Ebrahimi // the operation.
2014*f5c631daSSadaf Ebrahimi if (addr.IsImmediateOffset() && !IsImmLSScaled(addr.GetOffset(), size) &&
2015*f5c631daSSadaf Ebrahimi !IsImmLSUnscaled(addr.GetOffset())) {
2016*f5c631daSSadaf Ebrahimi // Immediate offset that can't be encoded using unsigned or unscaled
2017*f5c631daSSadaf Ebrahimi // addressing modes.
2018*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
2019*f5c631daSSadaf Ebrahimi Register temp = temps.AcquireSameSizeAs(addr.GetBaseRegister());
2020*f5c631daSSadaf Ebrahimi Mov(temp, addr.GetOffset());
2021*f5c631daSSadaf Ebrahimi Prefetch(op, MemOperand(addr.GetBaseRegister(), temp));
2022*f5c631daSSadaf Ebrahimi } else {
2023*f5c631daSSadaf Ebrahimi // Simple register-offsets are encodable in one instruction.
2024*f5c631daSSadaf Ebrahimi Prefetch(op, addr);
2025*f5c631daSSadaf Ebrahimi }
2026*f5c631daSSadaf Ebrahimi }
2027*f5c631daSSadaf Ebrahimi
2028*f5c631daSSadaf Ebrahimi
Push(const CPURegister & src0,const CPURegister & src1,const CPURegister & src2,const CPURegister & src3)2029*f5c631daSSadaf Ebrahimi void MacroAssembler::Push(const CPURegister& src0,
2030*f5c631daSSadaf Ebrahimi const CPURegister& src1,
2031*f5c631daSSadaf Ebrahimi const CPURegister& src2,
2032*f5c631daSSadaf Ebrahimi const CPURegister& src3) {
2033*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
2034*f5c631daSSadaf Ebrahimi VIXL_ASSERT(AreSameSizeAndType(src0, src1, src2, src3));
2035*f5c631daSSadaf Ebrahimi VIXL_ASSERT(src0.IsValid());
2036*f5c631daSSadaf Ebrahimi
2037*f5c631daSSadaf Ebrahimi int count = 1 + src1.IsValid() + src2.IsValid() + src3.IsValid();
2038*f5c631daSSadaf Ebrahimi int size = src0.GetSizeInBytes();
2039*f5c631daSSadaf Ebrahimi
2040*f5c631daSSadaf Ebrahimi PrepareForPush(count, size);
2041*f5c631daSSadaf Ebrahimi PushHelper(count, size, src0, src1, src2, src3);
2042*f5c631daSSadaf Ebrahimi }
2043*f5c631daSSadaf Ebrahimi
2044*f5c631daSSadaf Ebrahimi
Pop(const CPURegister & dst0,const CPURegister & dst1,const CPURegister & dst2,const CPURegister & dst3)2045*f5c631daSSadaf Ebrahimi void MacroAssembler::Pop(const CPURegister& dst0,
2046*f5c631daSSadaf Ebrahimi const CPURegister& dst1,
2047*f5c631daSSadaf Ebrahimi const CPURegister& dst2,
2048*f5c631daSSadaf Ebrahimi const CPURegister& dst3) {
2049*f5c631daSSadaf Ebrahimi // It is not valid to pop into the same register more than once in one
2050*f5c631daSSadaf Ebrahimi // instruction, not even into the zero register.
2051*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
2052*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!AreAliased(dst0, dst1, dst2, dst3));
2053*f5c631daSSadaf Ebrahimi VIXL_ASSERT(AreSameSizeAndType(dst0, dst1, dst2, dst3));
2054*f5c631daSSadaf Ebrahimi VIXL_ASSERT(dst0.IsValid());
2055*f5c631daSSadaf Ebrahimi
2056*f5c631daSSadaf Ebrahimi int count = 1 + dst1.IsValid() + dst2.IsValid() + dst3.IsValid();
2057*f5c631daSSadaf Ebrahimi int size = dst0.GetSizeInBytes();
2058*f5c631daSSadaf Ebrahimi
2059*f5c631daSSadaf Ebrahimi PrepareForPop(count, size);
2060*f5c631daSSadaf Ebrahimi PopHelper(count, size, dst0, dst1, dst2, dst3);
2061*f5c631daSSadaf Ebrahimi }
2062*f5c631daSSadaf Ebrahimi
2063*f5c631daSSadaf Ebrahimi
PushCPURegList(CPURegList registers)2064*f5c631daSSadaf Ebrahimi void MacroAssembler::PushCPURegList(CPURegList registers) {
2065*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!registers.Overlaps(*GetScratchRegisterList()));
2066*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!registers.Overlaps(*GetScratchVRegisterList()));
2067*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
2068*f5c631daSSadaf Ebrahimi
2069*f5c631daSSadaf Ebrahimi int reg_size = registers.GetRegisterSizeInBytes();
2070*f5c631daSSadaf Ebrahimi PrepareForPush(registers.GetCount(), reg_size);
2071*f5c631daSSadaf Ebrahimi
2072*f5c631daSSadaf Ebrahimi // Bump the stack pointer and store two registers at the bottom.
2073*f5c631daSSadaf Ebrahimi int size = registers.GetTotalSizeInBytes();
2074*f5c631daSSadaf Ebrahimi const CPURegister& bottom_0 = registers.PopLowestIndex();
2075*f5c631daSSadaf Ebrahimi const CPURegister& bottom_1 = registers.PopLowestIndex();
2076*f5c631daSSadaf Ebrahimi if (bottom_0.IsValid() && bottom_1.IsValid()) {
2077*f5c631daSSadaf Ebrahimi Stp(bottom_0, bottom_1, MemOperand(StackPointer(), -size, PreIndex));
2078*f5c631daSSadaf Ebrahimi } else if (bottom_0.IsValid()) {
2079*f5c631daSSadaf Ebrahimi Str(bottom_0, MemOperand(StackPointer(), -size, PreIndex));
2080*f5c631daSSadaf Ebrahimi }
2081*f5c631daSSadaf Ebrahimi
2082*f5c631daSSadaf Ebrahimi int offset = 2 * reg_size;
2083*f5c631daSSadaf Ebrahimi while (!registers.IsEmpty()) {
2084*f5c631daSSadaf Ebrahimi const CPURegister& src0 = registers.PopLowestIndex();
2085*f5c631daSSadaf Ebrahimi const CPURegister& src1 = registers.PopLowestIndex();
2086*f5c631daSSadaf Ebrahimi if (src1.IsValid()) {
2087*f5c631daSSadaf Ebrahimi Stp(src0, src1, MemOperand(StackPointer(), offset));
2088*f5c631daSSadaf Ebrahimi } else {
2089*f5c631daSSadaf Ebrahimi Str(src0, MemOperand(StackPointer(), offset));
2090*f5c631daSSadaf Ebrahimi }
2091*f5c631daSSadaf Ebrahimi offset += 2 * reg_size;
2092*f5c631daSSadaf Ebrahimi }
2093*f5c631daSSadaf Ebrahimi }
2094*f5c631daSSadaf Ebrahimi
2095*f5c631daSSadaf Ebrahimi
PopCPURegList(CPURegList registers)2096*f5c631daSSadaf Ebrahimi void MacroAssembler::PopCPURegList(CPURegList registers) {
2097*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!registers.Overlaps(*GetScratchRegisterList()));
2098*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!registers.Overlaps(*GetScratchVRegisterList()));
2099*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
2100*f5c631daSSadaf Ebrahimi
2101*f5c631daSSadaf Ebrahimi int reg_size = registers.GetRegisterSizeInBytes();
2102*f5c631daSSadaf Ebrahimi PrepareForPop(registers.GetCount(), reg_size);
2103*f5c631daSSadaf Ebrahimi
2104*f5c631daSSadaf Ebrahimi
2105*f5c631daSSadaf Ebrahimi int size = registers.GetTotalSizeInBytes();
2106*f5c631daSSadaf Ebrahimi const CPURegister& bottom_0 = registers.PopLowestIndex();
2107*f5c631daSSadaf Ebrahimi const CPURegister& bottom_1 = registers.PopLowestIndex();
2108*f5c631daSSadaf Ebrahimi
2109*f5c631daSSadaf Ebrahimi int offset = 2 * reg_size;
2110*f5c631daSSadaf Ebrahimi while (!registers.IsEmpty()) {
2111*f5c631daSSadaf Ebrahimi const CPURegister& dst0 = registers.PopLowestIndex();
2112*f5c631daSSadaf Ebrahimi const CPURegister& dst1 = registers.PopLowestIndex();
2113*f5c631daSSadaf Ebrahimi if (dst1.IsValid()) {
2114*f5c631daSSadaf Ebrahimi Ldp(dst0, dst1, MemOperand(StackPointer(), offset));
2115*f5c631daSSadaf Ebrahimi } else {
2116*f5c631daSSadaf Ebrahimi Ldr(dst0, MemOperand(StackPointer(), offset));
2117*f5c631daSSadaf Ebrahimi }
2118*f5c631daSSadaf Ebrahimi offset += 2 * reg_size;
2119*f5c631daSSadaf Ebrahimi }
2120*f5c631daSSadaf Ebrahimi
2121*f5c631daSSadaf Ebrahimi // Load the two registers at the bottom and drop the stack pointer.
2122*f5c631daSSadaf Ebrahimi if (bottom_0.IsValid() && bottom_1.IsValid()) {
2123*f5c631daSSadaf Ebrahimi Ldp(bottom_0, bottom_1, MemOperand(StackPointer(), size, PostIndex));
2124*f5c631daSSadaf Ebrahimi } else if (bottom_0.IsValid()) {
2125*f5c631daSSadaf Ebrahimi Ldr(bottom_0, MemOperand(StackPointer(), size, PostIndex));
2126*f5c631daSSadaf Ebrahimi }
2127*f5c631daSSadaf Ebrahimi }
2128*f5c631daSSadaf Ebrahimi
2129*f5c631daSSadaf Ebrahimi
PushMultipleTimes(int count,Register src)2130*f5c631daSSadaf Ebrahimi void MacroAssembler::PushMultipleTimes(int count, Register src) {
2131*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
2132*f5c631daSSadaf Ebrahimi int size = src.GetSizeInBytes();
2133*f5c631daSSadaf Ebrahimi
2134*f5c631daSSadaf Ebrahimi PrepareForPush(count, size);
2135*f5c631daSSadaf Ebrahimi // Push up to four registers at a time if possible because if the current
2136*f5c631daSSadaf Ebrahimi // stack pointer is sp and the register size is 32, registers must be pushed
2137*f5c631daSSadaf Ebrahimi // in blocks of four in order to maintain the 16-byte alignment for sp.
2138*f5c631daSSadaf Ebrahimi while (count >= 4) {
2139*f5c631daSSadaf Ebrahimi PushHelper(4, size, src, src, src, src);
2140*f5c631daSSadaf Ebrahimi count -= 4;
2141*f5c631daSSadaf Ebrahimi }
2142*f5c631daSSadaf Ebrahimi if (count >= 2) {
2143*f5c631daSSadaf Ebrahimi PushHelper(2, size, src, src, NoReg, NoReg);
2144*f5c631daSSadaf Ebrahimi count -= 2;
2145*f5c631daSSadaf Ebrahimi }
2146*f5c631daSSadaf Ebrahimi if (count == 1) {
2147*f5c631daSSadaf Ebrahimi PushHelper(1, size, src, NoReg, NoReg, NoReg);
2148*f5c631daSSadaf Ebrahimi count -= 1;
2149*f5c631daSSadaf Ebrahimi }
2150*f5c631daSSadaf Ebrahimi VIXL_ASSERT(count == 0);
2151*f5c631daSSadaf Ebrahimi }
2152*f5c631daSSadaf Ebrahimi
2153*f5c631daSSadaf Ebrahimi
PushHelper(int count,int size,const CPURegister & src0,const CPURegister & src1,const CPURegister & src2,const CPURegister & src3)2154*f5c631daSSadaf Ebrahimi void MacroAssembler::PushHelper(int count,
2155*f5c631daSSadaf Ebrahimi int size,
2156*f5c631daSSadaf Ebrahimi const CPURegister& src0,
2157*f5c631daSSadaf Ebrahimi const CPURegister& src1,
2158*f5c631daSSadaf Ebrahimi const CPURegister& src2,
2159*f5c631daSSadaf Ebrahimi const CPURegister& src3) {
2160*f5c631daSSadaf Ebrahimi // Ensure that we don't unintentionally modify scratch or debug registers.
2161*f5c631daSSadaf Ebrahimi // Worst case for size is 2 stp.
2162*f5c631daSSadaf Ebrahimi ExactAssemblyScope scope(this,
2163*f5c631daSSadaf Ebrahimi 2 * kInstructionSize,
2164*f5c631daSSadaf Ebrahimi ExactAssemblyScope::kMaximumSize);
2165*f5c631daSSadaf Ebrahimi
2166*f5c631daSSadaf Ebrahimi VIXL_ASSERT(AreSameSizeAndType(src0, src1, src2, src3));
2167*f5c631daSSadaf Ebrahimi VIXL_ASSERT(size == src0.GetSizeInBytes());
2168*f5c631daSSadaf Ebrahimi
2169*f5c631daSSadaf Ebrahimi // When pushing multiple registers, the store order is chosen such that
2170*f5c631daSSadaf Ebrahimi // Push(a, b) is equivalent to Push(a) followed by Push(b).
2171*f5c631daSSadaf Ebrahimi switch (count) {
2172*f5c631daSSadaf Ebrahimi case 1:
2173*f5c631daSSadaf Ebrahimi VIXL_ASSERT(src1.IsNone() && src2.IsNone() && src3.IsNone());
2174*f5c631daSSadaf Ebrahimi str(src0, MemOperand(StackPointer(), -1 * size, PreIndex));
2175*f5c631daSSadaf Ebrahimi break;
2176*f5c631daSSadaf Ebrahimi case 2:
2177*f5c631daSSadaf Ebrahimi VIXL_ASSERT(src2.IsNone() && src3.IsNone());
2178*f5c631daSSadaf Ebrahimi stp(src1, src0, MemOperand(StackPointer(), -2 * size, PreIndex));
2179*f5c631daSSadaf Ebrahimi break;
2180*f5c631daSSadaf Ebrahimi case 3:
2181*f5c631daSSadaf Ebrahimi VIXL_ASSERT(src3.IsNone());
2182*f5c631daSSadaf Ebrahimi stp(src2, src1, MemOperand(StackPointer(), -3 * size, PreIndex));
2183*f5c631daSSadaf Ebrahimi str(src0, MemOperand(StackPointer(), 2 * size));
2184*f5c631daSSadaf Ebrahimi break;
2185*f5c631daSSadaf Ebrahimi case 4:
2186*f5c631daSSadaf Ebrahimi // Skip over 4 * size, then fill in the gap. This allows four W registers
2187*f5c631daSSadaf Ebrahimi // to be pushed using sp, whilst maintaining 16-byte alignment for sp at
2188*f5c631daSSadaf Ebrahimi // all times.
2189*f5c631daSSadaf Ebrahimi stp(src3, src2, MemOperand(StackPointer(), -4 * size, PreIndex));
2190*f5c631daSSadaf Ebrahimi stp(src1, src0, MemOperand(StackPointer(), 2 * size));
2191*f5c631daSSadaf Ebrahimi break;
2192*f5c631daSSadaf Ebrahimi default:
2193*f5c631daSSadaf Ebrahimi VIXL_UNREACHABLE();
2194*f5c631daSSadaf Ebrahimi }
2195*f5c631daSSadaf Ebrahimi }
2196*f5c631daSSadaf Ebrahimi
2197*f5c631daSSadaf Ebrahimi
PopHelper(int count,int size,const CPURegister & dst0,const CPURegister & dst1,const CPURegister & dst2,const CPURegister & dst3)2198*f5c631daSSadaf Ebrahimi void MacroAssembler::PopHelper(int count,
2199*f5c631daSSadaf Ebrahimi int size,
2200*f5c631daSSadaf Ebrahimi const CPURegister& dst0,
2201*f5c631daSSadaf Ebrahimi const CPURegister& dst1,
2202*f5c631daSSadaf Ebrahimi const CPURegister& dst2,
2203*f5c631daSSadaf Ebrahimi const CPURegister& dst3) {
2204*f5c631daSSadaf Ebrahimi // Ensure that we don't unintentionally modify scratch or debug registers.
2205*f5c631daSSadaf Ebrahimi // Worst case for size is 2 ldp.
2206*f5c631daSSadaf Ebrahimi ExactAssemblyScope scope(this,
2207*f5c631daSSadaf Ebrahimi 2 * kInstructionSize,
2208*f5c631daSSadaf Ebrahimi ExactAssemblyScope::kMaximumSize);
2209*f5c631daSSadaf Ebrahimi
2210*f5c631daSSadaf Ebrahimi VIXL_ASSERT(AreSameSizeAndType(dst0, dst1, dst2, dst3));
2211*f5c631daSSadaf Ebrahimi VIXL_ASSERT(size == dst0.GetSizeInBytes());
2212*f5c631daSSadaf Ebrahimi
2213*f5c631daSSadaf Ebrahimi // When popping multiple registers, the load order is chosen such that
2214*f5c631daSSadaf Ebrahimi // Pop(a, b) is equivalent to Pop(a) followed by Pop(b).
2215*f5c631daSSadaf Ebrahimi switch (count) {
2216*f5c631daSSadaf Ebrahimi case 1:
2217*f5c631daSSadaf Ebrahimi VIXL_ASSERT(dst1.IsNone() && dst2.IsNone() && dst3.IsNone());
2218*f5c631daSSadaf Ebrahimi ldr(dst0, MemOperand(StackPointer(), 1 * size, PostIndex));
2219*f5c631daSSadaf Ebrahimi break;
2220*f5c631daSSadaf Ebrahimi case 2:
2221*f5c631daSSadaf Ebrahimi VIXL_ASSERT(dst2.IsNone() && dst3.IsNone());
2222*f5c631daSSadaf Ebrahimi ldp(dst0, dst1, MemOperand(StackPointer(), 2 * size, PostIndex));
2223*f5c631daSSadaf Ebrahimi break;
2224*f5c631daSSadaf Ebrahimi case 3:
2225*f5c631daSSadaf Ebrahimi VIXL_ASSERT(dst3.IsNone());
2226*f5c631daSSadaf Ebrahimi ldr(dst2, MemOperand(StackPointer(), 2 * size));
2227*f5c631daSSadaf Ebrahimi ldp(dst0, dst1, MemOperand(StackPointer(), 3 * size, PostIndex));
2228*f5c631daSSadaf Ebrahimi break;
2229*f5c631daSSadaf Ebrahimi case 4:
2230*f5c631daSSadaf Ebrahimi // Load the higher addresses first, then load the lower addresses and skip
2231*f5c631daSSadaf Ebrahimi // the whole block in the second instruction. This allows four W registers
2232*f5c631daSSadaf Ebrahimi // to be popped using sp, whilst maintaining 16-byte alignment for sp at
2233*f5c631daSSadaf Ebrahimi // all times.
2234*f5c631daSSadaf Ebrahimi ldp(dst2, dst3, MemOperand(StackPointer(), 2 * size));
2235*f5c631daSSadaf Ebrahimi ldp(dst0, dst1, MemOperand(StackPointer(), 4 * size, PostIndex));
2236*f5c631daSSadaf Ebrahimi break;
2237*f5c631daSSadaf Ebrahimi default:
2238*f5c631daSSadaf Ebrahimi VIXL_UNREACHABLE();
2239*f5c631daSSadaf Ebrahimi }
2240*f5c631daSSadaf Ebrahimi }
2241*f5c631daSSadaf Ebrahimi
2242*f5c631daSSadaf Ebrahimi
PrepareForPush(int count,int size)2243*f5c631daSSadaf Ebrahimi void MacroAssembler::PrepareForPush(int count, int size) {
2244*f5c631daSSadaf Ebrahimi if (sp.Is(StackPointer())) {
2245*f5c631daSSadaf Ebrahimi // If the current stack pointer is sp, then it must be aligned to 16 bytes
2246*f5c631daSSadaf Ebrahimi // on entry and the total size of the specified registers must also be a
2247*f5c631daSSadaf Ebrahimi // multiple of 16 bytes.
2248*f5c631daSSadaf Ebrahimi VIXL_ASSERT((count * size) % 16 == 0);
2249*f5c631daSSadaf Ebrahimi } else {
2250*f5c631daSSadaf Ebrahimi // Even if the current stack pointer is not the system stack pointer (sp),
2251*f5c631daSSadaf Ebrahimi // the system stack pointer will still be modified in order to comply with
2252*f5c631daSSadaf Ebrahimi // ABI rules about accessing memory below the system stack pointer.
2253*f5c631daSSadaf Ebrahimi BumpSystemStackPointer(count * size);
2254*f5c631daSSadaf Ebrahimi }
2255*f5c631daSSadaf Ebrahimi }
2256*f5c631daSSadaf Ebrahimi
2257*f5c631daSSadaf Ebrahimi
PrepareForPop(int count,int size)2258*f5c631daSSadaf Ebrahimi void MacroAssembler::PrepareForPop(int count, int size) {
2259*f5c631daSSadaf Ebrahimi USE(count, size);
2260*f5c631daSSadaf Ebrahimi if (sp.Is(StackPointer())) {
2261*f5c631daSSadaf Ebrahimi // If the current stack pointer is sp, then it must be aligned to 16 bytes
2262*f5c631daSSadaf Ebrahimi // on entry and the total size of the specified registers must also be a
2263*f5c631daSSadaf Ebrahimi // multiple of 16 bytes.
2264*f5c631daSSadaf Ebrahimi VIXL_ASSERT((count * size) % 16 == 0);
2265*f5c631daSSadaf Ebrahimi }
2266*f5c631daSSadaf Ebrahimi }
2267*f5c631daSSadaf Ebrahimi
Poke(const Register & src,const Operand & offset)2268*f5c631daSSadaf Ebrahimi void MacroAssembler::Poke(const Register& src, const Operand& offset) {
2269*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
2270*f5c631daSSadaf Ebrahimi if (offset.IsImmediate()) {
2271*f5c631daSSadaf Ebrahimi VIXL_ASSERT(offset.GetImmediate() >= 0);
2272*f5c631daSSadaf Ebrahimi }
2273*f5c631daSSadaf Ebrahimi
2274*f5c631daSSadaf Ebrahimi Str(src, MemOperand(StackPointer(), offset));
2275*f5c631daSSadaf Ebrahimi }
2276*f5c631daSSadaf Ebrahimi
2277*f5c631daSSadaf Ebrahimi
Peek(const Register & dst,const Operand & offset)2278*f5c631daSSadaf Ebrahimi void MacroAssembler::Peek(const Register& dst, const Operand& offset) {
2279*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
2280*f5c631daSSadaf Ebrahimi if (offset.IsImmediate()) {
2281*f5c631daSSadaf Ebrahimi VIXL_ASSERT(offset.GetImmediate() >= 0);
2282*f5c631daSSadaf Ebrahimi }
2283*f5c631daSSadaf Ebrahimi
2284*f5c631daSSadaf Ebrahimi Ldr(dst, MemOperand(StackPointer(), offset));
2285*f5c631daSSadaf Ebrahimi }
2286*f5c631daSSadaf Ebrahimi
2287*f5c631daSSadaf Ebrahimi
Claim(const Operand & size)2288*f5c631daSSadaf Ebrahimi void MacroAssembler::Claim(const Operand& size) {
2289*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
2290*f5c631daSSadaf Ebrahimi
2291*f5c631daSSadaf Ebrahimi if (size.IsZero()) {
2292*f5c631daSSadaf Ebrahimi return;
2293*f5c631daSSadaf Ebrahimi }
2294*f5c631daSSadaf Ebrahimi
2295*f5c631daSSadaf Ebrahimi if (size.IsImmediate()) {
2296*f5c631daSSadaf Ebrahimi VIXL_ASSERT(size.GetImmediate() > 0);
2297*f5c631daSSadaf Ebrahimi if (sp.Is(StackPointer())) {
2298*f5c631daSSadaf Ebrahimi VIXL_ASSERT((size.GetImmediate() % 16) == 0);
2299*f5c631daSSadaf Ebrahimi }
2300*f5c631daSSadaf Ebrahimi }
2301*f5c631daSSadaf Ebrahimi
2302*f5c631daSSadaf Ebrahimi if (!sp.Is(StackPointer())) {
2303*f5c631daSSadaf Ebrahimi BumpSystemStackPointer(size);
2304*f5c631daSSadaf Ebrahimi }
2305*f5c631daSSadaf Ebrahimi
2306*f5c631daSSadaf Ebrahimi Sub(StackPointer(), StackPointer(), size);
2307*f5c631daSSadaf Ebrahimi }
2308*f5c631daSSadaf Ebrahimi
2309*f5c631daSSadaf Ebrahimi
Drop(const Operand & size)2310*f5c631daSSadaf Ebrahimi void MacroAssembler::Drop(const Operand& size) {
2311*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
2312*f5c631daSSadaf Ebrahimi
2313*f5c631daSSadaf Ebrahimi if (size.IsZero()) {
2314*f5c631daSSadaf Ebrahimi return;
2315*f5c631daSSadaf Ebrahimi }
2316*f5c631daSSadaf Ebrahimi
2317*f5c631daSSadaf Ebrahimi if (size.IsImmediate()) {
2318*f5c631daSSadaf Ebrahimi VIXL_ASSERT(size.GetImmediate() > 0);
2319*f5c631daSSadaf Ebrahimi if (sp.Is(StackPointer())) {
2320*f5c631daSSadaf Ebrahimi VIXL_ASSERT((size.GetImmediate() % 16) == 0);
2321*f5c631daSSadaf Ebrahimi }
2322*f5c631daSSadaf Ebrahimi }
2323*f5c631daSSadaf Ebrahimi
2324*f5c631daSSadaf Ebrahimi Add(StackPointer(), StackPointer(), size);
2325*f5c631daSSadaf Ebrahimi }
2326*f5c631daSSadaf Ebrahimi
2327*f5c631daSSadaf Ebrahimi
PushCalleeSavedRegisters()2328*f5c631daSSadaf Ebrahimi void MacroAssembler::PushCalleeSavedRegisters() {
2329*f5c631daSSadaf Ebrahimi // Ensure that the macro-assembler doesn't use any scratch registers.
2330*f5c631daSSadaf Ebrahimi // 10 stp will be emitted.
2331*f5c631daSSadaf Ebrahimi // TODO(all): Should we use GetCalleeSaved and SavedFP.
2332*f5c631daSSadaf Ebrahimi ExactAssemblyScope scope(this, 10 * kInstructionSize);
2333*f5c631daSSadaf Ebrahimi
2334*f5c631daSSadaf Ebrahimi // This method must not be called unless the current stack pointer is sp.
2335*f5c631daSSadaf Ebrahimi VIXL_ASSERT(sp.Is(StackPointer()));
2336*f5c631daSSadaf Ebrahimi
2337*f5c631daSSadaf Ebrahimi MemOperand tos(sp, -2 * static_cast<int>(kXRegSizeInBytes), PreIndex);
2338*f5c631daSSadaf Ebrahimi
2339*f5c631daSSadaf Ebrahimi stp(x29, x30, tos);
2340*f5c631daSSadaf Ebrahimi stp(x27, x28, tos);
2341*f5c631daSSadaf Ebrahimi stp(x25, x26, tos);
2342*f5c631daSSadaf Ebrahimi stp(x23, x24, tos);
2343*f5c631daSSadaf Ebrahimi stp(x21, x22, tos);
2344*f5c631daSSadaf Ebrahimi stp(x19, x20, tos);
2345*f5c631daSSadaf Ebrahimi
2346*f5c631daSSadaf Ebrahimi stp(d14, d15, tos);
2347*f5c631daSSadaf Ebrahimi stp(d12, d13, tos);
2348*f5c631daSSadaf Ebrahimi stp(d10, d11, tos);
2349*f5c631daSSadaf Ebrahimi stp(d8, d9, tos);
2350*f5c631daSSadaf Ebrahimi }
2351*f5c631daSSadaf Ebrahimi
2352*f5c631daSSadaf Ebrahimi
PopCalleeSavedRegisters()2353*f5c631daSSadaf Ebrahimi void MacroAssembler::PopCalleeSavedRegisters() {
2354*f5c631daSSadaf Ebrahimi // Ensure that the macro-assembler doesn't use any scratch registers.
2355*f5c631daSSadaf Ebrahimi // 10 ldp will be emitted.
2356*f5c631daSSadaf Ebrahimi // TODO(all): Should we use GetCalleeSaved and SavedFP.
2357*f5c631daSSadaf Ebrahimi ExactAssemblyScope scope(this, 10 * kInstructionSize);
2358*f5c631daSSadaf Ebrahimi
2359*f5c631daSSadaf Ebrahimi // This method must not be called unless the current stack pointer is sp.
2360*f5c631daSSadaf Ebrahimi VIXL_ASSERT(sp.Is(StackPointer()));
2361*f5c631daSSadaf Ebrahimi
2362*f5c631daSSadaf Ebrahimi MemOperand tos(sp, 2 * kXRegSizeInBytes, PostIndex);
2363*f5c631daSSadaf Ebrahimi
2364*f5c631daSSadaf Ebrahimi ldp(d8, d9, tos);
2365*f5c631daSSadaf Ebrahimi ldp(d10, d11, tos);
2366*f5c631daSSadaf Ebrahimi ldp(d12, d13, tos);
2367*f5c631daSSadaf Ebrahimi ldp(d14, d15, tos);
2368*f5c631daSSadaf Ebrahimi
2369*f5c631daSSadaf Ebrahimi ldp(x19, x20, tos);
2370*f5c631daSSadaf Ebrahimi ldp(x21, x22, tos);
2371*f5c631daSSadaf Ebrahimi ldp(x23, x24, tos);
2372*f5c631daSSadaf Ebrahimi ldp(x25, x26, tos);
2373*f5c631daSSadaf Ebrahimi ldp(x27, x28, tos);
2374*f5c631daSSadaf Ebrahimi ldp(x29, x30, tos);
2375*f5c631daSSadaf Ebrahimi }
2376*f5c631daSSadaf Ebrahimi
LoadCPURegList(CPURegList registers,const MemOperand & src)2377*f5c631daSSadaf Ebrahimi void MacroAssembler::LoadCPURegList(CPURegList registers,
2378*f5c631daSSadaf Ebrahimi const MemOperand& src) {
2379*f5c631daSSadaf Ebrahimi LoadStoreCPURegListHelper(kLoad, registers, src);
2380*f5c631daSSadaf Ebrahimi }
2381*f5c631daSSadaf Ebrahimi
StoreCPURegList(CPURegList registers,const MemOperand & dst)2382*f5c631daSSadaf Ebrahimi void MacroAssembler::StoreCPURegList(CPURegList registers,
2383*f5c631daSSadaf Ebrahimi const MemOperand& dst) {
2384*f5c631daSSadaf Ebrahimi LoadStoreCPURegListHelper(kStore, registers, dst);
2385*f5c631daSSadaf Ebrahimi }
2386*f5c631daSSadaf Ebrahimi
2387*f5c631daSSadaf Ebrahimi
LoadStoreCPURegListHelper(LoadStoreCPURegListAction op,CPURegList registers,const MemOperand & mem)2388*f5c631daSSadaf Ebrahimi void MacroAssembler::LoadStoreCPURegListHelper(LoadStoreCPURegListAction op,
2389*f5c631daSSadaf Ebrahimi CPURegList registers,
2390*f5c631daSSadaf Ebrahimi const MemOperand& mem) {
2391*f5c631daSSadaf Ebrahimi // We do not handle pre-indexing or post-indexing.
2392*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!(mem.IsPreIndex() || mem.IsPostIndex()));
2393*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!registers.Overlaps(tmp_list_));
2394*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!registers.Overlaps(v_tmp_list_));
2395*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!registers.Overlaps(p_tmp_list_));
2396*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!registers.IncludesAliasOf(sp));
2397*f5c631daSSadaf Ebrahimi
2398*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
2399*f5c631daSSadaf Ebrahimi
2400*f5c631daSSadaf Ebrahimi MemOperand loc = BaseMemOperandForLoadStoreCPURegList(registers, mem, &temps);
2401*f5c631daSSadaf Ebrahimi const int reg_size = registers.GetRegisterSizeInBytes();
2402*f5c631daSSadaf Ebrahimi
2403*f5c631daSSadaf Ebrahimi VIXL_ASSERT(IsPowerOf2(reg_size));
2404*f5c631daSSadaf Ebrahimi
2405*f5c631daSSadaf Ebrahimi // Since we are operating on register pairs, we would like to align on double
2406*f5c631daSSadaf Ebrahimi // the standard size; on the other hand, we don't want to insert an extra
2407*f5c631daSSadaf Ebrahimi // operation, which will happen if the number of registers is even. Note that
2408*f5c631daSSadaf Ebrahimi // the alignment of the base pointer is unknown here, but we assume that it
2409*f5c631daSSadaf Ebrahimi // is more likely to be aligned.
2410*f5c631daSSadaf Ebrahimi if (((loc.GetOffset() & (2 * reg_size - 1)) != 0) &&
2411*f5c631daSSadaf Ebrahimi ((registers.GetCount() % 2) != 0)) {
2412*f5c631daSSadaf Ebrahimi if (op == kStore) {
2413*f5c631daSSadaf Ebrahimi Str(registers.PopLowestIndex(), loc);
2414*f5c631daSSadaf Ebrahimi } else {
2415*f5c631daSSadaf Ebrahimi VIXL_ASSERT(op == kLoad);
2416*f5c631daSSadaf Ebrahimi Ldr(registers.PopLowestIndex(), loc);
2417*f5c631daSSadaf Ebrahimi }
2418*f5c631daSSadaf Ebrahimi loc.AddOffset(reg_size);
2419*f5c631daSSadaf Ebrahimi }
2420*f5c631daSSadaf Ebrahimi while (registers.GetCount() >= 2) {
2421*f5c631daSSadaf Ebrahimi const CPURegister& dst0 = registers.PopLowestIndex();
2422*f5c631daSSadaf Ebrahimi const CPURegister& dst1 = registers.PopLowestIndex();
2423*f5c631daSSadaf Ebrahimi if (op == kStore) {
2424*f5c631daSSadaf Ebrahimi Stp(dst0, dst1, loc);
2425*f5c631daSSadaf Ebrahimi } else {
2426*f5c631daSSadaf Ebrahimi VIXL_ASSERT(op == kLoad);
2427*f5c631daSSadaf Ebrahimi Ldp(dst0, dst1, loc);
2428*f5c631daSSadaf Ebrahimi }
2429*f5c631daSSadaf Ebrahimi loc.AddOffset(2 * reg_size);
2430*f5c631daSSadaf Ebrahimi }
2431*f5c631daSSadaf Ebrahimi if (!registers.IsEmpty()) {
2432*f5c631daSSadaf Ebrahimi if (op == kStore) {
2433*f5c631daSSadaf Ebrahimi Str(registers.PopLowestIndex(), loc);
2434*f5c631daSSadaf Ebrahimi } else {
2435*f5c631daSSadaf Ebrahimi VIXL_ASSERT(op == kLoad);
2436*f5c631daSSadaf Ebrahimi Ldr(registers.PopLowestIndex(), loc);
2437*f5c631daSSadaf Ebrahimi }
2438*f5c631daSSadaf Ebrahimi }
2439*f5c631daSSadaf Ebrahimi }
2440*f5c631daSSadaf Ebrahimi
BaseMemOperandForLoadStoreCPURegList(const CPURegList & registers,const MemOperand & mem,UseScratchRegisterScope * scratch_scope)2441*f5c631daSSadaf Ebrahimi MemOperand MacroAssembler::BaseMemOperandForLoadStoreCPURegList(
2442*f5c631daSSadaf Ebrahimi const CPURegList& registers,
2443*f5c631daSSadaf Ebrahimi const MemOperand& mem,
2444*f5c631daSSadaf Ebrahimi UseScratchRegisterScope* scratch_scope) {
2445*f5c631daSSadaf Ebrahimi // If necessary, pre-compute the base address for the accesses.
2446*f5c631daSSadaf Ebrahimi if (mem.IsRegisterOffset()) {
2447*f5c631daSSadaf Ebrahimi Register reg_base = scratch_scope->AcquireX();
2448*f5c631daSSadaf Ebrahimi ComputeAddress(reg_base, mem);
2449*f5c631daSSadaf Ebrahimi return MemOperand(reg_base);
2450*f5c631daSSadaf Ebrahimi
2451*f5c631daSSadaf Ebrahimi } else if (mem.IsImmediateOffset()) {
2452*f5c631daSSadaf Ebrahimi int reg_size = registers.GetRegisterSizeInBytes();
2453*f5c631daSSadaf Ebrahimi int total_size = registers.GetTotalSizeInBytes();
2454*f5c631daSSadaf Ebrahimi int64_t min_offset = mem.GetOffset();
2455*f5c631daSSadaf Ebrahimi int64_t max_offset =
2456*f5c631daSSadaf Ebrahimi mem.GetOffset() + std::max(0, total_size - 2 * reg_size);
2457*f5c631daSSadaf Ebrahimi if ((registers.GetCount() >= 2) &&
2458*f5c631daSSadaf Ebrahimi (!Assembler::IsImmLSPair(min_offset, WhichPowerOf2(reg_size)) ||
2459*f5c631daSSadaf Ebrahimi !Assembler::IsImmLSPair(max_offset, WhichPowerOf2(reg_size)))) {
2460*f5c631daSSadaf Ebrahimi Register reg_base = scratch_scope->AcquireX();
2461*f5c631daSSadaf Ebrahimi ComputeAddress(reg_base, mem);
2462*f5c631daSSadaf Ebrahimi return MemOperand(reg_base);
2463*f5c631daSSadaf Ebrahimi }
2464*f5c631daSSadaf Ebrahimi }
2465*f5c631daSSadaf Ebrahimi
2466*f5c631daSSadaf Ebrahimi return mem;
2467*f5c631daSSadaf Ebrahimi }
2468*f5c631daSSadaf Ebrahimi
BumpSystemStackPointer(const Operand & space)2469*f5c631daSSadaf Ebrahimi void MacroAssembler::BumpSystemStackPointer(const Operand& space) {
2470*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!sp.Is(StackPointer()));
2471*f5c631daSSadaf Ebrahimi // TODO: Several callers rely on this not using scratch registers, so we use
2472*f5c631daSSadaf Ebrahimi // the assembler directly here. However, this means that large immediate
2473*f5c631daSSadaf Ebrahimi // values of 'space' cannot be handled.
2474*f5c631daSSadaf Ebrahimi ExactAssemblyScope scope(this, kInstructionSize);
2475*f5c631daSSadaf Ebrahimi sub(sp, StackPointer(), space);
2476*f5c631daSSadaf Ebrahimi }
2477*f5c631daSSadaf Ebrahimi
2478*f5c631daSSadaf Ebrahimi
2479*f5c631daSSadaf Ebrahimi // TODO(all): Fix printf for NEON and SVE registers.
2480*f5c631daSSadaf Ebrahimi
2481*f5c631daSSadaf Ebrahimi // This is the main Printf implementation. All callee-saved registers are
2482*f5c631daSSadaf Ebrahimi // preserved, but NZCV and the caller-saved registers may be clobbered.
PrintfNoPreserve(const char * format,const CPURegister & arg0,const CPURegister & arg1,const CPURegister & arg2,const CPURegister & arg3)2483*f5c631daSSadaf Ebrahimi void MacroAssembler::PrintfNoPreserve(const char* format,
2484*f5c631daSSadaf Ebrahimi const CPURegister& arg0,
2485*f5c631daSSadaf Ebrahimi const CPURegister& arg1,
2486*f5c631daSSadaf Ebrahimi const CPURegister& arg2,
2487*f5c631daSSadaf Ebrahimi const CPURegister& arg3) {
2488*f5c631daSSadaf Ebrahimi // We cannot handle a caller-saved stack pointer. It doesn't make much sense
2489*f5c631daSSadaf Ebrahimi // in most cases anyway, so this restriction shouldn't be too serious.
2490*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!kCallerSaved.IncludesAliasOf(StackPointer()));
2491*f5c631daSSadaf Ebrahimi
2492*f5c631daSSadaf Ebrahimi // The provided arguments, and their proper PCS registers.
2493*f5c631daSSadaf Ebrahimi CPURegister args[kPrintfMaxArgCount] = {arg0, arg1, arg2, arg3};
2494*f5c631daSSadaf Ebrahimi CPURegister pcs[kPrintfMaxArgCount];
2495*f5c631daSSadaf Ebrahimi
2496*f5c631daSSadaf Ebrahimi int arg_count = kPrintfMaxArgCount;
2497*f5c631daSSadaf Ebrahimi
2498*f5c631daSSadaf Ebrahimi // The PCS varargs registers for printf. Note that x0 is used for the printf
2499*f5c631daSSadaf Ebrahimi // format string.
2500*f5c631daSSadaf Ebrahimi static const CPURegList kPCSVarargs =
2501*f5c631daSSadaf Ebrahimi CPURegList(CPURegister::kRegister, kXRegSize, 1, arg_count);
2502*f5c631daSSadaf Ebrahimi static const CPURegList kPCSVarargsV =
2503*f5c631daSSadaf Ebrahimi CPURegList(CPURegister::kVRegister, kDRegSize, 0, arg_count - 1);
2504*f5c631daSSadaf Ebrahimi
2505*f5c631daSSadaf Ebrahimi // We can use caller-saved registers as scratch values, except for the
2506*f5c631daSSadaf Ebrahimi // arguments and the PCS registers where they might need to go.
2507*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
2508*f5c631daSSadaf Ebrahimi temps.Include(kCallerSaved);
2509*f5c631daSSadaf Ebrahimi temps.Include(kCallerSavedV);
2510*f5c631daSSadaf Ebrahimi temps.Exclude(kPCSVarargs);
2511*f5c631daSSadaf Ebrahimi temps.Exclude(kPCSVarargsV);
2512*f5c631daSSadaf Ebrahimi temps.Exclude(arg0, arg1, arg2, arg3);
2513*f5c631daSSadaf Ebrahimi
2514*f5c631daSSadaf Ebrahimi // Copies of the arg lists that we can iterate through.
2515*f5c631daSSadaf Ebrahimi CPURegList pcs_varargs = kPCSVarargs;
2516*f5c631daSSadaf Ebrahimi CPURegList pcs_varargs_fp = kPCSVarargsV;
2517*f5c631daSSadaf Ebrahimi
2518*f5c631daSSadaf Ebrahimi // Place the arguments. There are lots of clever tricks and optimizations we
2519*f5c631daSSadaf Ebrahimi // could use here, but Printf is a debug tool so instead we just try to keep
2520*f5c631daSSadaf Ebrahimi // it simple: Move each input that isn't already in the right place to a
2521*f5c631daSSadaf Ebrahimi // scratch register, then move everything back.
2522*f5c631daSSadaf Ebrahimi for (unsigned i = 0; i < kPrintfMaxArgCount; i++) {
2523*f5c631daSSadaf Ebrahimi // Work out the proper PCS register for this argument.
2524*f5c631daSSadaf Ebrahimi if (args[i].IsRegister()) {
2525*f5c631daSSadaf Ebrahimi pcs[i] = pcs_varargs.PopLowestIndex().X();
2526*f5c631daSSadaf Ebrahimi // We might only need a W register here. We need to know the size of the
2527*f5c631daSSadaf Ebrahimi // argument so we can properly encode it for the simulator call.
2528*f5c631daSSadaf Ebrahimi if (args[i].Is32Bits()) pcs[i] = pcs[i].W();
2529*f5c631daSSadaf Ebrahimi } else if (args[i].IsVRegister()) {
2530*f5c631daSSadaf Ebrahimi // In C, floats are always cast to doubles for varargs calls.
2531*f5c631daSSadaf Ebrahimi pcs[i] = pcs_varargs_fp.PopLowestIndex().D();
2532*f5c631daSSadaf Ebrahimi } else {
2533*f5c631daSSadaf Ebrahimi VIXL_ASSERT(args[i].IsNone());
2534*f5c631daSSadaf Ebrahimi arg_count = i;
2535*f5c631daSSadaf Ebrahimi break;
2536*f5c631daSSadaf Ebrahimi }
2537*f5c631daSSadaf Ebrahimi
2538*f5c631daSSadaf Ebrahimi // If the argument is already in the right place, leave it where it is.
2539*f5c631daSSadaf Ebrahimi if (args[i].Aliases(pcs[i])) continue;
2540*f5c631daSSadaf Ebrahimi
2541*f5c631daSSadaf Ebrahimi // Otherwise, if the argument is in a PCS argument register, allocate an
2542*f5c631daSSadaf Ebrahimi // appropriate scratch register and then move it out of the way.
2543*f5c631daSSadaf Ebrahimi if (kPCSVarargs.IncludesAliasOf(args[i]) ||
2544*f5c631daSSadaf Ebrahimi kPCSVarargsV.IncludesAliasOf(args[i])) {
2545*f5c631daSSadaf Ebrahimi if (args[i].IsRegister()) {
2546*f5c631daSSadaf Ebrahimi Register old_arg = Register(args[i]);
2547*f5c631daSSadaf Ebrahimi Register new_arg = temps.AcquireSameSizeAs(old_arg);
2548*f5c631daSSadaf Ebrahimi Mov(new_arg, old_arg);
2549*f5c631daSSadaf Ebrahimi args[i] = new_arg;
2550*f5c631daSSadaf Ebrahimi } else {
2551*f5c631daSSadaf Ebrahimi VRegister old_arg(args[i]);
2552*f5c631daSSadaf Ebrahimi VRegister new_arg = temps.AcquireSameSizeAs(old_arg);
2553*f5c631daSSadaf Ebrahimi Fmov(new_arg, old_arg);
2554*f5c631daSSadaf Ebrahimi args[i] = new_arg;
2555*f5c631daSSadaf Ebrahimi }
2556*f5c631daSSadaf Ebrahimi }
2557*f5c631daSSadaf Ebrahimi }
2558*f5c631daSSadaf Ebrahimi
2559*f5c631daSSadaf Ebrahimi // Do a second pass to move values into their final positions and perform any
2560*f5c631daSSadaf Ebrahimi // conversions that may be required.
2561*f5c631daSSadaf Ebrahimi for (int i = 0; i < arg_count; i++) {
2562*f5c631daSSadaf Ebrahimi VIXL_ASSERT(pcs[i].GetType() == args[i].GetType());
2563*f5c631daSSadaf Ebrahimi if (pcs[i].IsRegister()) {
2564*f5c631daSSadaf Ebrahimi Mov(Register(pcs[i]), Register(args[i]), kDiscardForSameWReg);
2565*f5c631daSSadaf Ebrahimi } else {
2566*f5c631daSSadaf Ebrahimi VIXL_ASSERT(pcs[i].IsVRegister());
2567*f5c631daSSadaf Ebrahimi if (pcs[i].GetSizeInBits() == args[i].GetSizeInBits()) {
2568*f5c631daSSadaf Ebrahimi Fmov(VRegister(pcs[i]), VRegister(args[i]));
2569*f5c631daSSadaf Ebrahimi } else {
2570*f5c631daSSadaf Ebrahimi Fcvt(VRegister(pcs[i]), VRegister(args[i]));
2571*f5c631daSSadaf Ebrahimi }
2572*f5c631daSSadaf Ebrahimi }
2573*f5c631daSSadaf Ebrahimi }
2574*f5c631daSSadaf Ebrahimi
2575*f5c631daSSadaf Ebrahimi // Load the format string into x0, as per the procedure-call standard.
2576*f5c631daSSadaf Ebrahimi //
2577*f5c631daSSadaf Ebrahimi // To make the code as portable as possible, the format string is encoded
2578*f5c631daSSadaf Ebrahimi // directly in the instruction stream. It might be cleaner to encode it in a
2579*f5c631daSSadaf Ebrahimi // literal pool, but since Printf is usually used for debugging, it is
2580*f5c631daSSadaf Ebrahimi // beneficial for it to be minimally dependent on other features.
2581*f5c631daSSadaf Ebrahimi temps.Exclude(x0);
2582*f5c631daSSadaf Ebrahimi Label format_address;
2583*f5c631daSSadaf Ebrahimi Adr(x0, &format_address);
2584*f5c631daSSadaf Ebrahimi
2585*f5c631daSSadaf Ebrahimi // Emit the format string directly in the instruction stream.
2586*f5c631daSSadaf Ebrahimi {
2587*f5c631daSSadaf Ebrahimi BlockPoolsScope scope(this);
2588*f5c631daSSadaf Ebrahimi // Data emitted:
2589*f5c631daSSadaf Ebrahimi // branch
2590*f5c631daSSadaf Ebrahimi // strlen(format) + 1 (includes null termination)
2591*f5c631daSSadaf Ebrahimi // padding to next instruction
2592*f5c631daSSadaf Ebrahimi // unreachable
2593*f5c631daSSadaf Ebrahimi EmissionCheckScope guard(this,
2594*f5c631daSSadaf Ebrahimi AlignUp(strlen(format) + 1, kInstructionSize) +
2595*f5c631daSSadaf Ebrahimi 2 * kInstructionSize);
2596*f5c631daSSadaf Ebrahimi Label after_data;
2597*f5c631daSSadaf Ebrahimi B(&after_data);
2598*f5c631daSSadaf Ebrahimi Bind(&format_address);
2599*f5c631daSSadaf Ebrahimi EmitString(format);
2600*f5c631daSSadaf Ebrahimi Unreachable();
2601*f5c631daSSadaf Ebrahimi Bind(&after_data);
2602*f5c631daSSadaf Ebrahimi }
2603*f5c631daSSadaf Ebrahimi
2604*f5c631daSSadaf Ebrahimi // We don't pass any arguments on the stack, but we still need to align the C
2605*f5c631daSSadaf Ebrahimi // stack pointer to a 16-byte boundary for PCS compliance.
2606*f5c631daSSadaf Ebrahimi if (!sp.Is(StackPointer())) {
2607*f5c631daSSadaf Ebrahimi Bic(sp, StackPointer(), 0xf);
2608*f5c631daSSadaf Ebrahimi }
2609*f5c631daSSadaf Ebrahimi
2610*f5c631daSSadaf Ebrahimi // Actually call printf. This part needs special handling for the simulator,
2611*f5c631daSSadaf Ebrahimi // since the system printf function will use a different instruction set and
2612*f5c631daSSadaf Ebrahimi // the procedure-call standard will not be compatible.
2613*f5c631daSSadaf Ebrahimi if (generate_simulator_code_) {
2614*f5c631daSSadaf Ebrahimi ExactAssemblyScope scope(this, kPrintfLength);
2615*f5c631daSSadaf Ebrahimi hlt(kPrintfOpcode);
2616*f5c631daSSadaf Ebrahimi dc32(arg_count); // kPrintfArgCountOffset
2617*f5c631daSSadaf Ebrahimi
2618*f5c631daSSadaf Ebrahimi // Determine the argument pattern.
2619*f5c631daSSadaf Ebrahimi uint32_t arg_pattern_list = 0;
2620*f5c631daSSadaf Ebrahimi for (int i = 0; i < arg_count; i++) {
2621*f5c631daSSadaf Ebrahimi uint32_t arg_pattern;
2622*f5c631daSSadaf Ebrahimi if (pcs[i].IsRegister()) {
2623*f5c631daSSadaf Ebrahimi arg_pattern = pcs[i].Is32Bits() ? kPrintfArgW : kPrintfArgX;
2624*f5c631daSSadaf Ebrahimi } else {
2625*f5c631daSSadaf Ebrahimi VIXL_ASSERT(pcs[i].Is64Bits());
2626*f5c631daSSadaf Ebrahimi arg_pattern = kPrintfArgD;
2627*f5c631daSSadaf Ebrahimi }
2628*f5c631daSSadaf Ebrahimi VIXL_ASSERT(arg_pattern < (1 << kPrintfArgPatternBits));
2629*f5c631daSSadaf Ebrahimi arg_pattern_list |= (arg_pattern << (kPrintfArgPatternBits * i));
2630*f5c631daSSadaf Ebrahimi }
2631*f5c631daSSadaf Ebrahimi dc32(arg_pattern_list); // kPrintfArgPatternListOffset
2632*f5c631daSSadaf Ebrahimi } else {
2633*f5c631daSSadaf Ebrahimi Register tmp = temps.AcquireX();
2634*f5c631daSSadaf Ebrahimi Mov(tmp, reinterpret_cast<uintptr_t>(printf));
2635*f5c631daSSadaf Ebrahimi Blr(tmp);
2636*f5c631daSSadaf Ebrahimi }
2637*f5c631daSSadaf Ebrahimi }
2638*f5c631daSSadaf Ebrahimi
2639*f5c631daSSadaf Ebrahimi
Printf(const char * format,CPURegister arg0,CPURegister arg1,CPURegister arg2,CPURegister arg3)2640*f5c631daSSadaf Ebrahimi void MacroAssembler::Printf(const char* format,
2641*f5c631daSSadaf Ebrahimi CPURegister arg0,
2642*f5c631daSSadaf Ebrahimi CPURegister arg1,
2643*f5c631daSSadaf Ebrahimi CPURegister arg2,
2644*f5c631daSSadaf Ebrahimi CPURegister arg3) {
2645*f5c631daSSadaf Ebrahimi // We can only print sp if it is the current stack pointer.
2646*f5c631daSSadaf Ebrahimi if (!sp.Is(StackPointer())) {
2647*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!sp.Aliases(arg0));
2648*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!sp.Aliases(arg1));
2649*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!sp.Aliases(arg2));
2650*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!sp.Aliases(arg3));
2651*f5c631daSSadaf Ebrahimi }
2652*f5c631daSSadaf Ebrahimi
2653*f5c631daSSadaf Ebrahimi // Make sure that the macro assembler doesn't try to use any of our arguments
2654*f5c631daSSadaf Ebrahimi // as scratch registers.
2655*f5c631daSSadaf Ebrahimi UseScratchRegisterScope exclude_all(this);
2656*f5c631daSSadaf Ebrahimi exclude_all.ExcludeAll();
2657*f5c631daSSadaf Ebrahimi
2658*f5c631daSSadaf Ebrahimi // Preserve all caller-saved registers as well as NZCV.
2659*f5c631daSSadaf Ebrahimi // If sp is the stack pointer, PushCPURegList asserts that the size of each
2660*f5c631daSSadaf Ebrahimi // list is a multiple of 16 bytes.
2661*f5c631daSSadaf Ebrahimi PushCPURegList(kCallerSaved);
2662*f5c631daSSadaf Ebrahimi PushCPURegList(kCallerSavedV);
2663*f5c631daSSadaf Ebrahimi
2664*f5c631daSSadaf Ebrahimi {
2665*f5c631daSSadaf Ebrahimi UseScratchRegisterScope temps(this);
2666*f5c631daSSadaf Ebrahimi // We can use caller-saved registers as scratch values (except for argN).
2667*f5c631daSSadaf Ebrahimi temps.Include(kCallerSaved);
2668*f5c631daSSadaf Ebrahimi temps.Include(kCallerSavedV);
2669*f5c631daSSadaf Ebrahimi temps.Exclude(arg0, arg1, arg2, arg3);
2670*f5c631daSSadaf Ebrahimi
2671*f5c631daSSadaf Ebrahimi // If any of the arguments are the current stack pointer, allocate a new
2672*f5c631daSSadaf Ebrahimi // register for them, and adjust the value to compensate for pushing the
2673*f5c631daSSadaf Ebrahimi // caller-saved registers.
2674*f5c631daSSadaf Ebrahimi bool arg0_sp = StackPointer().Aliases(arg0);
2675*f5c631daSSadaf Ebrahimi bool arg1_sp = StackPointer().Aliases(arg1);
2676*f5c631daSSadaf Ebrahimi bool arg2_sp = StackPointer().Aliases(arg2);
2677*f5c631daSSadaf Ebrahimi bool arg3_sp = StackPointer().Aliases(arg3);
2678*f5c631daSSadaf Ebrahimi if (arg0_sp || arg1_sp || arg2_sp || arg3_sp) {
2679*f5c631daSSadaf Ebrahimi // Allocate a register to hold the original stack pointer value, to pass
2680*f5c631daSSadaf Ebrahimi // to PrintfNoPreserve as an argument.
2681*f5c631daSSadaf Ebrahimi Register arg_sp = temps.AcquireX();
2682*f5c631daSSadaf Ebrahimi Add(arg_sp,
2683*f5c631daSSadaf Ebrahimi StackPointer(),
2684*f5c631daSSadaf Ebrahimi kCallerSaved.GetTotalSizeInBytes() +
2685*f5c631daSSadaf Ebrahimi kCallerSavedV.GetTotalSizeInBytes());
2686*f5c631daSSadaf Ebrahimi if (arg0_sp) arg0 = Register(arg_sp.GetCode(), arg0.GetSizeInBits());
2687*f5c631daSSadaf Ebrahimi if (arg1_sp) arg1 = Register(arg_sp.GetCode(), arg1.GetSizeInBits());
2688*f5c631daSSadaf Ebrahimi if (arg2_sp) arg2 = Register(arg_sp.GetCode(), arg2.GetSizeInBits());
2689*f5c631daSSadaf Ebrahimi if (arg3_sp) arg3 = Register(arg_sp.GetCode(), arg3.GetSizeInBits());
2690*f5c631daSSadaf Ebrahimi }
2691*f5c631daSSadaf Ebrahimi
2692*f5c631daSSadaf Ebrahimi // Preserve NZCV.
2693*f5c631daSSadaf Ebrahimi Register tmp = temps.AcquireX();
2694*f5c631daSSadaf Ebrahimi Mrs(tmp, NZCV);
2695*f5c631daSSadaf Ebrahimi Push(tmp, xzr);
2696*f5c631daSSadaf Ebrahimi temps.Release(tmp);
2697*f5c631daSSadaf Ebrahimi
2698*f5c631daSSadaf Ebrahimi PrintfNoPreserve(format, arg0, arg1, arg2, arg3);
2699*f5c631daSSadaf Ebrahimi
2700*f5c631daSSadaf Ebrahimi // Restore NZCV.
2701*f5c631daSSadaf Ebrahimi tmp = temps.AcquireX();
2702*f5c631daSSadaf Ebrahimi Pop(xzr, tmp);
2703*f5c631daSSadaf Ebrahimi Msr(NZCV, tmp);
2704*f5c631daSSadaf Ebrahimi temps.Release(tmp);
2705*f5c631daSSadaf Ebrahimi }
2706*f5c631daSSadaf Ebrahimi
2707*f5c631daSSadaf Ebrahimi PopCPURegList(kCallerSavedV);
2708*f5c631daSSadaf Ebrahimi PopCPURegList(kCallerSaved);
2709*f5c631daSSadaf Ebrahimi }
2710*f5c631daSSadaf Ebrahimi
Trace(TraceParameters parameters,TraceCommand command)2711*f5c631daSSadaf Ebrahimi void MacroAssembler::Trace(TraceParameters parameters, TraceCommand command) {
2712*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
2713*f5c631daSSadaf Ebrahimi
2714*f5c631daSSadaf Ebrahimi if (generate_simulator_code_) {
2715*f5c631daSSadaf Ebrahimi // The arguments to the trace pseudo instruction need to be contiguous in
2716*f5c631daSSadaf Ebrahimi // memory, so make sure we don't try to emit a literal pool.
2717*f5c631daSSadaf Ebrahimi ExactAssemblyScope scope(this, kTraceLength);
2718*f5c631daSSadaf Ebrahimi
2719*f5c631daSSadaf Ebrahimi Label start;
2720*f5c631daSSadaf Ebrahimi bind(&start);
2721*f5c631daSSadaf Ebrahimi
2722*f5c631daSSadaf Ebrahimi // Refer to simulator-aarch64.h for a description of the marker and its
2723*f5c631daSSadaf Ebrahimi // arguments.
2724*f5c631daSSadaf Ebrahimi hlt(kTraceOpcode);
2725*f5c631daSSadaf Ebrahimi
2726*f5c631daSSadaf Ebrahimi VIXL_ASSERT(GetSizeOfCodeGeneratedSince(&start) == kTraceParamsOffset);
2727*f5c631daSSadaf Ebrahimi dc32(parameters);
2728*f5c631daSSadaf Ebrahimi
2729*f5c631daSSadaf Ebrahimi VIXL_ASSERT(GetSizeOfCodeGeneratedSince(&start) == kTraceCommandOffset);
2730*f5c631daSSadaf Ebrahimi dc32(command);
2731*f5c631daSSadaf Ebrahimi } else {
2732*f5c631daSSadaf Ebrahimi // Emit nothing on real hardware.
2733*f5c631daSSadaf Ebrahimi USE(parameters, command);
2734*f5c631daSSadaf Ebrahimi }
2735*f5c631daSSadaf Ebrahimi }
2736*f5c631daSSadaf Ebrahimi
2737*f5c631daSSadaf Ebrahimi
Log(TraceParameters parameters)2738*f5c631daSSadaf Ebrahimi void MacroAssembler::Log(TraceParameters parameters) {
2739*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
2740*f5c631daSSadaf Ebrahimi
2741*f5c631daSSadaf Ebrahimi if (generate_simulator_code_) {
2742*f5c631daSSadaf Ebrahimi // The arguments to the log pseudo instruction need to be contiguous in
2743*f5c631daSSadaf Ebrahimi // memory, so make sure we don't try to emit a literal pool.
2744*f5c631daSSadaf Ebrahimi ExactAssemblyScope scope(this, kLogLength);
2745*f5c631daSSadaf Ebrahimi
2746*f5c631daSSadaf Ebrahimi Label start;
2747*f5c631daSSadaf Ebrahimi bind(&start);
2748*f5c631daSSadaf Ebrahimi
2749*f5c631daSSadaf Ebrahimi // Refer to simulator-aarch64.h for a description of the marker and its
2750*f5c631daSSadaf Ebrahimi // arguments.
2751*f5c631daSSadaf Ebrahimi hlt(kLogOpcode);
2752*f5c631daSSadaf Ebrahimi
2753*f5c631daSSadaf Ebrahimi VIXL_ASSERT(GetSizeOfCodeGeneratedSince(&start) == kLogParamsOffset);
2754*f5c631daSSadaf Ebrahimi dc32(parameters);
2755*f5c631daSSadaf Ebrahimi } else {
2756*f5c631daSSadaf Ebrahimi // Emit nothing on real hardware.
2757*f5c631daSSadaf Ebrahimi USE(parameters);
2758*f5c631daSSadaf Ebrahimi }
2759*f5c631daSSadaf Ebrahimi }
2760*f5c631daSSadaf Ebrahimi
2761*f5c631daSSadaf Ebrahimi
SetSimulatorCPUFeatures(const CPUFeatures & features)2762*f5c631daSSadaf Ebrahimi void MacroAssembler::SetSimulatorCPUFeatures(const CPUFeatures& features) {
2763*f5c631daSSadaf Ebrahimi ConfigureSimulatorCPUFeaturesHelper(features, kSetCPUFeaturesOpcode);
2764*f5c631daSSadaf Ebrahimi }
2765*f5c631daSSadaf Ebrahimi
2766*f5c631daSSadaf Ebrahimi
EnableSimulatorCPUFeatures(const CPUFeatures & features)2767*f5c631daSSadaf Ebrahimi void MacroAssembler::EnableSimulatorCPUFeatures(const CPUFeatures& features) {
2768*f5c631daSSadaf Ebrahimi ConfigureSimulatorCPUFeaturesHelper(features, kEnableCPUFeaturesOpcode);
2769*f5c631daSSadaf Ebrahimi }
2770*f5c631daSSadaf Ebrahimi
2771*f5c631daSSadaf Ebrahimi
DisableSimulatorCPUFeatures(const CPUFeatures & features)2772*f5c631daSSadaf Ebrahimi void MacroAssembler::DisableSimulatorCPUFeatures(const CPUFeatures& features) {
2773*f5c631daSSadaf Ebrahimi ConfigureSimulatorCPUFeaturesHelper(features, kDisableCPUFeaturesOpcode);
2774*f5c631daSSadaf Ebrahimi }
2775*f5c631daSSadaf Ebrahimi
2776*f5c631daSSadaf Ebrahimi
ConfigureSimulatorCPUFeaturesHelper(const CPUFeatures & features,DebugHltOpcode action)2777*f5c631daSSadaf Ebrahimi void MacroAssembler::ConfigureSimulatorCPUFeaturesHelper(
2778*f5c631daSSadaf Ebrahimi const CPUFeatures& features, DebugHltOpcode action) {
2779*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
2780*f5c631daSSadaf Ebrahimi VIXL_ASSERT(generate_simulator_code_);
2781*f5c631daSSadaf Ebrahimi
2782*f5c631daSSadaf Ebrahimi typedef ConfigureCPUFeaturesElementType ElementType;
2783*f5c631daSSadaf Ebrahimi VIXL_ASSERT(CPUFeatures::kNumberOfFeatures <=
2784*f5c631daSSadaf Ebrahimi std::numeric_limits<ElementType>::max());
2785*f5c631daSSadaf Ebrahimi
2786*f5c631daSSadaf Ebrahimi size_t count = features.Count();
2787*f5c631daSSadaf Ebrahimi
2788*f5c631daSSadaf Ebrahimi size_t preamble_length = kConfigureCPUFeaturesListOffset;
2789*f5c631daSSadaf Ebrahimi size_t list_length = (count + 1) * sizeof(ElementType);
2790*f5c631daSSadaf Ebrahimi size_t padding_length = AlignUp(list_length, kInstructionSize) - list_length;
2791*f5c631daSSadaf Ebrahimi
2792*f5c631daSSadaf Ebrahimi size_t total_length = preamble_length + list_length + padding_length;
2793*f5c631daSSadaf Ebrahimi
2794*f5c631daSSadaf Ebrahimi // Check the overall code size as well as the size of each component.
2795*f5c631daSSadaf Ebrahimi ExactAssemblyScope guard_total(this, total_length);
2796*f5c631daSSadaf Ebrahimi
2797*f5c631daSSadaf Ebrahimi { // Preamble: the opcode itself.
2798*f5c631daSSadaf Ebrahimi ExactAssemblyScope guard_preamble(this, preamble_length);
2799*f5c631daSSadaf Ebrahimi hlt(action);
2800*f5c631daSSadaf Ebrahimi }
2801*f5c631daSSadaf Ebrahimi { // A kNone-terminated list of features.
2802*f5c631daSSadaf Ebrahimi ExactAssemblyScope guard_list(this, list_length);
2803*f5c631daSSadaf Ebrahimi for (CPUFeatures::const_iterator it = features.begin();
2804*f5c631daSSadaf Ebrahimi it != features.end();
2805*f5c631daSSadaf Ebrahimi ++it) {
2806*f5c631daSSadaf Ebrahimi dc(static_cast<ElementType>(*it));
2807*f5c631daSSadaf Ebrahimi }
2808*f5c631daSSadaf Ebrahimi dc(static_cast<ElementType>(CPUFeatures::kNone));
2809*f5c631daSSadaf Ebrahimi }
2810*f5c631daSSadaf Ebrahimi { // Padding for instruction alignment.
2811*f5c631daSSadaf Ebrahimi ExactAssemblyScope guard_padding(this, padding_length);
2812*f5c631daSSadaf Ebrahimi for (size_t size = 0; size < padding_length; size += sizeof(ElementType)) {
2813*f5c631daSSadaf Ebrahimi // The exact value is arbitrary.
2814*f5c631daSSadaf Ebrahimi dc(static_cast<ElementType>(CPUFeatures::kNone));
2815*f5c631daSSadaf Ebrahimi }
2816*f5c631daSSadaf Ebrahimi }
2817*f5c631daSSadaf Ebrahimi }
2818*f5c631daSSadaf Ebrahimi
SaveSimulatorCPUFeatures()2819*f5c631daSSadaf Ebrahimi void MacroAssembler::SaveSimulatorCPUFeatures() {
2820*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
2821*f5c631daSSadaf Ebrahimi VIXL_ASSERT(generate_simulator_code_);
2822*f5c631daSSadaf Ebrahimi SingleEmissionCheckScope guard(this);
2823*f5c631daSSadaf Ebrahimi hlt(kSaveCPUFeaturesOpcode);
2824*f5c631daSSadaf Ebrahimi }
2825*f5c631daSSadaf Ebrahimi
2826*f5c631daSSadaf Ebrahimi
RestoreSimulatorCPUFeatures()2827*f5c631daSSadaf Ebrahimi void MacroAssembler::RestoreSimulatorCPUFeatures() {
2828*f5c631daSSadaf Ebrahimi VIXL_ASSERT(allow_macro_instructions_);
2829*f5c631daSSadaf Ebrahimi VIXL_ASSERT(generate_simulator_code_);
2830*f5c631daSSadaf Ebrahimi SingleEmissionCheckScope guard(this);
2831*f5c631daSSadaf Ebrahimi hlt(kRestoreCPUFeaturesOpcode);
2832*f5c631daSSadaf Ebrahimi }
2833*f5c631daSSadaf Ebrahimi
2834*f5c631daSSadaf Ebrahimi
Open(MacroAssembler * masm)2835*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Open(MacroAssembler* masm) {
2836*f5c631daSSadaf Ebrahimi VIXL_ASSERT(masm_ == NULL);
2837*f5c631daSSadaf Ebrahimi VIXL_ASSERT(masm != NULL);
2838*f5c631daSSadaf Ebrahimi masm_ = masm;
2839*f5c631daSSadaf Ebrahimi
2840*f5c631daSSadaf Ebrahimi CPURegList* available = masm->GetScratchRegisterList();
2841*f5c631daSSadaf Ebrahimi CPURegList* available_v = masm->GetScratchVRegisterList();
2842*f5c631daSSadaf Ebrahimi CPURegList* available_p = masm->GetScratchPRegisterList();
2843*f5c631daSSadaf Ebrahimi old_available_ = available->GetList();
2844*f5c631daSSadaf Ebrahimi old_available_v_ = available_v->GetList();
2845*f5c631daSSadaf Ebrahimi old_available_p_ = available_p->GetList();
2846*f5c631daSSadaf Ebrahimi VIXL_ASSERT(available->GetType() == CPURegister::kRegister);
2847*f5c631daSSadaf Ebrahimi VIXL_ASSERT(available_v->GetType() == CPURegister::kVRegister);
2848*f5c631daSSadaf Ebrahimi VIXL_ASSERT(available_p->GetType() == CPURegister::kPRegister);
2849*f5c631daSSadaf Ebrahimi
2850*f5c631daSSadaf Ebrahimi parent_ = masm->GetCurrentScratchRegisterScope();
2851*f5c631daSSadaf Ebrahimi masm->SetCurrentScratchRegisterScope(this);
2852*f5c631daSSadaf Ebrahimi }
2853*f5c631daSSadaf Ebrahimi
2854*f5c631daSSadaf Ebrahimi
Close()2855*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Close() {
2856*f5c631daSSadaf Ebrahimi if (masm_ != NULL) {
2857*f5c631daSSadaf Ebrahimi // Ensure that scopes nest perfectly, and do not outlive their parents.
2858*f5c631daSSadaf Ebrahimi // This is a run-time check because the order of destruction of objects in
2859*f5c631daSSadaf Ebrahimi // the _same_ scope is implementation-defined, and is likely to change in
2860*f5c631daSSadaf Ebrahimi // optimised builds.
2861*f5c631daSSadaf Ebrahimi VIXL_CHECK(masm_->GetCurrentScratchRegisterScope() == this);
2862*f5c631daSSadaf Ebrahimi masm_->SetCurrentScratchRegisterScope(parent_);
2863*f5c631daSSadaf Ebrahimi
2864*f5c631daSSadaf Ebrahimi masm_->GetScratchRegisterList()->SetList(old_available_);
2865*f5c631daSSadaf Ebrahimi masm_->GetScratchVRegisterList()->SetList(old_available_v_);
2866*f5c631daSSadaf Ebrahimi masm_->GetScratchPRegisterList()->SetList(old_available_p_);
2867*f5c631daSSadaf Ebrahimi
2868*f5c631daSSadaf Ebrahimi masm_ = NULL;
2869*f5c631daSSadaf Ebrahimi }
2870*f5c631daSSadaf Ebrahimi }
2871*f5c631daSSadaf Ebrahimi
2872*f5c631daSSadaf Ebrahimi
IsAvailable(const CPURegister & reg) const2873*f5c631daSSadaf Ebrahimi bool UseScratchRegisterScope::IsAvailable(const CPURegister& reg) const {
2874*f5c631daSSadaf Ebrahimi return masm_->GetScratchRegisterList()->IncludesAliasOf(reg) ||
2875*f5c631daSSadaf Ebrahimi masm_->GetScratchVRegisterList()->IncludesAliasOf(reg) ||
2876*f5c631daSSadaf Ebrahimi masm_->GetScratchPRegisterList()->IncludesAliasOf(reg);
2877*f5c631daSSadaf Ebrahimi }
2878*f5c631daSSadaf Ebrahimi
AcquireRegisterOfSize(int size_in_bits)2879*f5c631daSSadaf Ebrahimi Register UseScratchRegisterScope::AcquireRegisterOfSize(int size_in_bits) {
2880*f5c631daSSadaf Ebrahimi int code = AcquireFrom(masm_->GetScratchRegisterList()).GetCode();
2881*f5c631daSSadaf Ebrahimi return Register(code, size_in_bits);
2882*f5c631daSSadaf Ebrahimi }
2883*f5c631daSSadaf Ebrahimi
2884*f5c631daSSadaf Ebrahimi
AcquireVRegisterOfSize(int size_in_bits)2885*f5c631daSSadaf Ebrahimi VRegister UseScratchRegisterScope::AcquireVRegisterOfSize(int size_in_bits) {
2886*f5c631daSSadaf Ebrahimi int code = AcquireFrom(masm_->GetScratchVRegisterList()).GetCode();
2887*f5c631daSSadaf Ebrahimi return VRegister(code, size_in_bits);
2888*f5c631daSSadaf Ebrahimi }
2889*f5c631daSSadaf Ebrahimi
2890*f5c631daSSadaf Ebrahimi
Release(const CPURegister & reg)2891*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Release(const CPURegister& reg) {
2892*f5c631daSSadaf Ebrahimi VIXL_ASSERT(masm_ != NULL);
2893*f5c631daSSadaf Ebrahimi
2894*f5c631daSSadaf Ebrahimi // Release(NoReg) has no effect.
2895*f5c631daSSadaf Ebrahimi if (reg.IsNone()) return;
2896*f5c631daSSadaf Ebrahimi
2897*f5c631daSSadaf Ebrahimi ReleaseByCode(GetAvailableListFor(reg.GetBank()), reg.GetCode());
2898*f5c631daSSadaf Ebrahimi }
2899*f5c631daSSadaf Ebrahimi
2900*f5c631daSSadaf Ebrahimi
Include(const CPURegList & list)2901*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Include(const CPURegList& list) {
2902*f5c631daSSadaf Ebrahimi VIXL_ASSERT(masm_ != NULL);
2903*f5c631daSSadaf Ebrahimi
2904*f5c631daSSadaf Ebrahimi // Including an empty list has no effect.
2905*f5c631daSSadaf Ebrahimi if (list.IsEmpty()) return;
2906*f5c631daSSadaf Ebrahimi VIXL_ASSERT(list.GetType() != CPURegister::kNoRegister);
2907*f5c631daSSadaf Ebrahimi
2908*f5c631daSSadaf Ebrahimi RegList reg_list = list.GetList();
2909*f5c631daSSadaf Ebrahimi if (list.GetType() == CPURegister::kRegister) {
2910*f5c631daSSadaf Ebrahimi // Make sure that neither sp nor xzr are included the list.
2911*f5c631daSSadaf Ebrahimi reg_list &= ~(xzr.GetBit() | sp.GetBit());
2912*f5c631daSSadaf Ebrahimi }
2913*f5c631daSSadaf Ebrahimi
2914*f5c631daSSadaf Ebrahimi IncludeByRegList(GetAvailableListFor(list.GetBank()), reg_list);
2915*f5c631daSSadaf Ebrahimi }
2916*f5c631daSSadaf Ebrahimi
2917*f5c631daSSadaf Ebrahimi
Include(const Register & reg1,const Register & reg2,const Register & reg3,const Register & reg4)2918*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Include(const Register& reg1,
2919*f5c631daSSadaf Ebrahimi const Register& reg2,
2920*f5c631daSSadaf Ebrahimi const Register& reg3,
2921*f5c631daSSadaf Ebrahimi const Register& reg4) {
2922*f5c631daSSadaf Ebrahimi VIXL_ASSERT(masm_ != NULL);
2923*f5c631daSSadaf Ebrahimi RegList include =
2924*f5c631daSSadaf Ebrahimi reg1.GetBit() | reg2.GetBit() | reg3.GetBit() | reg4.GetBit();
2925*f5c631daSSadaf Ebrahimi // Make sure that neither sp nor xzr are included the list.
2926*f5c631daSSadaf Ebrahimi include &= ~(xzr.GetBit() | sp.GetBit());
2927*f5c631daSSadaf Ebrahimi
2928*f5c631daSSadaf Ebrahimi IncludeByRegList(masm_->GetScratchRegisterList(), include);
2929*f5c631daSSadaf Ebrahimi }
2930*f5c631daSSadaf Ebrahimi
2931*f5c631daSSadaf Ebrahimi
Include(const VRegister & reg1,const VRegister & reg2,const VRegister & reg3,const VRegister & reg4)2932*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Include(const VRegister& reg1,
2933*f5c631daSSadaf Ebrahimi const VRegister& reg2,
2934*f5c631daSSadaf Ebrahimi const VRegister& reg3,
2935*f5c631daSSadaf Ebrahimi const VRegister& reg4) {
2936*f5c631daSSadaf Ebrahimi RegList include =
2937*f5c631daSSadaf Ebrahimi reg1.GetBit() | reg2.GetBit() | reg3.GetBit() | reg4.GetBit();
2938*f5c631daSSadaf Ebrahimi IncludeByRegList(masm_->GetScratchVRegisterList(), include);
2939*f5c631daSSadaf Ebrahimi }
2940*f5c631daSSadaf Ebrahimi
2941*f5c631daSSadaf Ebrahimi
Include(const CPURegister & reg1,const CPURegister & reg2,const CPURegister & reg3,const CPURegister & reg4)2942*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Include(const CPURegister& reg1,
2943*f5c631daSSadaf Ebrahimi const CPURegister& reg2,
2944*f5c631daSSadaf Ebrahimi const CPURegister& reg3,
2945*f5c631daSSadaf Ebrahimi const CPURegister& reg4) {
2946*f5c631daSSadaf Ebrahimi RegList include = 0;
2947*f5c631daSSadaf Ebrahimi RegList include_v = 0;
2948*f5c631daSSadaf Ebrahimi RegList include_p = 0;
2949*f5c631daSSadaf Ebrahimi
2950*f5c631daSSadaf Ebrahimi const CPURegister regs[] = {reg1, reg2, reg3, reg4};
2951*f5c631daSSadaf Ebrahimi
2952*f5c631daSSadaf Ebrahimi for (size_t i = 0; i < ArrayLength(regs); i++) {
2953*f5c631daSSadaf Ebrahimi RegList bit = regs[i].GetBit();
2954*f5c631daSSadaf Ebrahimi switch (regs[i].GetBank()) {
2955*f5c631daSSadaf Ebrahimi case CPURegister::kNoRegisterBank:
2956*f5c631daSSadaf Ebrahimi // Include(NoReg) has no effect.
2957*f5c631daSSadaf Ebrahimi VIXL_ASSERT(regs[i].IsNone());
2958*f5c631daSSadaf Ebrahimi break;
2959*f5c631daSSadaf Ebrahimi case CPURegister::kRRegisterBank:
2960*f5c631daSSadaf Ebrahimi include |= bit;
2961*f5c631daSSadaf Ebrahimi break;
2962*f5c631daSSadaf Ebrahimi case CPURegister::kVRegisterBank:
2963*f5c631daSSadaf Ebrahimi include_v |= bit;
2964*f5c631daSSadaf Ebrahimi break;
2965*f5c631daSSadaf Ebrahimi case CPURegister::kPRegisterBank:
2966*f5c631daSSadaf Ebrahimi include_p |= bit;
2967*f5c631daSSadaf Ebrahimi break;
2968*f5c631daSSadaf Ebrahimi }
2969*f5c631daSSadaf Ebrahimi }
2970*f5c631daSSadaf Ebrahimi
2971*f5c631daSSadaf Ebrahimi IncludeByRegList(masm_->GetScratchRegisterList(), include);
2972*f5c631daSSadaf Ebrahimi IncludeByRegList(masm_->GetScratchVRegisterList(), include_v);
2973*f5c631daSSadaf Ebrahimi IncludeByRegList(masm_->GetScratchPRegisterList(), include_p);
2974*f5c631daSSadaf Ebrahimi }
2975*f5c631daSSadaf Ebrahimi
2976*f5c631daSSadaf Ebrahimi
Exclude(const CPURegList & list)2977*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Exclude(const CPURegList& list) {
2978*f5c631daSSadaf Ebrahimi ExcludeByRegList(GetAvailableListFor(list.GetBank()), list.GetList());
2979*f5c631daSSadaf Ebrahimi }
2980*f5c631daSSadaf Ebrahimi
2981*f5c631daSSadaf Ebrahimi
Exclude(const Register & reg1,const Register & reg2,const Register & reg3,const Register & reg4)2982*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Exclude(const Register& reg1,
2983*f5c631daSSadaf Ebrahimi const Register& reg2,
2984*f5c631daSSadaf Ebrahimi const Register& reg3,
2985*f5c631daSSadaf Ebrahimi const Register& reg4) {
2986*f5c631daSSadaf Ebrahimi RegList exclude =
2987*f5c631daSSadaf Ebrahimi reg1.GetBit() | reg2.GetBit() | reg3.GetBit() | reg4.GetBit();
2988*f5c631daSSadaf Ebrahimi ExcludeByRegList(masm_->GetScratchRegisterList(), exclude);
2989*f5c631daSSadaf Ebrahimi }
2990*f5c631daSSadaf Ebrahimi
2991*f5c631daSSadaf Ebrahimi
Exclude(const VRegister & reg1,const VRegister & reg2,const VRegister & reg3,const VRegister & reg4)2992*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Exclude(const VRegister& reg1,
2993*f5c631daSSadaf Ebrahimi const VRegister& reg2,
2994*f5c631daSSadaf Ebrahimi const VRegister& reg3,
2995*f5c631daSSadaf Ebrahimi const VRegister& reg4) {
2996*f5c631daSSadaf Ebrahimi RegList exclude_v =
2997*f5c631daSSadaf Ebrahimi reg1.GetBit() | reg2.GetBit() | reg3.GetBit() | reg4.GetBit();
2998*f5c631daSSadaf Ebrahimi ExcludeByRegList(masm_->GetScratchVRegisterList(), exclude_v);
2999*f5c631daSSadaf Ebrahimi }
3000*f5c631daSSadaf Ebrahimi
3001*f5c631daSSadaf Ebrahimi
Exclude(const CPURegister & reg1,const CPURegister & reg2,const CPURegister & reg3,const CPURegister & reg4)3002*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::Exclude(const CPURegister& reg1,
3003*f5c631daSSadaf Ebrahimi const CPURegister& reg2,
3004*f5c631daSSadaf Ebrahimi const CPURegister& reg3,
3005*f5c631daSSadaf Ebrahimi const CPURegister& reg4) {
3006*f5c631daSSadaf Ebrahimi RegList exclude = 0;
3007*f5c631daSSadaf Ebrahimi RegList exclude_v = 0;
3008*f5c631daSSadaf Ebrahimi RegList exclude_p = 0;
3009*f5c631daSSadaf Ebrahimi
3010*f5c631daSSadaf Ebrahimi const CPURegister regs[] = {reg1, reg2, reg3, reg4};
3011*f5c631daSSadaf Ebrahimi
3012*f5c631daSSadaf Ebrahimi for (size_t i = 0; i < ArrayLength(regs); i++) {
3013*f5c631daSSadaf Ebrahimi RegList bit = regs[i].GetBit();
3014*f5c631daSSadaf Ebrahimi switch (regs[i].GetBank()) {
3015*f5c631daSSadaf Ebrahimi case CPURegister::kNoRegisterBank:
3016*f5c631daSSadaf Ebrahimi // Exclude(NoReg) has no effect.
3017*f5c631daSSadaf Ebrahimi VIXL_ASSERT(regs[i].IsNone());
3018*f5c631daSSadaf Ebrahimi break;
3019*f5c631daSSadaf Ebrahimi case CPURegister::kRRegisterBank:
3020*f5c631daSSadaf Ebrahimi exclude |= bit;
3021*f5c631daSSadaf Ebrahimi break;
3022*f5c631daSSadaf Ebrahimi case CPURegister::kVRegisterBank:
3023*f5c631daSSadaf Ebrahimi exclude_v |= bit;
3024*f5c631daSSadaf Ebrahimi break;
3025*f5c631daSSadaf Ebrahimi case CPURegister::kPRegisterBank:
3026*f5c631daSSadaf Ebrahimi exclude_p |= bit;
3027*f5c631daSSadaf Ebrahimi break;
3028*f5c631daSSadaf Ebrahimi }
3029*f5c631daSSadaf Ebrahimi }
3030*f5c631daSSadaf Ebrahimi
3031*f5c631daSSadaf Ebrahimi ExcludeByRegList(masm_->GetScratchRegisterList(), exclude);
3032*f5c631daSSadaf Ebrahimi ExcludeByRegList(masm_->GetScratchVRegisterList(), exclude_v);
3033*f5c631daSSadaf Ebrahimi ExcludeByRegList(masm_->GetScratchPRegisterList(), exclude_p);
3034*f5c631daSSadaf Ebrahimi }
3035*f5c631daSSadaf Ebrahimi
3036*f5c631daSSadaf Ebrahimi
ExcludeAll()3037*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::ExcludeAll() {
3038*f5c631daSSadaf Ebrahimi ExcludeByRegList(masm_->GetScratchRegisterList(),
3039*f5c631daSSadaf Ebrahimi masm_->GetScratchRegisterList()->GetList());
3040*f5c631daSSadaf Ebrahimi ExcludeByRegList(masm_->GetScratchVRegisterList(),
3041*f5c631daSSadaf Ebrahimi masm_->GetScratchVRegisterList()->GetList());
3042*f5c631daSSadaf Ebrahimi ExcludeByRegList(masm_->GetScratchPRegisterList(),
3043*f5c631daSSadaf Ebrahimi masm_->GetScratchPRegisterList()->GetList());
3044*f5c631daSSadaf Ebrahimi }
3045*f5c631daSSadaf Ebrahimi
3046*f5c631daSSadaf Ebrahimi
AcquireFrom(CPURegList * available,RegList mask)3047*f5c631daSSadaf Ebrahimi CPURegister UseScratchRegisterScope::AcquireFrom(CPURegList* available,
3048*f5c631daSSadaf Ebrahimi RegList mask) {
3049*f5c631daSSadaf Ebrahimi VIXL_CHECK((available->GetList() & mask) != 0);
3050*f5c631daSSadaf Ebrahimi CPURegister result = available->PopLowestIndex(mask);
3051*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!AreAliased(result, xzr, sp));
3052*f5c631daSSadaf Ebrahimi return result;
3053*f5c631daSSadaf Ebrahimi }
3054*f5c631daSSadaf Ebrahimi
3055*f5c631daSSadaf Ebrahimi
ReleaseByCode(CPURegList * available,int code)3056*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::ReleaseByCode(CPURegList* available, int code) {
3057*f5c631daSSadaf Ebrahimi ReleaseByRegList(available, static_cast<RegList>(1) << code);
3058*f5c631daSSadaf Ebrahimi }
3059*f5c631daSSadaf Ebrahimi
3060*f5c631daSSadaf Ebrahimi
ReleaseByRegList(CPURegList * available,RegList regs)3061*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::ReleaseByRegList(CPURegList* available,
3062*f5c631daSSadaf Ebrahimi RegList regs) {
3063*f5c631daSSadaf Ebrahimi available->SetList(available->GetList() | regs);
3064*f5c631daSSadaf Ebrahimi }
3065*f5c631daSSadaf Ebrahimi
3066*f5c631daSSadaf Ebrahimi
IncludeByRegList(CPURegList * available,RegList regs)3067*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::IncludeByRegList(CPURegList* available,
3068*f5c631daSSadaf Ebrahimi RegList regs) {
3069*f5c631daSSadaf Ebrahimi available->SetList(available->GetList() | regs);
3070*f5c631daSSadaf Ebrahimi }
3071*f5c631daSSadaf Ebrahimi
3072*f5c631daSSadaf Ebrahimi
ExcludeByRegList(CPURegList * available,RegList exclude)3073*f5c631daSSadaf Ebrahimi void UseScratchRegisterScope::ExcludeByRegList(CPURegList* available,
3074*f5c631daSSadaf Ebrahimi RegList exclude) {
3075*f5c631daSSadaf Ebrahimi available->SetList(available->GetList() & ~exclude);
3076*f5c631daSSadaf Ebrahimi }
3077*f5c631daSSadaf Ebrahimi
GetAvailableListFor(CPURegister::RegisterBank bank)3078*f5c631daSSadaf Ebrahimi CPURegList* UseScratchRegisterScope::GetAvailableListFor(
3079*f5c631daSSadaf Ebrahimi CPURegister::RegisterBank bank) {
3080*f5c631daSSadaf Ebrahimi switch (bank) {
3081*f5c631daSSadaf Ebrahimi case CPURegister::kNoRegisterBank:
3082*f5c631daSSadaf Ebrahimi return NULL;
3083*f5c631daSSadaf Ebrahimi case CPURegister::kRRegisterBank:
3084*f5c631daSSadaf Ebrahimi return masm_->GetScratchRegisterList();
3085*f5c631daSSadaf Ebrahimi case CPURegister::kVRegisterBank:
3086*f5c631daSSadaf Ebrahimi return masm_->GetScratchVRegisterList();
3087*f5c631daSSadaf Ebrahimi case CPURegister::kPRegisterBank:
3088*f5c631daSSadaf Ebrahimi return masm_->GetScratchPRegisterList();
3089*f5c631daSSadaf Ebrahimi return NULL;
3090*f5c631daSSadaf Ebrahimi }
3091*f5c631daSSadaf Ebrahimi VIXL_UNREACHABLE();
3092*f5c631daSSadaf Ebrahimi return NULL;
3093*f5c631daSSadaf Ebrahimi }
3094*f5c631daSSadaf Ebrahimi
3095*f5c631daSSadaf Ebrahimi } // namespace aarch64
3096*f5c631daSSadaf Ebrahimi } // namespace vixl
3097