1*f5c631daSSadaf Ebrahimi // Copyright 2016, VIXL authors
2*f5c631daSSadaf Ebrahimi // All rights reserved.
3*f5c631daSSadaf Ebrahimi //
4*f5c631daSSadaf Ebrahimi // Redistribution and use in source and binary forms, with or without
5*f5c631daSSadaf Ebrahimi // modification, are permitted provided that the following conditions are met:
6*f5c631daSSadaf Ebrahimi //
7*f5c631daSSadaf Ebrahimi // * Redistributions of source code must retain the above copyright notice,
8*f5c631daSSadaf Ebrahimi // this list of conditions and the following disclaimer.
9*f5c631daSSadaf Ebrahimi // * Redistributions in binary form must reproduce the above copyright notice,
10*f5c631daSSadaf Ebrahimi // this list of conditions and the following disclaimer in the documentation
11*f5c631daSSadaf Ebrahimi // and/or other materials provided with the distribution.
12*f5c631daSSadaf Ebrahimi // * Neither the name of ARM Limited nor the names of its contributors may be
13*f5c631daSSadaf Ebrahimi // used to endorse or promote products derived from this software without
14*f5c631daSSadaf Ebrahimi // specific prior written permission.
15*f5c631daSSadaf Ebrahimi //
16*f5c631daSSadaf Ebrahimi // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17*f5c631daSSadaf Ebrahimi // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18*f5c631daSSadaf Ebrahimi // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19*f5c631daSSadaf Ebrahimi // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20*f5c631daSSadaf Ebrahimi // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21*f5c631daSSadaf Ebrahimi // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22*f5c631daSSadaf Ebrahimi // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23*f5c631daSSadaf Ebrahimi // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24*f5c631daSSadaf Ebrahimi // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25*f5c631daSSadaf Ebrahimi // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26*f5c631daSSadaf Ebrahimi
27*f5c631daSSadaf Ebrahimi #include "operands-aarch64.h"
28*f5c631daSSadaf Ebrahimi
29*f5c631daSSadaf Ebrahimi namespace vixl {
30*f5c631daSSadaf Ebrahimi namespace aarch64 {
31*f5c631daSSadaf Ebrahimi
32*f5c631daSSadaf Ebrahimi // CPURegList utilities.
PopLowestIndex(RegList mask)33*f5c631daSSadaf Ebrahimi CPURegister CPURegList::PopLowestIndex(RegList mask) {
34*f5c631daSSadaf Ebrahimi RegList list = list_ & mask;
35*f5c631daSSadaf Ebrahimi if (list == 0) return NoCPUReg;
36*f5c631daSSadaf Ebrahimi int index = CountTrailingZeros(list);
37*f5c631daSSadaf Ebrahimi VIXL_ASSERT(((1 << index) & list) != 0);
38*f5c631daSSadaf Ebrahimi Remove(index);
39*f5c631daSSadaf Ebrahimi return CPURegister(index, size_, type_);
40*f5c631daSSadaf Ebrahimi }
41*f5c631daSSadaf Ebrahimi
42*f5c631daSSadaf Ebrahimi
PopHighestIndex(RegList mask)43*f5c631daSSadaf Ebrahimi CPURegister CPURegList::PopHighestIndex(RegList mask) {
44*f5c631daSSadaf Ebrahimi RegList list = list_ & mask;
45*f5c631daSSadaf Ebrahimi if (list == 0) return NoCPUReg;
46*f5c631daSSadaf Ebrahimi int index = CountLeadingZeros(list);
47*f5c631daSSadaf Ebrahimi index = kRegListSizeInBits - 1 - index;
48*f5c631daSSadaf Ebrahimi VIXL_ASSERT(((1 << index) & list) != 0);
49*f5c631daSSadaf Ebrahimi Remove(index);
50*f5c631daSSadaf Ebrahimi return CPURegister(index, size_, type_);
51*f5c631daSSadaf Ebrahimi }
52*f5c631daSSadaf Ebrahimi
53*f5c631daSSadaf Ebrahimi
IsValid() const54*f5c631daSSadaf Ebrahimi bool CPURegList::IsValid() const {
55*f5c631daSSadaf Ebrahimi if (type_ == CPURegister::kNoRegister) {
56*f5c631daSSadaf Ebrahimi // We can't use IsEmpty here because that asserts IsValid().
57*f5c631daSSadaf Ebrahimi return list_ == 0;
58*f5c631daSSadaf Ebrahimi } else {
59*f5c631daSSadaf Ebrahimi bool is_valid = true;
60*f5c631daSSadaf Ebrahimi // Try to create a CPURegister for each element in the list.
61*f5c631daSSadaf Ebrahimi for (int i = 0; i < kRegListSizeInBits; i++) {
62*f5c631daSSadaf Ebrahimi if (((list_ >> i) & 1) != 0) {
63*f5c631daSSadaf Ebrahimi is_valid &= CPURegister(i, size_, type_).IsValid();
64*f5c631daSSadaf Ebrahimi }
65*f5c631daSSadaf Ebrahimi }
66*f5c631daSSadaf Ebrahimi return is_valid;
67*f5c631daSSadaf Ebrahimi }
68*f5c631daSSadaf Ebrahimi }
69*f5c631daSSadaf Ebrahimi
70*f5c631daSSadaf Ebrahimi
RemoveCalleeSaved()71*f5c631daSSadaf Ebrahimi void CPURegList::RemoveCalleeSaved() {
72*f5c631daSSadaf Ebrahimi if (GetType() == CPURegister::kRegister) {
73*f5c631daSSadaf Ebrahimi Remove(GetCalleeSaved(GetRegisterSizeInBits()));
74*f5c631daSSadaf Ebrahimi } else if (GetType() == CPURegister::kVRegister) {
75*f5c631daSSadaf Ebrahimi Remove(GetCalleeSavedV(GetRegisterSizeInBits()));
76*f5c631daSSadaf Ebrahimi } else {
77*f5c631daSSadaf Ebrahimi VIXL_ASSERT(GetType() == CPURegister::kNoRegister);
78*f5c631daSSadaf Ebrahimi VIXL_ASSERT(IsEmpty());
79*f5c631daSSadaf Ebrahimi // The list must already be empty, so do nothing.
80*f5c631daSSadaf Ebrahimi }
81*f5c631daSSadaf Ebrahimi }
82*f5c631daSSadaf Ebrahimi
83*f5c631daSSadaf Ebrahimi
Union(const CPURegList & list_1,const CPURegList & list_2,const CPURegList & list_3)84*f5c631daSSadaf Ebrahimi CPURegList CPURegList::Union(const CPURegList& list_1,
85*f5c631daSSadaf Ebrahimi const CPURegList& list_2,
86*f5c631daSSadaf Ebrahimi const CPURegList& list_3) {
87*f5c631daSSadaf Ebrahimi return Union(list_1, Union(list_2, list_3));
88*f5c631daSSadaf Ebrahimi }
89*f5c631daSSadaf Ebrahimi
90*f5c631daSSadaf Ebrahimi
Union(const CPURegList & list_1,const CPURegList & list_2,const CPURegList & list_3,const CPURegList & list_4)91*f5c631daSSadaf Ebrahimi CPURegList CPURegList::Union(const CPURegList& list_1,
92*f5c631daSSadaf Ebrahimi const CPURegList& list_2,
93*f5c631daSSadaf Ebrahimi const CPURegList& list_3,
94*f5c631daSSadaf Ebrahimi const CPURegList& list_4) {
95*f5c631daSSadaf Ebrahimi return Union(Union(list_1, list_2), Union(list_3, list_4));
96*f5c631daSSadaf Ebrahimi }
97*f5c631daSSadaf Ebrahimi
98*f5c631daSSadaf Ebrahimi
Intersection(const CPURegList & list_1,const CPURegList & list_2,const CPURegList & list_3)99*f5c631daSSadaf Ebrahimi CPURegList CPURegList::Intersection(const CPURegList& list_1,
100*f5c631daSSadaf Ebrahimi const CPURegList& list_2,
101*f5c631daSSadaf Ebrahimi const CPURegList& list_3) {
102*f5c631daSSadaf Ebrahimi return Intersection(list_1, Intersection(list_2, list_3));
103*f5c631daSSadaf Ebrahimi }
104*f5c631daSSadaf Ebrahimi
105*f5c631daSSadaf Ebrahimi
Intersection(const CPURegList & list_1,const CPURegList & list_2,const CPURegList & list_3,const CPURegList & list_4)106*f5c631daSSadaf Ebrahimi CPURegList CPURegList::Intersection(const CPURegList& list_1,
107*f5c631daSSadaf Ebrahimi const CPURegList& list_2,
108*f5c631daSSadaf Ebrahimi const CPURegList& list_3,
109*f5c631daSSadaf Ebrahimi const CPURegList& list_4) {
110*f5c631daSSadaf Ebrahimi return Intersection(Intersection(list_1, list_2),
111*f5c631daSSadaf Ebrahimi Intersection(list_3, list_4));
112*f5c631daSSadaf Ebrahimi }
113*f5c631daSSadaf Ebrahimi
114*f5c631daSSadaf Ebrahimi
GetCalleeSaved(unsigned size)115*f5c631daSSadaf Ebrahimi CPURegList CPURegList::GetCalleeSaved(unsigned size) {
116*f5c631daSSadaf Ebrahimi return CPURegList(CPURegister::kRegister, size, 19, 29);
117*f5c631daSSadaf Ebrahimi }
118*f5c631daSSadaf Ebrahimi
119*f5c631daSSadaf Ebrahimi
GetCalleeSavedV(unsigned size)120*f5c631daSSadaf Ebrahimi CPURegList CPURegList::GetCalleeSavedV(unsigned size) {
121*f5c631daSSadaf Ebrahimi return CPURegList(CPURegister::kVRegister, size, 8, 15);
122*f5c631daSSadaf Ebrahimi }
123*f5c631daSSadaf Ebrahimi
124*f5c631daSSadaf Ebrahimi
GetCallerSaved(unsigned size)125*f5c631daSSadaf Ebrahimi CPURegList CPURegList::GetCallerSaved(unsigned size) {
126*f5c631daSSadaf Ebrahimi // Registers x0-x18 and lr (x30) are caller-saved.
127*f5c631daSSadaf Ebrahimi CPURegList list = CPURegList(CPURegister::kRegister, size, 0, 18);
128*f5c631daSSadaf Ebrahimi // Do not use lr directly to avoid initialisation order fiasco bugs for users.
129*f5c631daSSadaf Ebrahimi list.Combine(Register(30, kXRegSize));
130*f5c631daSSadaf Ebrahimi return list;
131*f5c631daSSadaf Ebrahimi }
132*f5c631daSSadaf Ebrahimi
133*f5c631daSSadaf Ebrahimi
GetCallerSavedV(unsigned size)134*f5c631daSSadaf Ebrahimi CPURegList CPURegList::GetCallerSavedV(unsigned size) {
135*f5c631daSSadaf Ebrahimi // Registers d0-d7 and d16-d31 are caller-saved.
136*f5c631daSSadaf Ebrahimi CPURegList list = CPURegList(CPURegister::kVRegister, size, 0, 7);
137*f5c631daSSadaf Ebrahimi list.Combine(CPURegList(CPURegister::kVRegister, size, 16, 31));
138*f5c631daSSadaf Ebrahimi return list;
139*f5c631daSSadaf Ebrahimi }
140*f5c631daSSadaf Ebrahimi
141*f5c631daSSadaf Ebrahimi
142*f5c631daSSadaf Ebrahimi const CPURegList kCalleeSaved = CPURegList::GetCalleeSaved();
143*f5c631daSSadaf Ebrahimi const CPURegList kCalleeSavedV = CPURegList::GetCalleeSavedV();
144*f5c631daSSadaf Ebrahimi const CPURegList kCallerSaved = CPURegList::GetCallerSaved();
145*f5c631daSSadaf Ebrahimi const CPURegList kCallerSavedV = CPURegList::GetCallerSavedV();
146*f5c631daSSadaf Ebrahimi
147*f5c631daSSadaf Ebrahimi // Operand.
Operand(int64_t immediate)148*f5c631daSSadaf Ebrahimi Operand::Operand(int64_t immediate)
149*f5c631daSSadaf Ebrahimi : immediate_(immediate),
150*f5c631daSSadaf Ebrahimi reg_(NoReg),
151*f5c631daSSadaf Ebrahimi shift_(NO_SHIFT),
152*f5c631daSSadaf Ebrahimi extend_(NO_EXTEND),
153*f5c631daSSadaf Ebrahimi shift_amount_(0) {}
154*f5c631daSSadaf Ebrahimi
Operand(IntegerOperand immediate)155*f5c631daSSadaf Ebrahimi Operand::Operand(IntegerOperand immediate)
156*f5c631daSSadaf Ebrahimi : immediate_(immediate.AsIntN(64)),
157*f5c631daSSadaf Ebrahimi reg_(NoReg),
158*f5c631daSSadaf Ebrahimi shift_(NO_SHIFT),
159*f5c631daSSadaf Ebrahimi extend_(NO_EXTEND),
160*f5c631daSSadaf Ebrahimi shift_amount_(0) {}
161*f5c631daSSadaf Ebrahimi
Operand(Register reg,Shift shift,unsigned shift_amount)162*f5c631daSSadaf Ebrahimi Operand::Operand(Register reg, Shift shift, unsigned shift_amount)
163*f5c631daSSadaf Ebrahimi : reg_(reg),
164*f5c631daSSadaf Ebrahimi shift_(shift),
165*f5c631daSSadaf Ebrahimi extend_(NO_EXTEND),
166*f5c631daSSadaf Ebrahimi shift_amount_(shift_amount) {
167*f5c631daSSadaf Ebrahimi VIXL_ASSERT(shift != MSL);
168*f5c631daSSadaf Ebrahimi VIXL_ASSERT(reg.Is64Bits() || (shift_amount < kWRegSize));
169*f5c631daSSadaf Ebrahimi VIXL_ASSERT(reg.Is32Bits() || (shift_amount < kXRegSize));
170*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!reg.IsSP());
171*f5c631daSSadaf Ebrahimi }
172*f5c631daSSadaf Ebrahimi
173*f5c631daSSadaf Ebrahimi
Operand(Register reg,Extend extend,unsigned shift_amount)174*f5c631daSSadaf Ebrahimi Operand::Operand(Register reg, Extend extend, unsigned shift_amount)
175*f5c631daSSadaf Ebrahimi : reg_(reg),
176*f5c631daSSadaf Ebrahimi shift_(NO_SHIFT),
177*f5c631daSSadaf Ebrahimi extend_(extend),
178*f5c631daSSadaf Ebrahimi shift_amount_(shift_amount) {
179*f5c631daSSadaf Ebrahimi VIXL_ASSERT(reg.IsValid());
180*f5c631daSSadaf Ebrahimi VIXL_ASSERT(shift_amount <= 4);
181*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!reg.IsSP());
182*f5c631daSSadaf Ebrahimi
183*f5c631daSSadaf Ebrahimi // Extend modes SXTX and UXTX require a 64-bit register.
184*f5c631daSSadaf Ebrahimi VIXL_ASSERT(reg.Is64Bits() || ((extend != SXTX) && (extend != UXTX)));
185*f5c631daSSadaf Ebrahimi }
186*f5c631daSSadaf Ebrahimi
187*f5c631daSSadaf Ebrahimi
IsImmediate() const188*f5c631daSSadaf Ebrahimi bool Operand::IsImmediate() const { return reg_.Is(NoReg); }
189*f5c631daSSadaf Ebrahimi
190*f5c631daSSadaf Ebrahimi
IsPlainRegister() const191*f5c631daSSadaf Ebrahimi bool Operand::IsPlainRegister() const {
192*f5c631daSSadaf Ebrahimi return reg_.IsValid() &&
193*f5c631daSSadaf Ebrahimi (((shift_ == NO_SHIFT) && (extend_ == NO_EXTEND)) ||
194*f5c631daSSadaf Ebrahimi // No-op shifts.
195*f5c631daSSadaf Ebrahimi ((shift_ != NO_SHIFT) && (shift_amount_ == 0)) ||
196*f5c631daSSadaf Ebrahimi // No-op extend operations.
197*f5c631daSSadaf Ebrahimi // We can't include [US]XTW here without knowing more about the
198*f5c631daSSadaf Ebrahimi // context; they are only no-ops for 32-bit operations.
199*f5c631daSSadaf Ebrahimi //
200*f5c631daSSadaf Ebrahimi // For example, this operand could be replaced with w1:
201*f5c631daSSadaf Ebrahimi // __ Add(w0, w0, Operand(w1, UXTW));
202*f5c631daSSadaf Ebrahimi // However, no plain register can replace it in this context:
203*f5c631daSSadaf Ebrahimi // __ Add(x0, x0, Operand(w1, UXTW));
204*f5c631daSSadaf Ebrahimi (((extend_ == UXTX) || (extend_ == SXTX)) && (shift_amount_ == 0)));
205*f5c631daSSadaf Ebrahimi }
206*f5c631daSSadaf Ebrahimi
207*f5c631daSSadaf Ebrahimi
IsShiftedRegister() const208*f5c631daSSadaf Ebrahimi bool Operand::IsShiftedRegister() const {
209*f5c631daSSadaf Ebrahimi return reg_.IsValid() && (shift_ != NO_SHIFT);
210*f5c631daSSadaf Ebrahimi }
211*f5c631daSSadaf Ebrahimi
212*f5c631daSSadaf Ebrahimi
IsExtendedRegister() const213*f5c631daSSadaf Ebrahimi bool Operand::IsExtendedRegister() const {
214*f5c631daSSadaf Ebrahimi return reg_.IsValid() && (extend_ != NO_EXTEND);
215*f5c631daSSadaf Ebrahimi }
216*f5c631daSSadaf Ebrahimi
217*f5c631daSSadaf Ebrahimi
IsZero() const218*f5c631daSSadaf Ebrahimi bool Operand::IsZero() const {
219*f5c631daSSadaf Ebrahimi if (IsImmediate()) {
220*f5c631daSSadaf Ebrahimi return GetImmediate() == 0;
221*f5c631daSSadaf Ebrahimi } else {
222*f5c631daSSadaf Ebrahimi return GetRegister().IsZero();
223*f5c631daSSadaf Ebrahimi }
224*f5c631daSSadaf Ebrahimi }
225*f5c631daSSadaf Ebrahimi
226*f5c631daSSadaf Ebrahimi
ToExtendedRegister() const227*f5c631daSSadaf Ebrahimi Operand Operand::ToExtendedRegister() const {
228*f5c631daSSadaf Ebrahimi VIXL_ASSERT(IsShiftedRegister());
229*f5c631daSSadaf Ebrahimi VIXL_ASSERT((shift_ == LSL) && (shift_amount_ <= 4));
230*f5c631daSSadaf Ebrahimi return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_);
231*f5c631daSSadaf Ebrahimi }
232*f5c631daSSadaf Ebrahimi
233*f5c631daSSadaf Ebrahimi
234*f5c631daSSadaf Ebrahimi // MemOperand
MemOperand()235*f5c631daSSadaf Ebrahimi MemOperand::MemOperand()
236*f5c631daSSadaf Ebrahimi : base_(NoReg),
237*f5c631daSSadaf Ebrahimi regoffset_(NoReg),
238*f5c631daSSadaf Ebrahimi offset_(0),
239*f5c631daSSadaf Ebrahimi addrmode_(Offset),
240*f5c631daSSadaf Ebrahimi shift_(NO_SHIFT),
241*f5c631daSSadaf Ebrahimi extend_(NO_EXTEND) {}
242*f5c631daSSadaf Ebrahimi
243*f5c631daSSadaf Ebrahimi
MemOperand(Register base,int64_t offset,AddrMode addrmode)244*f5c631daSSadaf Ebrahimi MemOperand::MemOperand(Register base, int64_t offset, AddrMode addrmode)
245*f5c631daSSadaf Ebrahimi : base_(base),
246*f5c631daSSadaf Ebrahimi regoffset_(NoReg),
247*f5c631daSSadaf Ebrahimi offset_(offset),
248*f5c631daSSadaf Ebrahimi addrmode_(addrmode),
249*f5c631daSSadaf Ebrahimi shift_(NO_SHIFT),
250*f5c631daSSadaf Ebrahimi extend_(NO_EXTEND),
251*f5c631daSSadaf Ebrahimi shift_amount_(0) {
252*f5c631daSSadaf Ebrahimi VIXL_ASSERT(base.Is64Bits() && !base.IsZero());
253*f5c631daSSadaf Ebrahimi }
254*f5c631daSSadaf Ebrahimi
255*f5c631daSSadaf Ebrahimi
MemOperand(Register base,Register regoffset,Extend extend,unsigned shift_amount)256*f5c631daSSadaf Ebrahimi MemOperand::MemOperand(Register base,
257*f5c631daSSadaf Ebrahimi Register regoffset,
258*f5c631daSSadaf Ebrahimi Extend extend,
259*f5c631daSSadaf Ebrahimi unsigned shift_amount)
260*f5c631daSSadaf Ebrahimi : base_(base),
261*f5c631daSSadaf Ebrahimi regoffset_(regoffset),
262*f5c631daSSadaf Ebrahimi offset_(0),
263*f5c631daSSadaf Ebrahimi addrmode_(Offset),
264*f5c631daSSadaf Ebrahimi shift_(NO_SHIFT),
265*f5c631daSSadaf Ebrahimi extend_(extend),
266*f5c631daSSadaf Ebrahimi shift_amount_(shift_amount) {
267*f5c631daSSadaf Ebrahimi VIXL_ASSERT(base.Is64Bits() && !base.IsZero());
268*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!regoffset.IsSP());
269*f5c631daSSadaf Ebrahimi VIXL_ASSERT((extend == UXTW) || (extend == SXTW) || (extend == SXTX));
270*f5c631daSSadaf Ebrahimi
271*f5c631daSSadaf Ebrahimi // SXTX extend mode requires a 64-bit offset register.
272*f5c631daSSadaf Ebrahimi VIXL_ASSERT(regoffset.Is64Bits() || (extend != SXTX));
273*f5c631daSSadaf Ebrahimi }
274*f5c631daSSadaf Ebrahimi
275*f5c631daSSadaf Ebrahimi
MemOperand(Register base,Register regoffset,Shift shift,unsigned shift_amount)276*f5c631daSSadaf Ebrahimi MemOperand::MemOperand(Register base,
277*f5c631daSSadaf Ebrahimi Register regoffset,
278*f5c631daSSadaf Ebrahimi Shift shift,
279*f5c631daSSadaf Ebrahimi unsigned shift_amount)
280*f5c631daSSadaf Ebrahimi : base_(base),
281*f5c631daSSadaf Ebrahimi regoffset_(regoffset),
282*f5c631daSSadaf Ebrahimi offset_(0),
283*f5c631daSSadaf Ebrahimi addrmode_(Offset),
284*f5c631daSSadaf Ebrahimi shift_(shift),
285*f5c631daSSadaf Ebrahimi extend_(NO_EXTEND),
286*f5c631daSSadaf Ebrahimi shift_amount_(shift_amount) {
287*f5c631daSSadaf Ebrahimi VIXL_ASSERT(base.Is64Bits() && !base.IsZero());
288*f5c631daSSadaf Ebrahimi VIXL_ASSERT(regoffset.Is64Bits() && !regoffset.IsSP());
289*f5c631daSSadaf Ebrahimi VIXL_ASSERT(shift == LSL);
290*f5c631daSSadaf Ebrahimi }
291*f5c631daSSadaf Ebrahimi
292*f5c631daSSadaf Ebrahimi
MemOperand(Register base,const Operand & offset,AddrMode addrmode)293*f5c631daSSadaf Ebrahimi MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode)
294*f5c631daSSadaf Ebrahimi : base_(base),
295*f5c631daSSadaf Ebrahimi regoffset_(NoReg),
296*f5c631daSSadaf Ebrahimi addrmode_(addrmode),
297*f5c631daSSadaf Ebrahimi shift_(NO_SHIFT),
298*f5c631daSSadaf Ebrahimi extend_(NO_EXTEND),
299*f5c631daSSadaf Ebrahimi shift_amount_(0) {
300*f5c631daSSadaf Ebrahimi VIXL_ASSERT(base.Is64Bits() && !base.IsZero());
301*f5c631daSSadaf Ebrahimi
302*f5c631daSSadaf Ebrahimi if (offset.IsImmediate()) {
303*f5c631daSSadaf Ebrahimi offset_ = offset.GetImmediate();
304*f5c631daSSadaf Ebrahimi } else if (offset.IsShiftedRegister()) {
305*f5c631daSSadaf Ebrahimi VIXL_ASSERT((addrmode == Offset) || (addrmode == PostIndex));
306*f5c631daSSadaf Ebrahimi
307*f5c631daSSadaf Ebrahimi regoffset_ = offset.GetRegister();
308*f5c631daSSadaf Ebrahimi shift_ = offset.GetShift();
309*f5c631daSSadaf Ebrahimi shift_amount_ = offset.GetShiftAmount();
310*f5c631daSSadaf Ebrahimi
311*f5c631daSSadaf Ebrahimi extend_ = NO_EXTEND;
312*f5c631daSSadaf Ebrahimi offset_ = 0;
313*f5c631daSSadaf Ebrahimi
314*f5c631daSSadaf Ebrahimi // These assertions match those in the shifted-register constructor.
315*f5c631daSSadaf Ebrahimi VIXL_ASSERT(regoffset_.Is64Bits() && !regoffset_.IsSP());
316*f5c631daSSadaf Ebrahimi VIXL_ASSERT(shift_ == LSL);
317*f5c631daSSadaf Ebrahimi } else {
318*f5c631daSSadaf Ebrahimi VIXL_ASSERT(offset.IsExtendedRegister());
319*f5c631daSSadaf Ebrahimi VIXL_ASSERT(addrmode == Offset);
320*f5c631daSSadaf Ebrahimi
321*f5c631daSSadaf Ebrahimi regoffset_ = offset.GetRegister();
322*f5c631daSSadaf Ebrahimi extend_ = offset.GetExtend();
323*f5c631daSSadaf Ebrahimi shift_amount_ = offset.GetShiftAmount();
324*f5c631daSSadaf Ebrahimi
325*f5c631daSSadaf Ebrahimi shift_ = NO_SHIFT;
326*f5c631daSSadaf Ebrahimi offset_ = 0;
327*f5c631daSSadaf Ebrahimi
328*f5c631daSSadaf Ebrahimi // These assertions match those in the extended-register constructor.
329*f5c631daSSadaf Ebrahimi VIXL_ASSERT(!regoffset_.IsSP());
330*f5c631daSSadaf Ebrahimi VIXL_ASSERT((extend_ == UXTW) || (extend_ == SXTW) || (extend_ == SXTX));
331*f5c631daSSadaf Ebrahimi VIXL_ASSERT((regoffset_.Is64Bits() || (extend_ != SXTX)));
332*f5c631daSSadaf Ebrahimi }
333*f5c631daSSadaf Ebrahimi }
334*f5c631daSSadaf Ebrahimi
335*f5c631daSSadaf Ebrahimi
IsPlainRegister() const336*f5c631daSSadaf Ebrahimi bool MemOperand::IsPlainRegister() const {
337*f5c631daSSadaf Ebrahimi return IsImmediateOffset() && (GetOffset() == 0);
338*f5c631daSSadaf Ebrahimi }
339*f5c631daSSadaf Ebrahimi
340*f5c631daSSadaf Ebrahimi
IsEquivalentToPlainRegister() const341*f5c631daSSadaf Ebrahimi bool MemOperand::IsEquivalentToPlainRegister() const {
342*f5c631daSSadaf Ebrahimi if (regoffset_.Is(NoReg)) {
343*f5c631daSSadaf Ebrahimi // Immediate offset, pre-index or post-index.
344*f5c631daSSadaf Ebrahimi return GetOffset() == 0;
345*f5c631daSSadaf Ebrahimi } else if (GetRegisterOffset().IsZero()) {
346*f5c631daSSadaf Ebrahimi // Zero register offset, pre-index or post-index.
347*f5c631daSSadaf Ebrahimi // We can ignore shift and extend options because they all result in zero.
348*f5c631daSSadaf Ebrahimi return true;
349*f5c631daSSadaf Ebrahimi }
350*f5c631daSSadaf Ebrahimi return false;
351*f5c631daSSadaf Ebrahimi }
352*f5c631daSSadaf Ebrahimi
353*f5c631daSSadaf Ebrahimi
IsImmediateOffset() const354*f5c631daSSadaf Ebrahimi bool MemOperand::IsImmediateOffset() const {
355*f5c631daSSadaf Ebrahimi return (addrmode_ == Offset) && regoffset_.Is(NoReg);
356*f5c631daSSadaf Ebrahimi }
357*f5c631daSSadaf Ebrahimi
358*f5c631daSSadaf Ebrahimi
IsRegisterOffset() const359*f5c631daSSadaf Ebrahimi bool MemOperand::IsRegisterOffset() const {
360*f5c631daSSadaf Ebrahimi return (addrmode_ == Offset) && !regoffset_.Is(NoReg);
361*f5c631daSSadaf Ebrahimi }
362*f5c631daSSadaf Ebrahimi
IsPreIndex() const363*f5c631daSSadaf Ebrahimi bool MemOperand::IsPreIndex() const { return addrmode_ == PreIndex; }
IsPostIndex() const364*f5c631daSSadaf Ebrahimi bool MemOperand::IsPostIndex() const { return addrmode_ == PostIndex; }
365*f5c631daSSadaf Ebrahimi
IsImmediatePreIndex() const366*f5c631daSSadaf Ebrahimi bool MemOperand::IsImmediatePreIndex() const {
367*f5c631daSSadaf Ebrahimi return IsPreIndex() && regoffset_.Is(NoReg);
368*f5c631daSSadaf Ebrahimi }
369*f5c631daSSadaf Ebrahimi
IsImmediatePostIndex() const370*f5c631daSSadaf Ebrahimi bool MemOperand::IsImmediatePostIndex() const {
371*f5c631daSSadaf Ebrahimi return IsPostIndex() && regoffset_.Is(NoReg);
372*f5c631daSSadaf Ebrahimi }
373*f5c631daSSadaf Ebrahimi
AddOffset(int64_t offset)374*f5c631daSSadaf Ebrahimi void MemOperand::AddOffset(int64_t offset) {
375*f5c631daSSadaf Ebrahimi VIXL_ASSERT(IsImmediateOffset());
376*f5c631daSSadaf Ebrahimi offset_ += offset;
377*f5c631daSSadaf Ebrahimi }
378*f5c631daSSadaf Ebrahimi
379*f5c631daSSadaf Ebrahimi
IsValid() const380*f5c631daSSadaf Ebrahimi bool SVEMemOperand::IsValid() const {
381*f5c631daSSadaf Ebrahimi #ifdef VIXL_DEBUG
382*f5c631daSSadaf Ebrahimi {
383*f5c631daSSadaf Ebrahimi // It should not be possible for an SVEMemOperand to match multiple types.
384*f5c631daSSadaf Ebrahimi int count = 0;
385*f5c631daSSadaf Ebrahimi if (IsScalarPlusImmediate()) count++;
386*f5c631daSSadaf Ebrahimi if (IsScalarPlusScalar()) count++;
387*f5c631daSSadaf Ebrahimi if (IsScalarPlusVector()) count++;
388*f5c631daSSadaf Ebrahimi if (IsVectorPlusImmediate()) count++;
389*f5c631daSSadaf Ebrahimi if (IsVectorPlusScalar()) count++;
390*f5c631daSSadaf Ebrahimi if (IsVectorPlusVector()) count++;
391*f5c631daSSadaf Ebrahimi VIXL_ASSERT(count <= 1);
392*f5c631daSSadaf Ebrahimi }
393*f5c631daSSadaf Ebrahimi #endif
394*f5c631daSSadaf Ebrahimi
395*f5c631daSSadaf Ebrahimi // We can't have a register _and_ an immediate offset.
396*f5c631daSSadaf Ebrahimi if ((offset_ != 0) && (!regoffset_.IsNone())) return false;
397*f5c631daSSadaf Ebrahimi
398*f5c631daSSadaf Ebrahimi if (shift_amount_ != 0) {
399*f5c631daSSadaf Ebrahimi // Only shift and extend modifiers can take a shift amount.
400*f5c631daSSadaf Ebrahimi switch (mod_) {
401*f5c631daSSadaf Ebrahimi case NO_SVE_OFFSET_MODIFIER:
402*f5c631daSSadaf Ebrahimi case SVE_MUL_VL:
403*f5c631daSSadaf Ebrahimi return false;
404*f5c631daSSadaf Ebrahimi case SVE_LSL:
405*f5c631daSSadaf Ebrahimi case SVE_UXTW:
406*f5c631daSSadaf Ebrahimi case SVE_SXTW:
407*f5c631daSSadaf Ebrahimi // Fall through.
408*f5c631daSSadaf Ebrahimi break;
409*f5c631daSSadaf Ebrahimi }
410*f5c631daSSadaf Ebrahimi }
411*f5c631daSSadaf Ebrahimi
412*f5c631daSSadaf Ebrahimi return IsScalarPlusImmediate() || IsScalarPlusScalar() ||
413*f5c631daSSadaf Ebrahimi IsScalarPlusVector() || IsVectorPlusImmediate() ||
414*f5c631daSSadaf Ebrahimi IsVectorPlusScalar() || IsVectorPlusVector();
415*f5c631daSSadaf Ebrahimi }
416*f5c631daSSadaf Ebrahimi
417*f5c631daSSadaf Ebrahimi
IsEquivalentToScalar() const418*f5c631daSSadaf Ebrahimi bool SVEMemOperand::IsEquivalentToScalar() const {
419*f5c631daSSadaf Ebrahimi if (IsScalarPlusImmediate()) {
420*f5c631daSSadaf Ebrahimi return GetImmediateOffset() == 0;
421*f5c631daSSadaf Ebrahimi }
422*f5c631daSSadaf Ebrahimi if (IsScalarPlusScalar()) {
423*f5c631daSSadaf Ebrahimi // We can ignore the shift because it will still result in zero.
424*f5c631daSSadaf Ebrahimi return GetScalarOffset().IsZero();
425*f5c631daSSadaf Ebrahimi }
426*f5c631daSSadaf Ebrahimi // Forms involving vectors are never equivalent to a single scalar.
427*f5c631daSSadaf Ebrahimi return false;
428*f5c631daSSadaf Ebrahimi }
429*f5c631daSSadaf Ebrahimi
IsPlainRegister() const430*f5c631daSSadaf Ebrahimi bool SVEMemOperand::IsPlainRegister() const {
431*f5c631daSSadaf Ebrahimi if (IsScalarPlusImmediate()) {
432*f5c631daSSadaf Ebrahimi return GetImmediateOffset() == 0;
433*f5c631daSSadaf Ebrahimi }
434*f5c631daSSadaf Ebrahimi return false;
435*f5c631daSSadaf Ebrahimi }
436*f5c631daSSadaf Ebrahimi
GenericOperand(const CPURegister & reg)437*f5c631daSSadaf Ebrahimi GenericOperand::GenericOperand(const CPURegister& reg)
438*f5c631daSSadaf Ebrahimi : cpu_register_(reg), mem_op_size_(0) {
439*f5c631daSSadaf Ebrahimi if (reg.IsQ()) {
440*f5c631daSSadaf Ebrahimi VIXL_ASSERT(reg.GetSizeInBits() > static_cast<int>(kXRegSize));
441*f5c631daSSadaf Ebrahimi // Support for Q registers is not implemented yet.
442*f5c631daSSadaf Ebrahimi VIXL_UNIMPLEMENTED();
443*f5c631daSSadaf Ebrahimi }
444*f5c631daSSadaf Ebrahimi }
445*f5c631daSSadaf Ebrahimi
446*f5c631daSSadaf Ebrahimi
GenericOperand(const MemOperand & mem_op,size_t mem_op_size)447*f5c631daSSadaf Ebrahimi GenericOperand::GenericOperand(const MemOperand& mem_op, size_t mem_op_size)
448*f5c631daSSadaf Ebrahimi : cpu_register_(NoReg), mem_op_(mem_op), mem_op_size_(mem_op_size) {
449*f5c631daSSadaf Ebrahimi if (mem_op_size_ > kXRegSizeInBytes) {
450*f5c631daSSadaf Ebrahimi // We only support generic operands up to the size of X registers.
451*f5c631daSSadaf Ebrahimi VIXL_UNIMPLEMENTED();
452*f5c631daSSadaf Ebrahimi }
453*f5c631daSSadaf Ebrahimi }
454*f5c631daSSadaf Ebrahimi
Equals(const GenericOperand & other) const455*f5c631daSSadaf Ebrahimi bool GenericOperand::Equals(const GenericOperand& other) const {
456*f5c631daSSadaf Ebrahimi if (!IsValid() || !other.IsValid()) {
457*f5c631daSSadaf Ebrahimi // Two invalid generic operands are considered equal.
458*f5c631daSSadaf Ebrahimi return !IsValid() && !other.IsValid();
459*f5c631daSSadaf Ebrahimi }
460*f5c631daSSadaf Ebrahimi if (IsCPURegister() && other.IsCPURegister()) {
461*f5c631daSSadaf Ebrahimi return GetCPURegister().Is(other.GetCPURegister());
462*f5c631daSSadaf Ebrahimi } else if (IsMemOperand() && other.IsMemOperand()) {
463*f5c631daSSadaf Ebrahimi return GetMemOperand().Equals(other.GetMemOperand()) &&
464*f5c631daSSadaf Ebrahimi (GetMemOperandSizeInBytes() == other.GetMemOperandSizeInBytes());
465*f5c631daSSadaf Ebrahimi }
466*f5c631daSSadaf Ebrahimi return false;
467*f5c631daSSadaf Ebrahimi }
468*f5c631daSSadaf Ebrahimi }
469*f5c631daSSadaf Ebrahimi } // namespace vixl::aarch64
470