1*f5c631daSSadaf Ebrahimi // Copyright 2017, VIXL authors
2*f5c631daSSadaf Ebrahimi // All rights reserved.
3*f5c631daSSadaf Ebrahimi //
4*f5c631daSSadaf Ebrahimi // Redistribution and use in source and binary forms, with or without
5*f5c631daSSadaf Ebrahimi // modification, are permitted provided that the following conditions are met:
6*f5c631daSSadaf Ebrahimi //
7*f5c631daSSadaf Ebrahimi // * Redistributions of source code must retain the above copyright notice,
8*f5c631daSSadaf Ebrahimi // this list of conditions and the following disclaimer.
9*f5c631daSSadaf Ebrahimi // * Redistributions in binary form must reproduce the above copyright notice,
10*f5c631daSSadaf Ebrahimi // this list of conditions and the following disclaimer in the documentation
11*f5c631daSSadaf Ebrahimi // and/or other materials provided with the distribution.
12*f5c631daSSadaf Ebrahimi // * Neither the name of ARM Limited nor the names of its contributors may be
13*f5c631daSSadaf Ebrahimi // used to endorse or promote products derived from this software without
14*f5c631daSSadaf Ebrahimi // specific prior written permission.
15*f5c631daSSadaf Ebrahimi //
16*f5c631daSSadaf Ebrahimi // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17*f5c631daSSadaf Ebrahimi // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18*f5c631daSSadaf Ebrahimi // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19*f5c631daSSadaf Ebrahimi // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20*f5c631daSSadaf Ebrahimi // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21*f5c631daSSadaf Ebrahimi // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22*f5c631daSSadaf Ebrahimi // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23*f5c631daSSadaf Ebrahimi // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24*f5c631daSSadaf Ebrahimi // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25*f5c631daSSadaf Ebrahimi // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26*f5c631daSSadaf Ebrahimi
27*f5c631daSSadaf Ebrahimi #include "location-aarch32.h"
28*f5c631daSSadaf Ebrahimi
29*f5c631daSSadaf Ebrahimi #include "assembler-aarch32.h"
30*f5c631daSSadaf Ebrahimi #include "macro-assembler-aarch32.h"
31*f5c631daSSadaf Ebrahimi
32*f5c631daSSadaf Ebrahimi namespace vixl {
33*f5c631daSSadaf Ebrahimi
34*f5c631daSSadaf Ebrahimi namespace aarch32 {
35*f5c631daSSadaf Ebrahimi
Needs16BitPadding(int32_t location) const36*f5c631daSSadaf Ebrahimi bool Location::Needs16BitPadding(int32_t location) const {
37*f5c631daSSadaf Ebrahimi if (!HasForwardReferences()) return false;
38*f5c631daSSadaf Ebrahimi const ForwardRef& last_ref = GetLastForwardReference();
39*f5c631daSSadaf Ebrahimi int32_t min_location_last_ref = last_ref.GetMinLocation();
40*f5c631daSSadaf Ebrahimi VIXL_ASSERT(min_location_last_ref - location <= 2);
41*f5c631daSSadaf Ebrahimi return (min_location_last_ref > location);
42*f5c631daSSadaf Ebrahimi }
43*f5c631daSSadaf Ebrahimi
ResolveReferences(internal::AssemblerBase * assembler)44*f5c631daSSadaf Ebrahimi void Location::ResolveReferences(internal::AssemblerBase* assembler) {
45*f5c631daSSadaf Ebrahimi // Iterate over references and call EncodeLocationFor on each of them.
46*f5c631daSSadaf Ebrahimi for (ForwardRefListIterator it(this); !it.Done(); it.Advance()) {
47*f5c631daSSadaf Ebrahimi const ForwardRef& reference = *it.Current();
48*f5c631daSSadaf Ebrahimi VIXL_ASSERT(reference.LocationIsEncodable(location_));
49*f5c631daSSadaf Ebrahimi int32_t from = reference.GetLocation();
50*f5c631daSSadaf Ebrahimi EncodeLocationFor(assembler, from, reference.op());
51*f5c631daSSadaf Ebrahimi }
52*f5c631daSSadaf Ebrahimi forward_.clear();
53*f5c631daSSadaf Ebrahimi }
54*f5c631daSSadaf Ebrahimi
Is16BitEncoding(uint16_t instr)55*f5c631daSSadaf Ebrahimi static bool Is16BitEncoding(uint16_t instr) {
56*f5c631daSSadaf Ebrahimi return instr < (kLowestT32_32Opcode >> 16);
57*f5c631daSSadaf Ebrahimi }
58*f5c631daSSadaf Ebrahimi
EncodeLocationFor(internal::AssemblerBase * assembler,int32_t from,const Location::EmitOperator * encoder)59*f5c631daSSadaf Ebrahimi void Location::EncodeLocationFor(internal::AssemblerBase* assembler,
60*f5c631daSSadaf Ebrahimi int32_t from,
61*f5c631daSSadaf Ebrahimi const Location::EmitOperator* encoder) {
62*f5c631daSSadaf Ebrahimi if (encoder->IsUsingT32()) {
63*f5c631daSSadaf Ebrahimi uint16_t* instr_ptr =
64*f5c631daSSadaf Ebrahimi assembler->GetBuffer()->GetOffsetAddress<uint16_t*>(from);
65*f5c631daSSadaf Ebrahimi if (Is16BitEncoding(instr_ptr[0])) {
66*f5c631daSSadaf Ebrahimi // The Encode methods always deals with uint32_t types so we need
67*f5c631daSSadaf Ebrahimi // to explicitly cast it.
68*f5c631daSSadaf Ebrahimi uint32_t instr = static_cast<uint32_t>(instr_ptr[0]);
69*f5c631daSSadaf Ebrahimi instr = encoder->Encode(instr, from, this);
70*f5c631daSSadaf Ebrahimi // The Encode method should not ever set the top 16 bits.
71*f5c631daSSadaf Ebrahimi VIXL_ASSERT((instr & ~0xffff) == 0);
72*f5c631daSSadaf Ebrahimi instr_ptr[0] = static_cast<uint16_t>(instr);
73*f5c631daSSadaf Ebrahimi } else {
74*f5c631daSSadaf Ebrahimi uint32_t instr =
75*f5c631daSSadaf Ebrahimi instr_ptr[1] | (static_cast<uint32_t>(instr_ptr[0]) << 16);
76*f5c631daSSadaf Ebrahimi instr = encoder->Encode(instr, from, this);
77*f5c631daSSadaf Ebrahimi instr_ptr[0] = static_cast<uint16_t>(instr >> 16);
78*f5c631daSSadaf Ebrahimi instr_ptr[1] = static_cast<uint16_t>(instr);
79*f5c631daSSadaf Ebrahimi }
80*f5c631daSSadaf Ebrahimi } else {
81*f5c631daSSadaf Ebrahimi uint32_t* instr_ptr =
82*f5c631daSSadaf Ebrahimi assembler->GetBuffer()->GetOffsetAddress<uint32_t*>(from);
83*f5c631daSSadaf Ebrahimi instr_ptr[0] = encoder->Encode(instr_ptr[0], from, this);
84*f5c631daSSadaf Ebrahimi }
85*f5c631daSSadaf Ebrahimi }
86*f5c631daSSadaf Ebrahimi
AddForwardRef(int32_t instr_location,const EmitOperator & op,const ReferenceInfo * info)87*f5c631daSSadaf Ebrahimi void Location::AddForwardRef(int32_t instr_location,
88*f5c631daSSadaf Ebrahimi const EmitOperator& op,
89*f5c631daSSadaf Ebrahimi const ReferenceInfo* info) {
90*f5c631daSSadaf Ebrahimi VIXL_ASSERT(referenced_);
91*f5c631daSSadaf Ebrahimi int32_t from = instr_location + (op.IsUsingT32() ? kT32PcDelta : kA32PcDelta);
92*f5c631daSSadaf Ebrahimi if (info->pc_needs_aligning == ReferenceInfo::kAlignPc)
93*f5c631daSSadaf Ebrahimi from = AlignDown(from, 4);
94*f5c631daSSadaf Ebrahimi int32_t min_object_location = from + info->min_offset;
95*f5c631daSSadaf Ebrahimi int32_t max_object_location = from + info->max_offset;
96*f5c631daSSadaf Ebrahimi forward_.insert(ForwardRef(&op,
97*f5c631daSSadaf Ebrahimi instr_location,
98*f5c631daSSadaf Ebrahimi info->size,
99*f5c631daSSadaf Ebrahimi min_object_location,
100*f5c631daSSadaf Ebrahimi max_object_location,
101*f5c631daSSadaf Ebrahimi info->alignment));
102*f5c631daSSadaf Ebrahimi }
103*f5c631daSSadaf Ebrahimi
GetMaxAlignment() const104*f5c631daSSadaf Ebrahimi int Location::GetMaxAlignment() const {
105*f5c631daSSadaf Ebrahimi int max_alignment = GetPoolObjectAlignment();
106*f5c631daSSadaf Ebrahimi for (ForwardRefListIterator it(const_cast<Location*>(this)); !it.Done();
107*f5c631daSSadaf Ebrahimi it.Advance()) {
108*f5c631daSSadaf Ebrahimi const ForwardRef& reference = *it.Current();
109*f5c631daSSadaf Ebrahimi if (reference.GetAlignment() > max_alignment)
110*f5c631daSSadaf Ebrahimi max_alignment = reference.GetAlignment();
111*f5c631daSSadaf Ebrahimi }
112*f5c631daSSadaf Ebrahimi return max_alignment;
113*f5c631daSSadaf Ebrahimi }
114*f5c631daSSadaf Ebrahimi
GetMinLocation() const115*f5c631daSSadaf Ebrahimi int Location::GetMinLocation() const {
116*f5c631daSSadaf Ebrahimi int32_t min_location = 0;
117*f5c631daSSadaf Ebrahimi for (ForwardRefListIterator it(const_cast<Location*>(this)); !it.Done();
118*f5c631daSSadaf Ebrahimi it.Advance()) {
119*f5c631daSSadaf Ebrahimi const ForwardRef& reference = *it.Current();
120*f5c631daSSadaf Ebrahimi if (reference.GetMinLocation() > min_location)
121*f5c631daSSadaf Ebrahimi min_location = reference.GetMinLocation();
122*f5c631daSSadaf Ebrahimi }
123*f5c631daSSadaf Ebrahimi return min_location;
124*f5c631daSSadaf Ebrahimi }
125*f5c631daSSadaf Ebrahimi
UpdatePoolObject(PoolObject<int32_t> * object)126*f5c631daSSadaf Ebrahimi void Label::UpdatePoolObject(PoolObject<int32_t>* object) {
127*f5c631daSSadaf Ebrahimi VIXL_ASSERT(forward_.size() == 1);
128*f5c631daSSadaf Ebrahimi const ForwardRef& reference = forward_.Front();
129*f5c631daSSadaf Ebrahimi object->Update(reference.GetMinLocation(),
130*f5c631daSSadaf Ebrahimi reference.GetMaxLocation(),
131*f5c631daSSadaf Ebrahimi reference.GetAlignment());
132*f5c631daSSadaf Ebrahimi }
133*f5c631daSSadaf Ebrahimi
EmitPoolObject(MacroAssemblerInterface * masm)134*f5c631daSSadaf Ebrahimi void Label::EmitPoolObject(MacroAssemblerInterface* masm) {
135*f5c631daSSadaf Ebrahimi MacroAssembler* macro_assembler = static_cast<MacroAssembler*>(masm);
136*f5c631daSSadaf Ebrahimi
137*f5c631daSSadaf Ebrahimi // Add a new branch to this label.
138*f5c631daSSadaf Ebrahimi macro_assembler->GetBuffer()->EnsureSpaceFor(kMaxInstructionSizeInBytes);
139*f5c631daSSadaf Ebrahimi ExactAssemblyScopeWithoutPoolsCheck guard(macro_assembler,
140*f5c631daSSadaf Ebrahimi kMaxInstructionSizeInBytes,
141*f5c631daSSadaf Ebrahimi ExactAssemblyScope::kMaximumSize);
142*f5c631daSSadaf Ebrahimi macro_assembler->b(this);
143*f5c631daSSadaf Ebrahimi }
144*f5c631daSSadaf Ebrahimi
EmitPoolObject(MacroAssemblerInterface * masm)145*f5c631daSSadaf Ebrahimi void RawLiteral::EmitPoolObject(MacroAssemblerInterface* masm) {
146*f5c631daSSadaf Ebrahimi Assembler* assembler = static_cast<Assembler*>(masm->AsAssemblerBase());
147*f5c631daSSadaf Ebrahimi
148*f5c631daSSadaf Ebrahimi assembler->GetBuffer()->EnsureSpaceFor(GetSize());
149*f5c631daSSadaf Ebrahimi assembler->GetBuffer()->EmitData(GetDataAddress(), GetSize());
150*f5c631daSSadaf Ebrahimi }
151*f5c631daSSadaf Ebrahimi }
152*f5c631daSSadaf Ebrahimi }
153