xref: /aosp_15_r20/external/vixl/src/code-buffer-vixl.cc (revision f5c631da2f1efdd72b5fd1e20510e4042af13d77)
1*f5c631daSSadaf Ebrahimi // Copyright 2017, VIXL authors
2*f5c631daSSadaf Ebrahimi // All rights reserved.
3*f5c631daSSadaf Ebrahimi //
4*f5c631daSSadaf Ebrahimi // Redistribution and use in source and binary forms, with or without
5*f5c631daSSadaf Ebrahimi // modification, are permitted provided that the following conditions are met:
6*f5c631daSSadaf Ebrahimi //
7*f5c631daSSadaf Ebrahimi //   * Redistributions of source code must retain the above copyright notice,
8*f5c631daSSadaf Ebrahimi //     this list of conditions and the following disclaimer.
9*f5c631daSSadaf Ebrahimi //   * Redistributions in binary form must reproduce the above copyright notice,
10*f5c631daSSadaf Ebrahimi //     this list of conditions and the following disclaimer in the documentation
11*f5c631daSSadaf Ebrahimi //     and/or other materials provided with the distribution.
12*f5c631daSSadaf Ebrahimi //   * Neither the name of ARM Limited nor the names of its contributors may be
13*f5c631daSSadaf Ebrahimi //     used to endorse or promote products derived from this software without
14*f5c631daSSadaf Ebrahimi //     specific prior written permission.
15*f5c631daSSadaf Ebrahimi //
16*f5c631daSSadaf Ebrahimi // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17*f5c631daSSadaf Ebrahimi // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18*f5c631daSSadaf Ebrahimi // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19*f5c631daSSadaf Ebrahimi // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20*f5c631daSSadaf Ebrahimi // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21*f5c631daSSadaf Ebrahimi // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22*f5c631daSSadaf Ebrahimi // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23*f5c631daSSadaf Ebrahimi // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24*f5c631daSSadaf Ebrahimi // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25*f5c631daSSadaf Ebrahimi // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26*f5c631daSSadaf Ebrahimi 
27*f5c631daSSadaf Ebrahimi extern "C" {
28*f5c631daSSadaf Ebrahimi #include <sys/mman.h>
29*f5c631daSSadaf Ebrahimi }
30*f5c631daSSadaf Ebrahimi 
31*f5c631daSSadaf Ebrahimi #include "code-buffer-vixl.h"
32*f5c631daSSadaf Ebrahimi #include "utils-vixl.h"
33*f5c631daSSadaf Ebrahimi 
34*f5c631daSSadaf Ebrahimi namespace vixl {
35*f5c631daSSadaf Ebrahimi 
36*f5c631daSSadaf Ebrahimi // BSD uses `MAP_ANON` instead of the Linux `MAP_ANONYMOUS`. The `MAP_ANONYMOUS`
37*f5c631daSSadaf Ebrahimi // alias should generally be available, but is not always, so define it manually
38*f5c631daSSadaf Ebrahimi // if necessary.
39*f5c631daSSadaf Ebrahimi #if !defined(MAP_ANONYMOUS) && defined(MAP_ANON)
40*f5c631daSSadaf Ebrahimi #define MAP_ANONYMOUS MAP_ANON
41*f5c631daSSadaf Ebrahimi #endif
42*f5c631daSSadaf Ebrahimi 
CodeBuffer(size_t capacity)43*f5c631daSSadaf Ebrahimi CodeBuffer::CodeBuffer(size_t capacity)
44*f5c631daSSadaf Ebrahimi     : buffer_(NULL),
45*f5c631daSSadaf Ebrahimi       managed_(true),
46*f5c631daSSadaf Ebrahimi       cursor_(NULL),
47*f5c631daSSadaf Ebrahimi       dirty_(false),
48*f5c631daSSadaf Ebrahimi       capacity_(capacity) {
49*f5c631daSSadaf Ebrahimi   if (capacity_ == 0) {
50*f5c631daSSadaf Ebrahimi     return;
51*f5c631daSSadaf Ebrahimi   }
52*f5c631daSSadaf Ebrahimi #ifdef VIXL_CODE_BUFFER_MALLOC
53*f5c631daSSadaf Ebrahimi   buffer_ = reinterpret_cast<byte*>(malloc(capacity_));
54*f5c631daSSadaf Ebrahimi #elif defined(VIXL_CODE_BUFFER_MMAP)
55*f5c631daSSadaf Ebrahimi   buffer_ = reinterpret_cast<byte*>(mmap(NULL,
56*f5c631daSSadaf Ebrahimi                                          capacity,
57*f5c631daSSadaf Ebrahimi                                          PROT_READ | PROT_WRITE,
58*f5c631daSSadaf Ebrahimi                                          MAP_PRIVATE | MAP_ANONYMOUS,
59*f5c631daSSadaf Ebrahimi                                          -1,
60*f5c631daSSadaf Ebrahimi                                          0));
61*f5c631daSSadaf Ebrahimi #else
62*f5c631daSSadaf Ebrahimi #error Unknown code buffer allocator.
63*f5c631daSSadaf Ebrahimi #endif
64*f5c631daSSadaf Ebrahimi   VIXL_CHECK(buffer_ != NULL);
65*f5c631daSSadaf Ebrahimi   // Aarch64 instructions must be word aligned, we assert the default allocator
66*f5c631daSSadaf Ebrahimi   // always returns word align memory.
67*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(IsWordAligned(buffer_));
68*f5c631daSSadaf Ebrahimi 
69*f5c631daSSadaf Ebrahimi   cursor_ = buffer_;
70*f5c631daSSadaf Ebrahimi }
71*f5c631daSSadaf Ebrahimi 
72*f5c631daSSadaf Ebrahimi 
CodeBuffer(byte * buffer,size_t capacity)73*f5c631daSSadaf Ebrahimi CodeBuffer::CodeBuffer(byte* buffer, size_t capacity)
74*f5c631daSSadaf Ebrahimi     : buffer_(reinterpret_cast<byte*>(buffer)),
75*f5c631daSSadaf Ebrahimi       managed_(false),
76*f5c631daSSadaf Ebrahimi       cursor_(reinterpret_cast<byte*>(buffer)),
77*f5c631daSSadaf Ebrahimi       dirty_(false),
78*f5c631daSSadaf Ebrahimi       capacity_(capacity) {
79*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(buffer_ != NULL);
80*f5c631daSSadaf Ebrahimi }
81*f5c631daSSadaf Ebrahimi 
82*f5c631daSSadaf Ebrahimi 
~CodeBuffer()83*f5c631daSSadaf Ebrahimi CodeBuffer::~CodeBuffer() VIXL_NEGATIVE_TESTING_ALLOW_EXCEPTION {
84*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(!IsDirty());
85*f5c631daSSadaf Ebrahimi   if (managed_) {
86*f5c631daSSadaf Ebrahimi #ifdef VIXL_CODE_BUFFER_MALLOC
87*f5c631daSSadaf Ebrahimi     free(buffer_);
88*f5c631daSSadaf Ebrahimi #elif defined(VIXL_CODE_BUFFER_MMAP)
89*f5c631daSSadaf Ebrahimi     munmap(buffer_, capacity_);
90*f5c631daSSadaf Ebrahimi #else
91*f5c631daSSadaf Ebrahimi #error Unknown code buffer allocator.
92*f5c631daSSadaf Ebrahimi #endif
93*f5c631daSSadaf Ebrahimi   }
94*f5c631daSSadaf Ebrahimi }
95*f5c631daSSadaf Ebrahimi 
96*f5c631daSSadaf Ebrahimi 
SetExecutable()97*f5c631daSSadaf Ebrahimi void CodeBuffer::SetExecutable() {
98*f5c631daSSadaf Ebrahimi #ifdef VIXL_CODE_BUFFER_MMAP
99*f5c631daSSadaf Ebrahimi   int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_EXEC);
100*f5c631daSSadaf Ebrahimi   VIXL_CHECK(ret == 0);
101*f5c631daSSadaf Ebrahimi #else
102*f5c631daSSadaf Ebrahimi   // This requires page-aligned memory blocks, which we can only guarantee with
103*f5c631daSSadaf Ebrahimi   // mmap.
104*f5c631daSSadaf Ebrahimi   VIXL_UNIMPLEMENTED();
105*f5c631daSSadaf Ebrahimi #endif
106*f5c631daSSadaf Ebrahimi }
107*f5c631daSSadaf Ebrahimi 
108*f5c631daSSadaf Ebrahimi 
SetWritable()109*f5c631daSSadaf Ebrahimi void CodeBuffer::SetWritable() {
110*f5c631daSSadaf Ebrahimi #ifdef VIXL_CODE_BUFFER_MMAP
111*f5c631daSSadaf Ebrahimi   int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_WRITE);
112*f5c631daSSadaf Ebrahimi   VIXL_CHECK(ret == 0);
113*f5c631daSSadaf Ebrahimi #else
114*f5c631daSSadaf Ebrahimi   // This requires page-aligned memory blocks, which we can only guarantee with
115*f5c631daSSadaf Ebrahimi   // mmap.
116*f5c631daSSadaf Ebrahimi   VIXL_UNIMPLEMENTED();
117*f5c631daSSadaf Ebrahimi #endif
118*f5c631daSSadaf Ebrahimi }
119*f5c631daSSadaf Ebrahimi 
120*f5c631daSSadaf Ebrahimi 
EmitString(const char * string)121*f5c631daSSadaf Ebrahimi void CodeBuffer::EmitString(const char* string) {
122*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(HasSpaceFor(strlen(string) + 1));
123*f5c631daSSadaf Ebrahimi   char* dst = reinterpret_cast<char*>(cursor_);
124*f5c631daSSadaf Ebrahimi   dirty_ = true;
125*f5c631daSSadaf Ebrahimi   char* null_char = stpcpy(dst, string);
126*f5c631daSSadaf Ebrahimi   cursor_ = reinterpret_cast<byte*>(null_char) + 1;
127*f5c631daSSadaf Ebrahimi }
128*f5c631daSSadaf Ebrahimi 
129*f5c631daSSadaf Ebrahimi 
EmitData(const void * data,size_t size)130*f5c631daSSadaf Ebrahimi void CodeBuffer::EmitData(const void* data, size_t size) {
131*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(HasSpaceFor(size));
132*f5c631daSSadaf Ebrahimi   dirty_ = true;
133*f5c631daSSadaf Ebrahimi   memcpy(cursor_, data, size);
134*f5c631daSSadaf Ebrahimi   cursor_ = cursor_ + size;
135*f5c631daSSadaf Ebrahimi }
136*f5c631daSSadaf Ebrahimi 
137*f5c631daSSadaf Ebrahimi 
UpdateData(size_t offset,const void * data,size_t size)138*f5c631daSSadaf Ebrahimi void CodeBuffer::UpdateData(size_t offset, const void* data, size_t size) {
139*f5c631daSSadaf Ebrahimi   dirty_ = true;
140*f5c631daSSadaf Ebrahimi   byte* dst = buffer_ + offset;
141*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(dst + size <= cursor_);
142*f5c631daSSadaf Ebrahimi   memcpy(dst, data, size);
143*f5c631daSSadaf Ebrahimi }
144*f5c631daSSadaf Ebrahimi 
145*f5c631daSSadaf Ebrahimi 
Align()146*f5c631daSSadaf Ebrahimi void CodeBuffer::Align() {
147*f5c631daSSadaf Ebrahimi   byte* end = AlignUp(cursor_, 4);
148*f5c631daSSadaf Ebrahimi   const size_t padding_size = end - cursor_;
149*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(padding_size <= 4);
150*f5c631daSSadaf Ebrahimi   EmitZeroedBytes(static_cast<int>(padding_size));
151*f5c631daSSadaf Ebrahimi }
152*f5c631daSSadaf Ebrahimi 
EmitZeroedBytes(int n)153*f5c631daSSadaf Ebrahimi void CodeBuffer::EmitZeroedBytes(int n) {
154*f5c631daSSadaf Ebrahimi   EnsureSpaceFor(n);
155*f5c631daSSadaf Ebrahimi   dirty_ = true;
156*f5c631daSSadaf Ebrahimi   memset(cursor_, 0, n);
157*f5c631daSSadaf Ebrahimi   cursor_ += n;
158*f5c631daSSadaf Ebrahimi }
159*f5c631daSSadaf Ebrahimi 
Reset()160*f5c631daSSadaf Ebrahimi void CodeBuffer::Reset() {
161*f5c631daSSadaf Ebrahimi #ifdef VIXL_DEBUG
162*f5c631daSSadaf Ebrahimi   if (managed_) {
163*f5c631daSSadaf Ebrahimi     // Fill with zeros (there is no useful value common to A32 and T32).
164*f5c631daSSadaf Ebrahimi     memset(buffer_, 0, capacity_);
165*f5c631daSSadaf Ebrahimi   }
166*f5c631daSSadaf Ebrahimi #endif
167*f5c631daSSadaf Ebrahimi   cursor_ = buffer_;
168*f5c631daSSadaf Ebrahimi   SetClean();
169*f5c631daSSadaf Ebrahimi }
170*f5c631daSSadaf Ebrahimi 
171*f5c631daSSadaf Ebrahimi 
Grow(size_t new_capacity)172*f5c631daSSadaf Ebrahimi void CodeBuffer::Grow(size_t new_capacity) {
173*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(managed_);
174*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(new_capacity > capacity_);
175*f5c631daSSadaf Ebrahimi   ptrdiff_t cursor_offset = GetCursorOffset();
176*f5c631daSSadaf Ebrahimi #ifdef VIXL_CODE_BUFFER_MALLOC
177*f5c631daSSadaf Ebrahimi   buffer_ = static_cast<byte*>(realloc(buffer_, new_capacity));
178*f5c631daSSadaf Ebrahimi   VIXL_CHECK(buffer_ != NULL);
179*f5c631daSSadaf Ebrahimi #elif defined(VIXL_CODE_BUFFER_MMAP)
180*f5c631daSSadaf Ebrahimi #ifdef __APPLE__
181*f5c631daSSadaf Ebrahimi   // TODO: Avoid using VIXL_CODE_BUFFER_MMAP.
182*f5c631daSSadaf Ebrahimi   // Don't use false to avoid having the compiler realize it's a noreturn
183*f5c631daSSadaf Ebrahimi   // method.
184*f5c631daSSadaf Ebrahimi   VIXL_ASSERT(!managed_);
185*f5c631daSSadaf Ebrahimi #else
186*f5c631daSSadaf Ebrahimi   buffer_ = static_cast<byte*>(
187*f5c631daSSadaf Ebrahimi       mremap(buffer_, capacity_, new_capacity, MREMAP_MAYMOVE));
188*f5c631daSSadaf Ebrahimi   VIXL_CHECK(buffer_ != MAP_FAILED);
189*f5c631daSSadaf Ebrahimi #endif
190*f5c631daSSadaf Ebrahimi #else
191*f5c631daSSadaf Ebrahimi #error Unknown code buffer allocator.
192*f5c631daSSadaf Ebrahimi #endif
193*f5c631daSSadaf Ebrahimi 
194*f5c631daSSadaf Ebrahimi   cursor_ = buffer_ + cursor_offset;
195*f5c631daSSadaf Ebrahimi   capacity_ = new_capacity;
196*f5c631daSSadaf Ebrahimi }
197*f5c631daSSadaf Ebrahimi 
198*f5c631daSSadaf Ebrahimi 
199*f5c631daSSadaf Ebrahimi }  // namespace vixl
200