xref: /aosp_15_r20/art/runtime/base/gc_visited_arena_pool.cc (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1*795d594fSAndroid Build Coastguard Worker /*
2*795d594fSAndroid Build Coastguard Worker  * Copyright 2022 The Android Open Source Project
3*795d594fSAndroid Build Coastguard Worker  *
4*795d594fSAndroid Build Coastguard Worker  * Licensed under the Apache License, Version 2.0 (the "License");
5*795d594fSAndroid Build Coastguard Worker  * you may not use this file except in compliance with the License.
6*795d594fSAndroid Build Coastguard Worker  * You may obtain a copy of the License at
7*795d594fSAndroid Build Coastguard Worker  *
8*795d594fSAndroid Build Coastguard Worker  *      http://www.apache.org/licenses/LICENSE-2.0
9*795d594fSAndroid Build Coastguard Worker  *
10*795d594fSAndroid Build Coastguard Worker  * Unless required by applicable law or agreed to in writing, software
11*795d594fSAndroid Build Coastguard Worker  * distributed under the License is distributed on an "AS IS" BASIS,
12*795d594fSAndroid Build Coastguard Worker  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13*795d594fSAndroid Build Coastguard Worker  * See the License for the specific language governing permissions and
14*795d594fSAndroid Build Coastguard Worker  * limitations under the License.
15*795d594fSAndroid Build Coastguard Worker  */
16*795d594fSAndroid Build Coastguard Worker 
17*795d594fSAndroid Build Coastguard Worker #include "base/gc_visited_arena_pool.h"
18*795d594fSAndroid Build Coastguard Worker 
19*795d594fSAndroid Build Coastguard Worker #include <sys/mman.h>
20*795d594fSAndroid Build Coastguard Worker #include <sys/types.h>
21*795d594fSAndroid Build Coastguard Worker #include <unistd.h>
22*795d594fSAndroid Build Coastguard Worker 
23*795d594fSAndroid Build Coastguard Worker #include "base/arena_allocator-inl.h"
24*795d594fSAndroid Build Coastguard Worker #include "base/memfd.h"
25*795d594fSAndroid Build Coastguard Worker #include "base/utils.h"
26*795d594fSAndroid Build Coastguard Worker #include "gc/collector/mark_compact-inl.h"
27*795d594fSAndroid Build Coastguard Worker 
28*795d594fSAndroid Build Coastguard Worker namespace art HIDDEN {
29*795d594fSAndroid Build Coastguard Worker 
TrackedArena(uint8_t * start,size_t size,bool pre_zygote_fork,bool single_obj_arena)30*795d594fSAndroid Build Coastguard Worker TrackedArena::TrackedArena(uint8_t* start, size_t size, bool pre_zygote_fork, bool single_obj_arena)
31*795d594fSAndroid Build Coastguard Worker     : Arena(),
32*795d594fSAndroid Build Coastguard Worker       first_obj_array_(nullptr),
33*795d594fSAndroid Build Coastguard Worker       pre_zygote_fork_(pre_zygote_fork),
34*795d594fSAndroid Build Coastguard Worker       waiting_for_deletion_(false) {
35*795d594fSAndroid Build Coastguard Worker   static_assert(ArenaAllocator::kArenaAlignment <= kMinPageSize,
36*795d594fSAndroid Build Coastguard Worker                 "Arena should not need stronger alignment than kMinPageSize.");
37*795d594fSAndroid Build Coastguard Worker   memory_ = start;
38*795d594fSAndroid Build Coastguard Worker   size_ = size;
39*795d594fSAndroid Build Coastguard Worker   if (single_obj_arena) {
40*795d594fSAndroid Build Coastguard Worker     // We have only one object in this arena and it is expected to consume the
41*795d594fSAndroid Build Coastguard Worker     // entire arena.
42*795d594fSAndroid Build Coastguard Worker     bytes_allocated_ = size;
43*795d594fSAndroid Build Coastguard Worker   } else {
44*795d594fSAndroid Build Coastguard Worker     DCHECK_ALIGNED_PARAM(size, gPageSize);
45*795d594fSAndroid Build Coastguard Worker     DCHECK_ALIGNED_PARAM(start, gPageSize);
46*795d594fSAndroid Build Coastguard Worker     size_t arr_size = DivideByPageSize(size);
47*795d594fSAndroid Build Coastguard Worker     first_obj_array_.reset(new uint8_t*[arr_size]);
48*795d594fSAndroid Build Coastguard Worker     std::fill_n(first_obj_array_.get(), arr_size, nullptr);
49*795d594fSAndroid Build Coastguard Worker   }
50*795d594fSAndroid Build Coastguard Worker }
51*795d594fSAndroid Build Coastguard Worker 
Release()52*795d594fSAndroid Build Coastguard Worker void TrackedArena::Release() {
53*795d594fSAndroid Build Coastguard Worker   if (bytes_allocated_ > 0) {
54*795d594fSAndroid Build Coastguard Worker     ZeroAndReleaseMemory(Begin(), Size());
55*795d594fSAndroid Build Coastguard Worker     if (first_obj_array_.get() != nullptr) {
56*795d594fSAndroid Build Coastguard Worker       std::fill_n(first_obj_array_.get(), DivideByPageSize(Size()), nullptr);
57*795d594fSAndroid Build Coastguard Worker     }
58*795d594fSAndroid Build Coastguard Worker     bytes_allocated_ = 0;
59*795d594fSAndroid Build Coastguard Worker   }
60*795d594fSAndroid Build Coastguard Worker }
61*795d594fSAndroid Build Coastguard Worker 
SetFirstObject(uint8_t * obj_begin,uint8_t * obj_end)62*795d594fSAndroid Build Coastguard Worker void TrackedArena::SetFirstObject(uint8_t* obj_begin, uint8_t* obj_end) {
63*795d594fSAndroid Build Coastguard Worker   DCHECK(first_obj_array_.get() != nullptr);
64*795d594fSAndroid Build Coastguard Worker   DCHECK_LE(static_cast<void*>(Begin()), static_cast<void*>(obj_end));
65*795d594fSAndroid Build Coastguard Worker   DCHECK_LT(static_cast<void*>(obj_begin), static_cast<void*>(obj_end));
66*795d594fSAndroid Build Coastguard Worker   GcVisitedArenaPool* arena_pool =
67*795d594fSAndroid Build Coastguard Worker       static_cast<GcVisitedArenaPool*>(Runtime::Current()->GetLinearAllocArenaPool());
68*795d594fSAndroid Build Coastguard Worker   size_t idx = DivideByPageSize(static_cast<size_t>(obj_begin - Begin()));
69*795d594fSAndroid Build Coastguard Worker   size_t last_byte_idx = DivideByPageSize(static_cast<size_t>(obj_end - 1 - Begin()));
70*795d594fSAndroid Build Coastguard Worker   // Do the update below with arena-pool's lock in shared-mode to serialize with
71*795d594fSAndroid Build Coastguard Worker   // the compaction-pause wherein we acquire it exclusively. This is to ensure
72*795d594fSAndroid Build Coastguard Worker   // that last-byte read there doesn't change after reading it and before
73*795d594fSAndroid Build Coastguard Worker   // userfaultfd registration.
74*795d594fSAndroid Build Coastguard Worker   ReaderMutexLock rmu(Thread::Current(), arena_pool->GetLock());
75*795d594fSAndroid Build Coastguard Worker   // If the addr is at the beginning of a page, then we set it for that page too.
76*795d594fSAndroid Build Coastguard Worker   if (IsAlignedParam(obj_begin, gPageSize)) {
77*795d594fSAndroid Build Coastguard Worker     first_obj_array_[idx] = obj_begin;
78*795d594fSAndroid Build Coastguard Worker   }
79*795d594fSAndroid Build Coastguard Worker   while (idx < last_byte_idx) {
80*795d594fSAndroid Build Coastguard Worker     first_obj_array_[++idx] = obj_begin;
81*795d594fSAndroid Build Coastguard Worker   }
82*795d594fSAndroid Build Coastguard Worker }
83*795d594fSAndroid Build Coastguard Worker 
AddMap(size_t min_size)84*795d594fSAndroid Build Coastguard Worker uint8_t* GcVisitedArenaPool::AddMap(size_t min_size) {
85*795d594fSAndroid Build Coastguard Worker   size_t size = std::max(min_size, kLinearAllocPoolSize);
86*795d594fSAndroid Build Coastguard Worker #if defined(__LP64__)
87*795d594fSAndroid Build Coastguard Worker   // This is true only when we are running a 64-bit dex2oat to compile a 32-bit image.
88*795d594fSAndroid Build Coastguard Worker   if (low_4gb_) {
89*795d594fSAndroid Build Coastguard Worker     size = std::max(min_size, kLow4GBLinearAllocPoolSize);
90*795d594fSAndroid Build Coastguard Worker   }
91*795d594fSAndroid Build Coastguard Worker #endif
92*795d594fSAndroid Build Coastguard Worker   size_t alignment = gc::Heap::BestPageTableAlignment(size);
93*795d594fSAndroid Build Coastguard Worker   DCHECK_GE(size, gc::Heap::GetPMDSize());
94*795d594fSAndroid Build Coastguard Worker   std::string err_msg;
95*795d594fSAndroid Build Coastguard Worker   maps_.emplace_back(MemMap::MapAnonymousAligned(
96*795d594fSAndroid Build Coastguard Worker       name_, size, PROT_READ | PROT_WRITE, low_4gb_, alignment, &err_msg));
97*795d594fSAndroid Build Coastguard Worker   MemMap& map = maps_.back();
98*795d594fSAndroid Build Coastguard Worker   if (!map.IsValid()) {
99*795d594fSAndroid Build Coastguard Worker     LOG(FATAL) << "Failed to allocate " << name_ << ": " << err_msg;
100*795d594fSAndroid Build Coastguard Worker     UNREACHABLE();
101*795d594fSAndroid Build Coastguard Worker   }
102*795d594fSAndroid Build Coastguard Worker 
103*795d594fSAndroid Build Coastguard Worker   if (gUseUserfaultfd) {
104*795d594fSAndroid Build Coastguard Worker     // Create a shadow-map for the map being added for userfaultfd GC
105*795d594fSAndroid Build Coastguard Worker     gc::collector::MarkCompact* mark_compact =
106*795d594fSAndroid Build Coastguard Worker         Runtime::Current()->GetHeap()->MarkCompactCollector();
107*795d594fSAndroid Build Coastguard Worker     DCHECK_NE(mark_compact, nullptr);
108*795d594fSAndroid Build Coastguard Worker     mark_compact->AddLinearAllocSpaceData(map.Begin(), map.Size());
109*795d594fSAndroid Build Coastguard Worker   }
110*795d594fSAndroid Build Coastguard Worker   Chunk* chunk = new Chunk(map.Begin(), map.Size());
111*795d594fSAndroid Build Coastguard Worker   best_fit_allocs_.insert(chunk);
112*795d594fSAndroid Build Coastguard Worker   free_chunks_.insert(chunk);
113*795d594fSAndroid Build Coastguard Worker   return map.Begin();
114*795d594fSAndroid Build Coastguard Worker }
115*795d594fSAndroid Build Coastguard Worker 
GcVisitedArenaPool(bool low_4gb,bool is_zygote,const char * name)116*795d594fSAndroid Build Coastguard Worker GcVisitedArenaPool::GcVisitedArenaPool(bool low_4gb, bool is_zygote, const char* name)
117*795d594fSAndroid Build Coastguard Worker     : lock_("gc-visited arena-pool", kGenericBottomLock),
118*795d594fSAndroid Build Coastguard Worker       bytes_allocated_(0),
119*795d594fSAndroid Build Coastguard Worker       unused_arenas_(nullptr),
120*795d594fSAndroid Build Coastguard Worker       name_(name),
121*795d594fSAndroid Build Coastguard Worker       defer_arena_freeing_(false),
122*795d594fSAndroid Build Coastguard Worker       low_4gb_(low_4gb),
123*795d594fSAndroid Build Coastguard Worker       pre_zygote_fork_(is_zygote) {}
124*795d594fSAndroid Build Coastguard Worker 
~GcVisitedArenaPool()125*795d594fSAndroid Build Coastguard Worker GcVisitedArenaPool::~GcVisitedArenaPool() {
126*795d594fSAndroid Build Coastguard Worker   for (Chunk* chunk : free_chunks_) {
127*795d594fSAndroid Build Coastguard Worker     delete chunk;
128*795d594fSAndroid Build Coastguard Worker   }
129*795d594fSAndroid Build Coastguard Worker   // Must not delete chunks from best_fit_allocs_ as they are shared with
130*795d594fSAndroid Build Coastguard Worker   // free_chunks_.
131*795d594fSAndroid Build Coastguard Worker }
132*795d594fSAndroid Build Coastguard Worker 
GetBytesAllocated() const133*795d594fSAndroid Build Coastguard Worker size_t GcVisitedArenaPool::GetBytesAllocated() const {
134*795d594fSAndroid Build Coastguard Worker   ReaderMutexLock rmu(Thread::Current(), lock_);
135*795d594fSAndroid Build Coastguard Worker   return bytes_allocated_;
136*795d594fSAndroid Build Coastguard Worker }
137*795d594fSAndroid Build Coastguard Worker 
AddPreZygoteForkMap(size_t size)138*795d594fSAndroid Build Coastguard Worker uint8_t* GcVisitedArenaPool::AddPreZygoteForkMap(size_t size) {
139*795d594fSAndroid Build Coastguard Worker   DCHECK(pre_zygote_fork_);
140*795d594fSAndroid Build Coastguard Worker   std::string pre_fork_name = "Pre-zygote-";
141*795d594fSAndroid Build Coastguard Worker   pre_fork_name += name_;
142*795d594fSAndroid Build Coastguard Worker   std::string err_msg;
143*795d594fSAndroid Build Coastguard Worker   maps_.emplace_back(MemMap::MapAnonymous(
144*795d594fSAndroid Build Coastguard Worker       pre_fork_name.c_str(), size, PROT_READ | PROT_WRITE, low_4gb_, &err_msg));
145*795d594fSAndroid Build Coastguard Worker   MemMap& map = maps_.back();
146*795d594fSAndroid Build Coastguard Worker   if (!map.IsValid()) {
147*795d594fSAndroid Build Coastguard Worker     LOG(FATAL) << "Failed to allocate " << pre_fork_name << ": " << err_msg;
148*795d594fSAndroid Build Coastguard Worker     UNREACHABLE();
149*795d594fSAndroid Build Coastguard Worker   }
150*795d594fSAndroid Build Coastguard Worker   return map.Begin();
151*795d594fSAndroid Build Coastguard Worker }
152*795d594fSAndroid Build Coastguard Worker 
AllocSingleObjArena(size_t size)153*795d594fSAndroid Build Coastguard Worker uint8_t* GcVisitedArenaPool::AllocSingleObjArena(size_t size) {
154*795d594fSAndroid Build Coastguard Worker   WriterMutexLock wmu(Thread::Current(), lock_);
155*795d594fSAndroid Build Coastguard Worker   Arena* arena;
156*795d594fSAndroid Build Coastguard Worker   DCHECK(gUseUserfaultfd);
157*795d594fSAndroid Build Coastguard Worker   // To minimize private dirty, all class and intern table allocations are
158*795d594fSAndroid Build Coastguard Worker   // done outside LinearAlloc range so they are untouched during GC.
159*795d594fSAndroid Build Coastguard Worker   if (pre_zygote_fork_) {
160*795d594fSAndroid Build Coastguard Worker     uint8_t* begin = static_cast<uint8_t*>(malloc(size));
161*795d594fSAndroid Build Coastguard Worker     auto insert_result = allocated_arenas_.insert(
162*795d594fSAndroid Build Coastguard Worker         new TrackedArena(begin, size, /*pre_zygote_fork=*/true, /*single_obj_arena=*/true));
163*795d594fSAndroid Build Coastguard Worker     arena = *insert_result.first;
164*795d594fSAndroid Build Coastguard Worker   } else {
165*795d594fSAndroid Build Coastguard Worker     arena = AllocArena(size, /*need_first_obj_arr=*/true);
166*795d594fSAndroid Build Coastguard Worker   }
167*795d594fSAndroid Build Coastguard Worker   return arena->Begin();
168*795d594fSAndroid Build Coastguard Worker }
169*795d594fSAndroid Build Coastguard Worker 
FreeSingleObjArena(uint8_t * addr)170*795d594fSAndroid Build Coastguard Worker void GcVisitedArenaPool::FreeSingleObjArena(uint8_t* addr) {
171*795d594fSAndroid Build Coastguard Worker   Thread* self = Thread::Current();
172*795d594fSAndroid Build Coastguard Worker   size_t size;
173*795d594fSAndroid Build Coastguard Worker   bool zygote_arena;
174*795d594fSAndroid Build Coastguard Worker   {
175*795d594fSAndroid Build Coastguard Worker     TrackedArena temp_arena(addr);
176*795d594fSAndroid Build Coastguard Worker     WriterMutexLock wmu(self, lock_);
177*795d594fSAndroid Build Coastguard Worker     auto iter = allocated_arenas_.find(&temp_arena);
178*795d594fSAndroid Build Coastguard Worker     DCHECK(iter != allocated_arenas_.end());
179*795d594fSAndroid Build Coastguard Worker     TrackedArena* arena = *iter;
180*795d594fSAndroid Build Coastguard Worker     size = arena->Size();
181*795d594fSAndroid Build Coastguard Worker     zygote_arena = arena->IsPreZygoteForkArena();
182*795d594fSAndroid Build Coastguard Worker     DCHECK_EQ(arena->Begin(), addr);
183*795d594fSAndroid Build Coastguard Worker     DCHECK(arena->IsSingleObjectArena());
184*795d594fSAndroid Build Coastguard Worker     allocated_arenas_.erase(iter);
185*795d594fSAndroid Build Coastguard Worker     if (defer_arena_freeing_) {
186*795d594fSAndroid Build Coastguard Worker       arena->SetupForDeferredDeletion(unused_arenas_);
187*795d594fSAndroid Build Coastguard Worker       unused_arenas_ = arena;
188*795d594fSAndroid Build Coastguard Worker     } else {
189*795d594fSAndroid Build Coastguard Worker       delete arena;
190*795d594fSAndroid Build Coastguard Worker     }
191*795d594fSAndroid Build Coastguard Worker   }
192*795d594fSAndroid Build Coastguard Worker   // Refer to the comment in FreeArenaChain() for why the pages are released
193*795d594fSAndroid Build Coastguard Worker   // after deleting the arena.
194*795d594fSAndroid Build Coastguard Worker   if (zygote_arena) {
195*795d594fSAndroid Build Coastguard Worker     free(addr);
196*795d594fSAndroid Build Coastguard Worker   } else {
197*795d594fSAndroid Build Coastguard Worker     ZeroAndReleaseMemory(addr, size);
198*795d594fSAndroid Build Coastguard Worker     WriterMutexLock wmu(self, lock_);
199*795d594fSAndroid Build Coastguard Worker     FreeRangeLocked(addr, size);
200*795d594fSAndroid Build Coastguard Worker   }
201*795d594fSAndroid Build Coastguard Worker }
202*795d594fSAndroid Build Coastguard Worker 
AllocArena(size_t size,bool single_obj_arena)203*795d594fSAndroid Build Coastguard Worker Arena* GcVisitedArenaPool::AllocArena(size_t size, bool single_obj_arena) {
204*795d594fSAndroid Build Coastguard Worker   // Return only page aligned sizes so that madvise can be leveraged.
205*795d594fSAndroid Build Coastguard Worker   size = RoundUp(size, gPageSize);
206*795d594fSAndroid Build Coastguard Worker   if (pre_zygote_fork_) {
207*795d594fSAndroid Build Coastguard Worker     // The first fork out of zygote hasn't happened yet. Allocate arena in a
208*795d594fSAndroid Build Coastguard Worker     // private-anonymous mapping to retain clean pages across fork.
209*795d594fSAndroid Build Coastguard Worker     uint8_t* addr = AddPreZygoteForkMap(size);
210*795d594fSAndroid Build Coastguard Worker     auto insert_result = allocated_arenas_.insert(
211*795d594fSAndroid Build Coastguard Worker         new TrackedArena(addr, size, /*pre_zygote_fork=*/true, single_obj_arena));
212*795d594fSAndroid Build Coastguard Worker     DCHECK(insert_result.second);
213*795d594fSAndroid Build Coastguard Worker     return *insert_result.first;
214*795d594fSAndroid Build Coastguard Worker   }
215*795d594fSAndroid Build Coastguard Worker 
216*795d594fSAndroid Build Coastguard Worker   Chunk temp_chunk(nullptr, size);
217*795d594fSAndroid Build Coastguard Worker   auto best_fit_iter = best_fit_allocs_.lower_bound(&temp_chunk);
218*795d594fSAndroid Build Coastguard Worker   if (UNLIKELY(best_fit_iter == best_fit_allocs_.end())) {
219*795d594fSAndroid Build Coastguard Worker     AddMap(size);
220*795d594fSAndroid Build Coastguard Worker     best_fit_iter = best_fit_allocs_.lower_bound(&temp_chunk);
221*795d594fSAndroid Build Coastguard Worker     CHECK(best_fit_iter != best_fit_allocs_.end());
222*795d594fSAndroid Build Coastguard Worker   }
223*795d594fSAndroid Build Coastguard Worker   auto free_chunks_iter = free_chunks_.find(*best_fit_iter);
224*795d594fSAndroid Build Coastguard Worker   DCHECK(free_chunks_iter != free_chunks_.end());
225*795d594fSAndroid Build Coastguard Worker   Chunk* chunk = *best_fit_iter;
226*795d594fSAndroid Build Coastguard Worker   DCHECK_EQ(chunk, *free_chunks_iter);
227*795d594fSAndroid Build Coastguard Worker   // if the best-fit chunk < 2x the requested size, then give the whole chunk.
228*795d594fSAndroid Build Coastguard Worker   if (chunk->size_ < 2 * size) {
229*795d594fSAndroid Build Coastguard Worker     DCHECK_GE(chunk->size_, size);
230*795d594fSAndroid Build Coastguard Worker     auto insert_result = allocated_arenas_.insert(new TrackedArena(chunk->addr_,
231*795d594fSAndroid Build Coastguard Worker                                                                    chunk->size_,
232*795d594fSAndroid Build Coastguard Worker                                                                    /*pre_zygote_fork=*/false,
233*795d594fSAndroid Build Coastguard Worker                                                                    single_obj_arena));
234*795d594fSAndroid Build Coastguard Worker     DCHECK(insert_result.second);
235*795d594fSAndroid Build Coastguard Worker     free_chunks_.erase(free_chunks_iter);
236*795d594fSAndroid Build Coastguard Worker     best_fit_allocs_.erase(best_fit_iter);
237*795d594fSAndroid Build Coastguard Worker     delete chunk;
238*795d594fSAndroid Build Coastguard Worker     return *insert_result.first;
239*795d594fSAndroid Build Coastguard Worker   } else {
240*795d594fSAndroid Build Coastguard Worker     auto insert_result = allocated_arenas_.insert(new TrackedArena(chunk->addr_,
241*795d594fSAndroid Build Coastguard Worker                                                                    size,
242*795d594fSAndroid Build Coastguard Worker                                                                    /*pre_zygote_fork=*/false,
243*795d594fSAndroid Build Coastguard Worker                                                                    single_obj_arena));
244*795d594fSAndroid Build Coastguard Worker     DCHECK(insert_result.second);
245*795d594fSAndroid Build Coastguard Worker     // Compute next iterators for faster insert later.
246*795d594fSAndroid Build Coastguard Worker     auto next_best_fit_iter = best_fit_iter;
247*795d594fSAndroid Build Coastguard Worker     next_best_fit_iter++;
248*795d594fSAndroid Build Coastguard Worker     auto next_free_chunks_iter = free_chunks_iter;
249*795d594fSAndroid Build Coastguard Worker     next_free_chunks_iter++;
250*795d594fSAndroid Build Coastguard Worker     auto best_fit_nh = best_fit_allocs_.extract(best_fit_iter);
251*795d594fSAndroid Build Coastguard Worker     auto free_chunks_nh = free_chunks_.extract(free_chunks_iter);
252*795d594fSAndroid Build Coastguard Worker     best_fit_nh.value()->addr_ += size;
253*795d594fSAndroid Build Coastguard Worker     best_fit_nh.value()->size_ -= size;
254*795d594fSAndroid Build Coastguard Worker     DCHECK_EQ(free_chunks_nh.value()->addr_, chunk->addr_);
255*795d594fSAndroid Build Coastguard Worker     best_fit_allocs_.insert(next_best_fit_iter, std::move(best_fit_nh));
256*795d594fSAndroid Build Coastguard Worker     free_chunks_.insert(next_free_chunks_iter, std::move(free_chunks_nh));
257*795d594fSAndroid Build Coastguard Worker     return *insert_result.first;
258*795d594fSAndroid Build Coastguard Worker   }
259*795d594fSAndroid Build Coastguard Worker }
260*795d594fSAndroid Build Coastguard Worker 
FreeRangeLocked(uint8_t * range_begin,size_t range_size)261*795d594fSAndroid Build Coastguard Worker void GcVisitedArenaPool::FreeRangeLocked(uint8_t* range_begin, size_t range_size) {
262*795d594fSAndroid Build Coastguard Worker   Chunk temp_chunk(range_begin, range_size);
263*795d594fSAndroid Build Coastguard Worker   bool merge_with_next = false;
264*795d594fSAndroid Build Coastguard Worker   bool merge_with_prev = false;
265*795d594fSAndroid Build Coastguard Worker   auto next_iter = free_chunks_.lower_bound(&temp_chunk);
266*795d594fSAndroid Build Coastguard Worker   auto iter_for_extract = free_chunks_.end();
267*795d594fSAndroid Build Coastguard Worker   // Can we merge with the previous chunk?
268*795d594fSAndroid Build Coastguard Worker   if (next_iter != free_chunks_.begin()) {
269*795d594fSAndroid Build Coastguard Worker     auto prev_iter = next_iter;
270*795d594fSAndroid Build Coastguard Worker     prev_iter--;
271*795d594fSAndroid Build Coastguard Worker     merge_with_prev = (*prev_iter)->addr_ + (*prev_iter)->size_ == range_begin;
272*795d594fSAndroid Build Coastguard Worker     if (merge_with_prev) {
273*795d594fSAndroid Build Coastguard Worker       range_begin = (*prev_iter)->addr_;
274*795d594fSAndroid Build Coastguard Worker       range_size += (*prev_iter)->size_;
275*795d594fSAndroid Build Coastguard Worker       // Hold on to the iterator for faster extract later
276*795d594fSAndroid Build Coastguard Worker       iter_for_extract = prev_iter;
277*795d594fSAndroid Build Coastguard Worker     }
278*795d594fSAndroid Build Coastguard Worker   }
279*795d594fSAndroid Build Coastguard Worker   // Can we merge with the next chunk?
280*795d594fSAndroid Build Coastguard Worker   if (next_iter != free_chunks_.end()) {
281*795d594fSAndroid Build Coastguard Worker     merge_with_next = range_begin + range_size == (*next_iter)->addr_;
282*795d594fSAndroid Build Coastguard Worker     if (merge_with_next) {
283*795d594fSAndroid Build Coastguard Worker       range_size += (*next_iter)->size_;
284*795d594fSAndroid Build Coastguard Worker       if (merge_with_prev) {
285*795d594fSAndroid Build Coastguard Worker         auto iter = next_iter;
286*795d594fSAndroid Build Coastguard Worker         next_iter++;
287*795d594fSAndroid Build Coastguard Worker         // Keep only one of the two chunks to be expanded.
288*795d594fSAndroid Build Coastguard Worker         Chunk* chunk = *iter;
289*795d594fSAndroid Build Coastguard Worker         size_t erase_res = best_fit_allocs_.erase(chunk);
290*795d594fSAndroid Build Coastguard Worker         DCHECK_EQ(erase_res, 1u);
291*795d594fSAndroid Build Coastguard Worker         free_chunks_.erase(iter);
292*795d594fSAndroid Build Coastguard Worker         delete chunk;
293*795d594fSAndroid Build Coastguard Worker       } else {
294*795d594fSAndroid Build Coastguard Worker         iter_for_extract = next_iter;
295*795d594fSAndroid Build Coastguard Worker         next_iter++;
296*795d594fSAndroid Build Coastguard Worker       }
297*795d594fSAndroid Build Coastguard Worker     }
298*795d594fSAndroid Build Coastguard Worker   }
299*795d594fSAndroid Build Coastguard Worker 
300*795d594fSAndroid Build Coastguard Worker   // Extract-insert avoids 2/4 destroys and 2/2 creations
301*795d594fSAndroid Build Coastguard Worker   // as compared to erase-insert, so use that when merging.
302*795d594fSAndroid Build Coastguard Worker   if (merge_with_prev || merge_with_next) {
303*795d594fSAndroid Build Coastguard Worker     auto free_chunks_nh = free_chunks_.extract(iter_for_extract);
304*795d594fSAndroid Build Coastguard Worker     auto best_fit_allocs_nh = best_fit_allocs_.extract(*iter_for_extract);
305*795d594fSAndroid Build Coastguard Worker 
306*795d594fSAndroid Build Coastguard Worker     free_chunks_nh.value()->addr_ = range_begin;
307*795d594fSAndroid Build Coastguard Worker     DCHECK_EQ(best_fit_allocs_nh.value()->addr_, range_begin);
308*795d594fSAndroid Build Coastguard Worker     free_chunks_nh.value()->size_ = range_size;
309*795d594fSAndroid Build Coastguard Worker     DCHECK_EQ(best_fit_allocs_nh.value()->size_, range_size);
310*795d594fSAndroid Build Coastguard Worker 
311*795d594fSAndroid Build Coastguard Worker     free_chunks_.insert(next_iter, std::move(free_chunks_nh));
312*795d594fSAndroid Build Coastguard Worker     // Since the chunk's size has expanded, the hint won't be useful
313*795d594fSAndroid Build Coastguard Worker     // for best-fit set.
314*795d594fSAndroid Build Coastguard Worker     best_fit_allocs_.insert(std::move(best_fit_allocs_nh));
315*795d594fSAndroid Build Coastguard Worker   } else {
316*795d594fSAndroid Build Coastguard Worker     DCHECK(iter_for_extract == free_chunks_.end());
317*795d594fSAndroid Build Coastguard Worker     Chunk* chunk = new Chunk(range_begin, range_size);
318*795d594fSAndroid Build Coastguard Worker     free_chunks_.insert(next_iter, chunk);
319*795d594fSAndroid Build Coastguard Worker     best_fit_allocs_.insert(chunk);
320*795d594fSAndroid Build Coastguard Worker   }
321*795d594fSAndroid Build Coastguard Worker }
322*795d594fSAndroid Build Coastguard Worker 
FreeArenaChain(Arena * first)323*795d594fSAndroid Build Coastguard Worker void GcVisitedArenaPool::FreeArenaChain(Arena* first) {
324*795d594fSAndroid Build Coastguard Worker   if (kRunningOnMemoryTool) {
325*795d594fSAndroid Build Coastguard Worker     for (Arena* arena = first; arena != nullptr; arena = arena->Next()) {
326*795d594fSAndroid Build Coastguard Worker       MEMORY_TOOL_MAKE_UNDEFINED(arena->Begin(), arena->GetBytesAllocated());
327*795d594fSAndroid Build Coastguard Worker     }
328*795d594fSAndroid Build Coastguard Worker   }
329*795d594fSAndroid Build Coastguard Worker 
330*795d594fSAndroid Build Coastguard Worker   // TODO: Handle the case when arena_allocator::kArenaAllocatorPreciseTracking
331*795d594fSAndroid Build Coastguard Worker   // is true. See MemMapArenaPool::FreeArenaChain() for example.
332*795d594fSAndroid Build Coastguard Worker   CHECK(!arena_allocator::kArenaAllocatorPreciseTracking);
333*795d594fSAndroid Build Coastguard Worker   Thread* self = Thread::Current();
334*795d594fSAndroid Build Coastguard Worker   // vector of arena ranges to be freed and whether they are pre-zygote-fork.
335*795d594fSAndroid Build Coastguard Worker   std::vector<std::tuple<uint8_t*, size_t, bool>> free_ranges;
336*795d594fSAndroid Build Coastguard Worker 
337*795d594fSAndroid Build Coastguard Worker   {
338*795d594fSAndroid Build Coastguard Worker     WriterMutexLock wmu(self, lock_);
339*795d594fSAndroid Build Coastguard Worker     while (first != nullptr) {
340*795d594fSAndroid Build Coastguard Worker       TrackedArena* temp = down_cast<TrackedArena*>(first);
341*795d594fSAndroid Build Coastguard Worker       DCHECK(!temp->IsSingleObjectArena());
342*795d594fSAndroid Build Coastguard Worker       first = first->Next();
343*795d594fSAndroid Build Coastguard Worker       free_ranges.emplace_back(temp->Begin(), temp->Size(), temp->IsPreZygoteForkArena());
344*795d594fSAndroid Build Coastguard Worker       // In other implementations of ArenaPool this is calculated when asked for,
345*795d594fSAndroid Build Coastguard Worker       // thanks to the list of free arenas that is kept around. But in this case,
346*795d594fSAndroid Build Coastguard Worker       // we release the freed arena back to the pool and therefore need to
347*795d594fSAndroid Build Coastguard Worker       // calculate here.
348*795d594fSAndroid Build Coastguard Worker       bytes_allocated_ += temp->GetBytesAllocated();
349*795d594fSAndroid Build Coastguard Worker       auto iter = allocated_arenas_.find(temp);
350*795d594fSAndroid Build Coastguard Worker       DCHECK(iter != allocated_arenas_.end());
351*795d594fSAndroid Build Coastguard Worker       allocated_arenas_.erase(iter);
352*795d594fSAndroid Build Coastguard Worker       if (defer_arena_freeing_) {
353*795d594fSAndroid Build Coastguard Worker         temp->SetupForDeferredDeletion(unused_arenas_);
354*795d594fSAndroid Build Coastguard Worker         unused_arenas_ = temp;
355*795d594fSAndroid Build Coastguard Worker       } else {
356*795d594fSAndroid Build Coastguard Worker         delete temp;
357*795d594fSAndroid Build Coastguard Worker       }
358*795d594fSAndroid Build Coastguard Worker     }
359*795d594fSAndroid Build Coastguard Worker   }
360*795d594fSAndroid Build Coastguard Worker 
361*795d594fSAndroid Build Coastguard Worker   // madvise of arenas must be done after the above loop which serializes with
362*795d594fSAndroid Build Coastguard Worker   // MarkCompact::ProcessLinearAlloc() so that if it finds an arena to be not
363*795d594fSAndroid Build Coastguard Worker   // 'waiting-for-deletion' then it finishes the arena's processing before
364*795d594fSAndroid Build Coastguard Worker   // clearing here. Otherwise, we could have a situation wherein arena-pool
365*795d594fSAndroid Build Coastguard Worker   // assumes the memory range of the arena(s) to be zero'ed (by madvise),
366*795d594fSAndroid Build Coastguard Worker   // whereas GC maps stale arena pages.
367*795d594fSAndroid Build Coastguard Worker   for (auto& iter : free_ranges) {
368*795d594fSAndroid Build Coastguard Worker     // No need to madvise pre-zygote-fork arenas as they will munmapped below.
369*795d594fSAndroid Build Coastguard Worker     if (!std::get<2>(iter)) {
370*795d594fSAndroid Build Coastguard Worker       ZeroAndReleaseMemory(std::get<0>(iter), std::get<1>(iter));
371*795d594fSAndroid Build Coastguard Worker     }
372*795d594fSAndroid Build Coastguard Worker   }
373*795d594fSAndroid Build Coastguard Worker 
374*795d594fSAndroid Build Coastguard Worker   WriterMutexLock wmu(self, lock_);
375*795d594fSAndroid Build Coastguard Worker   for (auto& iter : free_ranges) {
376*795d594fSAndroid Build Coastguard Worker     if (UNLIKELY(std::get<2>(iter))) {
377*795d594fSAndroid Build Coastguard Worker       bool found = false;
378*795d594fSAndroid Build Coastguard Worker       for (auto map_iter = maps_.begin(); map_iter != maps_.end(); map_iter++) {
379*795d594fSAndroid Build Coastguard Worker         if (map_iter->Begin() == std::get<0>(iter)) {
380*795d594fSAndroid Build Coastguard Worker           // erase will destruct the MemMap and thereby munmap. But this happens
381*795d594fSAndroid Build Coastguard Worker           // very rarely so it's ok to do it with lock acquired.
382*795d594fSAndroid Build Coastguard Worker           maps_.erase(map_iter);
383*795d594fSAndroid Build Coastguard Worker           found = true;
384*795d594fSAndroid Build Coastguard Worker           break;
385*795d594fSAndroid Build Coastguard Worker         }
386*795d594fSAndroid Build Coastguard Worker       }
387*795d594fSAndroid Build Coastguard Worker       CHECK(found);
388*795d594fSAndroid Build Coastguard Worker     } else {
389*795d594fSAndroid Build Coastguard Worker       FreeRangeLocked(std::get<0>(iter), std::get<1>(iter));
390*795d594fSAndroid Build Coastguard Worker     }
391*795d594fSAndroid Build Coastguard Worker   }
392*795d594fSAndroid Build Coastguard Worker }
393*795d594fSAndroid Build Coastguard Worker 
DeleteUnusedArenas()394*795d594fSAndroid Build Coastguard Worker void GcVisitedArenaPool::DeleteUnusedArenas() {
395*795d594fSAndroid Build Coastguard Worker   TrackedArena* arena;
396*795d594fSAndroid Build Coastguard Worker   {
397*795d594fSAndroid Build Coastguard Worker     WriterMutexLock wmu(Thread::Current(), lock_);
398*795d594fSAndroid Build Coastguard Worker     defer_arena_freeing_ = false;
399*795d594fSAndroid Build Coastguard Worker     arena = unused_arenas_;
400*795d594fSAndroid Build Coastguard Worker     unused_arenas_ = nullptr;
401*795d594fSAndroid Build Coastguard Worker   }
402*795d594fSAndroid Build Coastguard Worker   while (arena != nullptr) {
403*795d594fSAndroid Build Coastguard Worker     TrackedArena* temp = down_cast<TrackedArena*>(arena->Next());
404*795d594fSAndroid Build Coastguard Worker     delete arena;
405*795d594fSAndroid Build Coastguard Worker     arena = temp;
406*795d594fSAndroid Build Coastguard Worker   }
407*795d594fSAndroid Build Coastguard Worker }
408*795d594fSAndroid Build Coastguard Worker 
409*795d594fSAndroid Build Coastguard Worker }  // namespace art
410