xref: /aosp_15_r20/art/runtime/gc/space/region_space.cc (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1*795d594fSAndroid Build Coastguard Worker /*
2*795d594fSAndroid Build Coastguard Worker  * Copyright (C) 2014 The Android Open Source Project
3*795d594fSAndroid Build Coastguard Worker  *
4*795d594fSAndroid Build Coastguard Worker  * Licensed under the Apache License, Version 2.0 (the "License");
5*795d594fSAndroid Build Coastguard Worker  * you may not use this file except in compliance with the License.
6*795d594fSAndroid Build Coastguard Worker  * You may obtain a copy of the License at
7*795d594fSAndroid Build Coastguard Worker  *
8*795d594fSAndroid Build Coastguard Worker  *      http://www.apache.org/licenses/LICENSE-2.0
9*795d594fSAndroid Build Coastguard Worker  *
10*795d594fSAndroid Build Coastguard Worker  * Unless required by applicable law or agreed to in writing, software
11*795d594fSAndroid Build Coastguard Worker  * distributed under the License is distributed on an "AS IS" BASIS,
12*795d594fSAndroid Build Coastguard Worker  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13*795d594fSAndroid Build Coastguard Worker  * See the License for the specific language governing permissions and
14*795d594fSAndroid Build Coastguard Worker  * limitations under the License.
15*795d594fSAndroid Build Coastguard Worker  */
16*795d594fSAndroid Build Coastguard Worker #include <deque>
17*795d594fSAndroid Build Coastguard Worker 
18*795d594fSAndroid Build Coastguard Worker #include "bump_pointer_space-inl.h"
19*795d594fSAndroid Build Coastguard Worker #include "bump_pointer_space.h"
20*795d594fSAndroid Build Coastguard Worker #include "base/dumpable.h"
21*795d594fSAndroid Build Coastguard Worker #include "base/logging.h"
22*795d594fSAndroid Build Coastguard Worker #include "gc/accounting/read_barrier_table.h"
23*795d594fSAndroid Build Coastguard Worker #include "mirror/class-inl.h"
24*795d594fSAndroid Build Coastguard Worker #include "mirror/object-inl.h"
25*795d594fSAndroid Build Coastguard Worker #include "thread_list.h"
26*795d594fSAndroid Build Coastguard Worker 
27*795d594fSAndroid Build Coastguard Worker namespace art HIDDEN {
28*795d594fSAndroid Build Coastguard Worker namespace gc {
29*795d594fSAndroid Build Coastguard Worker namespace space {
30*795d594fSAndroid Build Coastguard Worker 
31*795d594fSAndroid Build Coastguard Worker // If a region has live objects whose size is less than this percent
32*795d594fSAndroid Build Coastguard Worker // value of the region size, evaculate the region.
33*795d594fSAndroid Build Coastguard Worker static constexpr uint kEvacuateLivePercentThreshold = 75U;
34*795d594fSAndroid Build Coastguard Worker 
35*795d594fSAndroid Build Coastguard Worker // Whether we protect the unused and cleared regions.
36*795d594fSAndroid Build Coastguard Worker static constexpr bool kProtectClearedRegions = kIsDebugBuild;
37*795d594fSAndroid Build Coastguard Worker 
38*795d594fSAndroid Build Coastguard Worker // Wether we poison memory areas occupied by dead objects in unevacuated regions.
39*795d594fSAndroid Build Coastguard Worker static constexpr bool kPoisonDeadObjectsInUnevacuatedRegions = kIsDebugBuild;
40*795d594fSAndroid Build Coastguard Worker 
41*795d594fSAndroid Build Coastguard Worker // Special 32-bit value used to poison memory areas occupied by dead
42*795d594fSAndroid Build Coastguard Worker // objects in unevacuated regions. Dereferencing this value is expected
43*795d594fSAndroid Build Coastguard Worker // to trigger a memory protection fault, as it is unlikely that it
44*795d594fSAndroid Build Coastguard Worker // points to a valid, non-protected memory area.
45*795d594fSAndroid Build Coastguard Worker static constexpr uint32_t kPoisonDeadObject = 0xBADDB01D;  // "BADDROID"
46*795d594fSAndroid Build Coastguard Worker 
47*795d594fSAndroid Build Coastguard Worker // Whether we check a region's live bytes count against the region bitmap.
48*795d594fSAndroid Build Coastguard Worker static constexpr bool kCheckLiveBytesAgainstRegionBitmap = kIsDebugBuild;
49*795d594fSAndroid Build Coastguard Worker 
CreateMemMap(const std::string & name,size_t capacity,uint8_t * requested_begin)50*795d594fSAndroid Build Coastguard Worker MemMap RegionSpace::CreateMemMap(const std::string& name,
51*795d594fSAndroid Build Coastguard Worker                                  size_t capacity,
52*795d594fSAndroid Build Coastguard Worker                                  uint8_t* requested_begin) {
53*795d594fSAndroid Build Coastguard Worker   CHECK_ALIGNED(capacity, kRegionSize);
54*795d594fSAndroid Build Coastguard Worker   std::string error_msg;
55*795d594fSAndroid Build Coastguard Worker   // Ask for the capacity of an additional kRegionSize so that we can align the map by kRegionSize
56*795d594fSAndroid Build Coastguard Worker   // even if we get unaligned base address. This is necessary for the ReadBarrierTable to work.
57*795d594fSAndroid Build Coastguard Worker   MemMap mem_map;
58*795d594fSAndroid Build Coastguard Worker   while (true) {
59*795d594fSAndroid Build Coastguard Worker     mem_map = MemMap::MapAnonymous(name.c_str(),
60*795d594fSAndroid Build Coastguard Worker                                    requested_begin,
61*795d594fSAndroid Build Coastguard Worker                                    capacity + kRegionSize,
62*795d594fSAndroid Build Coastguard Worker                                    PROT_READ | PROT_WRITE,
63*795d594fSAndroid Build Coastguard Worker                                    /*low_4gb=*/ true,
64*795d594fSAndroid Build Coastguard Worker                                    /*reuse=*/ false,
65*795d594fSAndroid Build Coastguard Worker                                    /*reservation=*/ nullptr,
66*795d594fSAndroid Build Coastguard Worker                                    &error_msg);
67*795d594fSAndroid Build Coastguard Worker     if (mem_map.IsValid() || requested_begin == nullptr) {
68*795d594fSAndroid Build Coastguard Worker       break;
69*795d594fSAndroid Build Coastguard Worker     }
70*795d594fSAndroid Build Coastguard Worker     // Retry with no specified request begin.
71*795d594fSAndroid Build Coastguard Worker     requested_begin = nullptr;
72*795d594fSAndroid Build Coastguard Worker   }
73*795d594fSAndroid Build Coastguard Worker   if (!mem_map.IsValid()) {
74*795d594fSAndroid Build Coastguard Worker     LOG(ERROR) << "Failed to allocate pages for alloc space (" << name << ") of size "
75*795d594fSAndroid Build Coastguard Worker         << PrettySize(capacity) << " with message " << error_msg;
76*795d594fSAndroid Build Coastguard Worker     PrintFileToLog("/proc/self/maps", LogSeverity::ERROR);
77*795d594fSAndroid Build Coastguard Worker     MemMap::DumpMaps(LOG_STREAM(ERROR));
78*795d594fSAndroid Build Coastguard Worker     return MemMap::Invalid();
79*795d594fSAndroid Build Coastguard Worker   }
80*795d594fSAndroid Build Coastguard Worker   CHECK_EQ(mem_map.Size(), capacity + kRegionSize);
81*795d594fSAndroid Build Coastguard Worker   CHECK_EQ(mem_map.Begin(), mem_map.BaseBegin());
82*795d594fSAndroid Build Coastguard Worker   CHECK_EQ(mem_map.Size(), mem_map.BaseSize());
83*795d594fSAndroid Build Coastguard Worker   if (IsAlignedParam(mem_map.Begin(), kRegionSize)) {
84*795d594fSAndroid Build Coastguard Worker     // Got an aligned map. Since we requested a map that's kRegionSize larger. Shrink by
85*795d594fSAndroid Build Coastguard Worker     // kRegionSize at the end.
86*795d594fSAndroid Build Coastguard Worker     mem_map.SetSize(capacity);
87*795d594fSAndroid Build Coastguard Worker   } else {
88*795d594fSAndroid Build Coastguard Worker     // Got an unaligned map. Align the both ends.
89*795d594fSAndroid Build Coastguard Worker     mem_map.AlignBy(kRegionSize);
90*795d594fSAndroid Build Coastguard Worker   }
91*795d594fSAndroid Build Coastguard Worker   CHECK_ALIGNED(mem_map.Begin(), kRegionSize);
92*795d594fSAndroid Build Coastguard Worker   CHECK_ALIGNED(mem_map.End(), kRegionSize);
93*795d594fSAndroid Build Coastguard Worker   CHECK_EQ(mem_map.Size(), capacity);
94*795d594fSAndroid Build Coastguard Worker   return mem_map;
95*795d594fSAndroid Build Coastguard Worker }
96*795d594fSAndroid Build Coastguard Worker 
Create(const std::string & name,MemMap && mem_map,bool use_generational_cc)97*795d594fSAndroid Build Coastguard Worker RegionSpace* RegionSpace::Create(
98*795d594fSAndroid Build Coastguard Worker     const std::string& name, MemMap&& mem_map, bool use_generational_cc) {
99*795d594fSAndroid Build Coastguard Worker   return new RegionSpace(name, std::move(mem_map), use_generational_cc);
100*795d594fSAndroid Build Coastguard Worker }
101*795d594fSAndroid Build Coastguard Worker 
RegionSpace(const std::string & name,MemMap && mem_map,bool use_generational_cc)102*795d594fSAndroid Build Coastguard Worker RegionSpace::RegionSpace(const std::string& name, MemMap&& mem_map, bool use_generational_cc)
103*795d594fSAndroid Build Coastguard Worker     : ContinuousMemMapAllocSpace(name,
104*795d594fSAndroid Build Coastguard Worker                                  std::move(mem_map),
105*795d594fSAndroid Build Coastguard Worker                                  mem_map.Begin(),
106*795d594fSAndroid Build Coastguard Worker                                  mem_map.End(),
107*795d594fSAndroid Build Coastguard Worker                                  mem_map.End(),
108*795d594fSAndroid Build Coastguard Worker                                  kGcRetentionPolicyAlwaysCollect),
109*795d594fSAndroid Build Coastguard Worker       region_lock_("Region lock", kRegionSpaceRegionLock),
110*795d594fSAndroid Build Coastguard Worker       use_generational_cc_(use_generational_cc),
111*795d594fSAndroid Build Coastguard Worker       time_(1U),
112*795d594fSAndroid Build Coastguard Worker       num_regions_(mem_map_.Size() / kRegionSize),
113*795d594fSAndroid Build Coastguard Worker       madvise_time_(0U),
114*795d594fSAndroid Build Coastguard Worker       num_non_free_regions_(0U),
115*795d594fSAndroid Build Coastguard Worker       num_evac_regions_(0U),
116*795d594fSAndroid Build Coastguard Worker       max_peak_num_non_free_regions_(0U),
117*795d594fSAndroid Build Coastguard Worker       non_free_region_index_limit_(0U),
118*795d594fSAndroid Build Coastguard Worker       current_region_(&full_region_),
119*795d594fSAndroid Build Coastguard Worker       evac_region_(nullptr),
120*795d594fSAndroid Build Coastguard Worker       cyclic_alloc_region_index_(0U) {
121*795d594fSAndroid Build Coastguard Worker   CHECK_ALIGNED(mem_map_.Size(), kRegionSize);
122*795d594fSAndroid Build Coastguard Worker   CHECK_ALIGNED(mem_map_.Begin(), kRegionSize);
123*795d594fSAndroid Build Coastguard Worker   DCHECK_GT(num_regions_, 0U);
124*795d594fSAndroid Build Coastguard Worker   regions_.reset(new Region[num_regions_]);
125*795d594fSAndroid Build Coastguard Worker   uint8_t* region_addr = mem_map_.Begin();
126*795d594fSAndroid Build Coastguard Worker   for (size_t i = 0; i < num_regions_; ++i, region_addr += kRegionSize) {
127*795d594fSAndroid Build Coastguard Worker     regions_[i].Init(i, region_addr, region_addr + kRegionSize);
128*795d594fSAndroid Build Coastguard Worker   }
129*795d594fSAndroid Build Coastguard Worker   mark_bitmap_ =
130*795d594fSAndroid Build Coastguard Worker       accounting::ContinuousSpaceBitmap::Create("region space live bitmap", Begin(), Capacity());
131*795d594fSAndroid Build Coastguard Worker   if (kIsDebugBuild) {
132*795d594fSAndroid Build Coastguard Worker     CHECK_EQ(regions_[0].Begin(), Begin());
133*795d594fSAndroid Build Coastguard Worker     for (size_t i = 0; i < num_regions_; ++i) {
134*795d594fSAndroid Build Coastguard Worker       CHECK(regions_[i].IsFree());
135*795d594fSAndroid Build Coastguard Worker       CHECK_EQ(static_cast<size_t>(regions_[i].End() - regions_[i].Begin()), kRegionSize);
136*795d594fSAndroid Build Coastguard Worker       if (i + 1 < num_regions_) {
137*795d594fSAndroid Build Coastguard Worker         CHECK_EQ(regions_[i].End(), regions_[i + 1].Begin());
138*795d594fSAndroid Build Coastguard Worker       }
139*795d594fSAndroid Build Coastguard Worker     }
140*795d594fSAndroid Build Coastguard Worker     CHECK_EQ(regions_[num_regions_ - 1].End(), Limit());
141*795d594fSAndroid Build Coastguard Worker   }
142*795d594fSAndroid Build Coastguard Worker   DCHECK(!full_region_.IsFree());
143*795d594fSAndroid Build Coastguard Worker   DCHECK(full_region_.IsAllocated());
144*795d594fSAndroid Build Coastguard Worker   size_t ignored;
145*795d594fSAndroid Build Coastguard Worker   DCHECK(full_region_.Alloc(kAlignment, &ignored, nullptr, &ignored) == nullptr);
146*795d594fSAndroid Build Coastguard Worker   // Protect the whole region space from the start.
147*795d594fSAndroid Build Coastguard Worker   Protect();
148*795d594fSAndroid Build Coastguard Worker }
149*795d594fSAndroid Build Coastguard Worker 
FromSpaceSize()150*795d594fSAndroid Build Coastguard Worker size_t RegionSpace::FromSpaceSize() {
151*795d594fSAndroid Build Coastguard Worker   uint64_t num_regions = 0;
152*795d594fSAndroid Build Coastguard Worker   MutexLock mu(Thread::Current(), region_lock_);
153*795d594fSAndroid Build Coastguard Worker   for (size_t i = 0; i < num_regions_; ++i) {
154*795d594fSAndroid Build Coastguard Worker     Region* r = &regions_[i];
155*795d594fSAndroid Build Coastguard Worker     if (r->IsInFromSpace()) {
156*795d594fSAndroid Build Coastguard Worker       ++num_regions;
157*795d594fSAndroid Build Coastguard Worker     }
158*795d594fSAndroid Build Coastguard Worker   }
159*795d594fSAndroid Build Coastguard Worker   return num_regions * kRegionSize;
160*795d594fSAndroid Build Coastguard Worker }
161*795d594fSAndroid Build Coastguard Worker 
UnevacFromSpaceSize()162*795d594fSAndroid Build Coastguard Worker size_t RegionSpace::UnevacFromSpaceSize() {
163*795d594fSAndroid Build Coastguard Worker   uint64_t num_regions = 0;
164*795d594fSAndroid Build Coastguard Worker   MutexLock mu(Thread::Current(), region_lock_);
165*795d594fSAndroid Build Coastguard Worker   for (size_t i = 0; i < num_regions_; ++i) {
166*795d594fSAndroid Build Coastguard Worker     Region* r = &regions_[i];
167*795d594fSAndroid Build Coastguard Worker     if (r->IsInUnevacFromSpace()) {
168*795d594fSAndroid Build Coastguard Worker       ++num_regions;
169*795d594fSAndroid Build Coastguard Worker     }
170*795d594fSAndroid Build Coastguard Worker   }
171*795d594fSAndroid Build Coastguard Worker   return num_regions * kRegionSize;
172*795d594fSAndroid Build Coastguard Worker }
173*795d594fSAndroid Build Coastguard Worker 
ToSpaceSize()174*795d594fSAndroid Build Coastguard Worker size_t RegionSpace::ToSpaceSize() {
175*795d594fSAndroid Build Coastguard Worker   uint64_t num_regions = 0;
176*795d594fSAndroid Build Coastguard Worker   MutexLock mu(Thread::Current(), region_lock_);
177*795d594fSAndroid Build Coastguard Worker   for (size_t i = 0; i < num_regions_; ++i) {
178*795d594fSAndroid Build Coastguard Worker     Region* r = &regions_[i];
179*795d594fSAndroid Build Coastguard Worker     if (r->IsInToSpace()) {
180*795d594fSAndroid Build Coastguard Worker       ++num_regions;
181*795d594fSAndroid Build Coastguard Worker     }
182*795d594fSAndroid Build Coastguard Worker   }
183*795d594fSAndroid Build Coastguard Worker   return num_regions * kRegionSize;
184*795d594fSAndroid Build Coastguard Worker }
185*795d594fSAndroid Build Coastguard Worker 
SetAsUnevacFromSpace(bool clear_live_bytes)186*795d594fSAndroid Build Coastguard Worker void RegionSpace::Region::SetAsUnevacFromSpace(bool clear_live_bytes) {
187*795d594fSAndroid Build Coastguard Worker   // Live bytes are only preserved (i.e. not cleared) during sticky-bit CC collections.
188*795d594fSAndroid Build Coastguard Worker   DCHECK(GetUseGenerationalCC() || clear_live_bytes);
189*795d594fSAndroid Build Coastguard Worker   DCHECK(!IsFree() && IsInToSpace());
190*795d594fSAndroid Build Coastguard Worker   type_ = RegionType::kRegionTypeUnevacFromSpace;
191*795d594fSAndroid Build Coastguard Worker   if (IsNewlyAllocated()) {
192*795d594fSAndroid Build Coastguard Worker     // A newly allocated region set as unevac from-space must be
193*795d594fSAndroid Build Coastguard Worker     // a large or large tail region.
194*795d594fSAndroid Build Coastguard Worker     DCHECK(IsLarge() || IsLargeTail()) << static_cast<uint>(state_);
195*795d594fSAndroid Build Coastguard Worker     // Always clear the live bytes of a newly allocated (large or
196*795d594fSAndroid Build Coastguard Worker     // large tail) region.
197*795d594fSAndroid Build Coastguard Worker     clear_live_bytes = true;
198*795d594fSAndroid Build Coastguard Worker     // Clear the "newly allocated" status here, as we do not want the
199*795d594fSAndroid Build Coastguard Worker     // GC to see it when encountering (and processing) references in the
200*795d594fSAndroid Build Coastguard Worker     // from-space.
201*795d594fSAndroid Build Coastguard Worker     //
202*795d594fSAndroid Build Coastguard Worker     // Invariant: There should be no newly-allocated region in the
203*795d594fSAndroid Build Coastguard Worker     // from-space (when the from-space exists, which is between the calls
204*795d594fSAndroid Build Coastguard Worker     // to RegionSpace::SetFromSpace and RegionSpace::ClearFromSpace).
205*795d594fSAndroid Build Coastguard Worker     is_newly_allocated_ = false;
206*795d594fSAndroid Build Coastguard Worker   }
207*795d594fSAndroid Build Coastguard Worker   if (clear_live_bytes) {
208*795d594fSAndroid Build Coastguard Worker     // Reset the live bytes, as we have made a non-evacuation
209*795d594fSAndroid Build Coastguard Worker     // decision (possibly based on the percentage of live bytes).
210*795d594fSAndroid Build Coastguard Worker     live_bytes_ = 0;
211*795d594fSAndroid Build Coastguard Worker   }
212*795d594fSAndroid Build Coastguard Worker }
213*795d594fSAndroid Build Coastguard Worker 
GetUseGenerationalCC()214*795d594fSAndroid Build Coastguard Worker bool RegionSpace::Region::GetUseGenerationalCC() {
215*795d594fSAndroid Build Coastguard Worker   // We are retrieving the info from Heap, instead of the cached version in
216*795d594fSAndroid Build Coastguard Worker   // RegionSpace, because accessing the Heap from a Region object is easier
217*795d594fSAndroid Build Coastguard Worker   // than accessing the RegionSpace.
218*795d594fSAndroid Build Coastguard Worker   return art::Runtime::Current()->GetHeap()->GetUseGenerationalCC();
219*795d594fSAndroid Build Coastguard Worker }
220*795d594fSAndroid Build Coastguard Worker 
ShouldBeEvacuated(EvacMode evac_mode)221*795d594fSAndroid Build Coastguard Worker inline bool RegionSpace::Region::ShouldBeEvacuated(EvacMode evac_mode) {
222*795d594fSAndroid Build Coastguard Worker   // Evacuation mode `kEvacModeNewlyAllocated` is only used during sticky-bit CC collections.
223*795d594fSAndroid Build Coastguard Worker   DCHECK(GetUseGenerationalCC() || (evac_mode != kEvacModeNewlyAllocated));
224*795d594fSAndroid Build Coastguard Worker   DCHECK((IsAllocated() || IsLarge()) && IsInToSpace());
225*795d594fSAndroid Build Coastguard Worker   // The region should be evacuated if:
226*795d594fSAndroid Build Coastguard Worker   // - the evacuation is forced (!large && `evac_mode == kEvacModeForceAll`); or
227*795d594fSAndroid Build Coastguard Worker   // - the region was allocated after the start of the previous GC (newly allocated region); or
228*795d594fSAndroid Build Coastguard Worker   // - !large and the live ratio is below threshold (`kEvacuateLivePercentThreshold`).
229*795d594fSAndroid Build Coastguard Worker   if (IsLarge()) {
230*795d594fSAndroid Build Coastguard Worker     // It makes no sense to evacuate in the large case, since the region only contains zero or
231*795d594fSAndroid Build Coastguard Worker     // one object. If the regions is completely empty, we'll reclaim it anyhow. If its one object
232*795d594fSAndroid Build Coastguard Worker     // is live, we would just be moving around region-aligned memory.
233*795d594fSAndroid Build Coastguard Worker     return false;
234*795d594fSAndroid Build Coastguard Worker   }
235*795d594fSAndroid Build Coastguard Worker   if (UNLIKELY(evac_mode == kEvacModeForceAll)) {
236*795d594fSAndroid Build Coastguard Worker     return true;
237*795d594fSAndroid Build Coastguard Worker   }
238*795d594fSAndroid Build Coastguard Worker   DCHECK(IsAllocated());
239*795d594fSAndroid Build Coastguard Worker   if (is_newly_allocated_) {
240*795d594fSAndroid Build Coastguard Worker     // Invariant: newly allocated regions have an undefined live bytes count.
241*795d594fSAndroid Build Coastguard Worker     DCHECK_EQ(live_bytes_, static_cast<size_t>(-1));
242*795d594fSAndroid Build Coastguard Worker     // We always evacuate newly-allocated non-large regions as we
243*795d594fSAndroid Build Coastguard Worker     // believe they contain many dead objects (a very simple form of
244*795d594fSAndroid Build Coastguard Worker     // the generational hypothesis, even before the Sticky-Bit CC
245*795d594fSAndroid Build Coastguard Worker     // approach).
246*795d594fSAndroid Build Coastguard Worker     //
247*795d594fSAndroid Build Coastguard Worker     // TODO: Verify that assertion by collecting statistics on the
248*795d594fSAndroid Build Coastguard Worker     // number/proportion of live objects in newly allocated regions
249*795d594fSAndroid Build Coastguard Worker     // in RegionSpace::ClearFromSpace.
250*795d594fSAndroid Build Coastguard Worker     //
251*795d594fSAndroid Build Coastguard Worker     // Note that a side effect of evacuating a newly-allocated
252*795d594fSAndroid Build Coastguard Worker     // non-large region is that the "newly allocated" status will
253*795d594fSAndroid Build Coastguard Worker     // later be removed, as its live objects will be copied to an
254*795d594fSAndroid Build Coastguard Worker     // evacuation region, which won't be marked as "newly
255*795d594fSAndroid Build Coastguard Worker     // allocated" (see RegionSpace::AllocateRegion).
256*795d594fSAndroid Build Coastguard Worker     return true;
257*795d594fSAndroid Build Coastguard Worker   } else if (evac_mode == kEvacModeLivePercentNewlyAllocated) {
258*795d594fSAndroid Build Coastguard Worker     bool is_live_percent_valid = (live_bytes_ != static_cast<size_t>(-1));
259*795d594fSAndroid Build Coastguard Worker     if (is_live_percent_valid) {
260*795d594fSAndroid Build Coastguard Worker       DCHECK(IsInToSpace());
261*795d594fSAndroid Build Coastguard Worker       DCHECK_NE(live_bytes_, static_cast<size_t>(-1));
262*795d594fSAndroid Build Coastguard Worker       DCHECK_LE(live_bytes_, BytesAllocated());
263*795d594fSAndroid Build Coastguard Worker       const size_t bytes_allocated = RoundUp(BytesAllocated(), kRegionSize);
264*795d594fSAndroid Build Coastguard Worker       DCHECK_LE(live_bytes_, bytes_allocated);
265*795d594fSAndroid Build Coastguard Worker       // Side node: live_percent == 0 does not necessarily mean
266*795d594fSAndroid Build Coastguard Worker       // there's no live objects due to rounding (there may be a
267*795d594fSAndroid Build Coastguard Worker       // few).
268*795d594fSAndroid Build Coastguard Worker       return live_bytes_ * 100U < kEvacuateLivePercentThreshold * bytes_allocated;
269*795d594fSAndroid Build Coastguard Worker     }
270*795d594fSAndroid Build Coastguard Worker   }
271*795d594fSAndroid Build Coastguard Worker   return false;
272*795d594fSAndroid Build Coastguard Worker }
273*795d594fSAndroid Build Coastguard Worker 
ZeroLiveBytesForLargeObject(mirror::Object * obj)274*795d594fSAndroid Build Coastguard Worker void RegionSpace::ZeroLiveBytesForLargeObject(mirror::Object* obj) {
275*795d594fSAndroid Build Coastguard Worker   // This method is only used when Generational CC collection is enabled.
276*795d594fSAndroid Build Coastguard Worker   DCHECK(use_generational_cc_);
277*795d594fSAndroid Build Coastguard Worker 
278*795d594fSAndroid Build Coastguard Worker   // This code uses a logic similar to the one used in RegionSpace::FreeLarge
279*795d594fSAndroid Build Coastguard Worker   // to traverse the regions supporting `obj`.
280*795d594fSAndroid Build Coastguard Worker   // TODO: Refactor.
281*795d594fSAndroid Build Coastguard Worker   DCHECK(IsLargeObject(obj));
282*795d594fSAndroid Build Coastguard Worker   DCHECK_ALIGNED(obj, kRegionSize);
283*795d594fSAndroid Build Coastguard Worker   size_t obj_size = obj->SizeOf<kDefaultVerifyFlags>();
284*795d594fSAndroid Build Coastguard Worker   DCHECK_GT(obj_size, space::RegionSpace::kRegionSize);
285*795d594fSAndroid Build Coastguard Worker   // Size of the memory area allocated for `obj`.
286*795d594fSAndroid Build Coastguard Worker   size_t obj_alloc_size = RoundUp(obj_size, space::RegionSpace::kRegionSize);
287*795d594fSAndroid Build Coastguard Worker   uint8_t* begin_addr = reinterpret_cast<uint8_t*>(obj);
288*795d594fSAndroid Build Coastguard Worker   uint8_t* end_addr = begin_addr + obj_alloc_size;
289*795d594fSAndroid Build Coastguard Worker   DCHECK_ALIGNED(end_addr, kRegionSize);
290*795d594fSAndroid Build Coastguard Worker 
291*795d594fSAndroid Build Coastguard Worker   // Zero the live bytes of the large region and large tail regions containing the object.
292*795d594fSAndroid Build Coastguard Worker   MutexLock mu(Thread::Current(), region_lock_);
293*795d594fSAndroid Build Coastguard Worker   for (uint8_t* addr = begin_addr; addr < end_addr; addr += kRegionSize) {
294*795d594fSAndroid Build Coastguard Worker     Region* region = RefToRegionLocked(reinterpret_cast<mirror::Object*>(addr));
295*795d594fSAndroid Build Coastguard Worker     if (addr == begin_addr) {
296*795d594fSAndroid Build Coastguard Worker       DCHECK(region->IsLarge());
297*795d594fSAndroid Build Coastguard Worker     } else {
298*795d594fSAndroid Build Coastguard Worker       DCHECK(region->IsLargeTail());
299*795d594fSAndroid Build Coastguard Worker     }
300*795d594fSAndroid Build Coastguard Worker     region->ZeroLiveBytes();
301*795d594fSAndroid Build Coastguard Worker   }
302*795d594fSAndroid Build Coastguard Worker   if (kIsDebugBuild && end_addr < Limit()) {
303*795d594fSAndroid Build Coastguard Worker     // If we aren't at the end of the space, check that the next region is not a large tail.
304*795d594fSAndroid Build Coastguard Worker     Region* following_region = RefToRegionLocked(reinterpret_cast<mirror::Object*>(end_addr));
305*795d594fSAndroid Build Coastguard Worker     DCHECK(!following_region->IsLargeTail());
306*795d594fSAndroid Build Coastguard Worker   }
307*795d594fSAndroid Build Coastguard Worker }
308*795d594fSAndroid Build Coastguard Worker 
309*795d594fSAndroid Build Coastguard Worker // Determine which regions to evacuate and mark them as
310*795d594fSAndroid Build Coastguard Worker // from-space. Mark the rest as unevacuated from-space.
SetFromSpace(accounting::ReadBarrierTable * rb_table,EvacMode evac_mode,bool clear_live_bytes)311*795d594fSAndroid Build Coastguard Worker void RegionSpace::SetFromSpace(accounting::ReadBarrierTable* rb_table,
312*795d594fSAndroid Build Coastguard Worker                                EvacMode evac_mode,
313*795d594fSAndroid Build Coastguard Worker                                bool clear_live_bytes) {
314*795d594fSAndroid Build Coastguard Worker   // Live bytes are only preserved (i.e. not cleared) during sticky-bit CC collections.
315*795d594fSAndroid Build Coastguard Worker   DCHECK(use_generational_cc_ || clear_live_bytes);
316*795d594fSAndroid Build Coastguard Worker   ++time_;
317*795d594fSAndroid Build Coastguard Worker   if (kUseTableLookupReadBarrier) {
318*795d594fSAndroid Build Coastguard Worker     DCHECK(rb_table->IsAllCleared());
319*795d594fSAndroid Build Coastguard Worker     rb_table->SetAll();
320*795d594fSAndroid Build Coastguard Worker   }
321*795d594fSAndroid Build Coastguard Worker   MutexLock mu(Thread::Current(), region_lock_);
322*795d594fSAndroid Build Coastguard Worker   // We cannot use the partially utilized TLABs across a GC. Therefore, revoke
323*795d594fSAndroid Build Coastguard Worker   // them during the thread-flip.
324*795d594fSAndroid Build Coastguard Worker   partial_tlabs_.clear();
325*795d594fSAndroid Build Coastguard Worker 
326*795d594fSAndroid Build Coastguard Worker   // Counter for the number of expected large tail regions following a large region.
327*795d594fSAndroid Build Coastguard Worker   size_t num_expected_large_tails = 0U;
328*795d594fSAndroid Build Coastguard Worker   // Flag to store whether the previously seen large region has been evacuated.
329*795d594fSAndroid Build Coastguard Worker   // This is used to apply the same evacuation policy to related large tail regions.
330*795d594fSAndroid Build Coastguard Worker   bool prev_large_evacuated = false;
331*795d594fSAndroid Build Coastguard Worker   VerifyNonFreeRegionLimit();
332*795d594fSAndroid Build Coastguard Worker   const size_t iter_limit = kUseTableLookupReadBarrier
333*795d594fSAndroid Build Coastguard Worker       ? num_regions_
334*795d594fSAndroid Build Coastguard Worker       : std::min(num_regions_, non_free_region_index_limit_);
335*795d594fSAndroid Build Coastguard Worker   for (size_t i = 0; i < iter_limit; ++i) {
336*795d594fSAndroid Build Coastguard Worker     Region* r = &regions_[i];
337*795d594fSAndroid Build Coastguard Worker     RegionState state = r->State();
338*795d594fSAndroid Build Coastguard Worker     RegionType type = r->Type();
339*795d594fSAndroid Build Coastguard Worker     if (!r->IsFree()) {
340*795d594fSAndroid Build Coastguard Worker       DCHECK(r->IsInToSpace());
341*795d594fSAndroid Build Coastguard Worker       if (LIKELY(num_expected_large_tails == 0U)) {
342*795d594fSAndroid Build Coastguard Worker         DCHECK((state == RegionState::kRegionStateAllocated ||
343*795d594fSAndroid Build Coastguard Worker                 state == RegionState::kRegionStateLarge) &&
344*795d594fSAndroid Build Coastguard Worker                type == RegionType::kRegionTypeToSpace);
345*795d594fSAndroid Build Coastguard Worker         bool should_evacuate = r->ShouldBeEvacuated(evac_mode);
346*795d594fSAndroid Build Coastguard Worker         bool is_newly_allocated = r->IsNewlyAllocated();
347*795d594fSAndroid Build Coastguard Worker         if (should_evacuate) {
348*795d594fSAndroid Build Coastguard Worker           r->SetAsFromSpace();
349*795d594fSAndroid Build Coastguard Worker           DCHECK(r->IsInFromSpace());
350*795d594fSAndroid Build Coastguard Worker         } else {
351*795d594fSAndroid Build Coastguard Worker           r->SetAsUnevacFromSpace(clear_live_bytes);
352*795d594fSAndroid Build Coastguard Worker           DCHECK(r->IsInUnevacFromSpace());
353*795d594fSAndroid Build Coastguard Worker         }
354*795d594fSAndroid Build Coastguard Worker         if (UNLIKELY(state == RegionState::kRegionStateLarge &&
355*795d594fSAndroid Build Coastguard Worker                      type == RegionType::kRegionTypeToSpace)) {
356*795d594fSAndroid Build Coastguard Worker           prev_large_evacuated = should_evacuate;
357*795d594fSAndroid Build Coastguard Worker           // In 2-phase full heap GC, this function is called after marking is
358*795d594fSAndroid Build Coastguard Worker           // done. So, it is possible that some newly allocated large object is
359*795d594fSAndroid Build Coastguard Worker           // marked but its live_bytes is still -1. We need to clear the
360*795d594fSAndroid Build Coastguard Worker           // mark-bit otherwise the live_bytes will not be updated in
361*795d594fSAndroid Build Coastguard Worker           // ConcurrentCopying::ProcessMarkStackRef() and hence will break the
362*795d594fSAndroid Build Coastguard Worker           // logic.
363*795d594fSAndroid Build Coastguard Worker           if (use_generational_cc_ && !should_evacuate && is_newly_allocated) {
364*795d594fSAndroid Build Coastguard Worker             GetMarkBitmap()->Clear(reinterpret_cast<mirror::Object*>(r->Begin()));
365*795d594fSAndroid Build Coastguard Worker           }
366*795d594fSAndroid Build Coastguard Worker           num_expected_large_tails = RoundUp(r->BytesAllocated(), kRegionSize) / kRegionSize - 1;
367*795d594fSAndroid Build Coastguard Worker           DCHECK_GT(num_expected_large_tails, 0U);
368*795d594fSAndroid Build Coastguard Worker         }
369*795d594fSAndroid Build Coastguard Worker       } else {
370*795d594fSAndroid Build Coastguard Worker         DCHECK(state == RegionState::kRegionStateLargeTail &&
371*795d594fSAndroid Build Coastguard Worker                type == RegionType::kRegionTypeToSpace);
372*795d594fSAndroid Build Coastguard Worker         if (prev_large_evacuated) {
373*795d594fSAndroid Build Coastguard Worker           r->SetAsFromSpace();
374*795d594fSAndroid Build Coastguard Worker           DCHECK(r->IsInFromSpace());
375*795d594fSAndroid Build Coastguard Worker         } else {
376*795d594fSAndroid Build Coastguard Worker           r->SetAsUnevacFromSpace(clear_live_bytes);
377*795d594fSAndroid Build Coastguard Worker           DCHECK(r->IsInUnevacFromSpace());
378*795d594fSAndroid Build Coastguard Worker         }
379*795d594fSAndroid Build Coastguard Worker         --num_expected_large_tails;
380*795d594fSAndroid Build Coastguard Worker       }
381*795d594fSAndroid Build Coastguard Worker     } else {
382*795d594fSAndroid Build Coastguard Worker       DCHECK_EQ(num_expected_large_tails, 0U);
383*795d594fSAndroid Build Coastguard Worker       if (kUseTableLookupReadBarrier) {
384*795d594fSAndroid Build Coastguard Worker         // Clear the rb table for to-space regions.
385*795d594fSAndroid Build Coastguard Worker         rb_table->Clear(r->Begin(), r->End());
386*795d594fSAndroid Build Coastguard Worker       }
387*795d594fSAndroid Build Coastguard Worker     }
388*795d594fSAndroid Build Coastguard Worker     // Invariant: There should be no newly-allocated region in the from-space.
389*795d594fSAndroid Build Coastguard Worker     DCHECK(!r->is_newly_allocated_);
390*795d594fSAndroid Build Coastguard Worker   }
391*795d594fSAndroid Build Coastguard Worker   DCHECK_EQ(num_expected_large_tails, 0U);
392*795d594fSAndroid Build Coastguard Worker   current_region_ = &full_region_;
393*795d594fSAndroid Build Coastguard Worker   evac_region_ = &full_region_;
394*795d594fSAndroid Build Coastguard Worker }
395*795d594fSAndroid Build Coastguard Worker 
ZeroAndProtectRegion(uint8_t * begin,uint8_t * end,bool release_eagerly)396*795d594fSAndroid Build Coastguard Worker static void ZeroAndProtectRegion(uint8_t* begin, uint8_t* end, bool release_eagerly) {
397*795d594fSAndroid Build Coastguard Worker   ZeroMemory(begin, end - begin, release_eagerly);
398*795d594fSAndroid Build Coastguard Worker   if (kProtectClearedRegions) {
399*795d594fSAndroid Build Coastguard Worker     CheckedCall(mprotect, __FUNCTION__, begin, end - begin, PROT_NONE);
400*795d594fSAndroid Build Coastguard Worker   }
401*795d594fSAndroid Build Coastguard Worker }
402*795d594fSAndroid Build Coastguard Worker 
ReleaseFreeRegions()403*795d594fSAndroid Build Coastguard Worker void RegionSpace::ReleaseFreeRegions() {
404*795d594fSAndroid Build Coastguard Worker   MutexLock mu(Thread::Current(), region_lock_);
405*795d594fSAndroid Build Coastguard Worker   for (size_t i = 0u; i < num_regions_; ++i) {
406*795d594fSAndroid Build Coastguard Worker     if (regions_[i].IsFree()) {
407*795d594fSAndroid Build Coastguard Worker       uint8_t* begin = regions_[i].Begin();
408*795d594fSAndroid Build Coastguard Worker       DCHECK_ALIGNED_PARAM(begin, gPageSize);
409*795d594fSAndroid Build Coastguard Worker       DCHECK_ALIGNED_PARAM(regions_[i].End(), gPageSize);
410*795d594fSAndroid Build Coastguard Worker       bool res = madvise(begin, regions_[i].End() - begin, MADV_DONTNEED);
411*795d594fSAndroid Build Coastguard Worker       CHECK_NE(res, -1) << "madvise failed";
412*795d594fSAndroid Build Coastguard Worker     }
413*795d594fSAndroid Build Coastguard Worker   }
414*795d594fSAndroid Build Coastguard Worker }
415*795d594fSAndroid Build Coastguard Worker 
ClearFromSpace(uint64_t * cleared_bytes,uint64_t * cleared_objects,const bool clear_bitmap,const bool release_eagerly)416*795d594fSAndroid Build Coastguard Worker void RegionSpace::ClearFromSpace(/* out */ uint64_t* cleared_bytes,
417*795d594fSAndroid Build Coastguard Worker                                  /* out */ uint64_t* cleared_objects,
418*795d594fSAndroid Build Coastguard Worker                                  const bool clear_bitmap,
419*795d594fSAndroid Build Coastguard Worker                                  const bool release_eagerly) {
420*795d594fSAndroid Build Coastguard Worker   DCHECK(cleared_bytes != nullptr);
421*795d594fSAndroid Build Coastguard Worker   DCHECK(cleared_objects != nullptr);
422*795d594fSAndroid Build Coastguard Worker   *cleared_bytes = 0;
423*795d594fSAndroid Build Coastguard Worker   *cleared_objects = 0;
424*795d594fSAndroid Build Coastguard Worker   size_t new_non_free_region_index_limit = 0;
425*795d594fSAndroid Build Coastguard Worker   // We should avoid calling madvise syscalls while holding region_lock_.
426*795d594fSAndroid Build Coastguard Worker   // Therefore, we split the working of this function into 2 loops. The first
427*795d594fSAndroid Build Coastguard Worker   // loop gathers memory ranges that must be madvised. Then we release the lock
428*795d594fSAndroid Build Coastguard Worker   // and perform madvise on the gathered memory ranges. Finally, we reacquire
429*795d594fSAndroid Build Coastguard Worker   // the lock and loop over the regions to clear the from-space regions and make
430*795d594fSAndroid Build Coastguard Worker   // them availabe for allocation.
431*795d594fSAndroid Build Coastguard Worker   std::deque<std::pair<uint8_t*, uint8_t*>> madvise_list;
432*795d594fSAndroid Build Coastguard Worker   // Gather memory ranges that need to be madvised.
433*795d594fSAndroid Build Coastguard Worker   {
434*795d594fSAndroid Build Coastguard Worker     MutexLock mu(Thread::Current(), region_lock_);
435*795d594fSAndroid Build Coastguard Worker     // Lambda expression `expand_madvise_range` adds a region to the "clear block".
436*795d594fSAndroid Build Coastguard Worker     //
437*795d594fSAndroid Build Coastguard Worker     // As we iterate over from-space regions, we maintain a "clear block", composed of
438*795d594fSAndroid Build Coastguard Worker     // adjacent to-be-cleared regions and whose bounds are `clear_block_begin` and
439*795d594fSAndroid Build Coastguard Worker     // `clear_block_end`. When processing a new region which is not adjacent to
440*795d594fSAndroid Build Coastguard Worker     // the clear block (discontinuity in cleared regions), the clear block
441*795d594fSAndroid Build Coastguard Worker     // is added to madvise_list and the clear block is reset (to the most recent
442*795d594fSAndroid Build Coastguard Worker     // to-be-cleared region).
443*795d594fSAndroid Build Coastguard Worker     //
444*795d594fSAndroid Build Coastguard Worker     // This is done in order to combine zeroing and releasing pages to reduce how
445*795d594fSAndroid Build Coastguard Worker     // often madvise is called. This helps reduce contention on the mmap semaphore
446*795d594fSAndroid Build Coastguard Worker     // (see b/62194020).
447*795d594fSAndroid Build Coastguard Worker     uint8_t* clear_block_begin = nullptr;
448*795d594fSAndroid Build Coastguard Worker     uint8_t* clear_block_end = nullptr;
449*795d594fSAndroid Build Coastguard Worker     auto expand_madvise_range = [&madvise_list, &clear_block_begin, &clear_block_end] (Region* r) {
450*795d594fSAndroid Build Coastguard Worker       if (clear_block_end != r->Begin()) {
451*795d594fSAndroid Build Coastguard Worker         if (clear_block_begin != nullptr) {
452*795d594fSAndroid Build Coastguard Worker           DCHECK(clear_block_end != nullptr);
453*795d594fSAndroid Build Coastguard Worker           madvise_list.push_back(std::pair(clear_block_begin, clear_block_end));
454*795d594fSAndroid Build Coastguard Worker         }
455*795d594fSAndroid Build Coastguard Worker         clear_block_begin = r->Begin();
456*795d594fSAndroid Build Coastguard Worker       }
457*795d594fSAndroid Build Coastguard Worker       clear_block_end = r->End();
458*795d594fSAndroid Build Coastguard Worker     };
459*795d594fSAndroid Build Coastguard Worker     for (size_t i = 0; i < std::min(num_regions_, non_free_region_index_limit_); ++i) {
460*795d594fSAndroid Build Coastguard Worker       Region* r = &regions_[i];
461*795d594fSAndroid Build Coastguard Worker       // The following check goes through objects in the region, therefore it
462*795d594fSAndroid Build Coastguard Worker       // must be performed before madvising the region. Therefore, it can't be
463*795d594fSAndroid Build Coastguard Worker       // executed in the following loop.
464*795d594fSAndroid Build Coastguard Worker       if (kCheckLiveBytesAgainstRegionBitmap) {
465*795d594fSAndroid Build Coastguard Worker         CheckLiveBytesAgainstRegionBitmap(r);
466*795d594fSAndroid Build Coastguard Worker       }
467*795d594fSAndroid Build Coastguard Worker       if (r->IsInFromSpace()) {
468*795d594fSAndroid Build Coastguard Worker         expand_madvise_range(r);
469*795d594fSAndroid Build Coastguard Worker       } else if (r->IsInUnevacFromSpace()) {
470*795d594fSAndroid Build Coastguard Worker         // We must skip tails of live large objects.
471*795d594fSAndroid Build Coastguard Worker         if (r->LiveBytes() == 0 && !r->IsLargeTail()) {
472*795d594fSAndroid Build Coastguard Worker           // Special case for 0 live bytes, this means all of the objects in the region are
473*795d594fSAndroid Build Coastguard Worker           // dead and we can to clear it. This is important for large objects since we must
474*795d594fSAndroid Build Coastguard Worker           // not visit dead ones in RegionSpace::Walk because they may contain dangling
475*795d594fSAndroid Build Coastguard Worker           // references to invalid objects. It is also better to clear these regions now
476*795d594fSAndroid Build Coastguard Worker           // instead of at the end of the next GC to save RAM. If we don't clear the regions
477*795d594fSAndroid Build Coastguard Worker           // here, they will be cleared in next GC by the normal live percent evacuation logic.
478*795d594fSAndroid Build Coastguard Worker           expand_madvise_range(r);
479*795d594fSAndroid Build Coastguard Worker           // Also release RAM for large tails.
480*795d594fSAndroid Build Coastguard Worker           while (i + 1 < num_regions_ && regions_[i + 1].IsLargeTail()) {
481*795d594fSAndroid Build Coastguard Worker             expand_madvise_range(&regions_[i + 1]);
482*795d594fSAndroid Build Coastguard Worker             i++;
483*795d594fSAndroid Build Coastguard Worker           }
484*795d594fSAndroid Build Coastguard Worker         }
485*795d594fSAndroid Build Coastguard Worker       }
486*795d594fSAndroid Build Coastguard Worker     }
487*795d594fSAndroid Build Coastguard Worker     // There is a small probability that we may reach here with
488*795d594fSAndroid Build Coastguard Worker     // clear_block_{begin, end} = nullptr. If all the regions allocated since
489*795d594fSAndroid Build Coastguard Worker     // last GC have been for large objects and all of them survive till this GC
490*795d594fSAndroid Build Coastguard Worker     // cycle, then there will be no regions in from-space.
491*795d594fSAndroid Build Coastguard Worker     if (LIKELY(clear_block_begin != nullptr)) {
492*795d594fSAndroid Build Coastguard Worker       DCHECK(clear_block_end != nullptr);
493*795d594fSAndroid Build Coastguard Worker       madvise_list.push_back(std::pair(clear_block_begin, clear_block_end));
494*795d594fSAndroid Build Coastguard Worker     }
495*795d594fSAndroid Build Coastguard Worker   }
496*795d594fSAndroid Build Coastguard Worker 
497*795d594fSAndroid Build Coastguard Worker   // Madvise the memory ranges.
498*795d594fSAndroid Build Coastguard Worker   uint64_t start_time = NanoTime();
499*795d594fSAndroid Build Coastguard Worker   for (const auto &iter : madvise_list) {
500*795d594fSAndroid Build Coastguard Worker     ZeroAndProtectRegion(iter.first, iter.second, release_eagerly);
501*795d594fSAndroid Build Coastguard Worker   }
502*795d594fSAndroid Build Coastguard Worker   madvise_time_ += NanoTime() - start_time;
503*795d594fSAndroid Build Coastguard Worker 
504*795d594fSAndroid Build Coastguard Worker   for (const auto &iter : madvise_list) {
505*795d594fSAndroid Build Coastguard Worker     if (clear_bitmap) {
506*795d594fSAndroid Build Coastguard Worker       GetLiveBitmap()->ClearRange(
507*795d594fSAndroid Build Coastguard Worker           reinterpret_cast<mirror::Object*>(iter.first),
508*795d594fSAndroid Build Coastguard Worker           reinterpret_cast<mirror::Object*>(iter.second));
509*795d594fSAndroid Build Coastguard Worker     }
510*795d594fSAndroid Build Coastguard Worker   }
511*795d594fSAndroid Build Coastguard Worker   madvise_list.clear();
512*795d594fSAndroid Build Coastguard Worker 
513*795d594fSAndroid Build Coastguard Worker   // Iterate over regions again and actually make the from space regions
514*795d594fSAndroid Build Coastguard Worker   // available for allocation.
515*795d594fSAndroid Build Coastguard Worker   MutexLock mu(Thread::Current(), region_lock_);
516*795d594fSAndroid Build Coastguard Worker   VerifyNonFreeRegionLimit();
517*795d594fSAndroid Build Coastguard Worker 
518*795d594fSAndroid Build Coastguard Worker   // Update max of peak non free region count before reclaiming evacuated regions.
519*795d594fSAndroid Build Coastguard Worker   max_peak_num_non_free_regions_ = std::max(max_peak_num_non_free_regions_,
520*795d594fSAndroid Build Coastguard Worker                                             num_non_free_regions_);
521*795d594fSAndroid Build Coastguard Worker 
522*795d594fSAndroid Build Coastguard Worker   for (size_t i = 0; i < std::min(num_regions_, non_free_region_index_limit_); ++i) {
523*795d594fSAndroid Build Coastguard Worker     Region* r = &regions_[i];
524*795d594fSAndroid Build Coastguard Worker     if (r->IsInFromSpace()) {
525*795d594fSAndroid Build Coastguard Worker       DCHECK(!r->IsTlab());
526*795d594fSAndroid Build Coastguard Worker       *cleared_bytes += r->BytesAllocated();
527*795d594fSAndroid Build Coastguard Worker       *cleared_objects += r->ObjectsAllocated();
528*795d594fSAndroid Build Coastguard Worker       --num_non_free_regions_;
529*795d594fSAndroid Build Coastguard Worker       r->Clear(/*zero_and_release_pages=*/false);
530*795d594fSAndroid Build Coastguard Worker     } else if (r->IsInUnevacFromSpace()) {
531*795d594fSAndroid Build Coastguard Worker       if (r->LiveBytes() == 0) {
532*795d594fSAndroid Build Coastguard Worker         DCHECK(!r->IsLargeTail());
533*795d594fSAndroid Build Coastguard Worker         *cleared_bytes += r->BytesAllocated();
534*795d594fSAndroid Build Coastguard Worker         *cleared_objects += r->ObjectsAllocated();
535*795d594fSAndroid Build Coastguard Worker         r->Clear(/*zero_and_release_pages=*/false);
536*795d594fSAndroid Build Coastguard Worker         size_t free_regions = 1;
537*795d594fSAndroid Build Coastguard Worker         // Also release RAM for large tails.
538*795d594fSAndroid Build Coastguard Worker         while (i + free_regions < num_regions_ && regions_[i + free_regions].IsLargeTail()) {
539*795d594fSAndroid Build Coastguard Worker           regions_[i + free_regions].Clear(/*zero_and_release_pages=*/false);
540*795d594fSAndroid Build Coastguard Worker           ++free_regions;
541*795d594fSAndroid Build Coastguard Worker         }
542*795d594fSAndroid Build Coastguard Worker         num_non_free_regions_ -= free_regions;
543*795d594fSAndroid Build Coastguard Worker         // When clear_bitmap is true, this clearing of bitmap is taken care in
544*795d594fSAndroid Build Coastguard Worker         // clear_region().
545*795d594fSAndroid Build Coastguard Worker         if (!clear_bitmap) {
546*795d594fSAndroid Build Coastguard Worker           GetLiveBitmap()->ClearRange(
547*795d594fSAndroid Build Coastguard Worker               reinterpret_cast<mirror::Object*>(r->Begin()),
548*795d594fSAndroid Build Coastguard Worker               reinterpret_cast<mirror::Object*>(r->Begin() + free_regions * kRegionSize));
549*795d594fSAndroid Build Coastguard Worker         }
550*795d594fSAndroid Build Coastguard Worker         continue;
551*795d594fSAndroid Build Coastguard Worker       }
552*795d594fSAndroid Build Coastguard Worker       r->SetUnevacFromSpaceAsToSpace();
553*795d594fSAndroid Build Coastguard Worker       if (r->AllAllocatedBytesAreLive()) {
554*795d594fSAndroid Build Coastguard Worker         // Try to optimize the number of ClearRange calls by checking whether the next regions
555*795d594fSAndroid Build Coastguard Worker         // can also be cleared.
556*795d594fSAndroid Build Coastguard Worker         size_t regions_to_clear_bitmap = 1;
557*795d594fSAndroid Build Coastguard Worker         while (i + regions_to_clear_bitmap < num_regions_) {
558*795d594fSAndroid Build Coastguard Worker           Region* const cur = &regions_[i + regions_to_clear_bitmap];
559*795d594fSAndroid Build Coastguard Worker           if (!cur->AllAllocatedBytesAreLive()) {
560*795d594fSAndroid Build Coastguard Worker             DCHECK(!cur->IsLargeTail());
561*795d594fSAndroid Build Coastguard Worker             break;
562*795d594fSAndroid Build Coastguard Worker           }
563*795d594fSAndroid Build Coastguard Worker           CHECK(cur->IsInUnevacFromSpace());
564*795d594fSAndroid Build Coastguard Worker           cur->SetUnevacFromSpaceAsToSpace();
565*795d594fSAndroid Build Coastguard Worker           ++regions_to_clear_bitmap;
566*795d594fSAndroid Build Coastguard Worker         }
567*795d594fSAndroid Build Coastguard Worker 
568*795d594fSAndroid Build Coastguard Worker         // Optimization (for full CC only): If the live bytes are *all* live
569*795d594fSAndroid Build Coastguard Worker         // in a region then the live-bit information for these objects is
570*795d594fSAndroid Build Coastguard Worker         // superfluous:
571*795d594fSAndroid Build Coastguard Worker         // - We can determine that these objects are all live by using
572*795d594fSAndroid Build Coastguard Worker         //   Region::AllAllocatedBytesAreLive (which just checks whether
573*795d594fSAndroid Build Coastguard Worker         //   `LiveBytes() == static_cast<size_t>(Top() - Begin())`.
574*795d594fSAndroid Build Coastguard Worker         // - We can visit the objects in this region using
575*795d594fSAndroid Build Coastguard Worker         //   RegionSpace::GetNextObject, i.e. without resorting to the
576*795d594fSAndroid Build Coastguard Worker         //   live bits (see RegionSpace::WalkInternal).
577*795d594fSAndroid Build Coastguard Worker         // Therefore, we can clear the bits for these objects in the
578*795d594fSAndroid Build Coastguard Worker         // (live) region space bitmap (and release the corresponding pages).
579*795d594fSAndroid Build Coastguard Worker         //
580*795d594fSAndroid Build Coastguard Worker         // This optimization is incompatible with Generational CC, because:
581*795d594fSAndroid Build Coastguard Worker         // - minor (young-generation) collections need to know which objects
582*795d594fSAndroid Build Coastguard Worker         //   where marked during the previous GC cycle, meaning all mark bitmaps
583*795d594fSAndroid Build Coastguard Worker         //   (this includes the region space bitmap) need to be preserved
584*795d594fSAndroid Build Coastguard Worker         //   between a (minor or major) collection N and a following minor
585*795d594fSAndroid Build Coastguard Worker         //   collection N+1;
586*795d594fSAndroid Build Coastguard Worker         // - at this stage (in the current GC cycle), we cannot determine
587*795d594fSAndroid Build Coastguard Worker         //   whether the next collection will be a minor or a major one;
588*795d594fSAndroid Build Coastguard Worker         // This means that we need to be conservative and always preserve the
589*795d594fSAndroid Build Coastguard Worker         // region space bitmap when using Generational CC.
590*795d594fSAndroid Build Coastguard Worker         // Note that major collections do not require the previous mark bitmaps
591*795d594fSAndroid Build Coastguard Worker         // to be preserved, and as matter of fact they do clear the region space
592*795d594fSAndroid Build Coastguard Worker         // bitmap. But they cannot do so before we know the next GC cycle will
593*795d594fSAndroid Build Coastguard Worker         // be a major one, so this operation happens at the beginning of such a
594*795d594fSAndroid Build Coastguard Worker         // major collection, before marking starts.
595*795d594fSAndroid Build Coastguard Worker         if (!use_generational_cc_) {
596*795d594fSAndroid Build Coastguard Worker           GetLiveBitmap()->ClearRange(
597*795d594fSAndroid Build Coastguard Worker               reinterpret_cast<mirror::Object*>(r->Begin()),
598*795d594fSAndroid Build Coastguard Worker               reinterpret_cast<mirror::Object*>(r->Begin()
599*795d594fSAndroid Build Coastguard Worker                                                 + regions_to_clear_bitmap * kRegionSize));
600*795d594fSAndroid Build Coastguard Worker         }
601*795d594fSAndroid Build Coastguard Worker         // Skip over extra regions for which we cleared the bitmaps: we shall not clear them,
602*795d594fSAndroid Build Coastguard Worker         // as they are unevac regions that are live.
603*795d594fSAndroid Build Coastguard Worker         // Subtract one for the for-loop.
604*795d594fSAndroid Build Coastguard Worker         i += regions_to_clear_bitmap - 1;
605*795d594fSAndroid Build Coastguard Worker       } else {
606*795d594fSAndroid Build Coastguard Worker         // TODO: Explain why we do not poison dead objects in region
607*795d594fSAndroid Build Coastguard Worker         // `r` when it has an undefined live bytes count (i.e. when
608*795d594fSAndroid Build Coastguard Worker         // `r->LiveBytes() == static_cast<size_t>(-1)`) with
609*795d594fSAndroid Build Coastguard Worker         // Generational CC.
610*795d594fSAndroid Build Coastguard Worker         if (!use_generational_cc_ || (r->LiveBytes() != static_cast<size_t>(-1))) {
611*795d594fSAndroid Build Coastguard Worker           // Only some allocated bytes are live in this unevac region.
612*795d594fSAndroid Build Coastguard Worker           // This should only happen for an allocated non-large region.
613*795d594fSAndroid Build Coastguard Worker           DCHECK(r->IsAllocated()) << r->State();
614*795d594fSAndroid Build Coastguard Worker           if (kPoisonDeadObjectsInUnevacuatedRegions) {
615*795d594fSAndroid Build Coastguard Worker             PoisonDeadObjectsInUnevacuatedRegion(r);
616*795d594fSAndroid Build Coastguard Worker           }
617*795d594fSAndroid Build Coastguard Worker         }
618*795d594fSAndroid Build Coastguard Worker       }
619*795d594fSAndroid Build Coastguard Worker     }
620*795d594fSAndroid Build Coastguard Worker     // Note r != last_checked_region if r->IsInUnevacFromSpace() was true above.
621*795d594fSAndroid Build Coastguard Worker     Region* last_checked_region = &regions_[i];
622*795d594fSAndroid Build Coastguard Worker     if (!last_checked_region->IsFree()) {
623*795d594fSAndroid Build Coastguard Worker       new_non_free_region_index_limit = std::max(new_non_free_region_index_limit,
624*795d594fSAndroid Build Coastguard Worker                                                  last_checked_region->Idx() + 1);
625*795d594fSAndroid Build Coastguard Worker     }
626*795d594fSAndroid Build Coastguard Worker   }
627*795d594fSAndroid Build Coastguard Worker   // Update non_free_region_index_limit_.
628*795d594fSAndroid Build Coastguard Worker   SetNonFreeRegionLimit(new_non_free_region_index_limit);
629*795d594fSAndroid Build Coastguard Worker   evac_region_ = nullptr;
630*795d594fSAndroid Build Coastguard Worker   num_non_free_regions_ += num_evac_regions_;
631*795d594fSAndroid Build Coastguard Worker   num_evac_regions_ = 0;
632*795d594fSAndroid Build Coastguard Worker }
633*795d594fSAndroid Build Coastguard Worker 
CheckLiveBytesAgainstRegionBitmap(Region * r)634*795d594fSAndroid Build Coastguard Worker void RegionSpace::CheckLiveBytesAgainstRegionBitmap(Region* r) {
635*795d594fSAndroid Build Coastguard Worker   if (r->LiveBytes() == static_cast<size_t>(-1)) {
636*795d594fSAndroid Build Coastguard Worker     // Live bytes count is undefined for `r`; nothing to check here.
637*795d594fSAndroid Build Coastguard Worker     return;
638*795d594fSAndroid Build Coastguard Worker   }
639*795d594fSAndroid Build Coastguard Worker 
640*795d594fSAndroid Build Coastguard Worker   // Functor walking the region space bitmap for the range corresponding
641*795d594fSAndroid Build Coastguard Worker   // to region `r` and calculating the sum of live bytes.
642*795d594fSAndroid Build Coastguard Worker   size_t live_bytes_recount = 0u;
643*795d594fSAndroid Build Coastguard Worker   auto recount_live_bytes =
644*795d594fSAndroid Build Coastguard Worker       [&r, &live_bytes_recount](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
645*795d594fSAndroid Build Coastguard Worker     DCHECK_ALIGNED(obj, kAlignment);
646*795d594fSAndroid Build Coastguard Worker     if (r->IsLarge()) {
647*795d594fSAndroid Build Coastguard Worker       // If `r` is a large region, then it contains at most one
648*795d594fSAndroid Build Coastguard Worker       // object, which must start at the beginning of the
649*795d594fSAndroid Build Coastguard Worker       // region. The live byte count in that case is equal to the
650*795d594fSAndroid Build Coastguard Worker       // allocated regions (large region + large tails regions).
651*795d594fSAndroid Build Coastguard Worker       DCHECK_EQ(reinterpret_cast<uint8_t*>(obj), r->Begin());
652*795d594fSAndroid Build Coastguard Worker       DCHECK_EQ(live_bytes_recount, 0u);
653*795d594fSAndroid Build Coastguard Worker       live_bytes_recount = r->Top() - r->Begin();
654*795d594fSAndroid Build Coastguard Worker     } else {
655*795d594fSAndroid Build Coastguard Worker       DCHECK(r->IsAllocated())
656*795d594fSAndroid Build Coastguard Worker           << "r->State()=" << r->State() << " r->LiveBytes()=" << r->LiveBytes();
657*795d594fSAndroid Build Coastguard Worker       size_t obj_size = obj->SizeOf<kDefaultVerifyFlags>();
658*795d594fSAndroid Build Coastguard Worker       size_t alloc_size = RoundUp(obj_size, space::RegionSpace::kAlignment);
659*795d594fSAndroid Build Coastguard Worker       live_bytes_recount += alloc_size;
660*795d594fSAndroid Build Coastguard Worker     }
661*795d594fSAndroid Build Coastguard Worker   };
662*795d594fSAndroid Build Coastguard Worker   // Visit live objects in `r` and recount the live bytes.
663*795d594fSAndroid Build Coastguard Worker   GetLiveBitmap()->VisitMarkedRange(reinterpret_cast<uintptr_t>(r->Begin()),
664*795d594fSAndroid Build Coastguard Worker                                     reinterpret_cast<uintptr_t>(r->Top()),
665*795d594fSAndroid Build Coastguard Worker                                     recount_live_bytes);
666*795d594fSAndroid Build Coastguard Worker   // Check that this recount matches the region's current live bytes count.
667*795d594fSAndroid Build Coastguard Worker   DCHECK_EQ(live_bytes_recount, r->LiveBytes());
668*795d594fSAndroid Build Coastguard Worker }
669*795d594fSAndroid Build Coastguard Worker 
670*795d594fSAndroid Build Coastguard Worker // Poison the memory area in range [`begin`, `end`) with value `kPoisonDeadObject`.
PoisonUnevacuatedRange(uint8_t * begin,uint8_t * end)671*795d594fSAndroid Build Coastguard Worker static void PoisonUnevacuatedRange(uint8_t* begin, uint8_t* end) {
672*795d594fSAndroid Build Coastguard Worker   static constexpr size_t kPoisonDeadObjectSize = sizeof(kPoisonDeadObject);
673*795d594fSAndroid Build Coastguard Worker   static_assert(IsPowerOfTwo(kPoisonDeadObjectSize) &&
674*795d594fSAndroid Build Coastguard Worker                 IsPowerOfTwo(RegionSpace::kAlignment) &&
675*795d594fSAndroid Build Coastguard Worker                 (kPoisonDeadObjectSize < RegionSpace::kAlignment),
676*795d594fSAndroid Build Coastguard Worker                 "RegionSpace::kAlignment should be a multiple of kPoisonDeadObjectSize"
677*795d594fSAndroid Build Coastguard Worker                 " and both should be powers of 2");
678*795d594fSAndroid Build Coastguard Worker   DCHECK_ALIGNED(begin, kPoisonDeadObjectSize);
679*795d594fSAndroid Build Coastguard Worker   DCHECK_ALIGNED(end, kPoisonDeadObjectSize);
680*795d594fSAndroid Build Coastguard Worker   uint32_t* begin_addr = reinterpret_cast<uint32_t*>(begin);
681*795d594fSAndroid Build Coastguard Worker   uint32_t* end_addr = reinterpret_cast<uint32_t*>(end);
682*795d594fSAndroid Build Coastguard Worker   std::fill(begin_addr, end_addr, kPoisonDeadObject);
683*795d594fSAndroid Build Coastguard Worker }
684*795d594fSAndroid Build Coastguard Worker 
PoisonDeadObjectsInUnevacuatedRegion(Region * r)685*795d594fSAndroid Build Coastguard Worker void RegionSpace::PoisonDeadObjectsInUnevacuatedRegion(Region* r) {
686*795d594fSAndroid Build Coastguard Worker   // The live byte count of `r` should be different from -1, as this
687*795d594fSAndroid Build Coastguard Worker   // region should neither be a newly allocated region nor an
688*795d594fSAndroid Build Coastguard Worker   // evacuated region.
689*795d594fSAndroid Build Coastguard Worker   DCHECK_NE(r->LiveBytes(), static_cast<size_t>(-1))
690*795d594fSAndroid Build Coastguard Worker       << "Unexpected live bytes count of -1 in " << Dumpable<Region>(*r);
691*795d594fSAndroid Build Coastguard Worker 
692*795d594fSAndroid Build Coastguard Worker   // Past-the-end address of the previously visited (live) object (or
693*795d594fSAndroid Build Coastguard Worker   // the beginning of the region, if `maybe_poison` has not run yet).
694*795d594fSAndroid Build Coastguard Worker   uint8_t* prev_obj_end = reinterpret_cast<uint8_t*>(r->Begin());
695*795d594fSAndroid Build Coastguard Worker 
696*795d594fSAndroid Build Coastguard Worker   // Functor poisoning the space between `obj` and the previously
697*795d594fSAndroid Build Coastguard Worker   // visited (live) object (or the beginng of the region), if any.
698*795d594fSAndroid Build Coastguard Worker   auto maybe_poison = [&prev_obj_end](mirror::Object* obj) REQUIRES(Locks::mutator_lock_) {
699*795d594fSAndroid Build Coastguard Worker     DCHECK_ALIGNED(obj, kAlignment);
700*795d594fSAndroid Build Coastguard Worker     uint8_t* cur_obj_begin = reinterpret_cast<uint8_t*>(obj);
701*795d594fSAndroid Build Coastguard Worker     if (cur_obj_begin != prev_obj_end) {
702*795d594fSAndroid Build Coastguard Worker       // There is a gap (dead object(s)) between the previously
703*795d594fSAndroid Build Coastguard Worker       // visited (live) object (or the beginning of the region) and
704*795d594fSAndroid Build Coastguard Worker       // `obj`; poison that space.
705*795d594fSAndroid Build Coastguard Worker       PoisonUnevacuatedRange(prev_obj_end, cur_obj_begin);
706*795d594fSAndroid Build Coastguard Worker     }
707*795d594fSAndroid Build Coastguard Worker     prev_obj_end = reinterpret_cast<uint8_t*>(GetNextObject(obj));
708*795d594fSAndroid Build Coastguard Worker   };
709*795d594fSAndroid Build Coastguard Worker 
710*795d594fSAndroid Build Coastguard Worker   // Visit live objects in `r` and poison gaps (dead objects) between them.
711*795d594fSAndroid Build Coastguard Worker   GetLiveBitmap()->VisitMarkedRange(reinterpret_cast<uintptr_t>(r->Begin()),
712*795d594fSAndroid Build Coastguard Worker                                     reinterpret_cast<uintptr_t>(r->Top()),
713*795d594fSAndroid Build Coastguard Worker                                     maybe_poison);
714*795d594fSAndroid Build Coastguard Worker   // Poison memory between the last live object and the end of the region, if any.
715*795d594fSAndroid Build Coastguard Worker   if (prev_obj_end < r->Top()) {
716*795d594fSAndroid Build Coastguard Worker     PoisonUnevacuatedRange(prev_obj_end, r->Top());
717*795d594fSAndroid Build Coastguard Worker   }
718*795d594fSAndroid Build Coastguard Worker }
719*795d594fSAndroid Build Coastguard Worker 
LogFragmentationAllocFailure(std::ostream & os,size_t failed_alloc_bytes)720*795d594fSAndroid Build Coastguard Worker bool RegionSpace::LogFragmentationAllocFailure(std::ostream& os,
721*795d594fSAndroid Build Coastguard Worker                                                size_t failed_alloc_bytes) {
722*795d594fSAndroid Build Coastguard Worker   size_t max_contiguous_allocation = 0;
723*795d594fSAndroid Build Coastguard Worker   MutexLock mu(Thread::Current(), region_lock_);
724*795d594fSAndroid Build Coastguard Worker 
725*795d594fSAndroid Build Coastguard Worker   if (current_region_->End() - current_region_->Top() > 0) {
726*795d594fSAndroid Build Coastguard Worker     max_contiguous_allocation = current_region_->End() - current_region_->Top();
727*795d594fSAndroid Build Coastguard Worker   }
728*795d594fSAndroid Build Coastguard Worker 
729*795d594fSAndroid Build Coastguard Worker   size_t max_contiguous_free_regions = 0;
730*795d594fSAndroid Build Coastguard Worker   size_t num_contiguous_free_regions = 0;
731*795d594fSAndroid Build Coastguard Worker   bool prev_free_region = false;
732*795d594fSAndroid Build Coastguard Worker   for (size_t i = 0; i < num_regions_; ++i) {
733*795d594fSAndroid Build Coastguard Worker     Region* r = &regions_[i];
734*795d594fSAndroid Build Coastguard Worker     if (r->IsFree()) {
735*795d594fSAndroid Build Coastguard Worker       if (!prev_free_region) {
736*795d594fSAndroid Build Coastguard Worker         CHECK_EQ(num_contiguous_free_regions, 0U);
737*795d594fSAndroid Build Coastguard Worker         prev_free_region = true;
738*795d594fSAndroid Build Coastguard Worker       }
739*795d594fSAndroid Build Coastguard Worker       ++num_contiguous_free_regions;
740*795d594fSAndroid Build Coastguard Worker     } else if (prev_free_region) {
741*795d594fSAndroid Build Coastguard Worker       CHECK_NE(num_contiguous_free_regions, 0U);
742*795d594fSAndroid Build Coastguard Worker       max_contiguous_free_regions = std::max(max_contiguous_free_regions,
743*795d594fSAndroid Build Coastguard Worker                                              num_contiguous_free_regions);
744*795d594fSAndroid Build Coastguard Worker       num_contiguous_free_regions = 0U;
745*795d594fSAndroid Build Coastguard Worker       prev_free_region = false;
746*795d594fSAndroid Build Coastguard Worker     }
747*795d594fSAndroid Build Coastguard Worker   }
748*795d594fSAndroid Build Coastguard Worker   max_contiguous_allocation = std::max(max_contiguous_allocation,
749*795d594fSAndroid Build Coastguard Worker                                        max_contiguous_free_regions * kRegionSize);
750*795d594fSAndroid Build Coastguard Worker 
751*795d594fSAndroid Build Coastguard Worker   // Calculate how many regions are available for allocations as we have to ensure
752*795d594fSAndroid Build Coastguard Worker   // that enough regions are left for evacuation.
753*795d594fSAndroid Build Coastguard Worker   size_t regions_free_for_alloc = num_regions_ / 2 - num_non_free_regions_;
754*795d594fSAndroid Build Coastguard Worker 
755*795d594fSAndroid Build Coastguard Worker   max_contiguous_allocation = std::min(max_contiguous_allocation,
756*795d594fSAndroid Build Coastguard Worker                                        regions_free_for_alloc * kRegionSize);
757*795d594fSAndroid Build Coastguard Worker   if (failed_alloc_bytes > max_contiguous_allocation) {
758*795d594fSAndroid Build Coastguard Worker     // Region space does not normally fragment in the conventional sense. However we can run out
759*795d594fSAndroid Build Coastguard Worker     // of region space prematurely if we have many threads, each with a partially committed TLAB.
760*795d594fSAndroid Build Coastguard Worker     // The whole TLAB uses up region address space, but we only count the section that was
761*795d594fSAndroid Build Coastguard Worker     // actually given to the thread so far as allocated. For unlikely allocation request sequences
762*795d594fSAndroid Build Coastguard Worker     // involving largish objects that don't qualify for large objects space, we may also be unable
763*795d594fSAndroid Build Coastguard Worker     // to fully utilize entire TLABs, and thus generate enough actual fragmentation to get
764*795d594fSAndroid Build Coastguard Worker     // here. This appears less likely, since we usually reuse sufficiently large TLAB "tails"
765*795d594fSAndroid Build Coastguard Worker     // that are no longer needed.
766*795d594fSAndroid Build Coastguard Worker     os << "; failed due to fragmentation (largest possible contiguous allocation "
767*795d594fSAndroid Build Coastguard Worker        << max_contiguous_allocation << " bytes). Number of " << PrettySize(kRegionSize)
768*795d594fSAndroid Build Coastguard Worker        << " sized free regions are: " << regions_free_for_alloc
769*795d594fSAndroid Build Coastguard Worker        << ". Likely cause: (1) Too much memory in use, and "
770*795d594fSAndroid Build Coastguard Worker        << "(2) many threads or many larger objects of the wrong kind";
771*795d594fSAndroid Build Coastguard Worker     return true;
772*795d594fSAndroid Build Coastguard Worker   }
773*795d594fSAndroid Build Coastguard Worker   // Caller's job to print failed_alloc_bytes.
774*795d594fSAndroid Build Coastguard Worker   return false;
775*795d594fSAndroid Build Coastguard Worker }
776*795d594fSAndroid Build Coastguard Worker 
Clear()777*795d594fSAndroid Build Coastguard Worker void RegionSpace::Clear() {
778*795d594fSAndroid Build Coastguard Worker   MutexLock mu(Thread::Current(), region_lock_);
779*795d594fSAndroid Build Coastguard Worker   for (size_t i = 0; i < num_regions_; ++i) {
780*795d594fSAndroid Build Coastguard Worker     Region* r = &regions_[i];
781*795d594fSAndroid Build Coastguard Worker     if (!r->IsFree()) {
782*795d594fSAndroid Build Coastguard Worker       --num_non_free_regions_;
783*795d594fSAndroid Build Coastguard Worker     }
784*795d594fSAndroid Build Coastguard Worker     r->Clear(/*zero_and_release_pages=*/true);
785*795d594fSAndroid Build Coastguard Worker   }
786*795d594fSAndroid Build Coastguard Worker   SetNonFreeRegionLimit(0);
787*795d594fSAndroid Build Coastguard Worker   DCHECK_EQ(num_non_free_regions_, 0u);
788*795d594fSAndroid Build Coastguard Worker   current_region_ = &full_region_;
789*795d594fSAndroid Build Coastguard Worker   evac_region_ = &full_region_;
790*795d594fSAndroid Build Coastguard Worker }
791*795d594fSAndroid Build Coastguard Worker 
Protect()792*795d594fSAndroid Build Coastguard Worker void RegionSpace::Protect() {
793*795d594fSAndroid Build Coastguard Worker   if (kProtectClearedRegions) {
794*795d594fSAndroid Build Coastguard Worker     CheckedCall(mprotect, __FUNCTION__, Begin(), Size(), PROT_NONE);
795*795d594fSAndroid Build Coastguard Worker   }
796*795d594fSAndroid Build Coastguard Worker }
797*795d594fSAndroid Build Coastguard Worker 
Unprotect()798*795d594fSAndroid Build Coastguard Worker void RegionSpace::Unprotect() {
799*795d594fSAndroid Build Coastguard Worker   if (kProtectClearedRegions) {
800*795d594fSAndroid Build Coastguard Worker     CheckedCall(mprotect, __FUNCTION__, Begin(), Size(), PROT_READ | PROT_WRITE);
801*795d594fSAndroid Build Coastguard Worker   }
802*795d594fSAndroid Build Coastguard Worker }
803*795d594fSAndroid Build Coastguard Worker 
ClampGrowthLimit(size_t new_capacity)804*795d594fSAndroid Build Coastguard Worker void RegionSpace::ClampGrowthLimit(size_t new_capacity) {
805*795d594fSAndroid Build Coastguard Worker   MutexLock mu(Thread::Current(), region_lock_);
806*795d594fSAndroid Build Coastguard Worker   CHECK_LE(new_capacity, NonGrowthLimitCapacity());
807*795d594fSAndroid Build Coastguard Worker   size_t new_num_regions = new_capacity / kRegionSize;
808*795d594fSAndroid Build Coastguard Worker   if (non_free_region_index_limit_ > new_num_regions) {
809*795d594fSAndroid Build Coastguard Worker     LOG(WARNING) << "Couldn't clamp region space as there are regions in use beyond growth limit.";
810*795d594fSAndroid Build Coastguard Worker     return;
811*795d594fSAndroid Build Coastguard Worker   }
812*795d594fSAndroid Build Coastguard Worker   num_regions_ = new_num_regions;
813*795d594fSAndroid Build Coastguard Worker   if (kCyclicRegionAllocation && cyclic_alloc_region_index_ >= num_regions_) {
814*795d594fSAndroid Build Coastguard Worker     cyclic_alloc_region_index_ = 0u;
815*795d594fSAndroid Build Coastguard Worker   }
816*795d594fSAndroid Build Coastguard Worker   SetLimit(Begin() + new_capacity);
817*795d594fSAndroid Build Coastguard Worker   if (Size() > new_capacity) {
818*795d594fSAndroid Build Coastguard Worker     SetEnd(Limit());
819*795d594fSAndroid Build Coastguard Worker   }
820*795d594fSAndroid Build Coastguard Worker   GetMarkBitmap()->SetHeapSize(new_capacity);
821*795d594fSAndroid Build Coastguard Worker   GetMemMap()->SetSize(new_capacity);
822*795d594fSAndroid Build Coastguard Worker }
823*795d594fSAndroid Build Coastguard Worker 
Dump(std::ostream & os) const824*795d594fSAndroid Build Coastguard Worker void RegionSpace::Dump(std::ostream& os) const {
825*795d594fSAndroid Build Coastguard Worker   os << GetName() << " "
826*795d594fSAndroid Build Coastguard Worker      << reinterpret_cast<void*>(Begin()) << "-" << reinterpret_cast<void*>(Limit());
827*795d594fSAndroid Build Coastguard Worker }
828*795d594fSAndroid Build Coastguard Worker 
DumpRegionForObject(std::ostream & os,mirror::Object * obj)829*795d594fSAndroid Build Coastguard Worker void RegionSpace::DumpRegionForObject(std::ostream& os, mirror::Object* obj) {
830*795d594fSAndroid Build Coastguard Worker   CHECK(HasAddress(obj));
831*795d594fSAndroid Build Coastguard Worker   MutexLock mu(Thread::Current(), region_lock_);
832*795d594fSAndroid Build Coastguard Worker   RefToRegionUnlocked(obj)->Dump(os);
833*795d594fSAndroid Build Coastguard Worker }
834*795d594fSAndroid Build Coastguard Worker 
DumpRegions(std::ostream & os)835*795d594fSAndroid Build Coastguard Worker void RegionSpace::DumpRegions(std::ostream& os) {
836*795d594fSAndroid Build Coastguard Worker   MutexLock mu(Thread::Current(), region_lock_);
837*795d594fSAndroid Build Coastguard Worker   for (size_t i = 0; i < num_regions_; ++i) {
838*795d594fSAndroid Build Coastguard Worker     regions_[i].Dump(os);
839*795d594fSAndroid Build Coastguard Worker   }
840*795d594fSAndroid Build Coastguard Worker }
841*795d594fSAndroid Build Coastguard Worker 
DumpNonFreeRegions(std::ostream & os)842*795d594fSAndroid Build Coastguard Worker void RegionSpace::DumpNonFreeRegions(std::ostream& os) {
843*795d594fSAndroid Build Coastguard Worker   MutexLock mu(Thread::Current(), region_lock_);
844*795d594fSAndroid Build Coastguard Worker   for (size_t i = 0; i < num_regions_; ++i) {
845*795d594fSAndroid Build Coastguard Worker     Region* reg = &regions_[i];
846*795d594fSAndroid Build Coastguard Worker     if (!reg->IsFree()) {
847*795d594fSAndroid Build Coastguard Worker       reg->Dump(os);
848*795d594fSAndroid Build Coastguard Worker     }
849*795d594fSAndroid Build Coastguard Worker   }
850*795d594fSAndroid Build Coastguard Worker }
851*795d594fSAndroid Build Coastguard Worker 
RecordAlloc(mirror::Object * ref)852*795d594fSAndroid Build Coastguard Worker void RegionSpace::RecordAlloc(mirror::Object* ref) {
853*795d594fSAndroid Build Coastguard Worker   CHECK(ref != nullptr);
854*795d594fSAndroid Build Coastguard Worker   Region* r = RefToRegion(ref);
855*795d594fSAndroid Build Coastguard Worker   r->objects_allocated_.fetch_add(1, std::memory_order_relaxed);
856*795d594fSAndroid Build Coastguard Worker }
857*795d594fSAndroid Build Coastguard Worker 
AllocNewTlab(Thread * self,const size_t tlab_size,size_t * bytes_tl_bulk_allocated)858*795d594fSAndroid Build Coastguard Worker bool RegionSpace::AllocNewTlab(Thread* self,
859*795d594fSAndroid Build Coastguard Worker                                const size_t tlab_size,
860*795d594fSAndroid Build Coastguard Worker                                size_t* bytes_tl_bulk_allocated) {
861*795d594fSAndroid Build Coastguard Worker   MutexLock mu(self, region_lock_);
862*795d594fSAndroid Build Coastguard Worker   RevokeThreadLocalBuffersLocked(self, /*reuse=*/ gc::Heap::kUsePartialTlabs);
863*795d594fSAndroid Build Coastguard Worker   Region* r = nullptr;
864*795d594fSAndroid Build Coastguard Worker   uint8_t* pos = nullptr;
865*795d594fSAndroid Build Coastguard Worker   *bytes_tl_bulk_allocated = tlab_size;
866*795d594fSAndroid Build Coastguard Worker   // First attempt to get a partially used TLAB, if available.
867*795d594fSAndroid Build Coastguard Worker   if (tlab_size < kRegionSize) {
868*795d594fSAndroid Build Coastguard Worker     // Fetch the largest partial TLAB. The multimap is ordered in decreasing
869*795d594fSAndroid Build Coastguard Worker     // size.
870*795d594fSAndroid Build Coastguard Worker     auto largest_partial_tlab = partial_tlabs_.begin();
871*795d594fSAndroid Build Coastguard Worker     if (largest_partial_tlab != partial_tlabs_.end() && largest_partial_tlab->first >= tlab_size) {
872*795d594fSAndroid Build Coastguard Worker       r = largest_partial_tlab->second;
873*795d594fSAndroid Build Coastguard Worker       pos = r->End() - largest_partial_tlab->first;
874*795d594fSAndroid Build Coastguard Worker       partial_tlabs_.erase(largest_partial_tlab);
875*795d594fSAndroid Build Coastguard Worker       DCHECK_GT(r->End(), pos);
876*795d594fSAndroid Build Coastguard Worker       DCHECK_LE(r->Begin(), pos);
877*795d594fSAndroid Build Coastguard Worker       DCHECK_GE(r->Top(), pos);
878*795d594fSAndroid Build Coastguard Worker       *bytes_tl_bulk_allocated -= r->Top() - pos;
879*795d594fSAndroid Build Coastguard Worker     }
880*795d594fSAndroid Build Coastguard Worker   }
881*795d594fSAndroid Build Coastguard Worker   if (r == nullptr) {
882*795d594fSAndroid Build Coastguard Worker     // Fallback to allocating an entire region as TLAB.
883*795d594fSAndroid Build Coastguard Worker     r = AllocateRegion(/*for_evac=*/ false);
884*795d594fSAndroid Build Coastguard Worker   }
885*795d594fSAndroid Build Coastguard Worker   if (r != nullptr) {
886*795d594fSAndroid Build Coastguard Worker     uint8_t* start = pos != nullptr ? pos : r->Begin();
887*795d594fSAndroid Build Coastguard Worker     DCHECK_ALIGNED(start, kObjectAlignment);
888*795d594fSAndroid Build Coastguard Worker     r->is_a_tlab_ = true;
889*795d594fSAndroid Build Coastguard Worker     r->thread_ = self;
890*795d594fSAndroid Build Coastguard Worker     r->SetTop(r->End());
891*795d594fSAndroid Build Coastguard Worker     self->SetTlab(start, start + tlab_size, r->End());
892*795d594fSAndroid Build Coastguard Worker     return true;
893*795d594fSAndroid Build Coastguard Worker   }
894*795d594fSAndroid Build Coastguard Worker   return false;
895*795d594fSAndroid Build Coastguard Worker }
896*795d594fSAndroid Build Coastguard Worker 
RevokeThreadLocalBuffers(Thread * thread)897*795d594fSAndroid Build Coastguard Worker size_t RegionSpace::RevokeThreadLocalBuffers(Thread* thread) {
898*795d594fSAndroid Build Coastguard Worker   MutexLock mu(Thread::Current(), region_lock_);
899*795d594fSAndroid Build Coastguard Worker   RevokeThreadLocalBuffersLocked(thread, /*reuse=*/ gc::Heap::kUsePartialTlabs);
900*795d594fSAndroid Build Coastguard Worker   return 0U;
901*795d594fSAndroid Build Coastguard Worker }
902*795d594fSAndroid Build Coastguard Worker 
RevokeThreadLocalBuffers(Thread * thread,const bool reuse)903*795d594fSAndroid Build Coastguard Worker size_t RegionSpace::RevokeThreadLocalBuffers(Thread* thread, const bool reuse) {
904*795d594fSAndroid Build Coastguard Worker   MutexLock mu(Thread::Current(), region_lock_);
905*795d594fSAndroid Build Coastguard Worker   RevokeThreadLocalBuffersLocked(thread, reuse);
906*795d594fSAndroid Build Coastguard Worker   return 0U;
907*795d594fSAndroid Build Coastguard Worker }
908*795d594fSAndroid Build Coastguard Worker 
RevokeThreadLocalBuffersLocked(Thread * thread,bool reuse)909*795d594fSAndroid Build Coastguard Worker void RegionSpace::RevokeThreadLocalBuffersLocked(Thread* thread, bool reuse) {
910*795d594fSAndroid Build Coastguard Worker   uint8_t* tlab_start = thread->GetTlabStart();
911*795d594fSAndroid Build Coastguard Worker   DCHECK_EQ(thread->HasTlab(), tlab_start != nullptr);
912*795d594fSAndroid Build Coastguard Worker   if (tlab_start != nullptr) {
913*795d594fSAndroid Build Coastguard Worker     Region* r = RefToRegionLocked(reinterpret_cast<mirror::Object*>(tlab_start));
914*795d594fSAndroid Build Coastguard Worker     r->is_a_tlab_ = false;
915*795d594fSAndroid Build Coastguard Worker     r->thread_ = nullptr;
916*795d594fSAndroid Build Coastguard Worker     DCHECK(r->IsAllocated());
917*795d594fSAndroid Build Coastguard Worker     DCHECK_LE(thread->GetThreadLocalBytesAllocated(), kRegionSize);
918*795d594fSAndroid Build Coastguard Worker     r->RecordThreadLocalAllocations(thread->GetThreadLocalObjectsAllocated(),
919*795d594fSAndroid Build Coastguard Worker                                     thread->GetTlabEnd() - r->Begin());
920*795d594fSAndroid Build Coastguard Worker     DCHECK_GE(r->End(), thread->GetTlabPos());
921*795d594fSAndroid Build Coastguard Worker     DCHECK_LE(r->Begin(), thread->GetTlabPos());
922*795d594fSAndroid Build Coastguard Worker     size_t remaining_bytes = r->End() - thread->GetTlabPos();
923*795d594fSAndroid Build Coastguard Worker     if (reuse && remaining_bytes >= gc::Heap::kPartialTlabSize) {
924*795d594fSAndroid Build Coastguard Worker       partial_tlabs_.insert(std::make_pair(remaining_bytes, r));
925*795d594fSAndroid Build Coastguard Worker     }
926*795d594fSAndroid Build Coastguard Worker   }
927*795d594fSAndroid Build Coastguard Worker   thread->ResetTlab();
928*795d594fSAndroid Build Coastguard Worker }
929*795d594fSAndroid Build Coastguard Worker 
RevokeAllThreadLocalBuffers()930*795d594fSAndroid Build Coastguard Worker size_t RegionSpace::RevokeAllThreadLocalBuffers() {
931*795d594fSAndroid Build Coastguard Worker   Thread* self = Thread::Current();
932*795d594fSAndroid Build Coastguard Worker   MutexLock mu(self, *Locks::runtime_shutdown_lock_);
933*795d594fSAndroid Build Coastguard Worker   MutexLock mu2(self, *Locks::thread_list_lock_);
934*795d594fSAndroid Build Coastguard Worker   std::list<Thread*> thread_list = Runtime::Current()->GetThreadList()->GetList();
935*795d594fSAndroid Build Coastguard Worker   for (Thread* thread : thread_list) {
936*795d594fSAndroid Build Coastguard Worker     RevokeThreadLocalBuffers(thread);
937*795d594fSAndroid Build Coastguard Worker   }
938*795d594fSAndroid Build Coastguard Worker   return 0U;
939*795d594fSAndroid Build Coastguard Worker }
940*795d594fSAndroid Build Coastguard Worker 
AssertThreadLocalBuffersAreRevoked(Thread * thread)941*795d594fSAndroid Build Coastguard Worker void RegionSpace::AssertThreadLocalBuffersAreRevoked(Thread* thread) {
942*795d594fSAndroid Build Coastguard Worker   if (kIsDebugBuild) {
943*795d594fSAndroid Build Coastguard Worker     DCHECK(!thread->HasTlab());
944*795d594fSAndroid Build Coastguard Worker   }
945*795d594fSAndroid Build Coastguard Worker }
946*795d594fSAndroid Build Coastguard Worker 
AssertAllThreadLocalBuffersAreRevoked()947*795d594fSAndroid Build Coastguard Worker void RegionSpace::AssertAllThreadLocalBuffersAreRevoked() {
948*795d594fSAndroid Build Coastguard Worker   if (kIsDebugBuild) {
949*795d594fSAndroid Build Coastguard Worker     Thread* self = Thread::Current();
950*795d594fSAndroid Build Coastguard Worker     MutexLock mu(self, *Locks::runtime_shutdown_lock_);
951*795d594fSAndroid Build Coastguard Worker     MutexLock mu2(self, *Locks::thread_list_lock_);
952*795d594fSAndroid Build Coastguard Worker     std::list<Thread*> thread_list = Runtime::Current()->GetThreadList()->GetList();
953*795d594fSAndroid Build Coastguard Worker     for (Thread* thread : thread_list) {
954*795d594fSAndroid Build Coastguard Worker       AssertThreadLocalBuffersAreRevoked(thread);
955*795d594fSAndroid Build Coastguard Worker     }
956*795d594fSAndroid Build Coastguard Worker   }
957*795d594fSAndroid Build Coastguard Worker }
958*795d594fSAndroid Build Coastguard Worker 
Dump(std::ostream & os) const959*795d594fSAndroid Build Coastguard Worker void RegionSpace::Region::Dump(std::ostream& os) const {
960*795d594fSAndroid Build Coastguard Worker   os << "Region[" << idx_ << "]="
961*795d594fSAndroid Build Coastguard Worker      << reinterpret_cast<void*>(begin_)
962*795d594fSAndroid Build Coastguard Worker      << "-" << reinterpret_cast<void*>(Top())
963*795d594fSAndroid Build Coastguard Worker      << "-" << reinterpret_cast<void*>(end_)
964*795d594fSAndroid Build Coastguard Worker      << " state=" << state_
965*795d594fSAndroid Build Coastguard Worker      << " type=" << type_
966*795d594fSAndroid Build Coastguard Worker      << " objects_allocated=" << objects_allocated_
967*795d594fSAndroid Build Coastguard Worker      << " alloc_time=" << alloc_time_
968*795d594fSAndroid Build Coastguard Worker      << " live_bytes=" << live_bytes_;
969*795d594fSAndroid Build Coastguard Worker 
970*795d594fSAndroid Build Coastguard Worker   if (live_bytes_ != static_cast<size_t>(-1)) {
971*795d594fSAndroid Build Coastguard Worker     os << " ratio over allocated bytes="
972*795d594fSAndroid Build Coastguard Worker        << (static_cast<float>(live_bytes_) / RoundUp(BytesAllocated(), kRegionSize));
973*795d594fSAndroid Build Coastguard Worker     uint64_t longest_consecutive_free_bytes = GetLongestConsecutiveFreeBytes();
974*795d594fSAndroid Build Coastguard Worker     os << " longest_consecutive_free_bytes=" << longest_consecutive_free_bytes
975*795d594fSAndroid Build Coastguard Worker        << " (" << PrettySize(longest_consecutive_free_bytes) << ")";
976*795d594fSAndroid Build Coastguard Worker   }
977*795d594fSAndroid Build Coastguard Worker 
978*795d594fSAndroid Build Coastguard Worker   os << " is_newly_allocated=" << std::boolalpha << is_newly_allocated_ << std::noboolalpha
979*795d594fSAndroid Build Coastguard Worker      << " is_a_tlab=" << std::boolalpha << is_a_tlab_ << std::noboolalpha
980*795d594fSAndroid Build Coastguard Worker      << " thread=" << thread_ << '\n';
981*795d594fSAndroid Build Coastguard Worker }
982*795d594fSAndroid Build Coastguard Worker 
GetLongestConsecutiveFreeBytes() const983*795d594fSAndroid Build Coastguard Worker uint64_t RegionSpace::Region::GetLongestConsecutiveFreeBytes() const {
984*795d594fSAndroid Build Coastguard Worker   if (IsFree()) {
985*795d594fSAndroid Build Coastguard Worker     return kRegionSize;
986*795d594fSAndroid Build Coastguard Worker   }
987*795d594fSAndroid Build Coastguard Worker   if (IsLarge() || IsLargeTail()) {
988*795d594fSAndroid Build Coastguard Worker     return 0u;
989*795d594fSAndroid Build Coastguard Worker   }
990*795d594fSAndroid Build Coastguard Worker   uintptr_t max_gap = 0u;
991*795d594fSAndroid Build Coastguard Worker   uintptr_t prev_object_end = reinterpret_cast<uintptr_t>(Begin());
992*795d594fSAndroid Build Coastguard Worker   // Iterate through all live objects and find the largest free gap.
993*795d594fSAndroid Build Coastguard Worker   auto visitor = [&max_gap, &prev_object_end](mirror::Object* obj)
994*795d594fSAndroid Build Coastguard Worker     REQUIRES_SHARED(Locks::mutator_lock_) {
995*795d594fSAndroid Build Coastguard Worker     uintptr_t current = reinterpret_cast<uintptr_t>(obj);
996*795d594fSAndroid Build Coastguard Worker     uintptr_t diff = current - prev_object_end;
997*795d594fSAndroid Build Coastguard Worker     max_gap = std::max(diff, max_gap);
998*795d594fSAndroid Build Coastguard Worker     uintptr_t object_end = reinterpret_cast<uintptr_t>(obj) + obj->SizeOf();
999*795d594fSAndroid Build Coastguard Worker     prev_object_end = RoundUp(object_end, kAlignment);
1000*795d594fSAndroid Build Coastguard Worker   };
1001*795d594fSAndroid Build Coastguard Worker   space::RegionSpace* region_space = art::Runtime::Current()->GetHeap()->GetRegionSpace();
1002*795d594fSAndroid Build Coastguard Worker   region_space->WalkNonLargeRegion(visitor, this);
1003*795d594fSAndroid Build Coastguard Worker   return static_cast<uint64_t>(max_gap);
1004*795d594fSAndroid Build Coastguard Worker }
1005*795d594fSAndroid Build Coastguard Worker 
1006*795d594fSAndroid Build Coastguard Worker 
AllocationSizeNonvirtual(mirror::Object * obj,size_t * usable_size)1007*795d594fSAndroid Build Coastguard Worker size_t RegionSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size) {
1008*795d594fSAndroid Build Coastguard Worker   size_t num_bytes = obj->SizeOf();
1009*795d594fSAndroid Build Coastguard Worker   if (usable_size != nullptr) {
1010*795d594fSAndroid Build Coastguard Worker     if (LIKELY(num_bytes <= kRegionSize)) {
1011*795d594fSAndroid Build Coastguard Worker       DCHECK(RefToRegion(obj)->IsAllocated());
1012*795d594fSAndroid Build Coastguard Worker       *usable_size = RoundUp(num_bytes, kAlignment);
1013*795d594fSAndroid Build Coastguard Worker     } else {
1014*795d594fSAndroid Build Coastguard Worker       DCHECK(RefToRegion(obj)->IsLarge());
1015*795d594fSAndroid Build Coastguard Worker       *usable_size = RoundUp(num_bytes, kRegionSize);
1016*795d594fSAndroid Build Coastguard Worker     }
1017*795d594fSAndroid Build Coastguard Worker   }
1018*795d594fSAndroid Build Coastguard Worker   return num_bytes;
1019*795d594fSAndroid Build Coastguard Worker }
1020*795d594fSAndroid Build Coastguard Worker 
Clear(bool zero_and_release_pages)1021*795d594fSAndroid Build Coastguard Worker void RegionSpace::Region::Clear(bool zero_and_release_pages) {
1022*795d594fSAndroid Build Coastguard Worker   top_.store(begin_, std::memory_order_relaxed);
1023*795d594fSAndroid Build Coastguard Worker   state_ = RegionState::kRegionStateFree;
1024*795d594fSAndroid Build Coastguard Worker   type_ = RegionType::kRegionTypeNone;
1025*795d594fSAndroid Build Coastguard Worker   objects_allocated_.store(0, std::memory_order_relaxed);
1026*795d594fSAndroid Build Coastguard Worker   alloc_time_ = 0;
1027*795d594fSAndroid Build Coastguard Worker   live_bytes_ = static_cast<size_t>(-1);
1028*795d594fSAndroid Build Coastguard Worker   if (zero_and_release_pages) {
1029*795d594fSAndroid Build Coastguard Worker     ZeroAndProtectRegion(begin_, end_, /* release_eagerly= */ true);
1030*795d594fSAndroid Build Coastguard Worker   }
1031*795d594fSAndroid Build Coastguard Worker   is_newly_allocated_ = false;
1032*795d594fSAndroid Build Coastguard Worker   is_a_tlab_ = false;
1033*795d594fSAndroid Build Coastguard Worker   thread_ = nullptr;
1034*795d594fSAndroid Build Coastguard Worker }
1035*795d594fSAndroid Build Coastguard Worker 
TraceHeapSize()1036*795d594fSAndroid Build Coastguard Worker void RegionSpace::TraceHeapSize() {
1037*795d594fSAndroid Build Coastguard Worker   Heap* heap = Runtime::Current()->GetHeap();
1038*795d594fSAndroid Build Coastguard Worker   heap->TraceHeapSize(heap->GetBytesAllocated() + EvacBytes());
1039*795d594fSAndroid Build Coastguard Worker }
1040*795d594fSAndroid Build Coastguard Worker 
AllocateRegion(bool for_evac)1041*795d594fSAndroid Build Coastguard Worker RegionSpace::Region* RegionSpace::AllocateRegion(bool for_evac) {
1042*795d594fSAndroid Build Coastguard Worker   if (!for_evac && (num_non_free_regions_ + 1) * 2 > num_regions_) {
1043*795d594fSAndroid Build Coastguard Worker     return nullptr;
1044*795d594fSAndroid Build Coastguard Worker   }
1045*795d594fSAndroid Build Coastguard Worker   for (size_t i = 0; i < num_regions_; ++i) {
1046*795d594fSAndroid Build Coastguard Worker     // When using the cyclic region allocation strategy, try to
1047*795d594fSAndroid Build Coastguard Worker     // allocate a region starting from the last cyclic allocated
1048*795d594fSAndroid Build Coastguard Worker     // region marker. Otherwise, try to allocate a region starting
1049*795d594fSAndroid Build Coastguard Worker     // from the beginning of the region space.
1050*795d594fSAndroid Build Coastguard Worker     size_t region_index = kCyclicRegionAllocation
1051*795d594fSAndroid Build Coastguard Worker         ? ((cyclic_alloc_region_index_ + i) % num_regions_)
1052*795d594fSAndroid Build Coastguard Worker         : i;
1053*795d594fSAndroid Build Coastguard Worker     Region* r = &regions_[region_index];
1054*795d594fSAndroid Build Coastguard Worker     if (r->IsFree()) {
1055*795d594fSAndroid Build Coastguard Worker       r->Unfree(this, time_);
1056*795d594fSAndroid Build Coastguard Worker       if (use_generational_cc_) {
1057*795d594fSAndroid Build Coastguard Worker         // TODO: Add an explanation for this assertion.
1058*795d594fSAndroid Build Coastguard Worker         DCHECK_IMPLIES(for_evac, !r->is_newly_allocated_);
1059*795d594fSAndroid Build Coastguard Worker       }
1060*795d594fSAndroid Build Coastguard Worker       if (for_evac) {
1061*795d594fSAndroid Build Coastguard Worker         ++num_evac_regions_;
1062*795d594fSAndroid Build Coastguard Worker         TraceHeapSize();
1063*795d594fSAndroid Build Coastguard Worker         // Evac doesn't count as newly allocated.
1064*795d594fSAndroid Build Coastguard Worker       } else {
1065*795d594fSAndroid Build Coastguard Worker         r->SetNewlyAllocated();
1066*795d594fSAndroid Build Coastguard Worker         ++num_non_free_regions_;
1067*795d594fSAndroid Build Coastguard Worker       }
1068*795d594fSAndroid Build Coastguard Worker       if (kCyclicRegionAllocation) {
1069*795d594fSAndroid Build Coastguard Worker         // Move the cyclic allocation region marker to the region
1070*795d594fSAndroid Build Coastguard Worker         // following the one that was just allocated.
1071*795d594fSAndroid Build Coastguard Worker         cyclic_alloc_region_index_ = (region_index + 1) % num_regions_;
1072*795d594fSAndroid Build Coastguard Worker       }
1073*795d594fSAndroid Build Coastguard Worker       return r;
1074*795d594fSAndroid Build Coastguard Worker     }
1075*795d594fSAndroid Build Coastguard Worker   }
1076*795d594fSAndroid Build Coastguard Worker   return nullptr;
1077*795d594fSAndroid Build Coastguard Worker }
1078*795d594fSAndroid Build Coastguard Worker 
MarkAsAllocated(RegionSpace * region_space,uint32_t alloc_time)1079*795d594fSAndroid Build Coastguard Worker void RegionSpace::Region::MarkAsAllocated(RegionSpace* region_space, uint32_t alloc_time) {
1080*795d594fSAndroid Build Coastguard Worker   DCHECK(IsFree());
1081*795d594fSAndroid Build Coastguard Worker   alloc_time_ = alloc_time;
1082*795d594fSAndroid Build Coastguard Worker   region_space->AdjustNonFreeRegionLimit(idx_);
1083*795d594fSAndroid Build Coastguard Worker   type_ = RegionType::kRegionTypeToSpace;
1084*795d594fSAndroid Build Coastguard Worker   if (kProtectClearedRegions) {
1085*795d594fSAndroid Build Coastguard Worker     CheckedCall(mprotect, __FUNCTION__, Begin(), kRegionSize, PROT_READ | PROT_WRITE);
1086*795d594fSAndroid Build Coastguard Worker   }
1087*795d594fSAndroid Build Coastguard Worker }
1088*795d594fSAndroid Build Coastguard Worker 
Unfree(RegionSpace * region_space,uint32_t alloc_time)1089*795d594fSAndroid Build Coastguard Worker void RegionSpace::Region::Unfree(RegionSpace* region_space, uint32_t alloc_time) {
1090*795d594fSAndroid Build Coastguard Worker   MarkAsAllocated(region_space, alloc_time);
1091*795d594fSAndroid Build Coastguard Worker   state_ = RegionState::kRegionStateAllocated;
1092*795d594fSAndroid Build Coastguard Worker }
1093*795d594fSAndroid Build Coastguard Worker 
UnfreeLarge(RegionSpace * region_space,uint32_t alloc_time)1094*795d594fSAndroid Build Coastguard Worker void RegionSpace::Region::UnfreeLarge(RegionSpace* region_space, uint32_t alloc_time) {
1095*795d594fSAndroid Build Coastguard Worker   MarkAsAllocated(region_space, alloc_time);
1096*795d594fSAndroid Build Coastguard Worker   state_ = RegionState::kRegionStateLarge;
1097*795d594fSAndroid Build Coastguard Worker }
1098*795d594fSAndroid Build Coastguard Worker 
UnfreeLargeTail(RegionSpace * region_space,uint32_t alloc_time)1099*795d594fSAndroid Build Coastguard Worker void RegionSpace::Region::UnfreeLargeTail(RegionSpace* region_space, uint32_t alloc_time) {
1100*795d594fSAndroid Build Coastguard Worker   MarkAsAllocated(region_space, alloc_time);
1101*795d594fSAndroid Build Coastguard Worker   state_ = RegionState::kRegionStateLargeTail;
1102*795d594fSAndroid Build Coastguard Worker }
1103*795d594fSAndroid Build Coastguard Worker 
1104*795d594fSAndroid Build Coastguard Worker }  // namespace space
1105*795d594fSAndroid Build Coastguard Worker }  // namespace gc
1106*795d594fSAndroid Build Coastguard Worker }  // namespace art
1107