1*795d594fSAndroid Build Coastguard Worker /*
2*795d594fSAndroid Build Coastguard Worker * Copyright (C) 2014 The Android Open Source Project
3*795d594fSAndroid Build Coastguard Worker *
4*795d594fSAndroid Build Coastguard Worker * Licensed under the Apache License, Version 2.0 (the "License");
5*795d594fSAndroid Build Coastguard Worker * you may not use this file except in compliance with the License.
6*795d594fSAndroid Build Coastguard Worker * You may obtain a copy of the License at
7*795d594fSAndroid Build Coastguard Worker *
8*795d594fSAndroid Build Coastguard Worker * http://www.apache.org/licenses/LICENSE-2.0
9*795d594fSAndroid Build Coastguard Worker *
10*795d594fSAndroid Build Coastguard Worker * Unless required by applicable law or agreed to in writing, software
11*795d594fSAndroid Build Coastguard Worker * distributed under the License is distributed on an "AS IS" BASIS,
12*795d594fSAndroid Build Coastguard Worker * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13*795d594fSAndroid Build Coastguard Worker * See the License for the specific language governing permissions and
14*795d594fSAndroid Build Coastguard Worker * limitations under the License.
15*795d594fSAndroid Build Coastguard Worker */
16*795d594fSAndroid Build Coastguard Worker
17*795d594fSAndroid Build Coastguard Worker #ifndef ART_RUNTIME_GC_SPACE_REGION_SPACE_INL_H_
18*795d594fSAndroid Build Coastguard Worker #define ART_RUNTIME_GC_SPACE_REGION_SPACE_INL_H_
19*795d594fSAndroid Build Coastguard Worker
20*795d594fSAndroid Build Coastguard Worker #include "base/mutex-inl.h"
21*795d594fSAndroid Build Coastguard Worker #include "mirror/object-inl.h"
22*795d594fSAndroid Build Coastguard Worker #include "region_space.h"
23*795d594fSAndroid Build Coastguard Worker #include "thread-current-inl.h"
24*795d594fSAndroid Build Coastguard Worker
25*795d594fSAndroid Build Coastguard Worker namespace art HIDDEN {
26*795d594fSAndroid Build Coastguard Worker namespace gc {
27*795d594fSAndroid Build Coastguard Worker namespace space {
28*795d594fSAndroid Build Coastguard Worker
Alloc(Thread * self,size_t num_bytes,size_t * bytes_allocated,size_t * usable_size,size_t * bytes_tl_bulk_allocated)29*795d594fSAndroid Build Coastguard Worker inline mirror::Object* RegionSpace::Alloc([[maybe_unused]] Thread* self,
30*795d594fSAndroid Build Coastguard Worker size_t num_bytes,
31*795d594fSAndroid Build Coastguard Worker /* out */ size_t* bytes_allocated,
32*795d594fSAndroid Build Coastguard Worker /* out */ size_t* usable_size,
33*795d594fSAndroid Build Coastguard Worker /* out */ size_t* bytes_tl_bulk_allocated) {
34*795d594fSAndroid Build Coastguard Worker num_bytes = RoundUp(num_bytes, kAlignment);
35*795d594fSAndroid Build Coastguard Worker return AllocNonvirtual<false>(num_bytes, bytes_allocated, usable_size,
36*795d594fSAndroid Build Coastguard Worker bytes_tl_bulk_allocated);
37*795d594fSAndroid Build Coastguard Worker }
38*795d594fSAndroid Build Coastguard Worker
AllocThreadUnsafe(Thread * self,size_t num_bytes,size_t * bytes_allocated,size_t * usable_size,size_t * bytes_tl_bulk_allocated)39*795d594fSAndroid Build Coastguard Worker inline mirror::Object* RegionSpace::AllocThreadUnsafe(Thread* self,
40*795d594fSAndroid Build Coastguard Worker size_t num_bytes,
41*795d594fSAndroid Build Coastguard Worker /* out */ size_t* bytes_allocated,
42*795d594fSAndroid Build Coastguard Worker /* out */ size_t* usable_size,
43*795d594fSAndroid Build Coastguard Worker /* out */ size_t* bytes_tl_bulk_allocated) {
44*795d594fSAndroid Build Coastguard Worker Locks::mutator_lock_->AssertExclusiveHeld(self);
45*795d594fSAndroid Build Coastguard Worker return Alloc(self, num_bytes, bytes_allocated, usable_size, bytes_tl_bulk_allocated);
46*795d594fSAndroid Build Coastguard Worker }
47*795d594fSAndroid Build Coastguard Worker
48*795d594fSAndroid Build Coastguard Worker template<bool kForEvac>
AllocNonvirtual(size_t num_bytes,size_t * bytes_allocated,size_t * usable_size,size_t * bytes_tl_bulk_allocated)49*795d594fSAndroid Build Coastguard Worker inline mirror::Object* RegionSpace::AllocNonvirtual(size_t num_bytes,
50*795d594fSAndroid Build Coastguard Worker /* out */ size_t* bytes_allocated,
51*795d594fSAndroid Build Coastguard Worker /* out */ size_t* usable_size,
52*795d594fSAndroid Build Coastguard Worker /* out */ size_t* bytes_tl_bulk_allocated) {
53*795d594fSAndroid Build Coastguard Worker DCHECK_ALIGNED(num_bytes, kAlignment);
54*795d594fSAndroid Build Coastguard Worker mirror::Object* obj;
55*795d594fSAndroid Build Coastguard Worker if (LIKELY(num_bytes <= kRegionSize)) {
56*795d594fSAndroid Build Coastguard Worker // Non-large object.
57*795d594fSAndroid Build Coastguard Worker obj = (kForEvac ? evac_region_ : current_region_)->Alloc(num_bytes,
58*795d594fSAndroid Build Coastguard Worker bytes_allocated,
59*795d594fSAndroid Build Coastguard Worker usable_size,
60*795d594fSAndroid Build Coastguard Worker bytes_tl_bulk_allocated);
61*795d594fSAndroid Build Coastguard Worker if (LIKELY(obj != nullptr)) {
62*795d594fSAndroid Build Coastguard Worker return obj;
63*795d594fSAndroid Build Coastguard Worker }
64*795d594fSAndroid Build Coastguard Worker MutexLock mu(Thread::Current(), region_lock_);
65*795d594fSAndroid Build Coastguard Worker // Retry with current region since another thread may have updated
66*795d594fSAndroid Build Coastguard Worker // current_region_ or evac_region_. TODO: fix race.
67*795d594fSAndroid Build Coastguard Worker obj = (kForEvac ? evac_region_ : current_region_)->Alloc(num_bytes,
68*795d594fSAndroid Build Coastguard Worker bytes_allocated,
69*795d594fSAndroid Build Coastguard Worker usable_size,
70*795d594fSAndroid Build Coastguard Worker bytes_tl_bulk_allocated);
71*795d594fSAndroid Build Coastguard Worker if (LIKELY(obj != nullptr)) {
72*795d594fSAndroid Build Coastguard Worker return obj;
73*795d594fSAndroid Build Coastguard Worker }
74*795d594fSAndroid Build Coastguard Worker Region* r = AllocateRegion(kForEvac);
75*795d594fSAndroid Build Coastguard Worker if (LIKELY(r != nullptr)) {
76*795d594fSAndroid Build Coastguard Worker obj = r->Alloc(num_bytes, bytes_allocated, usable_size, bytes_tl_bulk_allocated);
77*795d594fSAndroid Build Coastguard Worker CHECK(obj != nullptr);
78*795d594fSAndroid Build Coastguard Worker // Do our allocation before setting the region, this makes sure no threads race ahead
79*795d594fSAndroid Build Coastguard Worker // and fill in the region before we allocate the object. b/63153464
80*795d594fSAndroid Build Coastguard Worker if (kForEvac) {
81*795d594fSAndroid Build Coastguard Worker evac_region_ = r;
82*795d594fSAndroid Build Coastguard Worker } else {
83*795d594fSAndroid Build Coastguard Worker current_region_ = r;
84*795d594fSAndroid Build Coastguard Worker }
85*795d594fSAndroid Build Coastguard Worker return obj;
86*795d594fSAndroid Build Coastguard Worker }
87*795d594fSAndroid Build Coastguard Worker } else {
88*795d594fSAndroid Build Coastguard Worker // Large object.
89*795d594fSAndroid Build Coastguard Worker obj = AllocLarge<kForEvac>(num_bytes, bytes_allocated, usable_size, bytes_tl_bulk_allocated);
90*795d594fSAndroid Build Coastguard Worker if (LIKELY(obj != nullptr)) {
91*795d594fSAndroid Build Coastguard Worker return obj;
92*795d594fSAndroid Build Coastguard Worker }
93*795d594fSAndroid Build Coastguard Worker }
94*795d594fSAndroid Build Coastguard Worker return nullptr;
95*795d594fSAndroid Build Coastguard Worker }
96*795d594fSAndroid Build Coastguard Worker
Alloc(size_t num_bytes,size_t * bytes_allocated,size_t * usable_size,size_t * bytes_tl_bulk_allocated)97*795d594fSAndroid Build Coastguard Worker inline mirror::Object* RegionSpace::Region::Alloc(size_t num_bytes,
98*795d594fSAndroid Build Coastguard Worker /* out */ size_t* bytes_allocated,
99*795d594fSAndroid Build Coastguard Worker /* out */ size_t* usable_size,
100*795d594fSAndroid Build Coastguard Worker /* out */ size_t* bytes_tl_bulk_allocated) {
101*795d594fSAndroid Build Coastguard Worker DCHECK(IsAllocated() && IsInToSpace());
102*795d594fSAndroid Build Coastguard Worker DCHECK_ALIGNED(num_bytes, kAlignment);
103*795d594fSAndroid Build Coastguard Worker uint8_t* old_top;
104*795d594fSAndroid Build Coastguard Worker uint8_t* new_top;
105*795d594fSAndroid Build Coastguard Worker do {
106*795d594fSAndroid Build Coastguard Worker old_top = top_.load(std::memory_order_relaxed);
107*795d594fSAndroid Build Coastguard Worker new_top = old_top + num_bytes;
108*795d594fSAndroid Build Coastguard Worker if (UNLIKELY(new_top > end_)) {
109*795d594fSAndroid Build Coastguard Worker return nullptr;
110*795d594fSAndroid Build Coastguard Worker }
111*795d594fSAndroid Build Coastguard Worker } while (!top_.CompareAndSetWeakRelaxed(old_top, new_top));
112*795d594fSAndroid Build Coastguard Worker objects_allocated_.fetch_add(1, std::memory_order_relaxed);
113*795d594fSAndroid Build Coastguard Worker DCHECK_LE(Top(), end_);
114*795d594fSAndroid Build Coastguard Worker DCHECK_LT(old_top, end_);
115*795d594fSAndroid Build Coastguard Worker DCHECK_LE(new_top, end_);
116*795d594fSAndroid Build Coastguard Worker *bytes_allocated = num_bytes;
117*795d594fSAndroid Build Coastguard Worker if (usable_size != nullptr) {
118*795d594fSAndroid Build Coastguard Worker *usable_size = num_bytes;
119*795d594fSAndroid Build Coastguard Worker }
120*795d594fSAndroid Build Coastguard Worker *bytes_tl_bulk_allocated = num_bytes;
121*795d594fSAndroid Build Coastguard Worker return reinterpret_cast<mirror::Object*>(old_top);
122*795d594fSAndroid Build Coastguard Worker }
123*795d594fSAndroid Build Coastguard Worker
124*795d594fSAndroid Build Coastguard Worker template<RegionSpace::RegionType kRegionType>
GetBytesAllocatedInternal()125*795d594fSAndroid Build Coastguard Worker inline uint64_t RegionSpace::GetBytesAllocatedInternal() {
126*795d594fSAndroid Build Coastguard Worker uint64_t bytes = 0;
127*795d594fSAndroid Build Coastguard Worker MutexLock mu(Thread::Current(), region_lock_);
128*795d594fSAndroid Build Coastguard Worker for (size_t i = 0; i < num_regions_; ++i) {
129*795d594fSAndroid Build Coastguard Worker Region* r = ®ions_[i];
130*795d594fSAndroid Build Coastguard Worker if (r->IsFree()) {
131*795d594fSAndroid Build Coastguard Worker continue;
132*795d594fSAndroid Build Coastguard Worker }
133*795d594fSAndroid Build Coastguard Worker switch (kRegionType) {
134*795d594fSAndroid Build Coastguard Worker case RegionType::kRegionTypeAll:
135*795d594fSAndroid Build Coastguard Worker bytes += r->BytesAllocated();
136*795d594fSAndroid Build Coastguard Worker break;
137*795d594fSAndroid Build Coastguard Worker case RegionType::kRegionTypeFromSpace:
138*795d594fSAndroid Build Coastguard Worker if (r->IsInFromSpace()) {
139*795d594fSAndroid Build Coastguard Worker bytes += r->BytesAllocated();
140*795d594fSAndroid Build Coastguard Worker }
141*795d594fSAndroid Build Coastguard Worker break;
142*795d594fSAndroid Build Coastguard Worker case RegionType::kRegionTypeUnevacFromSpace:
143*795d594fSAndroid Build Coastguard Worker if (r->IsInUnevacFromSpace()) {
144*795d594fSAndroid Build Coastguard Worker bytes += r->BytesAllocated();
145*795d594fSAndroid Build Coastguard Worker }
146*795d594fSAndroid Build Coastguard Worker break;
147*795d594fSAndroid Build Coastguard Worker case RegionType::kRegionTypeToSpace:
148*795d594fSAndroid Build Coastguard Worker if (r->IsInToSpace()) {
149*795d594fSAndroid Build Coastguard Worker bytes += r->BytesAllocated();
150*795d594fSAndroid Build Coastguard Worker }
151*795d594fSAndroid Build Coastguard Worker break;
152*795d594fSAndroid Build Coastguard Worker default:
153*795d594fSAndroid Build Coastguard Worker LOG(FATAL) << "Unexpected space type : " << kRegionType;
154*795d594fSAndroid Build Coastguard Worker }
155*795d594fSAndroid Build Coastguard Worker }
156*795d594fSAndroid Build Coastguard Worker return bytes;
157*795d594fSAndroid Build Coastguard Worker }
158*795d594fSAndroid Build Coastguard Worker
159*795d594fSAndroid Build Coastguard Worker template<RegionSpace::RegionType kRegionType>
GetObjectsAllocatedInternal()160*795d594fSAndroid Build Coastguard Worker inline uint64_t RegionSpace::GetObjectsAllocatedInternal() {
161*795d594fSAndroid Build Coastguard Worker uint64_t bytes = 0;
162*795d594fSAndroid Build Coastguard Worker MutexLock mu(Thread::Current(), region_lock_);
163*795d594fSAndroid Build Coastguard Worker for (size_t i = 0; i < num_regions_; ++i) {
164*795d594fSAndroid Build Coastguard Worker Region* r = ®ions_[i];
165*795d594fSAndroid Build Coastguard Worker if (r->IsFree()) {
166*795d594fSAndroid Build Coastguard Worker continue;
167*795d594fSAndroid Build Coastguard Worker }
168*795d594fSAndroid Build Coastguard Worker switch (kRegionType) {
169*795d594fSAndroid Build Coastguard Worker case RegionType::kRegionTypeAll:
170*795d594fSAndroid Build Coastguard Worker bytes += r->ObjectsAllocated();
171*795d594fSAndroid Build Coastguard Worker break;
172*795d594fSAndroid Build Coastguard Worker case RegionType::kRegionTypeFromSpace:
173*795d594fSAndroid Build Coastguard Worker if (r->IsInFromSpace()) {
174*795d594fSAndroid Build Coastguard Worker bytes += r->ObjectsAllocated();
175*795d594fSAndroid Build Coastguard Worker }
176*795d594fSAndroid Build Coastguard Worker break;
177*795d594fSAndroid Build Coastguard Worker case RegionType::kRegionTypeUnevacFromSpace:
178*795d594fSAndroid Build Coastguard Worker if (r->IsInUnevacFromSpace()) {
179*795d594fSAndroid Build Coastguard Worker bytes += r->ObjectsAllocated();
180*795d594fSAndroid Build Coastguard Worker }
181*795d594fSAndroid Build Coastguard Worker break;
182*795d594fSAndroid Build Coastguard Worker case RegionType::kRegionTypeToSpace:
183*795d594fSAndroid Build Coastguard Worker if (r->IsInToSpace()) {
184*795d594fSAndroid Build Coastguard Worker bytes += r->ObjectsAllocated();
185*795d594fSAndroid Build Coastguard Worker }
186*795d594fSAndroid Build Coastguard Worker break;
187*795d594fSAndroid Build Coastguard Worker default:
188*795d594fSAndroid Build Coastguard Worker LOG(FATAL) << "Unexpected space type : " << kRegionType;
189*795d594fSAndroid Build Coastguard Worker }
190*795d594fSAndroid Build Coastguard Worker }
191*795d594fSAndroid Build Coastguard Worker return bytes;
192*795d594fSAndroid Build Coastguard Worker }
193*795d594fSAndroid Build Coastguard Worker
194*795d594fSAndroid Build Coastguard Worker template <typename Visitor>
ScanUnevacFromSpace(accounting::ContinuousSpaceBitmap * bitmap,Visitor && visitor)195*795d594fSAndroid Build Coastguard Worker inline void RegionSpace::ScanUnevacFromSpace(accounting::ContinuousSpaceBitmap* bitmap,
196*795d594fSAndroid Build Coastguard Worker Visitor&& visitor) {
197*795d594fSAndroid Build Coastguard Worker const size_t iter_limit = kUseTableLookupReadBarrier
198*795d594fSAndroid Build Coastguard Worker ? num_regions_ : std::min(num_regions_, non_free_region_index_limit_);
199*795d594fSAndroid Build Coastguard Worker // Instead of region-wise scan, find contiguous blocks of un-evac regions and then
200*795d594fSAndroid Build Coastguard Worker // visit them. Everything before visit_block_begin has been processed, while
201*795d594fSAndroid Build Coastguard Worker // [visit_block_begin, visit_block_end) still needs to be visited.
202*795d594fSAndroid Build Coastguard Worker uint8_t* visit_block_begin = nullptr;
203*795d594fSAndroid Build Coastguard Worker uint8_t* visit_block_end = nullptr;
204*795d594fSAndroid Build Coastguard Worker for (size_t i = 0; i < iter_limit; ++i) {
205*795d594fSAndroid Build Coastguard Worker Region* r = ®ions_[i];
206*795d594fSAndroid Build Coastguard Worker if (r->IsInUnevacFromSpace()) {
207*795d594fSAndroid Build Coastguard Worker // visit_block_begin set to nullptr means a new visit block needs to be stated.
208*795d594fSAndroid Build Coastguard Worker if (visit_block_begin == nullptr) {
209*795d594fSAndroid Build Coastguard Worker visit_block_begin = r->Begin();
210*795d594fSAndroid Build Coastguard Worker }
211*795d594fSAndroid Build Coastguard Worker visit_block_end = r->End();
212*795d594fSAndroid Build Coastguard Worker } else if (visit_block_begin != nullptr) {
213*795d594fSAndroid Build Coastguard Worker // Visit the block range as r is not adjacent to current visit block.
214*795d594fSAndroid Build Coastguard Worker bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(visit_block_begin),
215*795d594fSAndroid Build Coastguard Worker reinterpret_cast<uintptr_t>(visit_block_end),
216*795d594fSAndroid Build Coastguard Worker visitor);
217*795d594fSAndroid Build Coastguard Worker visit_block_begin = nullptr;
218*795d594fSAndroid Build Coastguard Worker }
219*795d594fSAndroid Build Coastguard Worker }
220*795d594fSAndroid Build Coastguard Worker // Visit last block, if not processed yet.
221*795d594fSAndroid Build Coastguard Worker if (visit_block_begin != nullptr) {
222*795d594fSAndroid Build Coastguard Worker bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(visit_block_begin),
223*795d594fSAndroid Build Coastguard Worker reinterpret_cast<uintptr_t>(visit_block_end),
224*795d594fSAndroid Build Coastguard Worker visitor);
225*795d594fSAndroid Build Coastguard Worker }
226*795d594fSAndroid Build Coastguard Worker }
227*795d594fSAndroid Build Coastguard Worker
228*795d594fSAndroid Build Coastguard Worker template<bool kToSpaceOnly, typename Visitor>
WalkInternal(Visitor && visitor)229*795d594fSAndroid Build Coastguard Worker inline void RegionSpace::WalkInternal(Visitor&& visitor) {
230*795d594fSAndroid Build Coastguard Worker // TODO: MutexLock on region_lock_ won't work due to lock order
231*795d594fSAndroid Build Coastguard Worker // issues (the classloader classes lock and the monitor lock). We
232*795d594fSAndroid Build Coastguard Worker // call this with threads suspended.
233*795d594fSAndroid Build Coastguard Worker Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
234*795d594fSAndroid Build Coastguard Worker for (size_t i = 0; i < num_regions_; ++i) {
235*795d594fSAndroid Build Coastguard Worker Region* r = ®ions_[i];
236*795d594fSAndroid Build Coastguard Worker if (r->IsFree() || (kToSpaceOnly && !r->IsInToSpace())) {
237*795d594fSAndroid Build Coastguard Worker continue;
238*795d594fSAndroid Build Coastguard Worker }
239*795d594fSAndroid Build Coastguard Worker if (r->IsLarge()) {
240*795d594fSAndroid Build Coastguard Worker // We may visit a large object with live_bytes = 0 here. However, it is
241*795d594fSAndroid Build Coastguard Worker // safe as it cannot contain dangling pointers because corresponding regions
242*795d594fSAndroid Build Coastguard Worker // (and regions corresponding to dead referents) cannot be allocated for new
243*795d594fSAndroid Build Coastguard Worker // allocations without first clearing regions' live_bytes and state.
244*795d594fSAndroid Build Coastguard Worker mirror::Object* obj = reinterpret_cast<mirror::Object*>(r->Begin());
245*795d594fSAndroid Build Coastguard Worker DCHECK(obj->GetClass() != nullptr);
246*795d594fSAndroid Build Coastguard Worker visitor(obj);
247*795d594fSAndroid Build Coastguard Worker } else if (r->IsLargeTail()) {
248*795d594fSAndroid Build Coastguard Worker // Do nothing.
249*795d594fSAndroid Build Coastguard Worker } else {
250*795d594fSAndroid Build Coastguard Worker WalkNonLargeRegion(visitor, r);
251*795d594fSAndroid Build Coastguard Worker }
252*795d594fSAndroid Build Coastguard Worker }
253*795d594fSAndroid Build Coastguard Worker }
254*795d594fSAndroid Build Coastguard Worker
255*795d594fSAndroid Build Coastguard Worker template<typename Visitor>
WalkNonLargeRegion(Visitor && visitor,const Region * r)256*795d594fSAndroid Build Coastguard Worker inline void RegionSpace::WalkNonLargeRegion(Visitor&& visitor, const Region* r) {
257*795d594fSAndroid Build Coastguard Worker DCHECK(!r->IsLarge() && !r->IsLargeTail());
258*795d594fSAndroid Build Coastguard Worker // For newly allocated and evacuated regions, live bytes will be -1.
259*795d594fSAndroid Build Coastguard Worker uint8_t* pos = r->Begin();
260*795d594fSAndroid Build Coastguard Worker uint8_t* top = r->Top();
261*795d594fSAndroid Build Coastguard Worker // We need the region space bitmap to iterate over a region's objects
262*795d594fSAndroid Build Coastguard Worker // if
263*795d594fSAndroid Build Coastguard Worker // - its live bytes count is invalid (i.e. -1); or
264*795d594fSAndroid Build Coastguard Worker // - its live bytes count is lower than the allocated bytes count.
265*795d594fSAndroid Build Coastguard Worker //
266*795d594fSAndroid Build Coastguard Worker // In both of the previous cases, we do not have the guarantee that
267*795d594fSAndroid Build Coastguard Worker // all allocated objects are "alive" (i.e. valid), so we depend on
268*795d594fSAndroid Build Coastguard Worker // the region space bitmap to identify which ones to visit.
269*795d594fSAndroid Build Coastguard Worker //
270*795d594fSAndroid Build Coastguard Worker // On the other hand, when all allocated bytes are known to be alive,
271*795d594fSAndroid Build Coastguard Worker // we know that they form a range of consecutive objects (modulo
272*795d594fSAndroid Build Coastguard Worker // object alignment constraints) that can be visited iteratively: we
273*795d594fSAndroid Build Coastguard Worker // can compute the next object's location by using the current
274*795d594fSAndroid Build Coastguard Worker // object's address and size (and object alignment constraints).
275*795d594fSAndroid Build Coastguard Worker const bool need_bitmap =
276*795d594fSAndroid Build Coastguard Worker r->LiveBytes() != static_cast<size_t>(-1) &&
277*795d594fSAndroid Build Coastguard Worker r->LiveBytes() != static_cast<size_t>(top - pos);
278*795d594fSAndroid Build Coastguard Worker if (need_bitmap) {
279*795d594fSAndroid Build Coastguard Worker GetLiveBitmap()->VisitMarkedRange(
280*795d594fSAndroid Build Coastguard Worker reinterpret_cast<uintptr_t>(pos),
281*795d594fSAndroid Build Coastguard Worker reinterpret_cast<uintptr_t>(top),
282*795d594fSAndroid Build Coastguard Worker visitor);
283*795d594fSAndroid Build Coastguard Worker } else {
284*795d594fSAndroid Build Coastguard Worker while (pos < top) {
285*795d594fSAndroid Build Coastguard Worker mirror::Object* obj = reinterpret_cast<mirror::Object*>(pos);
286*795d594fSAndroid Build Coastguard Worker if (obj->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) {
287*795d594fSAndroid Build Coastguard Worker visitor(obj);
288*795d594fSAndroid Build Coastguard Worker pos = reinterpret_cast<uint8_t*>(GetNextObject(obj));
289*795d594fSAndroid Build Coastguard Worker } else {
290*795d594fSAndroid Build Coastguard Worker break;
291*795d594fSAndroid Build Coastguard Worker }
292*795d594fSAndroid Build Coastguard Worker }
293*795d594fSAndroid Build Coastguard Worker }
294*795d594fSAndroid Build Coastguard Worker }
295*795d594fSAndroid Build Coastguard Worker
296*795d594fSAndroid Build Coastguard Worker template <typename Visitor>
Walk(Visitor && visitor)297*795d594fSAndroid Build Coastguard Worker inline void RegionSpace::Walk(Visitor&& visitor) {
298*795d594fSAndroid Build Coastguard Worker WalkInternal</* kToSpaceOnly= */ false>(visitor);
299*795d594fSAndroid Build Coastguard Worker }
300*795d594fSAndroid Build Coastguard Worker template <typename Visitor>
WalkToSpace(Visitor && visitor)301*795d594fSAndroid Build Coastguard Worker inline void RegionSpace::WalkToSpace(Visitor&& visitor) {
302*795d594fSAndroid Build Coastguard Worker WalkInternal</* kToSpaceOnly= */ true>(visitor);
303*795d594fSAndroid Build Coastguard Worker }
304*795d594fSAndroid Build Coastguard Worker
GetNextObject(mirror::Object * obj)305*795d594fSAndroid Build Coastguard Worker inline mirror::Object* RegionSpace::GetNextObject(mirror::Object* obj) {
306*795d594fSAndroid Build Coastguard Worker const uintptr_t position = reinterpret_cast<uintptr_t>(obj) + obj->SizeOf();
307*795d594fSAndroid Build Coastguard Worker return reinterpret_cast<mirror::Object*>(RoundUp(position, kAlignment));
308*795d594fSAndroid Build Coastguard Worker }
309*795d594fSAndroid Build Coastguard Worker
310*795d594fSAndroid Build Coastguard Worker template<bool kForEvac>
AllocLarge(size_t num_bytes,size_t * bytes_allocated,size_t * usable_size,size_t * bytes_tl_bulk_allocated)311*795d594fSAndroid Build Coastguard Worker inline mirror::Object* RegionSpace::AllocLarge(size_t num_bytes,
312*795d594fSAndroid Build Coastguard Worker /* out */ size_t* bytes_allocated,
313*795d594fSAndroid Build Coastguard Worker /* out */ size_t* usable_size,
314*795d594fSAndroid Build Coastguard Worker /* out */ size_t* bytes_tl_bulk_allocated) {
315*795d594fSAndroid Build Coastguard Worker DCHECK_ALIGNED(num_bytes, kAlignment);
316*795d594fSAndroid Build Coastguard Worker DCHECK_GT(num_bytes, kRegionSize);
317*795d594fSAndroid Build Coastguard Worker size_t num_regs_in_large_region = RoundUp(num_bytes, kRegionSize) / kRegionSize;
318*795d594fSAndroid Build Coastguard Worker DCHECK_GT(num_regs_in_large_region, 0U);
319*795d594fSAndroid Build Coastguard Worker DCHECK_LT((num_regs_in_large_region - 1) * kRegionSize, num_bytes);
320*795d594fSAndroid Build Coastguard Worker DCHECK_LE(num_bytes, num_regs_in_large_region * kRegionSize);
321*795d594fSAndroid Build Coastguard Worker MutexLock mu(Thread::Current(), region_lock_);
322*795d594fSAndroid Build Coastguard Worker if (!kForEvac) {
323*795d594fSAndroid Build Coastguard Worker // Retain sufficient free regions for full evacuation.
324*795d594fSAndroid Build Coastguard Worker if ((num_non_free_regions_ + num_regs_in_large_region) * 2 > num_regions_) {
325*795d594fSAndroid Build Coastguard Worker return nullptr;
326*795d594fSAndroid Build Coastguard Worker }
327*795d594fSAndroid Build Coastguard Worker }
328*795d594fSAndroid Build Coastguard Worker
329*795d594fSAndroid Build Coastguard Worker mirror::Object* region = nullptr;
330*795d594fSAndroid Build Coastguard Worker // Find a large enough set of contiguous free regions.
331*795d594fSAndroid Build Coastguard Worker if (kCyclicRegionAllocation) {
332*795d594fSAndroid Build Coastguard Worker size_t next_region = -1;
333*795d594fSAndroid Build Coastguard Worker // Try to find a range of free regions within [cyclic_alloc_region_index_, num_regions_).
334*795d594fSAndroid Build Coastguard Worker region = AllocLargeInRange<kForEvac>(cyclic_alloc_region_index_,
335*795d594fSAndroid Build Coastguard Worker num_regions_,
336*795d594fSAndroid Build Coastguard Worker num_regs_in_large_region,
337*795d594fSAndroid Build Coastguard Worker bytes_allocated,
338*795d594fSAndroid Build Coastguard Worker usable_size,
339*795d594fSAndroid Build Coastguard Worker bytes_tl_bulk_allocated,
340*795d594fSAndroid Build Coastguard Worker &next_region);
341*795d594fSAndroid Build Coastguard Worker
342*795d594fSAndroid Build Coastguard Worker if (region == nullptr) {
343*795d594fSAndroid Build Coastguard Worker DCHECK_EQ(next_region, static_cast<size_t>(-1));
344*795d594fSAndroid Build Coastguard Worker // If the previous attempt failed, try to find a range of free regions within
345*795d594fSAndroid Build Coastguard Worker // [0, min(cyclic_alloc_region_index_ + num_regs_in_large_region - 1, num_regions_)).
346*795d594fSAndroid Build Coastguard Worker region = AllocLargeInRange<kForEvac>(
347*795d594fSAndroid Build Coastguard Worker 0,
348*795d594fSAndroid Build Coastguard Worker std::min(cyclic_alloc_region_index_ + num_regs_in_large_region - 1, num_regions_),
349*795d594fSAndroid Build Coastguard Worker num_regs_in_large_region,
350*795d594fSAndroid Build Coastguard Worker bytes_allocated,
351*795d594fSAndroid Build Coastguard Worker usable_size,
352*795d594fSAndroid Build Coastguard Worker bytes_tl_bulk_allocated,
353*795d594fSAndroid Build Coastguard Worker &next_region);
354*795d594fSAndroid Build Coastguard Worker }
355*795d594fSAndroid Build Coastguard Worker
356*795d594fSAndroid Build Coastguard Worker if (region != nullptr) {
357*795d594fSAndroid Build Coastguard Worker DCHECK_LT(0u, next_region);
358*795d594fSAndroid Build Coastguard Worker DCHECK_LE(next_region, num_regions_);
359*795d594fSAndroid Build Coastguard Worker // Move the cyclic allocation region marker to the region
360*795d594fSAndroid Build Coastguard Worker // following the large region that was just allocated.
361*795d594fSAndroid Build Coastguard Worker cyclic_alloc_region_index_ = next_region % num_regions_;
362*795d594fSAndroid Build Coastguard Worker }
363*795d594fSAndroid Build Coastguard Worker } else {
364*795d594fSAndroid Build Coastguard Worker // Try to find a range of free regions within [0, num_regions_).
365*795d594fSAndroid Build Coastguard Worker region = AllocLargeInRange<kForEvac>(0,
366*795d594fSAndroid Build Coastguard Worker num_regions_,
367*795d594fSAndroid Build Coastguard Worker num_regs_in_large_region,
368*795d594fSAndroid Build Coastguard Worker bytes_allocated,
369*795d594fSAndroid Build Coastguard Worker usable_size,
370*795d594fSAndroid Build Coastguard Worker bytes_tl_bulk_allocated);
371*795d594fSAndroid Build Coastguard Worker }
372*795d594fSAndroid Build Coastguard Worker if (kForEvac && region != nullptr) {
373*795d594fSAndroid Build Coastguard Worker TraceHeapSize();
374*795d594fSAndroid Build Coastguard Worker }
375*795d594fSAndroid Build Coastguard Worker return region;
376*795d594fSAndroid Build Coastguard Worker }
377*795d594fSAndroid Build Coastguard Worker
378*795d594fSAndroid Build Coastguard Worker template<bool kForEvac>
AllocLargeInRange(size_t begin,size_t end,size_t num_regs_in_large_region,size_t * bytes_allocated,size_t * usable_size,size_t * bytes_tl_bulk_allocated,size_t * next_region)379*795d594fSAndroid Build Coastguard Worker inline mirror::Object* RegionSpace::AllocLargeInRange(size_t begin,
380*795d594fSAndroid Build Coastguard Worker size_t end,
381*795d594fSAndroid Build Coastguard Worker size_t num_regs_in_large_region,
382*795d594fSAndroid Build Coastguard Worker /* out */ size_t* bytes_allocated,
383*795d594fSAndroid Build Coastguard Worker /* out */ size_t* usable_size,
384*795d594fSAndroid Build Coastguard Worker /* out */ size_t* bytes_tl_bulk_allocated,
385*795d594fSAndroid Build Coastguard Worker /* out */ size_t* next_region) {
386*795d594fSAndroid Build Coastguard Worker DCHECK_LE(0u, begin);
387*795d594fSAndroid Build Coastguard Worker DCHECK_LT(begin, end);
388*795d594fSAndroid Build Coastguard Worker DCHECK_LE(end, num_regions_);
389*795d594fSAndroid Build Coastguard Worker size_t left = begin;
390*795d594fSAndroid Build Coastguard Worker while (left + num_regs_in_large_region - 1 < end) {
391*795d594fSAndroid Build Coastguard Worker bool found = true;
392*795d594fSAndroid Build Coastguard Worker size_t right = left;
393*795d594fSAndroid Build Coastguard Worker DCHECK_LT(right, left + num_regs_in_large_region)
394*795d594fSAndroid Build Coastguard Worker << "The inner loop should iterate at least once";
395*795d594fSAndroid Build Coastguard Worker while (right < left + num_regs_in_large_region) {
396*795d594fSAndroid Build Coastguard Worker if (regions_[right].IsFree()) {
397*795d594fSAndroid Build Coastguard Worker ++right;
398*795d594fSAndroid Build Coastguard Worker // Ensure `right` is not going beyond the past-the-end index of the region space.
399*795d594fSAndroid Build Coastguard Worker DCHECK_LE(right, num_regions_);
400*795d594fSAndroid Build Coastguard Worker } else {
401*795d594fSAndroid Build Coastguard Worker found = false;
402*795d594fSAndroid Build Coastguard Worker break;
403*795d594fSAndroid Build Coastguard Worker }
404*795d594fSAndroid Build Coastguard Worker }
405*795d594fSAndroid Build Coastguard Worker if (found) {
406*795d594fSAndroid Build Coastguard Worker // `right` points to the one region past the last free region.
407*795d594fSAndroid Build Coastguard Worker DCHECK_EQ(left + num_regs_in_large_region, right);
408*795d594fSAndroid Build Coastguard Worker Region* first_reg = ®ions_[left];
409*795d594fSAndroid Build Coastguard Worker DCHECK(first_reg->IsFree());
410*795d594fSAndroid Build Coastguard Worker first_reg->UnfreeLarge(this, time_);
411*795d594fSAndroid Build Coastguard Worker if (kForEvac) {
412*795d594fSAndroid Build Coastguard Worker ++num_evac_regions_;
413*795d594fSAndroid Build Coastguard Worker } else {
414*795d594fSAndroid Build Coastguard Worker ++num_non_free_regions_;
415*795d594fSAndroid Build Coastguard Worker }
416*795d594fSAndroid Build Coastguard Worker size_t allocated = num_regs_in_large_region * kRegionSize;
417*795d594fSAndroid Build Coastguard Worker // We make 'top' all usable bytes, as the caller of this
418*795d594fSAndroid Build Coastguard Worker // allocation may use all of 'usable_size' (see mirror::Array::Alloc).
419*795d594fSAndroid Build Coastguard Worker first_reg->SetTop(first_reg->Begin() + allocated);
420*795d594fSAndroid Build Coastguard Worker if (!kForEvac) {
421*795d594fSAndroid Build Coastguard Worker // Evac doesn't count as newly allocated.
422*795d594fSAndroid Build Coastguard Worker first_reg->SetNewlyAllocated();
423*795d594fSAndroid Build Coastguard Worker }
424*795d594fSAndroid Build Coastguard Worker for (size_t p = left + 1; p < right; ++p) {
425*795d594fSAndroid Build Coastguard Worker DCHECK_LT(p, num_regions_);
426*795d594fSAndroid Build Coastguard Worker DCHECK(regions_[p].IsFree());
427*795d594fSAndroid Build Coastguard Worker regions_[p].UnfreeLargeTail(this, time_);
428*795d594fSAndroid Build Coastguard Worker if (kForEvac) {
429*795d594fSAndroid Build Coastguard Worker ++num_evac_regions_;
430*795d594fSAndroid Build Coastguard Worker } else {
431*795d594fSAndroid Build Coastguard Worker ++num_non_free_regions_;
432*795d594fSAndroid Build Coastguard Worker }
433*795d594fSAndroid Build Coastguard Worker if (!kForEvac) {
434*795d594fSAndroid Build Coastguard Worker // Evac doesn't count as newly allocated.
435*795d594fSAndroid Build Coastguard Worker regions_[p].SetNewlyAllocated();
436*795d594fSAndroid Build Coastguard Worker }
437*795d594fSAndroid Build Coastguard Worker }
438*795d594fSAndroid Build Coastguard Worker *bytes_allocated = allocated;
439*795d594fSAndroid Build Coastguard Worker if (usable_size != nullptr) {
440*795d594fSAndroid Build Coastguard Worker *usable_size = allocated;
441*795d594fSAndroid Build Coastguard Worker }
442*795d594fSAndroid Build Coastguard Worker *bytes_tl_bulk_allocated = allocated;
443*795d594fSAndroid Build Coastguard Worker mirror::Object* large_region = reinterpret_cast<mirror::Object*>(first_reg->Begin());
444*795d594fSAndroid Build Coastguard Worker DCHECK(large_region != nullptr);
445*795d594fSAndroid Build Coastguard Worker if (next_region != nullptr) {
446*795d594fSAndroid Build Coastguard Worker // Return the index to the region next to the allocated large region via `next_region`.
447*795d594fSAndroid Build Coastguard Worker *next_region = right;
448*795d594fSAndroid Build Coastguard Worker }
449*795d594fSAndroid Build Coastguard Worker return large_region;
450*795d594fSAndroid Build Coastguard Worker } else {
451*795d594fSAndroid Build Coastguard Worker // `right` points to the non-free region. Start with the one after it.
452*795d594fSAndroid Build Coastguard Worker left = right + 1;
453*795d594fSAndroid Build Coastguard Worker }
454*795d594fSAndroid Build Coastguard Worker }
455*795d594fSAndroid Build Coastguard Worker return nullptr;
456*795d594fSAndroid Build Coastguard Worker }
457*795d594fSAndroid Build Coastguard Worker
458*795d594fSAndroid Build Coastguard Worker template<bool kForEvac>
FreeLarge(mirror::Object * large_obj,size_t bytes_allocated)459*795d594fSAndroid Build Coastguard Worker inline void RegionSpace::FreeLarge(mirror::Object* large_obj, size_t bytes_allocated) {
460*795d594fSAndroid Build Coastguard Worker DCHECK(Contains(large_obj));
461*795d594fSAndroid Build Coastguard Worker DCHECK_ALIGNED(large_obj, kRegionSize);
462*795d594fSAndroid Build Coastguard Worker MutexLock mu(Thread::Current(), region_lock_);
463*795d594fSAndroid Build Coastguard Worker uint8_t* begin_addr = reinterpret_cast<uint8_t*>(large_obj);
464*795d594fSAndroid Build Coastguard Worker uint8_t* end_addr = AlignUp(reinterpret_cast<uint8_t*>(large_obj) + bytes_allocated, kRegionSize);
465*795d594fSAndroid Build Coastguard Worker CHECK_LT(begin_addr, end_addr);
466*795d594fSAndroid Build Coastguard Worker for (uint8_t* addr = begin_addr; addr < end_addr; addr += kRegionSize) {
467*795d594fSAndroid Build Coastguard Worker Region* reg = RefToRegionLocked(reinterpret_cast<mirror::Object*>(addr));
468*795d594fSAndroid Build Coastguard Worker if (addr == begin_addr) {
469*795d594fSAndroid Build Coastguard Worker DCHECK(reg->IsLarge());
470*795d594fSAndroid Build Coastguard Worker } else {
471*795d594fSAndroid Build Coastguard Worker DCHECK(reg->IsLargeTail());
472*795d594fSAndroid Build Coastguard Worker }
473*795d594fSAndroid Build Coastguard Worker reg->Clear(/*zero_and_release_pages=*/true);
474*795d594fSAndroid Build Coastguard Worker if (kForEvac) {
475*795d594fSAndroid Build Coastguard Worker --num_evac_regions_;
476*795d594fSAndroid Build Coastguard Worker } else {
477*795d594fSAndroid Build Coastguard Worker --num_non_free_regions_;
478*795d594fSAndroid Build Coastguard Worker }
479*795d594fSAndroid Build Coastguard Worker }
480*795d594fSAndroid Build Coastguard Worker if (kIsDebugBuild && end_addr < Limit()) {
481*795d594fSAndroid Build Coastguard Worker // If we aren't at the end of the space, check that the next region is not a large tail.
482*795d594fSAndroid Build Coastguard Worker Region* following_reg = RefToRegionLocked(reinterpret_cast<mirror::Object*>(end_addr));
483*795d594fSAndroid Build Coastguard Worker DCHECK(!following_reg->IsLargeTail());
484*795d594fSAndroid Build Coastguard Worker }
485*795d594fSAndroid Build Coastguard Worker }
486*795d594fSAndroid Build Coastguard Worker
BytesAllocated()487*795d594fSAndroid Build Coastguard Worker inline size_t RegionSpace::Region::BytesAllocated() const {
488*795d594fSAndroid Build Coastguard Worker if (IsLarge()) {
489*795d594fSAndroid Build Coastguard Worker DCHECK_LT(begin_ + kRegionSize, Top());
490*795d594fSAndroid Build Coastguard Worker return static_cast<size_t>(Top() - begin_);
491*795d594fSAndroid Build Coastguard Worker } else if (IsLargeTail()) {
492*795d594fSAndroid Build Coastguard Worker DCHECK_EQ(begin_, Top());
493*795d594fSAndroid Build Coastguard Worker return 0;
494*795d594fSAndroid Build Coastguard Worker } else {
495*795d594fSAndroid Build Coastguard Worker DCHECK(IsAllocated()) << "state=" << state_;
496*795d594fSAndroid Build Coastguard Worker DCHECK_LE(begin_, Top());
497*795d594fSAndroid Build Coastguard Worker size_t bytes;
498*795d594fSAndroid Build Coastguard Worker if (is_a_tlab_) {
499*795d594fSAndroid Build Coastguard Worker bytes = thread_->GetTlabEnd() - begin_;
500*795d594fSAndroid Build Coastguard Worker } else {
501*795d594fSAndroid Build Coastguard Worker bytes = static_cast<size_t>(Top() - begin_);
502*795d594fSAndroid Build Coastguard Worker }
503*795d594fSAndroid Build Coastguard Worker DCHECK_LE(bytes, kRegionSize);
504*795d594fSAndroid Build Coastguard Worker return bytes;
505*795d594fSAndroid Build Coastguard Worker }
506*795d594fSAndroid Build Coastguard Worker }
507*795d594fSAndroid Build Coastguard Worker
ObjectsAllocated()508*795d594fSAndroid Build Coastguard Worker inline size_t RegionSpace::Region::ObjectsAllocated() const {
509*795d594fSAndroid Build Coastguard Worker if (IsLarge()) {
510*795d594fSAndroid Build Coastguard Worker DCHECK_LT(begin_ + kRegionSize, Top());
511*795d594fSAndroid Build Coastguard Worker DCHECK_EQ(objects_allocated_.load(std::memory_order_relaxed), 0U);
512*795d594fSAndroid Build Coastguard Worker return 1;
513*795d594fSAndroid Build Coastguard Worker } else if (IsLargeTail()) {
514*795d594fSAndroid Build Coastguard Worker DCHECK_EQ(begin_, Top());
515*795d594fSAndroid Build Coastguard Worker DCHECK_EQ(objects_allocated_.load(std::memory_order_relaxed), 0U);
516*795d594fSAndroid Build Coastguard Worker return 0;
517*795d594fSAndroid Build Coastguard Worker } else {
518*795d594fSAndroid Build Coastguard Worker DCHECK(IsAllocated()) << "state=" << state_;
519*795d594fSAndroid Build Coastguard Worker return objects_allocated_;
520*795d594fSAndroid Build Coastguard Worker }
521*795d594fSAndroid Build Coastguard Worker }
522*795d594fSAndroid Build Coastguard Worker
523*795d594fSAndroid Build Coastguard Worker } // namespace space
524*795d594fSAndroid Build Coastguard Worker } // namespace gc
525*795d594fSAndroid Build Coastguard Worker } // namespace art
526*795d594fSAndroid Build Coastguard Worker
527*795d594fSAndroid Build Coastguard Worker #endif // ART_RUNTIME_GC_SPACE_REGION_SPACE_INL_H_
528