1 /*
2 * Copyright 2021 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_INL_H_
18 #define ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_INL_H_
19
20 #include "gc/space/bump_pointer_space.h"
21 #include "mark_compact.h"
22 #include "mirror/object-inl.h"
23 #include "thread-inl.h"
24
25 namespace art HIDDEN {
26 namespace gc {
27 namespace collector {
28
UpdateClassAfterObjectMap(mirror::Object * obj)29 inline void MarkCompact::UpdateClassAfterObjectMap(mirror::Object* obj) {
30 mirror::Class* klass = obj->GetClass<kVerifyNone, kWithoutReadBarrier>();
31 if (UNLIKELY(std::less<mirror::Object*>{}(obj, klass) && HasAddress(klass))) {
32 auto [iter, success] = class_after_obj_map_.try_emplace(ObjReference::FromMirrorPtr(klass),
33 ObjReference::FromMirrorPtr(obj));
34 if (!success && std::less<mirror::Object*>{}(obj, iter->second.AsMirrorPtr())) {
35 iter->second = ObjReference::FromMirrorPtr(obj);
36 }
37 }
38 }
39
40 template <size_t kAlignment>
SetLiveWords(uintptr_t begin,size_t size)41 inline uintptr_t MarkCompact::LiveWordsBitmap<kAlignment>::SetLiveWords(uintptr_t begin,
42 size_t size) {
43 const uintptr_t begin_bit_idx = MemRangeBitmap::BitIndexFromAddr(begin);
44 DCHECK(!Bitmap::TestBit(begin_bit_idx));
45 // Range to set bit: [begin, end]
46 uintptr_t end = begin + size - kAlignment;
47 const uintptr_t end_bit_idx = MemRangeBitmap::BitIndexFromAddr(end);
48 uintptr_t* begin_bm_address = Bitmap::Begin() + Bitmap::BitIndexToWordIndex(begin_bit_idx);
49 uintptr_t* end_bm_address = Bitmap::Begin() + Bitmap::BitIndexToWordIndex(end_bit_idx);
50 ptrdiff_t diff = end_bm_address - begin_bm_address;
51 uintptr_t mask = Bitmap::BitIndexToMask(begin_bit_idx);
52 // Bits that needs to be set in the first word, if it's not also the last word
53 mask = ~(mask - 1);
54 if (diff > 0) {
55 *begin_bm_address |= mask;
56 mask = ~0;
57 // Even though memset can handle the (diff == 1) case but we should avoid the
58 // overhead of a function call for this, highly likely (as most of the objects
59 // are small), case.
60 if (diff > 1) {
61 // Set all intermediate bits to 1.
62 std::memset(static_cast<void*>(begin_bm_address + 1), 0xff, (diff - 1) * sizeof(uintptr_t));
63 }
64 }
65 uintptr_t end_mask = Bitmap::BitIndexToMask(end_bit_idx);
66 *end_bm_address |= mask & (end_mask | (end_mask - 1));
67 return begin_bit_idx;
68 }
69
70 template <size_t kAlignment> template <typename Visitor>
VisitLiveStrides(uintptr_t begin_bit_idx,uint8_t * end,const size_t bytes,Visitor && visitor)71 inline void MarkCompact::LiveWordsBitmap<kAlignment>::VisitLiveStrides(uintptr_t begin_bit_idx,
72 uint8_t* end,
73 const size_t bytes,
74 Visitor&& visitor) const {
75 // Range to visit [begin_bit_idx, end_bit_idx]
76 DCHECK(IsAligned<kAlignment>(end));
77 end -= kAlignment;
78 const uintptr_t end_bit_idx = MemRangeBitmap::BitIndexFromAddr(reinterpret_cast<uintptr_t>(end));
79 DCHECK_LE(begin_bit_idx, end_bit_idx);
80 uintptr_t begin_word_idx = Bitmap::BitIndexToWordIndex(begin_bit_idx);
81 const uintptr_t end_word_idx = Bitmap::BitIndexToWordIndex(end_bit_idx);
82 DCHECK(Bitmap::TestBit(begin_bit_idx));
83 size_t stride_size = 0;
84 size_t idx_in_word = 0;
85 size_t num_heap_words = bytes / kAlignment;
86 uintptr_t live_stride_start_idx;
87 uintptr_t word = Bitmap::Begin()[begin_word_idx];
88
89 // Setup the first word.
90 word &= ~(Bitmap::BitIndexToMask(begin_bit_idx) - 1);
91 begin_bit_idx = RoundDown(begin_bit_idx, Bitmap::kBitsPerBitmapWord);
92
93 do {
94 if (UNLIKELY(begin_word_idx == end_word_idx)) {
95 uintptr_t mask = Bitmap::BitIndexToMask(end_bit_idx);
96 word &= mask | (mask - 1);
97 }
98 if (~word == 0) {
99 // All bits in the word are marked.
100 if (stride_size == 0) {
101 live_stride_start_idx = begin_bit_idx;
102 }
103 stride_size += Bitmap::kBitsPerBitmapWord;
104 if (num_heap_words <= stride_size) {
105 break;
106 }
107 } else {
108 while (word != 0) {
109 // discard 0s
110 size_t shift = CTZ(word);
111 idx_in_word += shift;
112 word >>= shift;
113 if (stride_size > 0) {
114 if (shift > 0) {
115 if (num_heap_words <= stride_size) {
116 break;
117 }
118 visitor(live_stride_start_idx, stride_size, /*is_last*/ false);
119 num_heap_words -= stride_size;
120 live_stride_start_idx = begin_bit_idx + idx_in_word;
121 stride_size = 0;
122 }
123 } else {
124 live_stride_start_idx = begin_bit_idx + idx_in_word;
125 }
126 // consume 1s
127 shift = CTZ(~word);
128 DCHECK_NE(shift, 0u);
129 word >>= shift;
130 idx_in_word += shift;
131 stride_size += shift;
132 }
133 // If the whole word == 0 or the higher bits are 0s, then we exit out of
134 // the above loop without completely consuming the word, so call visitor,
135 // if needed.
136 if (idx_in_word < Bitmap::kBitsPerBitmapWord && stride_size > 0) {
137 if (num_heap_words <= stride_size) {
138 break;
139 }
140 visitor(live_stride_start_idx, stride_size, /*is_last*/ false);
141 num_heap_words -= stride_size;
142 stride_size = 0;
143 }
144 idx_in_word = 0;
145 }
146 begin_bit_idx += Bitmap::kBitsPerBitmapWord;
147 begin_word_idx++;
148 if (UNLIKELY(begin_word_idx > end_word_idx)) {
149 num_heap_words = std::min(stride_size, num_heap_words);
150 break;
151 }
152 word = Bitmap::Begin()[begin_word_idx];
153 } while (true);
154
155 if (stride_size > 0) {
156 visitor(live_stride_start_idx, num_heap_words, /*is_last*/ true);
157 }
158 }
159
160 template <size_t kAlignment>
161 inline
FindNthLiveWordOffset(size_t chunk_idx,uint32_t n)162 uint32_t MarkCompact::LiveWordsBitmap<kAlignment>::FindNthLiveWordOffset(size_t chunk_idx,
163 uint32_t n) const {
164 DCHECK_LT(n, kBitsPerVectorWord);
165 const size_t index = chunk_idx * kBitmapWordsPerVectorWord;
166 for (uint32_t i = 0; i < kBitmapWordsPerVectorWord; i++) {
167 uintptr_t word = Bitmap::Begin()[index + i];
168 if (~word == 0) {
169 if (n < Bitmap::kBitsPerBitmapWord) {
170 return i * Bitmap::kBitsPerBitmapWord + n;
171 }
172 n -= Bitmap::kBitsPerBitmapWord;
173 } else {
174 uint32_t j = 0;
175 while (word != 0) {
176 // count contiguous 0s
177 uint32_t shift = CTZ(word);
178 word >>= shift;
179 j += shift;
180 // count contiguous 1s
181 shift = CTZ(~word);
182 DCHECK_NE(shift, 0u);
183 if (shift > n) {
184 return i * Bitmap::kBitsPerBitmapWord + j + n;
185 }
186 n -= shift;
187 word >>= shift;
188 j += shift;
189 }
190 }
191 }
192 LOG(FATAL) << "Unreachable";
193 UNREACHABLE();
194 }
195
IsOnAllocStack(mirror::Object * ref)196 inline bool MarkCompact::IsOnAllocStack(mirror::Object* ref) {
197 // Pairs with release fence after allocation-stack push in
198 // Heap::AllocObjectWithAllocator().
199 std::atomic_thread_fence(std::memory_order_acquire);
200 accounting::ObjectStack* stack = heap_->GetAllocationStack();
201 return stack->Contains(ref);
202 }
203
UpdateRef(mirror::Object * obj,MemberOffset offset,uint8_t * begin,uint8_t * end)204 inline void MarkCompact::UpdateRef(mirror::Object* obj,
205 MemberOffset offset,
206 uint8_t* begin,
207 uint8_t* end) {
208 mirror::Object* old_ref = obj->GetFieldObject<
209 mirror::Object, kVerifyNone, kWithoutReadBarrier, /*kIsVolatile*/false>(offset);
210 if (kIsDebugBuild) {
211 if (HasAddress(old_ref) &&
212 reinterpret_cast<uint8_t*>(old_ref) < black_allocations_begin_ &&
213 !moving_space_bitmap_->Test(old_ref)) {
214 mirror::Object* from_ref = GetFromSpaceAddr(old_ref);
215 std::ostringstream oss;
216 heap_->DumpSpaces(oss);
217 MemMap::DumpMaps(oss, /* terse= */ true);
218 LOG(FATAL) << "Not marked in the bitmap ref=" << old_ref
219 << " from_ref=" << from_ref
220 << " offset=" << offset
221 << " obj=" << obj
222 << " obj-validity=" << IsValidObject(obj)
223 << " from-space=" << static_cast<void*>(from_space_begin_)
224 << " bitmap= " << moving_space_bitmap_->DumpMemAround(old_ref)
225 << " from_ref "
226 << heap_->GetVerification()->DumpRAMAroundAddress(
227 reinterpret_cast<uintptr_t>(from_ref), 128)
228 << " obj "
229 << heap_->GetVerification()->DumpRAMAroundAddress(
230 reinterpret_cast<uintptr_t>(obj), 128)
231 << " old_ref " << heap_->GetVerification()->DumpRAMAroundAddress(
232 reinterpret_cast<uintptr_t>(old_ref), 128)
233 << " maps\n" << oss.str();
234 }
235 }
236 mirror::Object* new_ref = PostCompactAddress(old_ref, begin, end);
237 if (new_ref != old_ref) {
238 obj->SetFieldObjectWithoutWriteBarrier<
239 /*kTransactionActive*/false, /*kCheckTransaction*/false, kVerifyNone, /*kIsVolatile*/false>(
240 offset,
241 new_ref);
242 }
243 }
244
VerifyRootSingleUpdate(void * root,mirror::Object * old_ref,const RootInfo & info)245 inline bool MarkCompact::VerifyRootSingleUpdate(void* root,
246 mirror::Object* old_ref,
247 const RootInfo& info) {
248 // ASAN promotes stack-frames to heap in order to detect
249 // stack-use-after-return issues. And HWASAN has pointers tagged, which makes
250 // it difficult to recognize and prevent stack pointers from being checked.
251 // So skip using double-root update detection on ASANs.
252 if (kIsDebugBuild && !kMemoryToolIsAvailable && !kHwAsanEnabled) {
253 void* stack_low_addr = stack_low_addr_;
254 void* stack_high_addr = stack_high_addr_;
255 if (!HasAddress(old_ref)) {
256 return false;
257 }
258 Thread* self = Thread::Current();
259 if (UNLIKELY(stack_low_addr == nullptr)) {
260 // TODO(Simulator): Test that this should not operate on the simulated stack when the
261 // simulator supports mark compact.
262 stack_low_addr = self->GetStackEnd<kNativeStackType>();
263 stack_high_addr = reinterpret_cast<char*>(stack_low_addr)
264 + self->GetUsableStackSize<kNativeStackType>();
265 }
266 if (std::less<void*>{}(root, stack_low_addr) || std::greater<void*>{}(root, stack_high_addr)) {
267 bool inserted;
268 {
269 MutexLock mu(self, lock_);
270 inserted = updated_roots_->insert(root).second;
271 }
272 if (!inserted) {
273 std::ostringstream oss;
274 heap_->DumpSpaces(oss);
275 MemMap::DumpMaps(oss, /* terse= */ true);
276 CHECK(inserted) << "root=" << root << " old_ref=" << old_ref
277 << " stack_low_addr=" << stack_low_addr
278 << " stack_high_addr=" << stack_high_addr << " maps\n"
279 << oss.str();
280 }
281 }
282 DCHECK(reinterpret_cast<uint8_t*>(old_ref) >= black_allocations_begin_ ||
283 live_words_bitmap_->Test(old_ref))
284 << "ref=" << old_ref << " <" << mirror::Object::PrettyTypeOf(old_ref) << "> RootInfo ["
285 << info << "]";
286 }
287 return true;
288 }
289
UpdateRoot(mirror::CompressedReference<mirror::Object> * root,uint8_t * begin,uint8_t * end,const RootInfo & info)290 inline void MarkCompact::UpdateRoot(mirror::CompressedReference<mirror::Object>* root,
291 uint8_t* begin,
292 uint8_t* end,
293 const RootInfo& info) {
294 DCHECK(!root->IsNull());
295 mirror::Object* old_ref = root->AsMirrorPtr();
296 if (VerifyRootSingleUpdate(root, old_ref, info)) {
297 mirror::Object* new_ref = PostCompactAddress(old_ref, begin, end);
298 if (old_ref != new_ref) {
299 root->Assign(new_ref);
300 }
301 }
302 }
303
UpdateRoot(mirror::Object ** root,uint8_t * begin,uint8_t * end,const RootInfo & info)304 inline void MarkCompact::UpdateRoot(mirror::Object** root,
305 uint8_t* begin,
306 uint8_t* end,
307 const RootInfo& info) {
308 mirror::Object* old_ref = *root;
309 if (VerifyRootSingleUpdate(root, old_ref, info)) {
310 mirror::Object* new_ref = PostCompactAddress(old_ref, begin, end);
311 if (old_ref != new_ref) {
312 *root = new_ref;
313 }
314 }
315 }
316
317 template <size_t kAlignment>
CountLiveWordsUpto(size_t bit_idx)318 inline size_t MarkCompact::LiveWordsBitmap<kAlignment>::CountLiveWordsUpto(size_t bit_idx) const {
319 const size_t word_offset = Bitmap::BitIndexToWordIndex(bit_idx);
320 uintptr_t word;
321 size_t ret = 0;
322 // This is needed only if we decide to make chunks 128-bit but still
323 // choose to use 64-bit word for bitmap. Ideally we should use 128-bit
324 // SIMD instructions to compute popcount.
325 if (kBitmapWordsPerVectorWord > 1) {
326 for (size_t i = RoundDown(word_offset, kBitmapWordsPerVectorWord); i < word_offset; i++) {
327 word = Bitmap::Begin()[i];
328 ret += POPCOUNT(word);
329 }
330 }
331 word = Bitmap::Begin()[word_offset];
332 const uintptr_t mask = Bitmap::BitIndexToMask(bit_idx);
333 DCHECK_NE(word & mask, 0u)
334 << " word_offset:" << word_offset
335 << " bit_idx:" << bit_idx
336 << " bit_idx_in_word:" << (bit_idx % Bitmap::kBitsPerBitmapWord)
337 << std::hex << " word: 0x" << word
338 << " mask: 0x" << mask << std::dec;
339 ret += POPCOUNT(word & (mask - 1));
340 return ret;
341 }
342
PostCompactBlackObjAddr(mirror::Object * old_ref)343 inline mirror::Object* MarkCompact::PostCompactBlackObjAddr(mirror::Object* old_ref) const {
344 return reinterpret_cast<mirror::Object*>(reinterpret_cast<uint8_t*>(old_ref)
345 - black_objs_slide_diff_);
346 }
347
PostCompactOldObjAddr(mirror::Object * old_ref)348 inline mirror::Object* MarkCompact::PostCompactOldObjAddr(mirror::Object* old_ref) const {
349 const uintptr_t begin = live_words_bitmap_->Begin();
350 const uintptr_t addr_offset = reinterpret_cast<uintptr_t>(old_ref) - begin;
351 const size_t vec_idx = addr_offset / kOffsetChunkSize;
352 const size_t live_bytes_in_bitmap_word =
353 live_words_bitmap_->CountLiveWordsUpto(addr_offset / kAlignment) * kAlignment;
354 return reinterpret_cast<mirror::Object*>(begin
355 + chunk_info_vec_[vec_idx]
356 + live_bytes_in_bitmap_word);
357 }
358
PostCompactAddressUnchecked(mirror::Object * old_ref)359 inline mirror::Object* MarkCompact::PostCompactAddressUnchecked(mirror::Object* old_ref) const {
360 if (reinterpret_cast<uint8_t*>(old_ref) >= black_allocations_begin_) {
361 return PostCompactBlackObjAddr(old_ref);
362 }
363 if (kIsDebugBuild) {
364 mirror::Object* from_ref = GetFromSpaceAddr(old_ref);
365 DCHECK(live_words_bitmap_->Test(old_ref))
366 << "ref=" << old_ref;
367 if (!moving_space_bitmap_->Test(old_ref)) {
368 std::ostringstream oss;
369 Runtime::Current()->GetHeap()->DumpSpaces(oss);
370 MemMap::DumpMaps(oss, /* terse= */ true);
371 LOG(FATAL) << "ref=" << old_ref
372 << " from_ref=" << from_ref
373 << " from-space=" << static_cast<void*>(from_space_begin_)
374 << " bitmap= " << moving_space_bitmap_->DumpMemAround(old_ref)
375 << heap_->GetVerification()->DumpRAMAroundAddress(
376 reinterpret_cast<uintptr_t>(from_ref), 128)
377 << " maps\n" << oss.str();
378 }
379 }
380 return PostCompactOldObjAddr(old_ref);
381 }
382
PostCompactAddress(mirror::Object * old_ref,uint8_t * begin,uint8_t * end)383 inline mirror::Object* MarkCompact::PostCompactAddress(mirror::Object* old_ref,
384 uint8_t* begin,
385 uint8_t* end) const {
386 if (LIKELY(HasAddress(old_ref, begin, end))) {
387 return PostCompactAddressUnchecked(old_ref);
388 }
389 return old_ref;
390 }
391
392 } // namespace collector
393 } // namespace gc
394 } // namespace art
395
396 #endif // ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_INL_H_
397