1*795d594fSAndroid Build Coastguard Worker /*
2*795d594fSAndroid Build Coastguard Worker * Copyright (C) 2015 The Android Open Source Project
3*795d594fSAndroid Build Coastguard Worker *
4*795d594fSAndroid Build Coastguard Worker * Licensed under the Apache License, Version 2.0 (the "License");
5*795d594fSAndroid Build Coastguard Worker * you may not use this file except in compliance with the License.
6*795d594fSAndroid Build Coastguard Worker * You may obtain a copy of the License at
7*795d594fSAndroid Build Coastguard Worker *
8*795d594fSAndroid Build Coastguard Worker * http://www.apache.org/licenses/LICENSE-2.0
9*795d594fSAndroid Build Coastguard Worker *
10*795d594fSAndroid Build Coastguard Worker * Unless required by applicable law or agreed to in writing, software
11*795d594fSAndroid Build Coastguard Worker * distributed under the License is distributed on an "AS IS" BASIS,
12*795d594fSAndroid Build Coastguard Worker * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13*795d594fSAndroid Build Coastguard Worker * See the License for the specific language governing permissions and
14*795d594fSAndroid Build Coastguard Worker * limitations under the License.
15*795d594fSAndroid Build Coastguard Worker */
16*795d594fSAndroid Build Coastguard Worker
17*795d594fSAndroid Build Coastguard Worker #ifndef ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_INL_H_
18*795d594fSAndroid Build Coastguard Worker #define ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_INL_H_
19*795d594fSAndroid Build Coastguard Worker
20*795d594fSAndroid Build Coastguard Worker #include "concurrent_copying.h"
21*795d594fSAndroid Build Coastguard Worker
22*795d594fSAndroid Build Coastguard Worker #include "gc/accounting/atomic_stack.h"
23*795d594fSAndroid Build Coastguard Worker #include "gc/accounting/space_bitmap-inl.h"
24*795d594fSAndroid Build Coastguard Worker #include "gc/heap.h"
25*795d594fSAndroid Build Coastguard Worker #include "gc/space/region_space-inl.h"
26*795d594fSAndroid Build Coastguard Worker #include "gc/verification.h"
27*795d594fSAndroid Build Coastguard Worker #include "lock_word.h"
28*795d594fSAndroid Build Coastguard Worker #include "mirror/class.h"
29*795d594fSAndroid Build Coastguard Worker #include "mirror/object-readbarrier-inl.h"
30*795d594fSAndroid Build Coastguard Worker
31*795d594fSAndroid Build Coastguard Worker namespace art HIDDEN {
32*795d594fSAndroid Build Coastguard Worker namespace gc {
33*795d594fSAndroid Build Coastguard Worker namespace collector {
34*795d594fSAndroid Build Coastguard Worker
MarkUnevacFromSpaceRegion(Thread * const self,mirror::Object * ref,accounting::ContinuousSpaceBitmap * bitmap)35*795d594fSAndroid Build Coastguard Worker inline mirror::Object* ConcurrentCopying::MarkUnevacFromSpaceRegion(
36*795d594fSAndroid Build Coastguard Worker Thread* const self,
37*795d594fSAndroid Build Coastguard Worker mirror::Object* ref,
38*795d594fSAndroid Build Coastguard Worker accounting::ContinuousSpaceBitmap* bitmap) {
39*795d594fSAndroid Build Coastguard Worker if (use_generational_cc_ && !done_scanning_.load(std::memory_order_acquire)) {
40*795d594fSAndroid Build Coastguard Worker // Everything in the unevac space should be marked for young generation CC,
41*795d594fSAndroid Build Coastguard Worker // except for large objects.
42*795d594fSAndroid Build Coastguard Worker DCHECK(!young_gen_ || region_space_bitmap_->Test(ref) || region_space_->IsLargeObject(ref))
43*795d594fSAndroid Build Coastguard Worker << ref << " "
44*795d594fSAndroid Build Coastguard Worker << ref->GetClass<kVerifyNone, kWithoutReadBarrier>()->PrettyClass();
45*795d594fSAndroid Build Coastguard Worker // Since the mark bitmap is still filled in from last GC (or from marking phase of 2-phase CC,
46*795d594fSAndroid Build Coastguard Worker // we can not use that or else the mutator may see references to the from space. Instead, use
47*795d594fSAndroid Build Coastguard Worker // the baker pointer itself as the mark bit.
48*795d594fSAndroid Build Coastguard Worker if (ref->AtomicSetReadBarrierState(ReadBarrier::NonGrayState(), ReadBarrier::GrayState())) {
49*795d594fSAndroid Build Coastguard Worker // TODO: We don't actually need to scan this object later, we just need to clear the gray
50*795d594fSAndroid Build Coastguard Worker // bit.
51*795d594fSAndroid Build Coastguard Worker // TODO: We could also set the mark bit here for "free" since this case comes from the
52*795d594fSAndroid Build Coastguard Worker // read barrier.
53*795d594fSAndroid Build Coastguard Worker PushOntoMarkStack(self, ref);
54*795d594fSAndroid Build Coastguard Worker }
55*795d594fSAndroid Build Coastguard Worker DCHECK_EQ(ref->GetReadBarrierState(), ReadBarrier::GrayState());
56*795d594fSAndroid Build Coastguard Worker return ref;
57*795d594fSAndroid Build Coastguard Worker }
58*795d594fSAndroid Build Coastguard Worker // For the Baker-style RB, in a rare case, we could incorrectly change the object from non-gray
59*795d594fSAndroid Build Coastguard Worker // (black) to gray even though the object has already been marked through. This happens if a
60*795d594fSAndroid Build Coastguard Worker // mutator thread gets preempted before the AtomicSetReadBarrierState below, GC marks through the
61*795d594fSAndroid Build Coastguard Worker // object (changes it from non-gray (white) to gray and back to non-gray (black)), and the thread
62*795d594fSAndroid Build Coastguard Worker // runs and incorrectly changes it from non-gray (black) to gray. If this happens, the object
63*795d594fSAndroid Build Coastguard Worker // will get added to the mark stack again and get changed back to non-gray (black) after it is
64*795d594fSAndroid Build Coastguard Worker // processed.
65*795d594fSAndroid Build Coastguard Worker if (kUseBakerReadBarrier) {
66*795d594fSAndroid Build Coastguard Worker // Test the bitmap first to avoid graying an object that has already been marked through most
67*795d594fSAndroid Build Coastguard Worker // of the time.
68*795d594fSAndroid Build Coastguard Worker if (bitmap->Test(ref)) {
69*795d594fSAndroid Build Coastguard Worker return ref;
70*795d594fSAndroid Build Coastguard Worker }
71*795d594fSAndroid Build Coastguard Worker }
72*795d594fSAndroid Build Coastguard Worker // This may or may not succeed, which is ok because the object may already be gray.
73*795d594fSAndroid Build Coastguard Worker bool success = false;
74*795d594fSAndroid Build Coastguard Worker if (kUseBakerReadBarrier) {
75*795d594fSAndroid Build Coastguard Worker // GC will mark the bitmap when popping from mark stack. If only the GC is touching the bitmap
76*795d594fSAndroid Build Coastguard Worker // we can avoid an expensive CAS.
77*795d594fSAndroid Build Coastguard Worker // For the baker case, an object is marked if either the mark bit marked or the bitmap bit is
78*795d594fSAndroid Build Coastguard Worker // set.
79*795d594fSAndroid Build Coastguard Worker success = ref->AtomicSetReadBarrierState(/* expected_rb_state= */ ReadBarrier::NonGrayState(),
80*795d594fSAndroid Build Coastguard Worker /* rb_state= */ ReadBarrier::GrayState());
81*795d594fSAndroid Build Coastguard Worker } else {
82*795d594fSAndroid Build Coastguard Worker success = !bitmap->AtomicTestAndSet(ref);
83*795d594fSAndroid Build Coastguard Worker }
84*795d594fSAndroid Build Coastguard Worker if (success) {
85*795d594fSAndroid Build Coastguard Worker // Newly marked.
86*795d594fSAndroid Build Coastguard Worker if (kUseBakerReadBarrier) {
87*795d594fSAndroid Build Coastguard Worker DCHECK_EQ(ref->GetReadBarrierState(), ReadBarrier::GrayState());
88*795d594fSAndroid Build Coastguard Worker }
89*795d594fSAndroid Build Coastguard Worker PushOntoMarkStack(self, ref);
90*795d594fSAndroid Build Coastguard Worker }
91*795d594fSAndroid Build Coastguard Worker return ref;
92*795d594fSAndroid Build Coastguard Worker }
93*795d594fSAndroid Build Coastguard Worker
94*795d594fSAndroid Build Coastguard Worker template<bool kGrayImmuneObject>
MarkImmuneSpace(Thread * const self,mirror::Object * ref)95*795d594fSAndroid Build Coastguard Worker inline mirror::Object* ConcurrentCopying::MarkImmuneSpace(Thread* const self,
96*795d594fSAndroid Build Coastguard Worker mirror::Object* ref) {
97*795d594fSAndroid Build Coastguard Worker if (kUseBakerReadBarrier) {
98*795d594fSAndroid Build Coastguard Worker // The GC-running thread doesn't (need to) gray immune objects except when updating thread roots
99*795d594fSAndroid Build Coastguard Worker // in the thread flip on behalf of suspended threads (when gc_grays_immune_objects_ is
100*795d594fSAndroid Build Coastguard Worker // true). Also, a mutator doesn't (need to) gray an immune object after GC has updated all
101*795d594fSAndroid Build Coastguard Worker // immune space objects (when updated_all_immune_objects_ is true).
102*795d594fSAndroid Build Coastguard Worker if (kIsDebugBuild) {
103*795d594fSAndroid Build Coastguard Worker if (self == thread_running_gc_) {
104*795d594fSAndroid Build Coastguard Worker DCHECK(!kGrayImmuneObject ||
105*795d594fSAndroid Build Coastguard Worker updated_all_immune_objects_.load(std::memory_order_relaxed) ||
106*795d594fSAndroid Build Coastguard Worker gc_grays_immune_objects_);
107*795d594fSAndroid Build Coastguard Worker } else {
108*795d594fSAndroid Build Coastguard Worker DCHECK(kGrayImmuneObject);
109*795d594fSAndroid Build Coastguard Worker }
110*795d594fSAndroid Build Coastguard Worker }
111*795d594fSAndroid Build Coastguard Worker if (!kGrayImmuneObject || updated_all_immune_objects_.load(std::memory_order_relaxed)) {
112*795d594fSAndroid Build Coastguard Worker return ref;
113*795d594fSAndroid Build Coastguard Worker }
114*795d594fSAndroid Build Coastguard Worker // This may or may not succeed, which is ok because the object may already be gray.
115*795d594fSAndroid Build Coastguard Worker bool success =
116*795d594fSAndroid Build Coastguard Worker ref->AtomicSetReadBarrierState(/* expected_rb_state= */ ReadBarrier::NonGrayState(),
117*795d594fSAndroid Build Coastguard Worker /* rb_state= */ ReadBarrier::GrayState());
118*795d594fSAndroid Build Coastguard Worker if (success) {
119*795d594fSAndroid Build Coastguard Worker MutexLock mu(self, immune_gray_stack_lock_);
120*795d594fSAndroid Build Coastguard Worker immune_gray_stack_.push_back(ref);
121*795d594fSAndroid Build Coastguard Worker }
122*795d594fSAndroid Build Coastguard Worker }
123*795d594fSAndroid Build Coastguard Worker return ref;
124*795d594fSAndroid Build Coastguard Worker }
125*795d594fSAndroid Build Coastguard Worker
126*795d594fSAndroid Build Coastguard Worker template<bool kGrayImmuneObject, bool kNoUnEvac, bool kFromGCThread>
Mark(Thread * const self,mirror::Object * from_ref,mirror::Object * holder,MemberOffset offset)127*795d594fSAndroid Build Coastguard Worker inline mirror::Object* ConcurrentCopying::Mark(Thread* const self,
128*795d594fSAndroid Build Coastguard Worker mirror::Object* from_ref,
129*795d594fSAndroid Build Coastguard Worker mirror::Object* holder,
130*795d594fSAndroid Build Coastguard Worker MemberOffset offset) {
131*795d594fSAndroid Build Coastguard Worker // Cannot have `kNoUnEvac` when Generational CC collection is disabled.
132*795d594fSAndroid Build Coastguard Worker DCHECK_IMPLIES(kNoUnEvac, use_generational_cc_);
133*795d594fSAndroid Build Coastguard Worker if (from_ref == nullptr) {
134*795d594fSAndroid Build Coastguard Worker return nullptr;
135*795d594fSAndroid Build Coastguard Worker }
136*795d594fSAndroid Build Coastguard Worker DCHECK(heap_->collector_type_ == kCollectorTypeCC);
137*795d594fSAndroid Build Coastguard Worker if (kFromGCThread) {
138*795d594fSAndroid Build Coastguard Worker DCHECK(is_active_);
139*795d594fSAndroid Build Coastguard Worker DCHECK_EQ(self, thread_running_gc_);
140*795d594fSAndroid Build Coastguard Worker } else if (UNLIKELY(kUseBakerReadBarrier && !is_active_)) {
141*795d594fSAndroid Build Coastguard Worker // In the lock word forward address state, the read barrier bits
142*795d594fSAndroid Build Coastguard Worker // in the lock word are part of the stored forwarding address and
143*795d594fSAndroid Build Coastguard Worker // invalid. This is usually OK as the from-space copy of objects
144*795d594fSAndroid Build Coastguard Worker // aren't accessed by mutators due to the to-space
145*795d594fSAndroid Build Coastguard Worker // invariant. However, during the dex2oat image writing relocation
146*795d594fSAndroid Build Coastguard Worker // and the zygote compaction, objects can be in the forward
147*795d594fSAndroid Build Coastguard Worker // address state (to store the forward/relocation addresses) and
148*795d594fSAndroid Build Coastguard Worker // they can still be accessed and the invalid read barrier bits
149*795d594fSAndroid Build Coastguard Worker // are consulted. If they look like gray but aren't really, the
150*795d594fSAndroid Build Coastguard Worker // read barriers slow path can trigger when it shouldn't. To guard
151*795d594fSAndroid Build Coastguard Worker // against this, return here if the CC collector isn't running.
152*795d594fSAndroid Build Coastguard Worker return from_ref;
153*795d594fSAndroid Build Coastguard Worker }
154*795d594fSAndroid Build Coastguard Worker DCHECK(region_space_ != nullptr) << "Read barrier slow path taken when CC isn't running?";
155*795d594fSAndroid Build Coastguard Worker if (region_space_->HasAddress(from_ref)) {
156*795d594fSAndroid Build Coastguard Worker space::RegionSpace::RegionType rtype = region_space_->GetRegionTypeUnsafe(from_ref);
157*795d594fSAndroid Build Coastguard Worker switch (rtype) {
158*795d594fSAndroid Build Coastguard Worker case space::RegionSpace::RegionType::kRegionTypeToSpace:
159*795d594fSAndroid Build Coastguard Worker // It's already marked.
160*795d594fSAndroid Build Coastguard Worker return from_ref;
161*795d594fSAndroid Build Coastguard Worker case space::RegionSpace::RegionType::kRegionTypeFromSpace: {
162*795d594fSAndroid Build Coastguard Worker mirror::Object* to_ref = GetFwdPtr(from_ref);
163*795d594fSAndroid Build Coastguard Worker if (to_ref == nullptr) {
164*795d594fSAndroid Build Coastguard Worker // It isn't marked yet. Mark it by copying it to the to-space.
165*795d594fSAndroid Build Coastguard Worker to_ref = Copy(self, from_ref, holder, offset);
166*795d594fSAndroid Build Coastguard Worker }
167*795d594fSAndroid Build Coastguard Worker // The copy should either be in a to-space region, or in the
168*795d594fSAndroid Build Coastguard Worker // non-moving space, if it could not fit in a to-space region.
169*795d594fSAndroid Build Coastguard Worker DCHECK(region_space_->IsInToSpace(to_ref) || heap_->non_moving_space_->HasAddress(to_ref))
170*795d594fSAndroid Build Coastguard Worker << "from_ref=" << from_ref << " to_ref=" << to_ref;
171*795d594fSAndroid Build Coastguard Worker return to_ref;
172*795d594fSAndroid Build Coastguard Worker }
173*795d594fSAndroid Build Coastguard Worker case space::RegionSpace::RegionType::kRegionTypeUnevacFromSpace:
174*795d594fSAndroid Build Coastguard Worker if (kNoUnEvac && use_generational_cc_ && !region_space_->IsLargeObject(from_ref)) {
175*795d594fSAndroid Build Coastguard Worker if (!kFromGCThread) {
176*795d594fSAndroid Build Coastguard Worker DCHECK(IsMarkedInUnevacFromSpace(from_ref)) << "Returning unmarked object to mutator";
177*795d594fSAndroid Build Coastguard Worker }
178*795d594fSAndroid Build Coastguard Worker return from_ref;
179*795d594fSAndroid Build Coastguard Worker }
180*795d594fSAndroid Build Coastguard Worker return MarkUnevacFromSpaceRegion(self, from_ref, region_space_bitmap_);
181*795d594fSAndroid Build Coastguard Worker default:
182*795d594fSAndroid Build Coastguard Worker // The reference is in an unused region. Remove memory protection from
183*795d594fSAndroid Build Coastguard Worker // the region space and log debugging information.
184*795d594fSAndroid Build Coastguard Worker region_space_->Unprotect();
185*795d594fSAndroid Build Coastguard Worker LOG(FATAL_WITHOUT_ABORT) << DumpHeapReference(holder, offset, from_ref);
186*795d594fSAndroid Build Coastguard Worker region_space_->DumpNonFreeRegions(LOG_STREAM(FATAL_WITHOUT_ABORT));
187*795d594fSAndroid Build Coastguard Worker heap_->GetVerification()->LogHeapCorruption(holder, offset, from_ref, /* fatal= */ true);
188*795d594fSAndroid Build Coastguard Worker UNREACHABLE();
189*795d594fSAndroid Build Coastguard Worker }
190*795d594fSAndroid Build Coastguard Worker } else {
191*795d594fSAndroid Build Coastguard Worker if (immune_spaces_.ContainsObject(from_ref)) {
192*795d594fSAndroid Build Coastguard Worker return MarkImmuneSpace<kGrayImmuneObject>(self, from_ref);
193*795d594fSAndroid Build Coastguard Worker } else {
194*795d594fSAndroid Build Coastguard Worker return MarkNonMoving(self, from_ref, holder, offset);
195*795d594fSAndroid Build Coastguard Worker }
196*795d594fSAndroid Build Coastguard Worker }
197*795d594fSAndroid Build Coastguard Worker }
198*795d594fSAndroid Build Coastguard Worker
MarkFromReadBarrier(mirror::Object * from_ref)199*795d594fSAndroid Build Coastguard Worker inline mirror::Object* ConcurrentCopying::MarkFromReadBarrier(mirror::Object* from_ref) {
200*795d594fSAndroid Build Coastguard Worker mirror::Object* ret;
201*795d594fSAndroid Build Coastguard Worker Thread* const self = Thread::Current();
202*795d594fSAndroid Build Coastguard Worker // We can get here before marking starts since we gray immune objects before the marking phase.
203*795d594fSAndroid Build Coastguard Worker if (from_ref == nullptr || !self->GetIsGcMarking()) {
204*795d594fSAndroid Build Coastguard Worker return from_ref;
205*795d594fSAndroid Build Coastguard Worker }
206*795d594fSAndroid Build Coastguard Worker // TODO: Consider removing this check when we are done investigating slow paths. b/30162165
207*795d594fSAndroid Build Coastguard Worker if (UNLIKELY(mark_from_read_barrier_measurements_)) {
208*795d594fSAndroid Build Coastguard Worker ret = MarkFromReadBarrierWithMeasurements(self, from_ref);
209*795d594fSAndroid Build Coastguard Worker } else {
210*795d594fSAndroid Build Coastguard Worker ret = Mark</*kGrayImmuneObject=*/true, /*kNoUnEvac=*/false, /*kFromGCThread=*/false>(self,
211*795d594fSAndroid Build Coastguard Worker from_ref);
212*795d594fSAndroid Build Coastguard Worker }
213*795d594fSAndroid Build Coastguard Worker // Only set the mark bit for baker barrier.
214*795d594fSAndroid Build Coastguard Worker if (kUseBakerReadBarrier && LIKELY(!rb_mark_bit_stack_full_ && ret->AtomicSetMarkBit(0, 1))) {
215*795d594fSAndroid Build Coastguard Worker // If the mark stack is full, we may temporarily go to mark and back to unmarked. Seeing both
216*795d594fSAndroid Build Coastguard Worker // values are OK since the only race is doing an unnecessary Mark.
217*795d594fSAndroid Build Coastguard Worker if (!rb_mark_bit_stack_->AtomicPushBack(ret)) {
218*795d594fSAndroid Build Coastguard Worker // Mark stack is full, set the bit back to zero.
219*795d594fSAndroid Build Coastguard Worker CHECK(ret->AtomicSetMarkBit(1, 0));
220*795d594fSAndroid Build Coastguard Worker // Set rb_mark_bit_stack_full_, this is racy but OK since AtomicPushBack is thread safe.
221*795d594fSAndroid Build Coastguard Worker rb_mark_bit_stack_full_ = true;
222*795d594fSAndroid Build Coastguard Worker }
223*795d594fSAndroid Build Coastguard Worker }
224*795d594fSAndroid Build Coastguard Worker return ret;
225*795d594fSAndroid Build Coastguard Worker }
226*795d594fSAndroid Build Coastguard Worker
GetFwdPtrUnchecked(mirror::Object * from_ref)227*795d594fSAndroid Build Coastguard Worker inline mirror::Object* ConcurrentCopying::GetFwdPtrUnchecked(mirror::Object* from_ref) {
228*795d594fSAndroid Build Coastguard Worker LockWord lw = from_ref->GetLockWord(false);
229*795d594fSAndroid Build Coastguard Worker if (lw.GetState() == LockWord::kForwardingAddress) {
230*795d594fSAndroid Build Coastguard Worker mirror::Object* fwd_ptr = reinterpret_cast<mirror::Object*>(lw.ForwardingAddress());
231*795d594fSAndroid Build Coastguard Worker DCHECK(fwd_ptr != nullptr);
232*795d594fSAndroid Build Coastguard Worker return fwd_ptr;
233*795d594fSAndroid Build Coastguard Worker } else {
234*795d594fSAndroid Build Coastguard Worker return nullptr;
235*795d594fSAndroid Build Coastguard Worker }
236*795d594fSAndroid Build Coastguard Worker }
237*795d594fSAndroid Build Coastguard Worker
GetFwdPtr(mirror::Object * from_ref)238*795d594fSAndroid Build Coastguard Worker inline mirror::Object* ConcurrentCopying::GetFwdPtr(mirror::Object* from_ref) {
239*795d594fSAndroid Build Coastguard Worker DCHECK(region_space_->IsInFromSpace(from_ref));
240*795d594fSAndroid Build Coastguard Worker return GetFwdPtrUnchecked(from_ref);
241*795d594fSAndroid Build Coastguard Worker }
242*795d594fSAndroid Build Coastguard Worker
IsMarkedInUnevacFromSpace(mirror::Object * from_ref)243*795d594fSAndroid Build Coastguard Worker inline bool ConcurrentCopying::IsMarkedInUnevacFromSpace(mirror::Object* from_ref) {
244*795d594fSAndroid Build Coastguard Worker // Use load-acquire on the read barrier pointer to ensure that we never see a black (non-gray)
245*795d594fSAndroid Build Coastguard Worker // read barrier state with an unmarked bit due to reordering.
246*795d594fSAndroid Build Coastguard Worker DCHECK(region_space_->IsInUnevacFromSpace(from_ref));
247*795d594fSAndroid Build Coastguard Worker if (kUseBakerReadBarrier && from_ref->GetReadBarrierStateAcquire() == ReadBarrier::GrayState()) {
248*795d594fSAndroid Build Coastguard Worker return true;
249*795d594fSAndroid Build Coastguard Worker } else if (!use_generational_cc_ || done_scanning_.load(std::memory_order_acquire)) {
250*795d594fSAndroid Build Coastguard Worker // If the card table scanning is not finished yet, then only read-barrier
251*795d594fSAndroid Build Coastguard Worker // state should be checked. Checking the mark bitmap is unreliable as there
252*795d594fSAndroid Build Coastguard Worker // may be some objects - whose corresponding card is dirty - which are
253*795d594fSAndroid Build Coastguard Worker // marked in the mark bitmap, but cannot be considered marked unless their
254*795d594fSAndroid Build Coastguard Worker // read-barrier state is set to Gray.
255*795d594fSAndroid Build Coastguard Worker //
256*795d594fSAndroid Build Coastguard Worker // Why read read-barrier state before checking done_scanning_?
257*795d594fSAndroid Build Coastguard Worker // If the read-barrier state was read *after* done_scanning_, then there
258*795d594fSAndroid Build Coastguard Worker // exists a concurrency race due to which even after the object is marked,
259*795d594fSAndroid Build Coastguard Worker // read-barrier state is checked *after* that, this function will return
260*795d594fSAndroid Build Coastguard Worker // false. The following scenario may cause the race:
261*795d594fSAndroid Build Coastguard Worker //
262*795d594fSAndroid Build Coastguard Worker // 1. Mutator thread reads done_scanning_ and upon finding it false, gets
263*795d594fSAndroid Build Coastguard Worker // suspended before reading the object's read-barrier state.
264*795d594fSAndroid Build Coastguard Worker // 2. GC thread finishes card-table scan and then sets done_scanning_ to
265*795d594fSAndroid Build Coastguard Worker // true.
266*795d594fSAndroid Build Coastguard Worker // 3. GC thread grays the object, scans it, marks in the bitmap, and then
267*795d594fSAndroid Build Coastguard Worker // changes its read-barrier state back to non-gray.
268*795d594fSAndroid Build Coastguard Worker // 4. Mutator thread resumes, reads the object's read-barrier state and
269*795d594fSAndroid Build Coastguard Worker // returns false.
270*795d594fSAndroid Build Coastguard Worker return region_space_bitmap_->Test(from_ref);
271*795d594fSAndroid Build Coastguard Worker }
272*795d594fSAndroid Build Coastguard Worker return false;
273*795d594fSAndroid Build Coastguard Worker }
274*795d594fSAndroid Build Coastguard Worker
275*795d594fSAndroid Build Coastguard Worker } // namespace collector
276*795d594fSAndroid Build Coastguard Worker } // namespace gc
277*795d594fSAndroid Build Coastguard Worker } // namespace art
278*795d594fSAndroid Build Coastguard Worker
279*795d594fSAndroid Build Coastguard Worker #endif // ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_INL_H_
280