1*795d594fSAndroid Build Coastguard Worker /* 2*795d594fSAndroid Build Coastguard Worker * Copyright (C) 2014 The Android Open Source Project 3*795d594fSAndroid Build Coastguard Worker * 4*795d594fSAndroid Build Coastguard Worker * Licensed under the Apache License, Version 2.0 (the "License"); 5*795d594fSAndroid Build Coastguard Worker * you may not use this file except in compliance with the License. 6*795d594fSAndroid Build Coastguard Worker * You may obtain a copy of the License at 7*795d594fSAndroid Build Coastguard Worker * 8*795d594fSAndroid Build Coastguard Worker * http://www.apache.org/licenses/LICENSE-2.0 9*795d594fSAndroid Build Coastguard Worker * 10*795d594fSAndroid Build Coastguard Worker * Unless required by applicable law or agreed to in writing, software 11*795d594fSAndroid Build Coastguard Worker * distributed under the License is distributed on an "AS IS" BASIS, 12*795d594fSAndroid Build Coastguard Worker * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13*795d594fSAndroid Build Coastguard Worker * See the License for the specific language governing permissions and 14*795d594fSAndroid Build Coastguard Worker * limitations under the License. 15*795d594fSAndroid Build Coastguard Worker */ 16*795d594fSAndroid Build Coastguard Worker 17*795d594fSAndroid Build Coastguard Worker #ifndef ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_H_ 18*795d594fSAndroid Build Coastguard Worker #define ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_H_ 19*795d594fSAndroid Build Coastguard Worker 20*795d594fSAndroid Build Coastguard Worker #include "base/macros.h" 21*795d594fSAndroid Build Coastguard Worker #include "garbage_collector.h" 22*795d594fSAndroid Build Coastguard Worker #include "gc/accounting/space_bitmap.h" 23*795d594fSAndroid Build Coastguard Worker #include "immune_spaces.h" 24*795d594fSAndroid Build Coastguard Worker #include "offsets.h" 25*795d594fSAndroid Build Coastguard Worker 26*795d594fSAndroid Build Coastguard Worker #include <map> 27*795d594fSAndroid Build Coastguard Worker #include <memory> 28*795d594fSAndroid Build Coastguard Worker #include <unordered_map> 29*795d594fSAndroid Build Coastguard Worker #include <vector> 30*795d594fSAndroid Build Coastguard Worker 31*795d594fSAndroid Build Coastguard Worker namespace art HIDDEN { 32*795d594fSAndroid Build Coastguard Worker class Barrier; 33*795d594fSAndroid Build Coastguard Worker class Closure; 34*795d594fSAndroid Build Coastguard Worker class RootInfo; 35*795d594fSAndroid Build Coastguard Worker 36*795d594fSAndroid Build Coastguard Worker namespace mirror { 37*795d594fSAndroid Build Coastguard Worker template<class MirrorType> class CompressedReference; 38*795d594fSAndroid Build Coastguard Worker template<class MirrorType> class HeapReference; 39*795d594fSAndroid Build Coastguard Worker class Object; 40*795d594fSAndroid Build Coastguard Worker } // namespace mirror 41*795d594fSAndroid Build Coastguard Worker 42*795d594fSAndroid Build Coastguard Worker namespace gc { 43*795d594fSAndroid Build Coastguard Worker 44*795d594fSAndroid Build Coastguard Worker namespace accounting { 45*795d594fSAndroid Build Coastguard Worker template <size_t kAlignment> class SpaceBitmap; 46*795d594fSAndroid Build Coastguard Worker using ContinuousSpaceBitmap = SpaceBitmap<kObjectAlignment>; 47*795d594fSAndroid Build Coastguard Worker class HeapBitmap; 48*795d594fSAndroid Build Coastguard Worker class ReadBarrierTable; 49*795d594fSAndroid Build Coastguard Worker } // namespace accounting 50*795d594fSAndroid Build Coastguard Worker 51*795d594fSAndroid Build Coastguard Worker namespace space { 52*795d594fSAndroid Build Coastguard Worker class RegionSpace; 53*795d594fSAndroid Build Coastguard Worker } // namespace space 54*795d594fSAndroid Build Coastguard Worker 55*795d594fSAndroid Build Coastguard Worker namespace collector { 56*795d594fSAndroid Build Coastguard Worker 57*795d594fSAndroid Build Coastguard Worker class ConcurrentCopying : public GarbageCollector { 58*795d594fSAndroid Build Coastguard Worker public: 59*795d594fSAndroid Build Coastguard Worker // Enable the no-from-space-refs verification at the pause. 60*795d594fSAndroid Build Coastguard Worker static constexpr bool kEnableNoFromSpaceRefsVerification = kIsDebugBuild; 61*795d594fSAndroid Build Coastguard Worker // Enable the from-space bytes/objects check. 62*795d594fSAndroid Build Coastguard Worker static constexpr bool kEnableFromSpaceAccountingCheck = kIsDebugBuild; 63*795d594fSAndroid Build Coastguard Worker // Enable verbose mode. 64*795d594fSAndroid Build Coastguard Worker static constexpr bool kVerboseMode = false; 65*795d594fSAndroid Build Coastguard Worker // If kGrayDirtyImmuneObjects is true then we gray dirty objects in the GC pause to prevent dirty 66*795d594fSAndroid Build Coastguard Worker // pages. 67*795d594fSAndroid Build Coastguard Worker static constexpr bool kGrayDirtyImmuneObjects = true; 68*795d594fSAndroid Build Coastguard Worker 69*795d594fSAndroid Build Coastguard Worker ConcurrentCopying(Heap* heap, 70*795d594fSAndroid Build Coastguard Worker bool young_gen, 71*795d594fSAndroid Build Coastguard Worker bool use_generational_cc, 72*795d594fSAndroid Build Coastguard Worker const std::string& name_prefix = "", 73*795d594fSAndroid Build Coastguard Worker bool measure_read_barrier_slow_path = false); 74*795d594fSAndroid Build Coastguard Worker ~ConcurrentCopying(); 75*795d594fSAndroid Build Coastguard Worker 76*795d594fSAndroid Build Coastguard Worker void RunPhases() override 77*795d594fSAndroid Build Coastguard Worker REQUIRES(!immune_gray_stack_lock_, 78*795d594fSAndroid Build Coastguard Worker !mark_stack_lock_, 79*795d594fSAndroid Build Coastguard Worker !rb_slow_path_histogram_lock_, 80*795d594fSAndroid Build Coastguard Worker !skipped_blocks_lock_); 81*795d594fSAndroid Build Coastguard Worker void InitializePhase() REQUIRES_SHARED(Locks::mutator_lock_) 82*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_, !immune_gray_stack_lock_); 83*795d594fSAndroid Build Coastguard Worker void MarkingPhase() REQUIRES_SHARED(Locks::mutator_lock_) 84*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_); 85*795d594fSAndroid Build Coastguard Worker void CopyingPhase() REQUIRES_SHARED(Locks::mutator_lock_) 86*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); 87*795d594fSAndroid Build Coastguard Worker void ReclaimPhase() REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); 88*795d594fSAndroid Build Coastguard Worker void FinishPhase() REQUIRES(!mark_stack_lock_, 89*795d594fSAndroid Build Coastguard Worker !rb_slow_path_histogram_lock_, 90*795d594fSAndroid Build Coastguard Worker !skipped_blocks_lock_); 91*795d594fSAndroid Build Coastguard Worker 92*795d594fSAndroid Build Coastguard Worker void CaptureRssAtPeak() REQUIRES(!mark_stack_lock_); 93*795d594fSAndroid Build Coastguard Worker void BindBitmaps() REQUIRES_SHARED(Locks::mutator_lock_) 94*795d594fSAndroid Build Coastguard Worker REQUIRES(!Locks::heap_bitmap_lock_); GetGcType()95*795d594fSAndroid Build Coastguard Worker GcType GetGcType() const override { 96*795d594fSAndroid Build Coastguard Worker return (use_generational_cc_ && young_gen_) 97*795d594fSAndroid Build Coastguard Worker ? kGcTypeSticky 98*795d594fSAndroid Build Coastguard Worker : kGcTypePartial; 99*795d594fSAndroid Build Coastguard Worker } GetCollectorType()100*795d594fSAndroid Build Coastguard Worker CollectorType GetCollectorType() const override { 101*795d594fSAndroid Build Coastguard Worker return kCollectorTypeCC; 102*795d594fSAndroid Build Coastguard Worker } 103*795d594fSAndroid Build Coastguard Worker void RevokeAllThreadLocalBuffers() override; 104*795d594fSAndroid Build Coastguard Worker // Creates inter-region ref bitmaps for region-space and non-moving-space. 105*795d594fSAndroid Build Coastguard Worker // Gets called in Heap construction after the two spaces are created. 106*795d594fSAndroid Build Coastguard Worker void CreateInterRegionRefBitmaps(); SetRegionSpace(space::RegionSpace * region_space)107*795d594fSAndroid Build Coastguard Worker void SetRegionSpace(space::RegionSpace* region_space) { 108*795d594fSAndroid Build Coastguard Worker DCHECK(region_space != nullptr); 109*795d594fSAndroid Build Coastguard Worker region_space_ = region_space; 110*795d594fSAndroid Build Coastguard Worker } RegionSpace()111*795d594fSAndroid Build Coastguard Worker space::RegionSpace* RegionSpace() { 112*795d594fSAndroid Build Coastguard Worker return region_space_; 113*795d594fSAndroid Build Coastguard Worker } 114*795d594fSAndroid Build Coastguard Worker // Assert the to-space invariant for a heap reference `ref` held in `obj` at offset `offset`. 115*795d594fSAndroid Build Coastguard Worker void AssertToSpaceInvariant(mirror::Object* obj, MemberOffset offset, mirror::Object* ref) 116*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_); 117*795d594fSAndroid Build Coastguard Worker // Assert the to-space invariant for a GC root reference `ref`. 118*795d594fSAndroid Build Coastguard Worker void AssertToSpaceInvariant(GcRootSource* gc_root_source, mirror::Object* ref) 119*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_); IsInToSpace(mirror::Object * ref)120*795d594fSAndroid Build Coastguard Worker bool IsInToSpace(mirror::Object* ref) REQUIRES_SHARED(Locks::mutator_lock_) { 121*795d594fSAndroid Build Coastguard Worker DCHECK(ref != nullptr); 122*795d594fSAndroid Build Coastguard Worker return IsMarked(ref) == ref; 123*795d594fSAndroid Build Coastguard Worker } 124*795d594fSAndroid Build Coastguard Worker // Mark object `from_ref`, copying it to the to-space if needed. 125*795d594fSAndroid Build Coastguard Worker template<bool kGrayImmuneObject = true, bool kNoUnEvac = false, bool kFromGCThread = false> 126*795d594fSAndroid Build Coastguard Worker ALWAYS_INLINE mirror::Object* Mark(Thread* const self, 127*795d594fSAndroid Build Coastguard Worker mirror::Object* from_ref, 128*795d594fSAndroid Build Coastguard Worker mirror::Object* holder = nullptr, 129*795d594fSAndroid Build Coastguard Worker MemberOffset offset = MemberOffset(0)) 130*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) 131*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); 132*795d594fSAndroid Build Coastguard Worker ALWAYS_INLINE mirror::Object* MarkFromReadBarrier(mirror::Object* from_ref) 133*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) 134*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); IsMarking()135*795d594fSAndroid Build Coastguard Worker bool IsMarking() const { 136*795d594fSAndroid Build Coastguard Worker return is_marking_; 137*795d594fSAndroid Build Coastguard Worker } 138*795d594fSAndroid Build Coastguard Worker // We may want to use read barrier entrypoints before is_marking_ is true since concurrent graying 139*795d594fSAndroid Build Coastguard Worker // creates a small window where we might dispatch on these entrypoints. IsUsingReadBarrierEntrypoints()140*795d594fSAndroid Build Coastguard Worker bool IsUsingReadBarrierEntrypoints() const { 141*795d594fSAndroid Build Coastguard Worker return is_using_read_barrier_entrypoints_; 142*795d594fSAndroid Build Coastguard Worker } IsActive()143*795d594fSAndroid Build Coastguard Worker bool IsActive() const { 144*795d594fSAndroid Build Coastguard Worker return is_active_; 145*795d594fSAndroid Build Coastguard Worker } GetBarrier()146*795d594fSAndroid Build Coastguard Worker Barrier& GetBarrier() { 147*795d594fSAndroid Build Coastguard Worker return *gc_barrier_; 148*795d594fSAndroid Build Coastguard Worker } IsWeakRefAccessEnabled()149*795d594fSAndroid Build Coastguard Worker bool IsWeakRefAccessEnabled() REQUIRES(Locks::thread_list_lock_) { 150*795d594fSAndroid Build Coastguard Worker return weak_ref_access_enabled_; 151*795d594fSAndroid Build Coastguard Worker } 152*795d594fSAndroid Build Coastguard Worker void RevokeThreadLocalMarkStack(Thread* thread) REQUIRES(!mark_stack_lock_); 153*795d594fSAndroid Build Coastguard Worker 154*795d594fSAndroid Build Coastguard Worker // Blindly return the forwarding pointer from the lockword, or null if there is none. 155*795d594fSAndroid Build Coastguard Worker static mirror::Object* GetFwdPtrUnchecked(mirror::Object* from_ref) 156*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_); 157*795d594fSAndroid Build Coastguard Worker 158*795d594fSAndroid Build Coastguard Worker // If marked, return the to-space object, otherwise null. 159*795d594fSAndroid Build Coastguard Worker mirror::Object* IsMarked(mirror::Object* from_ref) override 160*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_); 161*795d594fSAndroid Build Coastguard Worker 162*795d594fSAndroid Build Coastguard Worker void AssertNoThreadMarkStackMapping(Thread* thread) REQUIRES(!mark_stack_lock_); 163*795d594fSAndroid Build Coastguard Worker // Dump information about reference `ref` and return it as a string. 164*795d594fSAndroid Build Coastguard Worker // Use `ref_name` to name the reference in messages. Each message is prefixed with `indent`. 165*795d594fSAndroid Build Coastguard Worker std::string DumpReferenceInfo(mirror::Object* ref, const char* ref_name, const char* indent = "") 166*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_); 167*795d594fSAndroid Build Coastguard Worker 168*795d594fSAndroid Build Coastguard Worker private: 169*795d594fSAndroid Build Coastguard Worker EXPORT void PushOntoMarkStack(Thread* const self, mirror::Object* obj) 170*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); 171*795d594fSAndroid Build Coastguard Worker // Returns a to-space copy of the from-space object from_ref, and atomically installs a 172*795d594fSAndroid Build Coastguard Worker // forwarding pointer. Ensures that the forwarding reference is visible to other threads before 173*795d594fSAndroid Build Coastguard Worker // the returned to-space pointer becomes visible to them. 174*795d594fSAndroid Build Coastguard Worker EXPORT mirror::Object* Copy(Thread* const self, 175*795d594fSAndroid Build Coastguard Worker mirror::Object* from_ref, 176*795d594fSAndroid Build Coastguard Worker mirror::Object* holder, 177*795d594fSAndroid Build Coastguard Worker MemberOffset offset) REQUIRES_SHARED(Locks::mutator_lock_) 178*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); 179*795d594fSAndroid Build Coastguard Worker // Scan the reference fields of object `to_ref`. 180*795d594fSAndroid Build Coastguard Worker template <bool kNoUnEvac> 181*795d594fSAndroid Build Coastguard Worker void Scan(mirror::Object* to_ref, size_t obj_size = 0) REQUIRES_SHARED(Locks::mutator_lock_) 182*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_); 183*795d594fSAndroid Build Coastguard Worker // Scan the reference fields of object 'obj' in the dirty cards during 184*795d594fSAndroid Build Coastguard Worker // card-table scan. In addition to visiting the references, it also sets the 185*795d594fSAndroid Build Coastguard Worker // read-barrier state to gray for Reference-type objects to ensure that 186*795d594fSAndroid Build Coastguard Worker // GetReferent() called on these objects calls the read-barrier on the referent. 187*795d594fSAndroid Build Coastguard Worker template <bool kNoUnEvac> 188*795d594fSAndroid Build Coastguard Worker void ScanDirtyObject(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) 189*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_); 190*795d594fSAndroid Build Coastguard Worker // Process a field. 191*795d594fSAndroid Build Coastguard Worker template <bool kNoUnEvac> 192*795d594fSAndroid Build Coastguard Worker void Process(mirror::Object* obj, MemberOffset offset) 193*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) 194*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_ , !skipped_blocks_lock_, !immune_gray_stack_lock_); 195*795d594fSAndroid Build Coastguard Worker void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) override 196*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) 197*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); 198*795d594fSAndroid Build Coastguard Worker template<bool kGrayImmuneObject> 199*795d594fSAndroid Build Coastguard Worker void MarkRoot(Thread* const self, mirror::CompressedReference<mirror::Object>* root) 200*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) 201*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); 202*795d594fSAndroid Build Coastguard Worker void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, 203*795d594fSAndroid Build Coastguard Worker size_t count, 204*795d594fSAndroid Build Coastguard Worker const RootInfo& info) override 205*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) 206*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); 207*795d594fSAndroid Build Coastguard Worker void VerifyNoFromSpaceReferences() REQUIRES(Locks::mutator_lock_); 208*795d594fSAndroid Build Coastguard Worker accounting::ObjectStack* GetAllocationStack(); 209*795d594fSAndroid Build Coastguard Worker accounting::ObjectStack* GetLiveStack(); 210*795d594fSAndroid Build Coastguard Worker void ProcessMarkStack() override REQUIRES_SHARED(Locks::mutator_lock_) 211*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_); 212*795d594fSAndroid Build Coastguard Worker bool ProcessMarkStackOnce() REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); 213*795d594fSAndroid Build Coastguard Worker void ProcessMarkStackRef(mirror::Object* to_ref) REQUIRES_SHARED(Locks::mutator_lock_) 214*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_); 215*795d594fSAndroid Build Coastguard Worker void GrayAllDirtyImmuneObjects() 216*795d594fSAndroid Build Coastguard Worker REQUIRES(Locks::mutator_lock_) 217*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_); 218*795d594fSAndroid Build Coastguard Worker void GrayAllNewlyDirtyImmuneObjects() 219*795d594fSAndroid Build Coastguard Worker REQUIRES(Locks::mutator_lock_) 220*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_); 221*795d594fSAndroid Build Coastguard Worker void VerifyGrayImmuneObjects() 222*795d594fSAndroid Build Coastguard Worker REQUIRES(Locks::mutator_lock_) 223*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_); 224*795d594fSAndroid Build Coastguard Worker void VerifyNoMissingCardMarks() 225*795d594fSAndroid Build Coastguard Worker REQUIRES(Locks::mutator_lock_) 226*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_); 227*795d594fSAndroid Build Coastguard Worker template <typename Processor> 228*795d594fSAndroid Build Coastguard Worker size_t ProcessThreadLocalMarkStacks(bool disable_weak_ref_access, 229*795d594fSAndroid Build Coastguard Worker Closure* checkpoint_callback, 230*795d594fSAndroid Build Coastguard Worker const Processor& processor) 231*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); 232*795d594fSAndroid Build Coastguard Worker void RevokeThreadLocalMarkStacks(bool disable_weak_ref_access, Closure* checkpoint_callback) 233*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_); 234*795d594fSAndroid Build Coastguard Worker void SwitchToSharedMarkStackMode() REQUIRES_SHARED(Locks::mutator_lock_) 235*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_); 236*795d594fSAndroid Build Coastguard Worker void SwitchToGcExclusiveMarkStackMode() REQUIRES_SHARED(Locks::mutator_lock_); 237*795d594fSAndroid Build Coastguard Worker void DelayReferenceReferent(ObjPtr<mirror::Class> klass, 238*795d594fSAndroid Build Coastguard Worker ObjPtr<mirror::Reference> reference) override 239*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_); 240*795d594fSAndroid Build Coastguard Worker void ProcessReferences(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_); 241*795d594fSAndroid Build Coastguard Worker mirror::Object* MarkObject(mirror::Object* from_ref) override 242*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) 243*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); 244*795d594fSAndroid Build Coastguard Worker void MarkHeapReference(mirror::HeapReference<mirror::Object>* from_ref, 245*795d594fSAndroid Build Coastguard Worker bool do_atomic_update) override 246*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) 247*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); 248*795d594fSAndroid Build Coastguard Worker bool IsMarkedInUnevacFromSpace(mirror::Object* from_ref) 249*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_); 250*795d594fSAndroid Build Coastguard Worker bool IsMarkedInNonMovingSpace(mirror::Object* from_ref) 251*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_); 252*795d594fSAndroid Build Coastguard Worker bool IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* field, 253*795d594fSAndroid Build Coastguard Worker bool do_atomic_update) override 254*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_); 255*795d594fSAndroid Build Coastguard Worker void SweepSystemWeaks(Thread* self) 256*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::heap_bitmap_lock_); 257*795d594fSAndroid Build Coastguard Worker // Sweep unmarked objects to complete the garbage collection. Full GCs sweep 258*795d594fSAndroid Build Coastguard Worker // all allocation spaces (except the region space). Sticky-bit GCs just sweep 259*795d594fSAndroid Build Coastguard Worker // a subset of the heap. 260*795d594fSAndroid Build Coastguard Worker void Sweep(bool swap_bitmaps) 261*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_, !mark_stack_lock_); 262*795d594fSAndroid Build Coastguard Worker void SweepArray(accounting::ObjectStack* obj_arr, bool swap_bitmaps) 263*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_, !mark_stack_lock_); 264*795d594fSAndroid Build Coastguard Worker void SweepLargeObjects(bool swap_bitmaps) 265*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_); 266*795d594fSAndroid Build Coastguard Worker void MarkZygoteLargeObjects() 267*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_); 268*795d594fSAndroid Build Coastguard Worker void FillWithFakeObject(Thread* const self, mirror::Object* fake_obj, size_t byte_size) 269*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_) 270*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_); 271*795d594fSAndroid Build Coastguard Worker mirror::Object* AllocateInSkippedBlock(Thread* const self, size_t alloc_size) 272*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_) 273*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_); 274*795d594fSAndroid Build Coastguard Worker void CheckEmptyMarkStack() REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); 275*795d594fSAndroid Build Coastguard Worker void IssueEmptyCheckpoint() REQUIRES_SHARED(Locks::mutator_lock_); 276*795d594fSAndroid Build Coastguard Worker bool IsOnAllocStack(mirror::Object* ref) REQUIRES_SHARED(Locks::mutator_lock_); 277*795d594fSAndroid Build Coastguard Worker // Return the forwarding pointer from the lockword. The argument must be in from space. 278*795d594fSAndroid Build Coastguard Worker mirror::Object* GetFwdPtr(mirror::Object* from_ref) REQUIRES_SHARED(Locks::mutator_lock_); 279*795d594fSAndroid Build Coastguard Worker void FlipThreadRoots() REQUIRES(!Locks::mutator_lock_); 280*795d594fSAndroid Build Coastguard Worker void SwapStacks() REQUIRES_SHARED(Locks::mutator_lock_); 281*795d594fSAndroid Build Coastguard Worker void RecordLiveStackFreezeSize(Thread* self); 282*795d594fSAndroid Build Coastguard Worker void ComputeUnevacFromSpaceLiveRatio(); 283*795d594fSAndroid Build Coastguard Worker void LogFromSpaceRefHolder(mirror::Object* obj, MemberOffset offset) 284*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_); 285*795d594fSAndroid Build Coastguard Worker // Dump information about heap reference `ref`, referenced from object `obj` at offset `offset`, 286*795d594fSAndroid Build Coastguard Worker // and return it as a string. 287*795d594fSAndroid Build Coastguard Worker EXPORT std::string DumpHeapReference(mirror::Object* obj, 288*795d594fSAndroid Build Coastguard Worker MemberOffset offset, 289*795d594fSAndroid Build Coastguard Worker mirror::Object* ref) REQUIRES_SHARED(Locks::mutator_lock_); 290*795d594fSAndroid Build Coastguard Worker // Dump information about GC root `ref` and return it as a string. 291*795d594fSAndroid Build Coastguard Worker std::string DumpGcRoot(mirror::Object* ref) REQUIRES_SHARED(Locks::mutator_lock_); 292*795d594fSAndroid Build Coastguard Worker void AssertToSpaceInvariantInNonMovingSpace(mirror::Object* obj, mirror::Object* ref) 293*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_); 294*795d594fSAndroid Build Coastguard Worker void ReenableWeakRefAccess(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_); 295*795d594fSAndroid Build Coastguard Worker void DisableMarking() REQUIRES_SHARED(Locks::mutator_lock_); 296*795d594fSAndroid Build Coastguard Worker void IssueDisableMarkingCheckpoint() REQUIRES_SHARED(Locks::mutator_lock_); 297*795d594fSAndroid Build Coastguard Worker void ExpandGcMarkStack() REQUIRES_SHARED(Locks::mutator_lock_); 298*795d594fSAndroid Build Coastguard Worker EXPORT mirror::Object* MarkNonMoving(Thread* const self, 299*795d594fSAndroid Build Coastguard Worker mirror::Object* from_ref, 300*795d594fSAndroid Build Coastguard Worker mirror::Object* holder = nullptr, 301*795d594fSAndroid Build Coastguard Worker MemberOffset offset = MemberOffset(0)) 302*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_); 303*795d594fSAndroid Build Coastguard Worker ALWAYS_INLINE mirror::Object* MarkUnevacFromSpaceRegion(Thread* const self, 304*795d594fSAndroid Build Coastguard Worker mirror::Object* from_ref, 305*795d594fSAndroid Build Coastguard Worker accounting::SpaceBitmap<kObjectAlignment>* bitmap) 306*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) 307*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_); 308*795d594fSAndroid Build Coastguard Worker template<bool kGrayImmuneObject> 309*795d594fSAndroid Build Coastguard Worker ALWAYS_INLINE mirror::Object* MarkImmuneSpace(Thread* const self, 310*795d594fSAndroid Build Coastguard Worker mirror::Object* from_ref) 311*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!immune_gray_stack_lock_); 312*795d594fSAndroid Build Coastguard Worker void ScanImmuneObject(mirror::Object* obj) 313*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); 314*795d594fSAndroid Build Coastguard Worker EXPORT mirror::Object* MarkFromReadBarrierWithMeasurements(Thread* const self, 315*795d594fSAndroid Build Coastguard Worker mirror::Object* from_ref) 316*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) 317*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); 318*795d594fSAndroid Build Coastguard Worker void DumpPerformanceInfo(std::ostream& os) override REQUIRES(!rb_slow_path_histogram_lock_); 319*795d594fSAndroid Build Coastguard Worker // Set the read barrier mark entrypoints to non-null. 320*795d594fSAndroid Build Coastguard Worker void ActivateReadBarrierEntrypoints(); 321*795d594fSAndroid Build Coastguard Worker 322*795d594fSAndroid Build Coastguard Worker void CaptureThreadRootsForMarking() REQUIRES_SHARED(Locks::mutator_lock_); 323*795d594fSAndroid Build Coastguard Worker void AddLiveBytesAndScanRef(mirror::Object* ref) REQUIRES_SHARED(Locks::mutator_lock_); 324*795d594fSAndroid Build Coastguard Worker bool TestMarkBitmapForRef(mirror::Object* ref) REQUIRES_SHARED(Locks::mutator_lock_); 325*795d594fSAndroid Build Coastguard Worker template <bool kAtomic = false> 326*795d594fSAndroid Build Coastguard Worker bool TestAndSetMarkBitForRef(mirror::Object* ref) REQUIRES_SHARED(Locks::mutator_lock_); 327*795d594fSAndroid Build Coastguard Worker void PushOntoLocalMarkStack(mirror::Object* ref) REQUIRES_SHARED(Locks::mutator_lock_); 328*795d594fSAndroid Build Coastguard Worker void ProcessMarkStackForMarkingAndComputeLiveBytes() REQUIRES_SHARED(Locks::mutator_lock_) 329*795d594fSAndroid Build Coastguard Worker REQUIRES(!mark_stack_lock_); 330*795d594fSAndroid Build Coastguard Worker 331*795d594fSAndroid Build Coastguard Worker void RemoveThreadMarkStackMapping(Thread* thread, accounting::ObjectStack* tl_mark_stack) 332*795d594fSAndroid Build Coastguard Worker REQUIRES(mark_stack_lock_); 333*795d594fSAndroid Build Coastguard Worker void AddThreadMarkStackMapping(Thread* thread, accounting::ObjectStack* tl_mark_stack) 334*795d594fSAndroid Build Coastguard Worker REQUIRES(mark_stack_lock_); 335*795d594fSAndroid Build Coastguard Worker void AssertEmptyThreadMarkStackMap() REQUIRES(mark_stack_lock_); 336*795d594fSAndroid Build Coastguard Worker 337*795d594fSAndroid Build Coastguard Worker space::RegionSpace* region_space_; // The underlying region space. 338*795d594fSAndroid Build Coastguard Worker std::unique_ptr<Barrier> gc_barrier_; 339*795d594fSAndroid Build Coastguard Worker std::unique_ptr<accounting::ObjectStack> gc_mark_stack_; 340*795d594fSAndroid Build Coastguard Worker 341*795d594fSAndroid Build Coastguard Worker // If true, enable generational collection when using the Concurrent Copying 342*795d594fSAndroid Build Coastguard Worker // (CC) collector, i.e. use sticky-bit CC for minor collections and (full) CC 343*795d594fSAndroid Build Coastguard Worker // for major collections. Generational CC collection is currently only 344*795d594fSAndroid Build Coastguard Worker // compatible with Baker read barriers. Set in Heap constructor. 345*795d594fSAndroid Build Coastguard Worker const bool use_generational_cc_; 346*795d594fSAndroid Build Coastguard Worker 347*795d594fSAndroid Build Coastguard Worker // Generational "sticky", only trace through dirty objects in region space. 348*795d594fSAndroid Build Coastguard Worker const bool young_gen_; 349*795d594fSAndroid Build Coastguard Worker 350*795d594fSAndroid Build Coastguard Worker // If true, the GC thread is done scanning marked objects on dirty and aged 351*795d594fSAndroid Build Coastguard Worker // card (see ConcurrentCopying::CopyingPhase). 352*795d594fSAndroid Build Coastguard Worker Atomic<bool> done_scanning_; 353*795d594fSAndroid Build Coastguard Worker 354*795d594fSAndroid Build Coastguard Worker // The read-barrier mark-bit stack. Stores object references whose 355*795d594fSAndroid Build Coastguard Worker // mark bit has been set by ConcurrentCopying::MarkFromReadBarrier, 356*795d594fSAndroid Build Coastguard Worker // so that this bit can be reset at the end of the collection in 357*795d594fSAndroid Build Coastguard Worker // ConcurrentCopying::FinishPhase. The mark bit of an object can be 358*795d594fSAndroid Build Coastguard Worker // used by mutator read barrier code to quickly test whether that 359*795d594fSAndroid Build Coastguard Worker // object has been already marked. 360*795d594fSAndroid Build Coastguard Worker std::unique_ptr<accounting::ObjectStack> rb_mark_bit_stack_; 361*795d594fSAndroid Build Coastguard Worker // Thread-unsafe Boolean value hinting that `rb_mark_bit_stack_` is 362*795d594fSAndroid Build Coastguard Worker // full. A thread-safe test of whether the read-barrier mark-bit 363*795d594fSAndroid Build Coastguard Worker // stack is full is implemented by `rb_mark_bit_stack_->AtomicPushBack(ref)` 364*795d594fSAndroid Build Coastguard Worker // (see use case in ConcurrentCopying::MarkFromReadBarrier). 365*795d594fSAndroid Build Coastguard Worker bool rb_mark_bit_stack_full_; 366*795d594fSAndroid Build Coastguard Worker 367*795d594fSAndroid Build Coastguard Worker // Guards access to pooled_mark_stacks_ and revoked_mark_stacks_ vectors. 368*795d594fSAndroid Build Coastguard Worker // Also guards destruction and revocations of thread-local mark-stacks. 369*795d594fSAndroid Build Coastguard Worker // Clearing thread-local mark-stack (by other threads or during destruction) 370*795d594fSAndroid Build Coastguard Worker // should be guarded by it. 371*795d594fSAndroid Build Coastguard Worker Mutex mark_stack_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; 372*795d594fSAndroid Build Coastguard Worker std::vector<accounting::ObjectStack*> revoked_mark_stacks_ 373*795d594fSAndroid Build Coastguard Worker GUARDED_BY(mark_stack_lock_); 374*795d594fSAndroid Build Coastguard Worker // Size of thread local mark stack. GetMarkStackSize()375*795d594fSAndroid Build Coastguard Worker static size_t GetMarkStackSize() { 376*795d594fSAndroid Build Coastguard Worker return gPageSize; 377*795d594fSAndroid Build Coastguard Worker } 378*795d594fSAndroid Build Coastguard Worker static constexpr size_t kMarkStackPoolSize = 256; 379*795d594fSAndroid Build Coastguard Worker std::vector<accounting::ObjectStack*> pooled_mark_stacks_ 380*795d594fSAndroid Build Coastguard Worker GUARDED_BY(mark_stack_lock_); 381*795d594fSAndroid Build Coastguard Worker Thread* thread_running_gc_; 382*795d594fSAndroid Build Coastguard Worker bool is_marking_; // True while marking is ongoing. 383*795d594fSAndroid Build Coastguard Worker // True while we might dispatch on the read barrier entrypoints. 384*795d594fSAndroid Build Coastguard Worker bool is_using_read_barrier_entrypoints_; 385*795d594fSAndroid Build Coastguard Worker bool is_active_; // True while the collection is ongoing. 386*795d594fSAndroid Build Coastguard Worker bool is_asserting_to_space_invariant_; // True while asserting the to-space invariant. 387*795d594fSAndroid Build Coastguard Worker ImmuneSpaces immune_spaces_; 388*795d594fSAndroid Build Coastguard Worker accounting::ContinuousSpaceBitmap* region_space_bitmap_; 389*795d594fSAndroid Build Coastguard Worker // A cache of Heap::GetMarkBitmap(). 390*795d594fSAndroid Build Coastguard Worker accounting::HeapBitmap* heap_mark_bitmap_; 391*795d594fSAndroid Build Coastguard Worker size_t live_stack_freeze_size_; 392*795d594fSAndroid Build Coastguard Worker size_t from_space_num_bytes_at_first_pause_; // Computed if kEnableFromSpaceAccountingCheck 393*795d594fSAndroid Build Coastguard Worker Atomic<int> is_mark_stack_push_disallowed_; // Debug only. 394*795d594fSAndroid Build Coastguard Worker enum MarkStackMode { 395*795d594fSAndroid Build Coastguard Worker kMarkStackModeOff = 0, // Mark stack is off. 396*795d594fSAndroid Build Coastguard Worker kMarkStackModeThreadLocal, // All threads except for the GC-running thread push refs onto 397*795d594fSAndroid Build Coastguard Worker // thread-local mark stacks. The GC-running thread pushes onto and 398*795d594fSAndroid Build Coastguard Worker // pops off the GC mark stack without a lock. 399*795d594fSAndroid Build Coastguard Worker kMarkStackModeShared, // All threads share the GC mark stack with a lock. 400*795d594fSAndroid Build Coastguard Worker kMarkStackModeGcExclusive // The GC-running thread pushes onto and pops from the GC mark stack 401*795d594fSAndroid Build Coastguard Worker // without a lock. Other threads won't access the mark stack. 402*795d594fSAndroid Build Coastguard Worker }; 403*795d594fSAndroid Build Coastguard Worker // mark_stack_mode_ is updated asynchronoulsy by the GC. We cannot assume that another thread 404*795d594fSAndroid Build Coastguard Worker // has seen it until it has run some kind of checkpoint. We generally access this using 405*795d594fSAndroid Build Coastguard Worker // acquire/release ordering, to ensure that any relevant prior changes are visible to readers of 406*795d594fSAndroid Build Coastguard Worker // the flag, and to ensure that CHECKs prior to a state change cannot be delayed past the state 407*795d594fSAndroid Build Coastguard Worker // change. 408*795d594fSAndroid Build Coastguard Worker Atomic<MarkStackMode> mark_stack_mode_; 409*795d594fSAndroid Build Coastguard Worker bool weak_ref_access_enabled_ GUARDED_BY(Locks::thread_list_lock_); 410*795d594fSAndroid Build Coastguard Worker 411*795d594fSAndroid Build Coastguard Worker // How many objects and bytes we moved. The GC thread moves many more objects 412*795d594fSAndroid Build Coastguard Worker // than mutators. Therefore, we separate the two to avoid CAS. Bytes_moved_ and 413*795d594fSAndroid Build Coastguard Worker // bytes_moved_gc_thread_ are critical for GC triggering; the others are just informative. 414*795d594fSAndroid Build Coastguard Worker Atomic<size_t> bytes_moved_; // Used by mutators 415*795d594fSAndroid Build Coastguard Worker Atomic<size_t> objects_moved_; // Used by mutators 416*795d594fSAndroid Build Coastguard Worker 417*795d594fSAndroid Build Coastguard Worker // copied_live_bytes_ratio_sum_ is read and written by CC per GC, in 418*795d594fSAndroid Build Coastguard Worker // ReclaimPhase, and is read by DumpPerformanceInfo (potentially from another 419*795d594fSAndroid Build Coastguard Worker // thread). However, at present, DumpPerformanceInfo is only called when the 420*795d594fSAndroid Build Coastguard Worker // runtime shuts down, so no concurrent access. The same reasoning goes for 421*795d594fSAndroid Build Coastguard Worker // gc_count_ and reclaimed_bytes_ratio_sum_ 422*795d594fSAndroid Build Coastguard Worker 423*795d594fSAndroid Build Coastguard Worker // The sum of of all copied live bytes ratio (to_bytes/from_bytes) 424*795d594fSAndroid Build Coastguard Worker float copied_live_bytes_ratio_sum_; 425*795d594fSAndroid Build Coastguard Worker // The number of GC counts, used to calculate the average above. (It doesn't 426*795d594fSAndroid Build Coastguard Worker // include GC where from_bytes is zero, IOW, from-space is empty, which is 427*795d594fSAndroid Build Coastguard Worker // possible for minor GC if all allocated objects are in non-moving 428*795d594fSAndroid Build Coastguard Worker // space.) 429*795d594fSAndroid Build Coastguard Worker size_t gc_count_; 430*795d594fSAndroid Build Coastguard Worker // Bit is set if the corresponding object has inter-region references that 431*795d594fSAndroid Build Coastguard Worker // were found during the marking phase of two-phase full-heap GC cycle. 432*795d594fSAndroid Build Coastguard Worker accounting::ContinuousSpaceBitmap region_space_inter_region_bitmap_; 433*795d594fSAndroid Build Coastguard Worker accounting::ContinuousSpaceBitmap non_moving_space_inter_region_bitmap_; 434*795d594fSAndroid Build Coastguard Worker 435*795d594fSAndroid Build Coastguard Worker // reclaimed_bytes_ratio = reclaimed_bytes/num_allocated_bytes per GC cycle 436*795d594fSAndroid Build Coastguard Worker float reclaimed_bytes_ratio_sum_; 437*795d594fSAndroid Build Coastguard Worker 438*795d594fSAndroid Build Coastguard Worker // Used only by GC thread, so need not be atomic. Also, should be kept 439*795d594fSAndroid Build Coastguard Worker // in a different cacheline than bytes/objects_moved_ (above) to avoid false 440*795d594fSAndroid Build Coastguard Worker // cacheline sharing. 441*795d594fSAndroid Build Coastguard Worker size_t bytes_moved_gc_thread_; 442*795d594fSAndroid Build Coastguard Worker size_t objects_moved_gc_thread_; 443*795d594fSAndroid Build Coastguard Worker uint64_t bytes_scanned_; 444*795d594fSAndroid Build Coastguard Worker uint64_t cumulative_bytes_moved_; 445*795d594fSAndroid Build Coastguard Worker 446*795d594fSAndroid Build Coastguard Worker // The skipped blocks are memory blocks/chucks that were copies of 447*795d594fSAndroid Build Coastguard Worker // objects that were unused due to lost races (cas failures) at 448*795d594fSAndroid Build Coastguard Worker // object copy/forward pointer install. They may be reused. 449*795d594fSAndroid Build Coastguard Worker // Skipped blocks are always in region space. Their size is included directly 450*795d594fSAndroid Build Coastguard Worker // in num_bytes_allocated_, i.e. they are treated as allocated, but may be directly 451*795d594fSAndroid Build Coastguard Worker // used without going through a GC cycle like other objects. They are reused only 452*795d594fSAndroid Build Coastguard Worker // if we run out of region space. TODO: Revisit this design. 453*795d594fSAndroid Build Coastguard Worker Mutex skipped_blocks_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; 454*795d594fSAndroid Build Coastguard Worker std::multimap<size_t, uint8_t*> skipped_blocks_map_ GUARDED_BY(skipped_blocks_lock_); 455*795d594fSAndroid Build Coastguard Worker Atomic<size_t> to_space_bytes_skipped_; 456*795d594fSAndroid Build Coastguard Worker Atomic<size_t> to_space_objects_skipped_; 457*795d594fSAndroid Build Coastguard Worker 458*795d594fSAndroid Build Coastguard Worker // If measure_read_barrier_slow_path_ is true, we count how long is spent in MarkFromReadBarrier 459*795d594fSAndroid Build Coastguard Worker // and also log. 460*795d594fSAndroid Build Coastguard Worker bool measure_read_barrier_slow_path_; 461*795d594fSAndroid Build Coastguard Worker // mark_from_read_barrier_measurements_ is true if systrace is enabled or 462*795d594fSAndroid Build Coastguard Worker // measure_read_barrier_time_ is true. 463*795d594fSAndroid Build Coastguard Worker bool mark_from_read_barrier_measurements_; 464*795d594fSAndroid Build Coastguard Worker Atomic<uint64_t> rb_slow_path_ns_; 465*795d594fSAndroid Build Coastguard Worker Atomic<uint64_t> rb_slow_path_count_; 466*795d594fSAndroid Build Coastguard Worker Atomic<uint64_t> rb_slow_path_count_gc_; 467*795d594fSAndroid Build Coastguard Worker mutable Mutex rb_slow_path_histogram_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; 468*795d594fSAndroid Build Coastguard Worker Histogram<uint64_t> rb_slow_path_time_histogram_ GUARDED_BY(rb_slow_path_histogram_lock_); 469*795d594fSAndroid Build Coastguard Worker uint64_t rb_slow_path_count_total_ GUARDED_BY(rb_slow_path_histogram_lock_); 470*795d594fSAndroid Build Coastguard Worker uint64_t rb_slow_path_count_gc_total_ GUARDED_BY(rb_slow_path_histogram_lock_); 471*795d594fSAndroid Build Coastguard Worker 472*795d594fSAndroid Build Coastguard Worker accounting::ReadBarrierTable* rb_table_; 473*795d594fSAndroid Build Coastguard Worker bool force_evacuate_all_; // True if all regions are evacuated. 474*795d594fSAndroid Build Coastguard Worker Atomic<bool> updated_all_immune_objects_; 475*795d594fSAndroid Build Coastguard Worker bool gc_grays_immune_objects_; 476*795d594fSAndroid Build Coastguard Worker Mutex immune_gray_stack_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; 477*795d594fSAndroid Build Coastguard Worker std::vector<mirror::Object*> immune_gray_stack_ GUARDED_BY(immune_gray_stack_lock_); 478*795d594fSAndroid Build Coastguard Worker 479*795d594fSAndroid Build Coastguard Worker // Class of java.lang.Object. Filled in from WellKnownClasses in FlipCallback. Must 480*795d594fSAndroid Build Coastguard Worker // be filled in before flipping thread roots so that FillWithFakeObject can run. Not 481*795d594fSAndroid Build Coastguard Worker // ObjPtr since the GC may transition to suspended and runnable between phases. 482*795d594fSAndroid Build Coastguard Worker mirror::Class* java_lang_Object_; 483*795d594fSAndroid Build Coastguard Worker 484*795d594fSAndroid Build Coastguard Worker // Use signed because after_gc may be larger than before_gc. 485*795d594fSAndroid Build Coastguard Worker int64_t num_bytes_allocated_before_gc_; 486*795d594fSAndroid Build Coastguard Worker 487*795d594fSAndroid Build Coastguard Worker class ActivateReadBarrierEntrypointsCallback; 488*795d594fSAndroid Build Coastguard Worker class ActivateReadBarrierEntrypointsCheckpoint; 489*795d594fSAndroid Build Coastguard Worker class AssertToSpaceInvariantFieldVisitor; 490*795d594fSAndroid Build Coastguard Worker class AssertToSpaceInvariantRefsVisitor; 491*795d594fSAndroid Build Coastguard Worker class ClearBlackPtrsVisitor; 492*795d594fSAndroid Build Coastguard Worker class ComputeUnevacFromSpaceLiveRatioVisitor; 493*795d594fSAndroid Build Coastguard Worker class DisableMarkingCallback; 494*795d594fSAndroid Build Coastguard Worker class DisableMarkingCheckpoint; 495*795d594fSAndroid Build Coastguard Worker class DisableWeakRefAccessCallback; 496*795d594fSAndroid Build Coastguard Worker class FlipCallback; 497*795d594fSAndroid Build Coastguard Worker template <bool kConcurrent> class GrayImmuneObjectVisitor; 498*795d594fSAndroid Build Coastguard Worker class ImmuneSpaceScanObjVisitor; 499*795d594fSAndroid Build Coastguard Worker class LostCopyVisitor; 500*795d594fSAndroid Build Coastguard Worker template <bool kNoUnEvac> class RefFieldsVisitor; 501*795d594fSAndroid Build Coastguard Worker class RevokeThreadLocalMarkStackCheckpoint; 502*795d594fSAndroid Build Coastguard Worker class ScopedGcGraysImmuneObjects; 503*795d594fSAndroid Build Coastguard Worker class ThreadFlipVisitor; 504*795d594fSAndroid Build Coastguard Worker class VerifyGrayImmuneObjectsVisitor; 505*795d594fSAndroid Build Coastguard Worker class VerifyNoFromSpaceRefsFieldVisitor; 506*795d594fSAndroid Build Coastguard Worker class VerifyNoFromSpaceRefsVisitor; 507*795d594fSAndroid Build Coastguard Worker class VerifyNoMissingCardMarkVisitor; 508*795d594fSAndroid Build Coastguard Worker class ImmuneSpaceCaptureRefsVisitor; 509*795d594fSAndroid Build Coastguard Worker template <bool kAtomicTestAndSet = false> class CaptureRootsForMarkingVisitor; 510*795d594fSAndroid Build Coastguard Worker class CaptureThreadRootsForMarkingAndCheckpoint; 511*795d594fSAndroid Build Coastguard Worker template <bool kHandleInterRegionRefs> class ComputeLiveBytesAndMarkRefFieldsVisitor; 512*795d594fSAndroid Build Coastguard Worker 513*795d594fSAndroid Build Coastguard Worker DISALLOW_IMPLICIT_CONSTRUCTORS(ConcurrentCopying); 514*795d594fSAndroid Build Coastguard Worker }; 515*795d594fSAndroid Build Coastguard Worker 516*795d594fSAndroid Build Coastguard Worker } // namespace collector 517*795d594fSAndroid Build Coastguard Worker } // namespace gc 518*795d594fSAndroid Build Coastguard Worker } // namespace art 519*795d594fSAndroid Build Coastguard Worker 520*795d594fSAndroid Build Coastguard Worker #endif // ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_H_ 521