1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_ 18 #define ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_ 19 20 #include <memory> 21 22 #include "base/atomic.h" 23 #include "base/locks.h" 24 #include "base/macros.h" 25 #include "garbage_collector.h" 26 #include "gc/accounting/heap_bitmap.h" 27 #include "gc_root.h" 28 #include "immune_spaces.h" 29 #include "mirror/object_reference.h" 30 #include "offsets.h" 31 32 namespace art HIDDEN { 33 34 class Thread; 35 36 namespace mirror { 37 class Class; 38 class Object; 39 } // namespace mirror 40 41 namespace gc { 42 class Heap; 43 44 namespace space { 45 class ContinuousMemMapAllocSpace; 46 class ContinuousSpace; 47 } // namespace space 48 49 namespace collector { 50 class SemiSpace : public GarbageCollector { 51 public: 52 // If true, use remembered sets in the generational mode. 53 static constexpr bool kUseRememberedSet = true; 54 55 explicit SemiSpace(Heap* heap, const std::string& name_prefix = ""); 56 ~SemiSpace()57 ~SemiSpace() {} 58 59 void RunPhases() override NO_THREAD_SAFETY_ANALYSIS; 60 virtual void InitializePhase(); 61 virtual void MarkingPhase() REQUIRES(Locks::mutator_lock_) 62 REQUIRES(!Locks::heap_bitmap_lock_); 63 virtual void ReclaimPhase() REQUIRES(Locks::mutator_lock_) 64 REQUIRES(!Locks::heap_bitmap_lock_); 65 virtual void FinishPhase() REQUIRES(Locks::mutator_lock_); 66 void MarkReachableObjects() 67 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_); GetGcType()68 GcType GetGcType() const override { 69 return kGcTypePartial; 70 } GetCollectorType()71 CollectorType GetCollectorType() const override { 72 return kCollectorTypeSS; 73 } 74 75 // Sets which space we will be copying objects to. 76 void SetToSpace(space::ContinuousMemMapAllocSpace* to_space); 77 78 // Set the space where we copy objects from. 79 void SetFromSpace(space::ContinuousMemMapAllocSpace* from_space); 80 81 // Set whether or not we swap the semi spaces in the heap. This needs to be done with mutators 82 // suspended. SetSwapSemiSpaces(bool swap_semi_spaces)83 void SetSwapSemiSpaces(bool swap_semi_spaces) { 84 swap_semi_spaces_ = swap_semi_spaces; 85 } 86 87 // Initializes internal structures. 88 void Init(); 89 90 // Find the default mark bitmap. 91 void FindDefaultMarkBitmap(); 92 93 // Updates obj_ptr if the object has moved. Takes either an ObjectReference or a HeapReference. 94 template<typename CompressedReferenceType> 95 void MarkObject(CompressedReferenceType* obj_ptr) 96 REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 97 98 template<typename CompressedReferenceType> 99 void MarkObjectIfNotInToSpace(CompressedReferenceType* obj_ptr) 100 REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 101 102 mirror::Object* MarkObject(mirror::Object* root) override 103 REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 104 105 void MarkHeapReference(mirror::HeapReference<mirror::Object>* obj_ptr, 106 bool do_atomic_update) override 107 REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 108 109 void ScanObject(mirror::Object* obj) 110 REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 111 112 void VerifyNoFromSpaceReferences(mirror::Object* obj) 113 REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 114 115 // Marks the root set at the start of a garbage collection. 116 void MarkRoots() 117 REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 118 119 // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie 120 // the image. Mark that portion of the heap as immune. 121 virtual void BindBitmaps() REQUIRES_SHARED(Locks::mutator_lock_) 122 REQUIRES(!Locks::heap_bitmap_lock_); 123 124 void UnBindBitmaps() 125 REQUIRES(Locks::heap_bitmap_lock_); 126 127 void ProcessReferences(Thread* self) REQUIRES(Locks::mutator_lock_) 128 REQUIRES(Locks::mutator_lock_); 129 130 // Sweeps unmarked objects to complete the garbage collection. 131 virtual void Sweep(bool swap_bitmaps) 132 REQUIRES(Locks::heap_bitmap_lock_) 133 REQUIRES_SHARED(Locks::mutator_lock_); 134 135 // Sweeps unmarked objects to complete the garbage collection. 136 void SweepLargeObjects(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_); 137 138 void SweepSystemWeaks() 139 REQUIRES_SHARED(Locks::heap_bitmap_lock_) REQUIRES(Locks::mutator_lock_); 140 141 void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) override 142 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_); 143 144 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, 145 size_t count, 146 const RootInfo& info) override 147 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_); 148 149 virtual mirror::Object* MarkNonForwardedObject(mirror::Object* obj) 150 REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 151 152 // Schedules an unmarked object for reference processing. 153 void DelayReferenceReferent(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> reference) 154 override REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 155 156 protected: 157 // Returns null if the object is not marked, otherwise returns the forwarding address (same as 158 // object for non movable things). 159 mirror::Object* IsMarked(mirror::Object* object) override 160 REQUIRES(Locks::mutator_lock_) 161 REQUIRES_SHARED(Locks::heap_bitmap_lock_); 162 163 bool IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* object, 164 bool do_atomic_update) override 165 REQUIRES(Locks::mutator_lock_) 166 REQUIRES_SHARED(Locks::heap_bitmap_lock_); 167 168 // Marks or unmarks a large object based on whether or not set is true. If set is true, then we 169 // mark, otherwise we unmark. 170 bool MarkLargeObject(const mirror::Object* obj) 171 REQUIRES(Locks::heap_bitmap_lock_) 172 REQUIRES_SHARED(Locks::mutator_lock_); 173 174 // Expand mark stack to 2x its current size. 175 void ResizeMarkStack(size_t new_size) REQUIRES_SHARED(Locks::mutator_lock_); 176 177 // Returns true if we should sweep the space. 178 virtual bool ShouldSweepSpace(space::ContinuousSpace* space) const; 179 180 // Push an object onto the mark stack. 181 void MarkStackPush(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_); 182 183 void UpdateAndMarkModUnion() 184 REQUIRES(Locks::heap_bitmap_lock_) 185 REQUIRES_SHARED(Locks::mutator_lock_); 186 187 // Recursively blackens objects on the mark stack. 188 void ProcessMarkStack() override 189 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_); 190 191 inline mirror::Object* GetForwardingAddressInFromSpace(mirror::Object* obj) const 192 REQUIRES_SHARED(Locks::mutator_lock_); 193 194 // Revoke all the thread-local buffers. 195 void RevokeAllThreadLocalBuffers() override; 196 197 // Current space, we check this space first to avoid searching for the appropriate space for an 198 // object. 199 accounting::ObjectStack* mark_stack_; 200 201 // Every object inside the immune spaces is assumed to be marked. 202 ImmuneSpaces immune_spaces_; 203 204 // Destination and source spaces (can be any type of ContinuousMemMapAllocSpace which either has 205 // a live bitmap or doesn't). 206 space::ContinuousMemMapAllocSpace* to_space_; 207 // Cached live bitmap as an optimization. 208 accounting::ContinuousSpaceBitmap* to_space_live_bitmap_; 209 space::ContinuousMemMapAllocSpace* from_space_; 210 // Cached mark bitmap as an optimization. 211 accounting::HeapBitmap* mark_bitmap_; 212 213 Thread* self_; 214 215 // The space which we copy to if the to_space_ is full. 216 space::ContinuousMemMapAllocSpace* fallback_space_; 217 218 // How many objects and bytes we moved, used so that we don't need to Get the size of the 219 // to_space_ when calculating how many objects and bytes we freed. 220 size_t bytes_moved_; 221 size_t objects_moved_; 222 223 // How many bytes we avoided dirtying. 224 size_t saved_bytes_; 225 226 // The name of the collector. 227 std::string collector_name_; 228 229 // Used for the generational mode. The default interval of the whole 230 // heap collection. If N, the whole heap collection occurs every N 231 // collections. 232 static constexpr int kDefaultWholeHeapCollectionInterval = 5; 233 234 // Whether or not we swap the semi spaces in the heap during the marking phase. 235 bool swap_semi_spaces_; 236 237 private: 238 class BitmapSetSlowPathVisitor; 239 class MarkObjectVisitor; 240 class VerifyNoFromSpaceReferencesVisitor; 241 DISALLOW_IMPLICIT_CONSTRUCTORS(SemiSpace); 242 }; 243 244 } // namespace collector 245 } // namespace gc 246 } // namespace art 247 248 #endif // ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_ 249