xref: /aosp_15_r20/art/runtime/class_table.h (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_CLASS_TABLE_H_
18 #define ART_RUNTIME_CLASS_TABLE_H_
19 
20 #include <string>
21 #include <utility>
22 #include <vector>
23 
24 #include "base/gc_visited_arena_pool.h"
25 #include "base/hash_set.h"
26 #include "base/macros.h"
27 #include "base/mutex.h"
28 #include "gc_root.h"
29 #include "obj_ptr.h"
30 
31 namespace art HIDDEN {
32 
33 class OatFile;
34 
35 namespace linker {
36 class ImageWriter;
37 }  // namespace linker
38 
39 namespace linker {
40 class OatWriter;
41 }  // namespace linker
42 
43 namespace mirror {
44 class Class;
45 class ClassLoader;
46 class Object;
47 }  // namespace mirror
48 
49 // Each loader has a ClassTable
50 class ClassTable {
51  public:
52   class TableSlot {
53    public:
TableSlot()54     TableSlot() : data_(0u) {}
55 
TableSlot(const TableSlot & copy)56     TableSlot(const TableSlot& copy) : data_(copy.data_.load(std::memory_order_relaxed)) {}
57 
58     explicit TableSlot(ObjPtr<mirror::Class> klass);
59 
60     TableSlot(ObjPtr<mirror::Class> klass, uint32_t descriptor_hash);
61     TableSlot(uint32_t ptr, uint32_t descriptor_hash);
62 
63     TableSlot& operator=(const TableSlot& copy) {
64       data_.store(copy.data_.load(std::memory_order_relaxed), std::memory_order_relaxed);
65       return *this;
66     }
67 
Data()68     uint32_t Data() const {
69       return data_.load(std::memory_order_relaxed);
70     }
71 
72     bool IsNull() const REQUIRES_SHARED(Locks::mutator_lock_);
73 
Hash()74     uint32_t Hash() const {
75       return MaskHash(data_.load(std::memory_order_relaxed));
76     }
77 
NonHashData()78     uint32_t NonHashData() const {
79       return RemoveHash(Data());
80     }
81 
RemoveHash(uint32_t hash)82     static uint32_t RemoveHash(uint32_t hash) {
83       return hash & ~kHashMask;
84     }
85 
MaskHash(uint32_t hash)86     static uint32_t MaskHash(uint32_t hash) {
87       return hash & kHashMask;
88     }
89 
MaskedHashEquals(uint32_t other)90     bool MaskedHashEquals(uint32_t other) const {
91       return MaskHash(other) == Hash();
92     }
93 
94     template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
95     ObjPtr<mirror::Class> Read() const REQUIRES_SHARED(Locks::mutator_lock_);
96 
97     // NO_THREAD_SAFETY_ANALYSIS since the visitor may require heap bitmap lock.
98     template<typename Visitor>
99     void VisitRoot(const Visitor& visitor) const NO_THREAD_SAFETY_ANALYSIS;
100 
101     template<typename Visitor>
102     class ClassAndRootVisitor;
103 
104    private:
105     // Extract a raw pointer from an address.
106     static ObjPtr<mirror::Class> ExtractPtr(uint32_t data)
107         REQUIRES_SHARED(Locks::mutator_lock_);
108 
109     static uint32_t Encode(ObjPtr<mirror::Class> klass, uint32_t hash_bits)
110         REQUIRES_SHARED(Locks::mutator_lock_);
111 
112     // Data contains the class pointer GcRoot as well as the low bits of the descriptor hash.
113     mutable Atomic<uint32_t> data_;
114     static constexpr uint32_t kHashMask = kObjectAlignment - 1;
115   };
116 
117   using DescriptorHashPair = std::pair<std::string_view, uint32_t>;
118 
119   class ClassDescriptorHash {
120    public:
121     // uint32_t for cross compilation.
122     // NO_THREAD_SAFETY_ANALYSIS: Used from unannotated `HashSet<>` functions.
123     uint32_t operator()(const TableSlot& slot) const NO_THREAD_SAFETY_ANALYSIS;
124     // uint32_t for cross compilation.
125     uint32_t operator()(const DescriptorHashPair& pair) const;
126   };
127 
128   class ClassDescriptorEquals {
129    public:
130     // Same class loader and descriptor.
131     bool operator()(const TableSlot& a, const TableSlot& b) const
132         NO_THREAD_SAFETY_ANALYSIS;
133     // Same descriptor.
134     bool operator()(const TableSlot& a, const DescriptorHashPair& b) const
135         NO_THREAD_SAFETY_ANALYSIS;
136   };
137 
138   class TableSlotEmptyFn {
139    public:
MakeEmpty(TableSlot & item)140     void MakeEmpty(TableSlot& item) const NO_THREAD_SAFETY_ANALYSIS {
141       item = TableSlot();
142       DCHECK(IsEmpty(item));
143     }
IsEmpty(const TableSlot & item)144     bool IsEmpty(const TableSlot& item) const NO_THREAD_SAFETY_ANALYSIS {
145       return item.IsNull();
146     }
147   };
148 
149   // Hash set that hashes class descriptor, and compares descriptors and class loaders. Results
150   // should be compared for a matching class descriptor and class loader.
151   using ClassSet = HashSet<TableSlot,
152                            TableSlotEmptyFn,
153                            ClassDescriptorHash,
154                            ClassDescriptorEquals,
155                            GcRootArenaAllocator<TableSlot, kAllocatorTagClassTable>>;
156 
157   EXPORT ClassTable();
158 
159   // Freeze the current class tables by allocating a new table and never updating or modifying the
160   // existing table. This helps prevents dirty pages after caused by inserting after zygote fork.
161   void FreezeSnapshot()
162       REQUIRES(!lock_)
163       REQUIRES_SHARED(Locks::mutator_lock_);
164 
165   // Returns the number of classes in previous snapshots defined by `defining_loader`.
166   size_t NumZygoteClasses(ObjPtr<mirror::ClassLoader> defining_loader) const
167       REQUIRES(!lock_)
168       REQUIRES_SHARED(Locks::mutator_lock_);
169 
170   // Returns all off the classes in the lastest snapshot defined by `defining_loader`.
171   size_t NumNonZygoteClasses(ObjPtr<mirror::ClassLoader> defining_loader) const
172       REQUIRES(!lock_)
173       REQUIRES_SHARED(Locks::mutator_lock_);
174 
175   // Returns the number of classes in previous snapshots no matter the defining loader.
176   EXPORT size_t NumReferencedZygoteClasses() const
177       REQUIRES(!lock_)
178       REQUIRES_SHARED(Locks::mutator_lock_);
179 
180   // Returns all off the classes in the lastest snapshot no matter the defining loader.
181   size_t NumReferencedNonZygoteClasses() const
182       REQUIRES(!lock_)
183       REQUIRES_SHARED(Locks::mutator_lock_);
184 
185   // Returns the number of class-sets in the class table.
186   size_t Size() const
187       REQUIRES(!lock_)
188       REQUIRES_SHARED(Locks::mutator_lock_);
189 
190   // Update a class in the table with the new class. Returns the existing class which was replaced.
191   ObjPtr<mirror::Class> UpdateClass(ObjPtr<mirror::Class> new_klass, size_t hash)
192       REQUIRES(!lock_)
193       REQUIRES_SHARED(Locks::mutator_lock_);
194 
195   // NO_THREAD_SAFETY_ANALYSIS for object marking requiring heap bitmap lock.
196   template <class Visitor>
197   void VisitRoots(Visitor& visitor, bool skip_classes = false) NO_THREAD_SAFETY_ANALYSIS
198       REQUIRES(!lock_) REQUIRES_SHARED(Locks::mutator_lock_);
199 
200   template <class Visitor>
201   void VisitRoots(const Visitor& visitor, bool skip_classes = false) NO_THREAD_SAFETY_ANALYSIS
202       REQUIRES(!lock_) REQUIRES_SHARED(Locks::mutator_lock_);
203 
204   template<class Visitor>
205   void VisitClassesAndRoots(Visitor& visitor)
206       NO_THREAD_SAFETY_ANALYSIS
207       REQUIRES(!lock_)
208       REQUIRES_SHARED(Locks::mutator_lock_);
209 
210   // Visit classes in those class-sets which satisfy 'cond'.
211   template <class Condition, class Visitor>
212   void VisitClassesIfConditionMet(Condition& cond, Visitor& visitor) REQUIRES(!lock_)
213       REQUIRES_SHARED(Locks::mutator_lock_);
214   // Stops visit if the visitor returns false.
215   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
216   bool Visit(Visitor& visitor)
217       REQUIRES(!lock_)
218       REQUIRES_SHARED(Locks::mutator_lock_);
219   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
220   bool Visit(const Visitor& visitor)
221       REQUIRES(!lock_)
222       REQUIRES_SHARED(Locks::mutator_lock_);
223 
224   // Return the first class that matches the descriptor. Returns null if there are none.
225   ObjPtr<mirror::Class> Lookup(std::string_view descriptor, size_t hash)
226       REQUIRES(!lock_)
227       REQUIRES_SHARED(Locks::mutator_lock_);
228 
229   // Return the first class that matches the descriptor of klass. Returns null if there are none.
230   // Used for tests and debug-build checks.
231   ObjPtr<mirror::Class> LookupByDescriptor(ObjPtr<mirror::Class> klass)
232       REQUIRES(!lock_)
233       REQUIRES_SHARED(Locks::mutator_lock_);
234 
235   void Insert(ObjPtr<mirror::Class> klass)
236       REQUIRES(!lock_)
237       REQUIRES_SHARED(Locks::mutator_lock_);
238 
239   void InsertWithHash(ObjPtr<mirror::Class> klass, size_t hash)
240       REQUIRES(!lock_)
241       REQUIRES_SHARED(Locks::mutator_lock_);
242 
243   // Return true if we inserted the strong root, false if it already exists.
244   bool InsertStrongRoot(ObjPtr<mirror::Object> obj)
245       REQUIRES(!lock_)
246       REQUIRES_SHARED(Locks::mutator_lock_);
247 
248   // Return true if we inserted the oat file, false if it already exists.
249   bool InsertOatFile(const OatFile* oat_file)
250       REQUIRES(!lock_)
251       REQUIRES_SHARED(Locks::mutator_lock_);
252 
253   // Read a table from ptr and put it at the front of the class set.
254   EXPORT size_t ReadFromMemory(uint8_t* ptr)
255       REQUIRES(!lock_)
256       REQUIRES_SHARED(Locks::mutator_lock_);
257 
258   // Add a class set to the front of classes.
259   void AddClassSet(ClassSet&& set)
260       REQUIRES(!lock_)
261       REQUIRES_SHARED(Locks::mutator_lock_);
262 
263   // Clear strong roots (other than classes themselves).
264   void ClearStrongRoots()
265       REQUIRES(!lock_)
266       REQUIRES_SHARED(Locks::mutator_lock_);
267 
268   // Filter strong roots (other than classes themselves).
269   template <typename Filter>
270   void RemoveStrongRoots(const Filter& filter)
271       REQUIRES(!lock_)
272       REQUIRES_SHARED(Locks::mutator_lock_);
273 
GetLock()274   ReaderWriterMutex& GetLock() {
275     return lock_;
276   }
277 
278  private:
279   size_t CountDefiningLoaderClasses(ObjPtr<mirror::ClassLoader> defining_loader,
280                                     const ClassSet& set) const
281       REQUIRES(lock_)
282       REQUIRES_SHARED(Locks::mutator_lock_);
283 
284   // Return true if we inserted the oat file, false if it already exists.
285   bool InsertOatFileLocked(const OatFile* oat_file)
286       REQUIRES(lock_)
287       REQUIRES_SHARED(Locks::mutator_lock_);
288 
289   // Lock to guard inserting and removing.
290   mutable ReaderWriterMutex lock_;
291   // We have a vector to help prevent dirty pages after the zygote forks by calling FreezeSnapshot.
292   std::vector<ClassSet> classes_ GUARDED_BY(lock_);
293   // Extra strong roots that can be either dex files or dex caches. Dex files used by the class
294   // loader which may not be owned by the class loader must be held strongly live. Also dex caches
295   // are held live to prevent them being unloading once they have classes in them.
296   std::vector<GcRoot<mirror::Object>> strong_roots_ GUARDED_BY(lock_);
297   // Keep track of oat files with GC roots associated with dex caches in `strong_roots_`.
298   std::vector<const OatFile*> oat_files_ GUARDED_BY(lock_);
299 
300   friend class linker::ImageWriter;  // for InsertWithoutLocks.
301 };
302 
303 }  // namespace art
304 
305 #endif  // ART_RUNTIME_CLASS_TABLE_H_
306