xref: /aosp_15_r20/art/runtime/class_linker.cc (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "class_linker.h"
18 
19 #include <unistd.h>
20 
21 #include <algorithm>
22 #include <deque>
23 #include <forward_list>
24 #include <iostream>
25 #include <iterator>
26 #include <map>
27 #include <memory>
28 #include <queue>
29 #include <string>
30 #include <string_view>
31 #include <tuple>
32 #include <utility>
33 #include <vector>
34 
35 #include "android-base/macros.h"
36 #include "android-base/stringprintf.h"
37 #include "android-base/strings.h"
38 #include "art_field-inl.h"
39 #include "art_method-inl.h"
40 #include "barrier.h"
41 #include "base/arena_allocator.h"
42 #include "base/arena_bit_vector.h"
43 #include "base/casts.h"
44 #include "base/file_utils.h"
45 #include "base/hash_map.h"
46 #include "base/hash_set.h"
47 #include "base/leb128.h"
48 #include "base/logging.h"
49 #include "base/mem_map_arena_pool.h"
50 #include "base/membarrier.h"
51 #include "base/metrics/metrics.h"
52 #include "base/mutex-inl.h"
53 #include "base/os.h"
54 #include "base/pointer_size.h"
55 #include "base/quasi_atomic.h"
56 #include "base/scoped_arena_containers.h"
57 #include "base/scoped_flock.h"
58 #include "base/stl_util.h"
59 #include "base/systrace.h"
60 #include "base/time_utils.h"
61 #include "base/unix_file/fd_file.h"
62 #include "base/utils.h"
63 #include "base/value_object.h"
64 #include "cha.h"
65 #include "class_linker-inl.h"
66 #include "class_loader_utils.h"
67 #include "class_root-inl.h"
68 #include "class_table-inl.h"
69 #include "common_throws.h"
70 #include "compiler_callbacks.h"
71 #include "debug_print.h"
72 #include "debugger.h"
73 #include "dex/class_accessor-inl.h"
74 #include "dex/descriptors_names.h"
75 #include "dex/dex_file-inl.h"
76 #include "dex/dex_file.h"
77 #include "dex/dex_file_annotations.h"
78 #include "dex/dex_file_exception_helpers.h"
79 #include "dex/dex_file_loader.h"
80 #include "dex/modifiers.h"
81 #include "dex/signature-inl.h"
82 #include "dex/utf.h"
83 #include "entrypoints/entrypoint_utils-inl.h"
84 #include "entrypoints/runtime_asm_entrypoints.h"
85 #include "experimental_flags.h"
86 #include "gc/accounting/card_table-inl.h"
87 #include "gc/accounting/heap_bitmap-inl.h"
88 #include "gc/accounting/space_bitmap-inl.h"
89 #include "gc/heap-visit-objects-inl.h"
90 #include "gc/heap.h"
91 #include "gc/scoped_gc_critical_section.h"
92 #include "gc/space/image_space.h"
93 #include "gc/space/space-inl.h"
94 #include "gc_root-inl.h"
95 #include "handle_scope-inl.h"
96 #include "hidden_api.h"
97 #include "imt_conflict_table.h"
98 #include "imtable-inl.h"
99 #include "intern_table-inl.h"
100 #include "interpreter/interpreter.h"
101 #include "interpreter/mterp/nterp.h"
102 #include "jit/debugger_interface.h"
103 #include "jit/jit.h"
104 #include "jit/jit_code_cache.h"
105 #include "jni/java_vm_ext.h"
106 #include "jni/jni_internal.h"
107 #include "linear_alloc-inl.h"
108 #include "mirror/array-alloc-inl.h"
109 #include "mirror/array-inl.h"
110 #include "mirror/call_site.h"
111 #include "mirror/class-alloc-inl.h"
112 #include "mirror/class-inl.h"
113 #include "mirror/class.h"
114 #include "mirror/class_ext.h"
115 #include "mirror/class_loader.h"
116 #include "mirror/dex_cache-inl.h"
117 #include "mirror/dex_cache.h"
118 #include "mirror/emulated_stack_frame.h"
119 #include "mirror/field.h"
120 #include "mirror/iftable-inl.h"
121 #include "mirror/method.h"
122 #include "mirror/method_handle_impl.h"
123 #include "mirror/method_handles_lookup.h"
124 #include "mirror/method_type-inl.h"
125 #include "mirror/object-inl.h"
126 #include "mirror/object-refvisitor-inl.h"
127 #include "mirror/object.h"
128 #include "mirror/object_array-alloc-inl.h"
129 #include "mirror/object_array-inl.h"
130 #include "mirror/object_array.h"
131 #include "mirror/object_reference-inl.h"
132 #include "mirror/object_reference.h"
133 #include "mirror/proxy.h"
134 #include "mirror/reference-inl.h"
135 #include "mirror/stack_trace_element.h"
136 #include "mirror/string-inl.h"
137 #include "mirror/throwable.h"
138 #include "mirror/var_handle.h"
139 #include "native/dalvik_system_DexFile.h"
140 #include "nativehelper/scoped_local_ref.h"
141 #include "nterp_helpers-inl.h"
142 #include "nterp_helpers.h"
143 #include "oat/image-inl.h"
144 #include "oat/jni_stub_hash_map-inl.h"
145 #include "oat/oat.h"
146 #include "oat/oat_file-inl.h"
147 #include "oat/oat_file.h"
148 #include "oat/oat_file_assistant.h"
149 #include "oat/oat_file_manager.h"
150 #include "object_lock.h"
151 #include "profile/profile_compilation_info.h"
152 #include "runtime.h"
153 #include "runtime_callbacks.h"
154 #include "scoped_assert_no_transaction_checks.h"
155 #include "scoped_thread_state_change-inl.h"
156 #include "startup_completed_task.h"
157 #include "thread-inl.h"
158 #include "thread.h"
159 #include "thread_list.h"
160 #include "trace.h"
161 #include "vdex_file.h"
162 #include "verifier/class_verifier.h"
163 #include "verifier/verifier_deps.h"
164 #include "well_known_classes.h"
165 
166 namespace art HIDDEN {
167 
168 using android::base::StringPrintf;
169 
170 static constexpr bool kCheckImageObjects = kIsDebugBuild;
171 static constexpr bool kVerifyArtMethodDeclaringClasses = kIsDebugBuild;
172 
173 static void ThrowNoClassDefFoundError(const char* fmt, ...)
174     __attribute__((__format__(__printf__, 1, 2)))
175     REQUIRES_SHARED(Locks::mutator_lock_);
ThrowNoClassDefFoundError(const char * fmt,...)176 static void ThrowNoClassDefFoundError(const char* fmt, ...) {
177   va_list args;
178   va_start(args, fmt);
179   Thread* self = Thread::Current();
180   self->ThrowNewExceptionV("Ljava/lang/NoClassDefFoundError;", fmt, args);
181   va_end(args);
182 }
183 
GetErroneousStateError(ObjPtr<mirror::Class> c)184 static ObjPtr<mirror::Object> GetErroneousStateError(ObjPtr<mirror::Class> c)
185     REQUIRES_SHARED(Locks::mutator_lock_) {
186   ObjPtr<mirror::ClassExt> ext(c->GetExtData());
187   if (ext == nullptr) {
188     return nullptr;
189   } else {
190     return ext->GetErroneousStateError();
191   }
192 }
193 
IsVerifyError(ObjPtr<mirror::Object> obj)194 static bool IsVerifyError(ObjPtr<mirror::Object> obj)
195     REQUIRES_SHARED(Locks::mutator_lock_) {
196   // This is slow, but we only use it for rethrowing an error, and for DCHECK.
197   return obj->GetClass()->DescriptorEquals("Ljava/lang/VerifyError;");
198 }
199 
200 // Helper for ThrowEarlierClassFailure. Throws the stored error.
HandleEarlierErroneousStateError(Thread * self,ClassLinker * class_linker,ObjPtr<mirror::Class> c)201 static void HandleEarlierErroneousStateError(Thread* self,
202                                              ClassLinker* class_linker,
203                                              ObjPtr<mirror::Class> c)
204     REQUIRES_SHARED(Locks::mutator_lock_) {
205   ObjPtr<mirror::Object> obj = GetErroneousStateError(c);
206   DCHECK(obj != nullptr);
207   self->AssertNoPendingException();
208   DCHECK(!obj->IsClass());
209   ObjPtr<mirror::Class> throwable_class = GetClassRoot<mirror::Throwable>(class_linker);
210   ObjPtr<mirror::Class> error_class = obj->GetClass();
211   CHECK(throwable_class->IsAssignableFrom(error_class));
212   self->SetException(obj->AsThrowable());
213   self->AssertPendingException();
214 }
215 
UpdateClassAfterVerification(Handle<mirror::Class> klass,PointerSize pointer_size,verifier::FailureKind failure_kind)216 static void UpdateClassAfterVerification(Handle<mirror::Class> klass,
217                                          PointerSize pointer_size,
218                                          verifier::FailureKind failure_kind)
219     REQUIRES_SHARED(Locks::mutator_lock_) {
220   Runtime* runtime = Runtime::Current();
221   ClassLinker* class_linker = runtime->GetClassLinker();
222   if (klass->IsVerified() && (failure_kind == verifier::FailureKind::kNoFailure)) {
223     klass->SetSkipAccessChecksFlagOnAllMethods(pointer_size);
224   }
225 
226   // Now that the class has passed verification, try to set nterp entrypoints
227   // to methods that currently use the switch interpreter.
228   if (interpreter::CanRuntimeUseNterp()) {
229     for (ArtMethod& m : klass->GetMethods(pointer_size)) {
230       if (class_linker->IsQuickToInterpreterBridge(m.GetEntryPointFromQuickCompiledCode())) {
231         runtime->GetInstrumentation()->InitializeMethodsCode(&m, /*aot_code=*/nullptr);
232       }
233     }
234   }
235 }
236 
237 // Callback responsible for making a batch of classes visibly initialized after ensuring
238 // visibility for all threads, either by using `membarrier()` or by running a checkpoint.
239 class ClassLinker::VisiblyInitializedCallback final
240     : public Closure, public IntrusiveForwardListNode<VisiblyInitializedCallback> {
241  public:
VisiblyInitializedCallback(ClassLinker * class_linker)242   explicit VisiblyInitializedCallback(ClassLinker* class_linker)
243       : class_linker_(class_linker),
244         num_classes_(0u),
245         thread_visibility_counter_(0),
246         barriers_() {
247     std::fill_n(classes_, kMaxClasses, nullptr);
248   }
249 
IsEmpty() const250   bool IsEmpty() const {
251     DCHECK_LE(num_classes_, kMaxClasses);
252     return num_classes_ == 0u;
253   }
254 
IsFull() const255   bool IsFull() const {
256     DCHECK_LE(num_classes_, kMaxClasses);
257     return num_classes_ == kMaxClasses;
258   }
259 
AddClass(Thread * self,ObjPtr<mirror::Class> klass)260   void AddClass(Thread* self, ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
261     DCHECK_EQ(klass->GetStatus(), ClassStatus::kInitialized);
262     DCHECK(!IsFull());
263     classes_[num_classes_] = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, klass);
264     ++num_classes_;
265   }
266 
AddBarrier(Barrier * barrier)267   void AddBarrier(Barrier* barrier) {
268     barriers_.push_front(barrier);
269   }
270 
GetAndClearBarriers()271   std::forward_list<Barrier*> GetAndClearBarriers() {
272     std::forward_list<Barrier*> result;
273     result.swap(barriers_);
274     result.reverse();  // Return barriers in insertion order.
275     return result;
276   }
277 
MakeVisible(Thread * self)278   void MakeVisible(Thread* self) {
279     if (class_linker_->visibly_initialize_classes_with_membarier_) {
280       // If the associated register command succeeded, this command should never fail.
281       int membarrier_result = art::membarrier(MembarrierCommand::kPrivateExpedited);
282       CHECK_EQ(membarrier_result, 0) << strerror(errno);
283       MarkVisiblyInitialized(self);
284     } else {
285       DCHECK_EQ(thread_visibility_counter_.load(std::memory_order_relaxed), 0);
286       size_t count = Runtime::Current()->GetThreadList()->RunCheckpoint(this);
287       AdjustThreadVisibilityCounter(self, count);
288     }
289   }
290 
Run(Thread * self)291   void Run(Thread* self) override {
292     AdjustThreadVisibilityCounter(self, -1);
293   }
294 
295  private:
AdjustThreadVisibilityCounter(Thread * self,ssize_t adjustment)296   void AdjustThreadVisibilityCounter(Thread* self, ssize_t adjustment) {
297     ssize_t old = thread_visibility_counter_.fetch_add(adjustment, std::memory_order_relaxed);
298     if (old + adjustment == 0) {
299       // All threads passed the checkpoint. Mark classes as visibly initialized.
300       MarkVisiblyInitialized(self);
301     }
302   }
303 
MarkVisiblyInitialized(Thread * self)304   void MarkVisiblyInitialized(Thread* self) {
305     {
306       ScopedObjectAccess soa(self);
307       StackHandleScope<1u> hs(self);
308       MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
309       JavaVMExt* vm = self->GetJniEnv()->GetVm();
310       for (size_t i = 0, num = num_classes_; i != num; ++i) {
311         klass.Assign(ObjPtr<mirror::Class>::DownCast(self->DecodeJObject(classes_[i])));
312         vm->DeleteWeakGlobalRef(self, classes_[i]);
313         if (klass != nullptr) {
314           mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
315           class_linker_->FixupStaticTrampolines(self, klass.Get());
316         }
317       }
318       num_classes_ = 0u;
319     }
320     class_linker_->VisiblyInitializedCallbackDone(self, this);
321   }
322 
323   // Making classes initialized in bigger batches helps with app startup for apps
324   // that initialize a lot of classes by running fewer synchronization functions.
325   // (On the other hand, bigger batches make class initialization checks more
326   // likely to take a slow path but that is mitigated by making partially
327   // filled buffers visibly initialized if we take the slow path many times.
328   // See `Thread::kMakeVisiblyInitializedCounterTriggerCount`.)
329   static constexpr size_t kMaxClasses = 48;
330 
331   ClassLinker* const class_linker_;
332   size_t num_classes_;
333   jweak classes_[kMaxClasses];
334 
335   // The thread visibility counter starts at 0 and it is incremented by the number of
336   // threads that need to run this callback (by the thread that request the callback
337   // to be run) and decremented once for each `Run()` execution. When it reaches 0,
338   // whether after the increment or after a decrement, we know that `Run()` was executed
339   // for all threads and therefore we can mark the classes as visibly initialized.
340   // Used only if the preferred `membarrier()` command is unsupported.
341   std::atomic<ssize_t> thread_visibility_counter_;
342 
343   // List of barries to `Pass()` for threads that wait for the callback to complete.
344   std::forward_list<Barrier*> barriers_;
345 };
346 
MakeInitializedClassesVisiblyInitialized(Thread * self,bool wait)347 void ClassLinker::MakeInitializedClassesVisiblyInitialized(Thread* self, bool wait) {
348   if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
349     return;  // Nothing to do. Thanks to the x86 memory model classes skip the initialized status.
350   }
351   std::optional<Barrier> maybe_barrier;  // Avoid constructing the Barrier for `wait == false`.
352   if (wait) {
353     Locks::mutator_lock_->AssertNotHeld(self);
354     maybe_barrier.emplace(0);
355   }
356   int wait_count = 0;
357   VisiblyInitializedCallback* callback = nullptr;
358   {
359     MutexLock lock(self, visibly_initialized_callback_lock_);
360     if (visibly_initialized_callback_ != nullptr && !visibly_initialized_callback_->IsEmpty()) {
361       callback = visibly_initialized_callback_.release();
362       running_visibly_initialized_callbacks_.push_front(*callback);
363     }
364     if (wait) {
365       DCHECK(maybe_barrier.has_value());
366       Barrier* barrier = std::addressof(*maybe_barrier);
367       for (VisiblyInitializedCallback& cb : running_visibly_initialized_callbacks_) {
368         cb.AddBarrier(barrier);
369         ++wait_count;
370       }
371     }
372   }
373   if (callback != nullptr) {
374     callback->MakeVisible(self);
375   }
376   if (wait_count != 0) {
377     DCHECK(maybe_barrier.has_value());
378     maybe_barrier->Increment(self, wait_count);
379   }
380 }
381 
VisiblyInitializedCallbackDone(Thread * self,VisiblyInitializedCallback * callback)382 void ClassLinker::VisiblyInitializedCallbackDone(Thread* self,
383                                                  VisiblyInitializedCallback* callback) {
384   MutexLock lock(self, visibly_initialized_callback_lock_);
385   // Pass the barriers if requested.
386   for (Barrier* barrier : callback->GetAndClearBarriers()) {
387     barrier->Pass(self);
388   }
389   // Remove the callback from the list of running callbacks.
390   auto before = running_visibly_initialized_callbacks_.before_begin();
391   auto it = running_visibly_initialized_callbacks_.begin();
392   DCHECK(it != running_visibly_initialized_callbacks_.end());
393   while (std::addressof(*it) != callback) {
394     before = it;
395     ++it;
396     DCHECK(it != running_visibly_initialized_callbacks_.end());
397   }
398   running_visibly_initialized_callbacks_.erase_after(before);
399   // Reuse or destroy the callback object.
400   if (visibly_initialized_callback_ == nullptr) {
401     visibly_initialized_callback_.reset(callback);
402   } else {
403     delete callback;
404   }
405 }
406 
ForceClassInitialized(Thread * self,Handle<mirror::Class> klass)407 void ClassLinker::ForceClassInitialized(Thread* self, Handle<mirror::Class> klass) {
408   ClassLinker::VisiblyInitializedCallback* cb = MarkClassInitialized(self, klass);
409   if (cb != nullptr) {
410     cb->MakeVisible(self);
411   }
412   ScopedThreadSuspension sts(self, ThreadState::kSuspended);
413   MakeInitializedClassesVisiblyInitialized(self, /*wait=*/true);
414 }
415 
FindBootJniStub(ArtMethod * method)416 const void* ClassLinker::FindBootJniStub(ArtMethod* method) {
417   return FindBootJniStub(JniStubKey(method));
418 }
419 
FindBootJniStub(uint32_t flags,std::string_view shorty)420 const void* ClassLinker::FindBootJniStub(uint32_t flags, std::string_view shorty) {
421   return FindBootJniStub(JniStubKey(flags, shorty));
422 }
423 
FindBootJniStub(JniStubKey key)424 const void* ClassLinker::FindBootJniStub(JniStubKey key) {
425   auto it = boot_image_jni_stubs_.find(key);
426   if (it == boot_image_jni_stubs_.end()) {
427     return nullptr;
428   } else {
429     return it->second;
430   }
431 }
432 
MarkClassInitialized(Thread * self,Handle<mirror::Class> klass)433 ClassLinker::VisiblyInitializedCallback* ClassLinker::MarkClassInitialized(
434     Thread* self, Handle<mirror::Class> klass) {
435   if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
436     // Thanks to the x86 memory model, we do not need any memory fences and
437     // we can immediately mark the class as visibly initialized.
438     mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
439     FixupStaticTrampolines(self, klass.Get());
440     return nullptr;
441   }
442   if (Runtime::Current()->IsActiveTransaction()) {
443     // Transactions are single-threaded, so we can mark the class as visibly intialized.
444     // (Otherwise we'd need to track the callback's entry in the transaction for rollback.)
445     mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
446     FixupStaticTrampolines(self, klass.Get());
447     return nullptr;
448   }
449   mirror::Class::SetStatus(klass, ClassStatus::kInitialized, self);
450   MutexLock lock(self, visibly_initialized_callback_lock_);
451   if (visibly_initialized_callback_ == nullptr) {
452     visibly_initialized_callback_.reset(new VisiblyInitializedCallback(this));
453   }
454   DCHECK(!visibly_initialized_callback_->IsFull());
455   visibly_initialized_callback_->AddClass(self, klass.Get());
456 
457   if (visibly_initialized_callback_->IsFull()) {
458     VisiblyInitializedCallback* callback = visibly_initialized_callback_.release();
459     running_visibly_initialized_callbacks_.push_front(*callback);
460     return callback;
461   } else {
462     return nullptr;
463   }
464 }
465 
RegisterNative(Thread * self,ArtMethod * method,const void * native_method)466 const void* ClassLinker::RegisterNative(
467     Thread* self, ArtMethod* method, const void* native_method) {
468   CHECK(method->IsNative()) << method->PrettyMethod();
469   CHECK(native_method != nullptr) << method->PrettyMethod();
470   void* new_native_method = nullptr;
471   Runtime* runtime = Runtime::Current();
472   runtime->GetRuntimeCallbacks()->RegisterNativeMethod(method,
473                                                        native_method,
474                                                        /*out*/&new_native_method);
475   if (method->IsCriticalNative()) {
476     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
477     // Remove old registered method if any.
478     auto it = critical_native_code_with_clinit_check_.find(method);
479     if (it != critical_native_code_with_clinit_check_.end()) {
480       critical_native_code_with_clinit_check_.erase(it);
481     }
482     // To ensure correct memory visibility, we need the class to be visibly
483     // initialized before we can set the JNI entrypoint.
484     if (method->GetDeclaringClass()->IsVisiblyInitialized()) {
485       method->SetEntryPointFromJni(new_native_method);
486     } else {
487       critical_native_code_with_clinit_check_.emplace(method, new_native_method);
488     }
489   } else {
490     method->SetEntryPointFromJni(new_native_method);
491   }
492   return new_native_method;
493 }
494 
UnregisterNative(Thread * self,ArtMethod * method)495 void ClassLinker::UnregisterNative(Thread* self, ArtMethod* method) {
496   CHECK(method->IsNative()) << method->PrettyMethod();
497   // Restore stub to lookup native pointer via dlsym.
498   if (method->IsCriticalNative()) {
499     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
500     auto it = critical_native_code_with_clinit_check_.find(method);
501     if (it != critical_native_code_with_clinit_check_.end()) {
502       critical_native_code_with_clinit_check_.erase(it);
503     }
504     method->SetEntryPointFromJni(GetJniDlsymLookupCriticalStub());
505   } else {
506     method->SetEntryPointFromJni(GetJniDlsymLookupStub());
507   }
508 }
509 
GetRegisteredNative(Thread * self,ArtMethod * method)510 const void* ClassLinker::GetRegisteredNative(Thread* self, ArtMethod* method) {
511   if (method->IsCriticalNative()) {
512     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
513     auto it = critical_native_code_with_clinit_check_.find(method);
514     if (it != critical_native_code_with_clinit_check_.end()) {
515       return it->second;
516     }
517     const void* native_code = method->GetEntryPointFromJni();
518     return IsJniDlsymLookupCriticalStub(native_code) ? nullptr : native_code;
519   } else {
520     const void* native_code = method->GetEntryPointFromJni();
521     return IsJniDlsymLookupStub(native_code) ? nullptr : native_code;
522   }
523 }
524 
ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,bool wrap_in_no_class_def,bool log)525 void ClassLinker::ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,
526                                            bool wrap_in_no_class_def,
527                                            bool log) {
528   // The class failed to initialize on a previous attempt, so we want to throw
529   // a NoClassDefFoundError (v2 2.17.5).  The exception to this rule is if we
530   // failed in verification, in which case v2 5.4.1 says we need to re-throw
531   // the previous error.
532   Runtime* const runtime = Runtime::Current();
533   if (!runtime->IsAotCompiler()) {  // Give info if this occurs at runtime.
534     std::string extra;
535     ObjPtr<mirror::Object> verify_error = GetErroneousStateError(c);
536     if (verify_error != nullptr) {
537       DCHECK(!verify_error->IsClass());
538       extra = verify_error->AsThrowable()->Dump();
539     }
540     if (log) {
541       LOG(INFO) << "Rejecting re-init on previously-failed class " << c->PrettyClass()
542                 << ": " << extra;
543     }
544   }
545 
546   CHECK(c->IsErroneous()) << c->PrettyClass() << " " << c->GetStatus();
547   Thread* self = Thread::Current();
548   if (runtime->IsAotCompiler()) {
549     // At compile time, accurate errors and NCDFE are disabled to speed compilation.
550     ObjPtr<mirror::Throwable> pre_allocated = runtime->GetPreAllocatedNoClassDefFoundError();
551     self->SetException(pre_allocated);
552   } else {
553     ObjPtr<mirror::Object> erroneous_state_error = GetErroneousStateError(c);
554     if (erroneous_state_error != nullptr) {
555       // Rethrow stored error.
556       HandleEarlierErroneousStateError(self, this, c);
557     }
558     // TODO This might be wrong if we hit an OOME while allocating the ClassExt. In that case we
559     // might have meant to go down the earlier if statement with the original error but it got
560     // swallowed by the OOM so we end up here.
561     if (erroneous_state_error == nullptr ||
562         (wrap_in_no_class_def && !IsVerifyError(erroneous_state_error))) {
563       // If there isn't a recorded earlier error, or this is a repeat throw from initialization,
564       // the top-level exception must be a NoClassDefFoundError. The potentially already pending
565       // exception will be a cause.
566       self->ThrowNewWrappedException("Ljava/lang/NoClassDefFoundError;",
567                                      c->PrettyDescriptor().c_str());
568     }
569   }
570 }
571 
VlogClassInitializationFailure(Handle<mirror::Class> klass)572 static void VlogClassInitializationFailure(Handle<mirror::Class> klass)
573     REQUIRES_SHARED(Locks::mutator_lock_) {
574   if (VLOG_IS_ON(class_linker)) {
575     std::string temp;
576     LOG(INFO) << "Failed to initialize class " << klass->GetDescriptor(&temp) << " from "
577               << klass->GetLocation() << "\n" << Thread::Current()->GetException()->Dump();
578   }
579 }
580 
WrapExceptionInInitializer(Handle<mirror::Class> klass)581 static void WrapExceptionInInitializer(Handle<mirror::Class> klass)
582     REQUIRES_SHARED(Locks::mutator_lock_) {
583   Thread* self = Thread::Current();
584 
585   ObjPtr<mirror::Throwable> cause = self->GetException();
586   CHECK(cause != nullptr);
587 
588   // Boot classpath classes should not fail initialization. This is a consistency debug check.
589   // This cannot in general be guaranteed, but in all likelihood leads to breakage down the line.
590   if (klass->GetClassLoader() == nullptr && !Runtime::Current()->IsAotCompiler()) {
591     std::string tmp;
592     // We want to LOG(FATAL) on debug builds since this really shouldn't be happening but we need to
593     // make sure to only do it if we don't have AsyncExceptions being thrown around since those
594     // could have caused the error.
595     bool known_impossible = kIsDebugBuild && !Runtime::Current()->AreAsyncExceptionsThrown();
596     LOG(known_impossible ? FATAL : WARNING) << klass->GetDescriptor(&tmp)
597                                             << " failed initialization: "
598                                             << self->GetException()->Dump();
599   }
600 
601   // We only wrap non-Error exceptions; an Error can just be used as-is.
602   if (!cause->IsError()) {
603     self->ThrowNewWrappedException("Ljava/lang/ExceptionInInitializerError;", nullptr);
604   }
605   VlogClassInitializationFailure(klass);
606 }
607 
RegisterMemBarrierForClassInitialization()608 static bool RegisterMemBarrierForClassInitialization() {
609   if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
610     // Thanks to the x86 memory model, classes skip the initialized status, so there is no need
611     // to use `membarrier()` or other synchronization for marking classes visibly initialized.
612     return false;
613   }
614   int membarrier_result = art::membarrier(MembarrierCommand::kRegisterPrivateExpedited);
615   return membarrier_result == 0;
616 }
617 
ClassLinker(InternTable * intern_table,bool fast_class_not_found_exceptions)618 ClassLinker::ClassLinker(InternTable* intern_table, bool fast_class_not_found_exceptions)
619     : boot_class_table_(new ClassTable()),
620       failed_dex_cache_class_lookups_(0),
621       class_roots_(nullptr),
622       find_array_class_cache_next_victim_(0),
623       init_done_(false),
624       log_new_roots_(false),
625       intern_table_(intern_table),
626       fast_class_not_found_exceptions_(fast_class_not_found_exceptions),
627       jni_dlsym_lookup_trampoline_(nullptr),
628       jni_dlsym_lookup_critical_trampoline_(nullptr),
629       quick_resolution_trampoline_(nullptr),
630       quick_imt_conflict_trampoline_(nullptr),
631       quick_generic_jni_trampoline_(nullptr),
632       quick_to_interpreter_bridge_trampoline_(nullptr),
633       nterp_trampoline_(nullptr),
634       image_pointer_size_(kRuntimePointerSize),
635       visibly_initialized_callback_lock_("visibly initialized callback lock"),
636       visibly_initialized_callback_(nullptr),
637       running_visibly_initialized_callbacks_(),
638       visibly_initialize_classes_with_membarier_(RegisterMemBarrierForClassInitialization()),
639       critical_native_code_with_clinit_check_lock_("critical native code with clinit check lock"),
640       critical_native_code_with_clinit_check_(),
641       boot_image_jni_stubs_(JniStubKeyHash(Runtime::Current()->GetInstructionSet()),
642                             JniStubKeyEquals(Runtime::Current()->GetInstructionSet())),
643       cha_(Runtime::Current()->IsAotCompiler() ? nullptr : new ClassHierarchyAnalysis()) {
644   // For CHA disabled during Aot, see b/34193647.
645 
646   CHECK(intern_table_ != nullptr);
647   static_assert(kFindArrayCacheSize == arraysize(find_array_class_cache_),
648                 "Array cache size wrong.");
649   for (size_t i = 0; i < kFindArrayCacheSize; i++) {
650     find_array_class_cache_[i].store(GcRoot<mirror::Class>(nullptr), std::memory_order_relaxed);
651   }
652 }
653 
CheckSystemClass(Thread * self,Handle<mirror::Class> c1,const char * descriptor)654 void ClassLinker::CheckSystemClass(Thread* self, Handle<mirror::Class> c1, const char* descriptor) {
655   ObjPtr<mirror::Class> c2 = FindSystemClass(self, descriptor);
656   if (c2 == nullptr) {
657     LOG(FATAL) << "Could not find class " << descriptor;
658     UNREACHABLE();
659   }
660   if (c1.Get() != c2) {
661     std::ostringstream os1, os2;
662     c1->DumpClass(os1, mirror::Class::kDumpClassFullDetail);
663     c2->DumpClass(os2, mirror::Class::kDumpClassFullDetail);
664     LOG(FATAL) << "InitWithoutImage: Class mismatch for " << descriptor
665                << ". This is most likely the result of a broken build. Make sure that "
666                << "libcore and art projects match.\n\n"
667                << os1.str() << "\n\n" << os2.str();
668     UNREACHABLE();
669   }
670 }
671 
AllocIfTable(Thread * self,size_t ifcount,ObjPtr<mirror::Class> iftable_class)672 ObjPtr<mirror::IfTable> AllocIfTable(Thread* self,
673                                      size_t ifcount,
674                                      ObjPtr<mirror::Class> iftable_class)
675     REQUIRES_SHARED(Locks::mutator_lock_) {
676   DCHECK(iftable_class->IsArrayClass());
677   DCHECK(iftable_class->GetComponentType()->IsObjectClass());
678   return ObjPtr<mirror::IfTable>::DownCast(ObjPtr<mirror::ObjectArray<mirror::Object>>(
679       mirror::IfTable::Alloc(self, iftable_class, ifcount * mirror::IfTable::kMax)));
680 }
681 
InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,std::string * error_msg)682 bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,
683                                    std::string* error_msg) {
684   VLOG(startup) << "ClassLinker::Init";
685 
686   Thread* const self = Thread::Current();
687   Runtime* const runtime = Runtime::Current();
688   gc::Heap* const heap = runtime->GetHeap();
689 
690   CHECK(!heap->HasBootImageSpace()) << "Runtime has image. We should use it.";
691   CHECK(!init_done_);
692 
693   // Use the pointer size from the runtime since we are probably creating the image.
694   image_pointer_size_ = InstructionSetPointerSize(runtime->GetInstructionSet());
695 
696   // java_lang_Class comes first, it's needed for AllocClass
697   // The GC can't handle an object with a null class since we can't get the size of this object.
698   heap->IncrementDisableMovingGC(self);
699   StackHandleScope<64> hs(self);  // 64 is picked arbitrarily.
700   auto class_class_size = mirror::Class::ClassClassSize(image_pointer_size_);
701   // Allocate the object as non-movable so that there are no cases where Object::IsClass returns
702   // the incorrect result when comparing to-space vs from-space.
703   Handle<mirror::Class> java_lang_Class(hs.NewHandle(ObjPtr<mirror::Class>::DownCast(
704       heap->AllocNonMovableObject(self, nullptr, class_class_size, VoidFunctor()))));
705   CHECK(java_lang_Class != nullptr);
706   java_lang_Class->SetClassFlags(mirror::kClassFlagClass);
707   java_lang_Class->SetClass(java_lang_Class.Get());
708   if (kUseBakerReadBarrier) {
709     java_lang_Class->AssertReadBarrierState();
710   }
711   java_lang_Class->SetClassSize(class_class_size);
712   java_lang_Class->SetPrimitiveType(Primitive::kPrimNot);
713   heap->DecrementDisableMovingGC(self);
714   // AllocClass(ObjPtr<mirror::Class>) can now be used
715 
716   // Class[] is used for reflection support.
717   auto class_array_class_size = mirror::ObjectArray<mirror::Class>::ClassSize(image_pointer_size_);
718   Handle<mirror::Class> class_array_class(hs.NewHandle(
719       AllocClass(self, java_lang_Class.Get(), class_array_class_size)));
720   class_array_class->SetComponentType(java_lang_Class.Get());
721 
722   // java_lang_Object comes next so that object_array_class can be created.
723   Handle<mirror::Class> java_lang_Object(hs.NewHandle(
724       AllocClass(self, java_lang_Class.Get(), mirror::Object::ClassSize(image_pointer_size_))));
725   CHECK(java_lang_Object != nullptr);
726   // backfill Object as the super class of Class.
727   java_lang_Class->SetSuperClass(java_lang_Object.Get());
728   mirror::Class::SetStatus(java_lang_Object, ClassStatus::kLoaded, self);
729 
730   java_lang_Object->SetObjectSize(sizeof(mirror::Object));
731   // Allocate in non-movable so that it's possible to check if a JNI weak global ref has been
732   // cleared without triggering the read barrier and unintentionally mark the sentinel alive.
733   runtime->SetSentinel(heap->AllocNonMovableObject(self,
734                                                    java_lang_Object.Get(),
735                                                    java_lang_Object->GetObjectSize(),
736                                                    VoidFunctor()));
737 
738   // Initialize the SubtypeCheck bitstring for java.lang.Object and java.lang.Class.
739   if (kBitstringSubtypeCheckEnabled) {
740     // It might seem the lock here is unnecessary, however all the SubtypeCheck
741     // functions are annotated to require locks all the way down.
742     //
743     // We take the lock here to avoid using NO_THREAD_SAFETY_ANALYSIS.
744     MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
745     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Object.Get());
746     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Class.Get());
747   }
748 
749   // Object[] next to hold class roots.
750   Handle<mirror::Class> object_array_class(hs.NewHandle(
751       AllocClass(self, java_lang_Class.Get(),
752                  mirror::ObjectArray<mirror::Object>::ClassSize(image_pointer_size_))));
753   object_array_class->SetComponentType(java_lang_Object.Get());
754 
755   // Setup java.lang.String.
756   //
757   // We make this class non-movable for the unlikely case where it were to be
758   // moved by a sticky-bit (minor) collection when using the Generational
759   // Concurrent Copying (CC) collector, potentially creating a stale reference
760   // in the `klass_` field of one of its instances allocated in the Large-Object
761   // Space (LOS) -- see the comment about the dirty card scanning logic in
762   // art::gc::collector::ConcurrentCopying::MarkingPhase.
763   Handle<mirror::Class> java_lang_String(hs.NewHandle(
764       AllocClass</* kMovable= */ false>(
765           self, java_lang_Class.Get(), mirror::String::ClassSize(image_pointer_size_))));
766   java_lang_String->SetStringClass();
767   mirror::Class::SetStatus(java_lang_String, ClassStatus::kResolved, self);
768 
769   // Setup java.lang.ref.Reference.
770   Handle<mirror::Class> java_lang_ref_Reference(hs.NewHandle(
771       AllocClass(self, java_lang_Class.Get(), mirror::Reference::ClassSize(image_pointer_size_))));
772   java_lang_ref_Reference->SetObjectSize(mirror::Reference::InstanceSize());
773   mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kResolved, self);
774 
775   // Create storage for root classes, save away our work so far (requires descriptors).
776   class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
777       mirror::ObjectArray<mirror::Class>::Alloc(self,
778                                                 object_array_class.Get(),
779                                                 static_cast<int32_t>(ClassRoot::kMax)));
780   CHECK(!class_roots_.IsNull());
781   SetClassRoot(ClassRoot::kJavaLangClass, java_lang_Class.Get());
782   SetClassRoot(ClassRoot::kJavaLangObject, java_lang_Object.Get());
783   SetClassRoot(ClassRoot::kClassArrayClass, class_array_class.Get());
784   SetClassRoot(ClassRoot::kObjectArrayClass, object_array_class.Get());
785   SetClassRoot(ClassRoot::kJavaLangString, java_lang_String.Get());
786   SetClassRoot(ClassRoot::kJavaLangRefReference, java_lang_ref_Reference.Get());
787 
788   // Fill in the empty iftable. Needs to be done after the kObjectArrayClass root is set.
789   java_lang_Object->SetIfTable(AllocIfTable(self, 0, object_array_class.Get()));
790 
791   // Create array interface entries to populate once we can load system classes.
792   object_array_class->SetIfTable(AllocIfTable(self, 2, object_array_class.Get()));
793   DCHECK_EQ(GetArrayIfTable(), object_array_class->GetIfTable());
794 
795   // Setup the primitive type classes.
796   CreatePrimitiveClass(self, Primitive::kPrimBoolean, ClassRoot::kPrimitiveBoolean);
797   CreatePrimitiveClass(self, Primitive::kPrimByte, ClassRoot::kPrimitiveByte);
798   CreatePrimitiveClass(self, Primitive::kPrimChar, ClassRoot::kPrimitiveChar);
799   CreatePrimitiveClass(self, Primitive::kPrimShort, ClassRoot::kPrimitiveShort);
800   CreatePrimitiveClass(self, Primitive::kPrimInt, ClassRoot::kPrimitiveInt);
801   CreatePrimitiveClass(self, Primitive::kPrimLong, ClassRoot::kPrimitiveLong);
802   CreatePrimitiveClass(self, Primitive::kPrimFloat, ClassRoot::kPrimitiveFloat);
803   CreatePrimitiveClass(self, Primitive::kPrimDouble, ClassRoot::kPrimitiveDouble);
804   CreatePrimitiveClass(self, Primitive::kPrimVoid, ClassRoot::kPrimitiveVoid);
805 
806   // Allocate the primitive array classes. We need only the native pointer
807   // array at this point (int[] or long[], depending on architecture) but
808   // we shall perform the same setup steps for all primitive array classes.
809   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveBoolean, ClassRoot::kBooleanArrayClass);
810   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveByte, ClassRoot::kByteArrayClass);
811   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveChar, ClassRoot::kCharArrayClass);
812   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveShort, ClassRoot::kShortArrayClass);
813   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveInt, ClassRoot::kIntArrayClass);
814   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveLong, ClassRoot::kLongArrayClass);
815   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveFloat, ClassRoot::kFloatArrayClass);
816   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveDouble, ClassRoot::kDoubleArrayClass);
817 
818   // now that these are registered, we can use AllocClass() and AllocObjectArray
819 
820   // Set up DexCache. This cannot be done later since AppendToBootClassPath calls AllocDexCache.
821   Handle<mirror::Class> java_lang_DexCache(hs.NewHandle(
822       AllocClass(self, java_lang_Class.Get(), mirror::DexCache::ClassSize(image_pointer_size_))));
823   SetClassRoot(ClassRoot::kJavaLangDexCache, java_lang_DexCache.Get());
824   java_lang_DexCache->SetDexCacheClass();
825   java_lang_DexCache->SetObjectSize(mirror::DexCache::InstanceSize());
826   mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kResolved, self);
827 
828 
829   // Setup dalvik.system.ClassExt
830   Handle<mirror::Class> dalvik_system_ClassExt(hs.NewHandle(
831       AllocClass(self, java_lang_Class.Get(), mirror::ClassExt::ClassSize(image_pointer_size_))));
832   SetClassRoot(ClassRoot::kDalvikSystemClassExt, dalvik_system_ClassExt.Get());
833   mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kResolved, self);
834 
835   // Set up array classes for string, field, method
836   Handle<mirror::Class> object_array_string(hs.NewHandle(
837       AllocClass(self, java_lang_Class.Get(),
838                  mirror::ObjectArray<mirror::String>::ClassSize(image_pointer_size_))));
839   object_array_string->SetComponentType(java_lang_String.Get());
840   SetClassRoot(ClassRoot::kJavaLangStringArrayClass, object_array_string.Get());
841 
842   LinearAlloc* linear_alloc = runtime->GetLinearAlloc();
843   // Create runtime resolution and imt conflict methods.
844   runtime->SetResolutionMethod(runtime->CreateResolutionMethod());
845   runtime->SetImtConflictMethod(runtime->CreateImtConflictMethod(linear_alloc));
846   runtime->SetImtUnimplementedMethod(runtime->CreateImtConflictMethod(linear_alloc));
847 
848   // Setup boot_class_path_ and register class_path now that we can use AllocObjectArray to create
849   // DexCache instances. Needs to be after String, Field, Method arrays since AllocDexCache uses
850   // these roots.
851   if (boot_class_path.empty()) {
852     *error_msg = "Boot classpath is empty.";
853     return false;
854   }
855   for (auto& dex_file : boot_class_path) {
856     if (dex_file == nullptr) {
857       *error_msg = "Null dex file.";
858       return false;
859     }
860     AppendToBootClassPath(self, dex_file.get());
861     boot_dex_files_.push_back(std::move(dex_file));
862   }
863 
864   // now we can use FindSystemClass
865 
866   // Set up GenericJNI entrypoint. That is mainly a hack for common_compiler_test.h so that
867   // we do not need friend classes or a publicly exposed setter.
868   quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
869   if (!runtime->IsAotCompiler()) {
870     // We need to set up the generic trampolines since we don't have an image.
871     jni_dlsym_lookup_trampoline_ = GetJniDlsymLookupStub();
872     jni_dlsym_lookup_critical_trampoline_ = GetJniDlsymLookupCriticalStub();
873     quick_resolution_trampoline_ = GetQuickResolutionStub();
874     quick_imt_conflict_trampoline_ = GetQuickImtConflictStub();
875     quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
876     quick_to_interpreter_bridge_trampoline_ = GetQuickToInterpreterBridge();
877     nterp_trampoline_ = interpreter::GetNterpEntryPoint();
878   }
879 
880   // Object, String, ClassExt and DexCache need to be rerun through FindSystemClass to finish init
881   // We also need to immediately clear the finalizable flag for Object so that other classes are
882   // not erroneously marked as finalizable. (Object defines an empty finalizer, so that other
883   // classes can override it but it is not itself finalizable.)
884   mirror::Class::SetStatus(java_lang_Object, ClassStatus::kNotReady, self);
885   CheckSystemClass(self, java_lang_Object, "Ljava/lang/Object;");
886   CHECK(java_lang_Object->IsFinalizable());
887   java_lang_Object->ClearFinalizable();
888   CHECK_EQ(java_lang_Object->GetObjectSize(), mirror::Object::InstanceSize());
889   mirror::Class::SetStatus(java_lang_String, ClassStatus::kNotReady, self);
890   CheckSystemClass(self, java_lang_String, "Ljava/lang/String;");
891   mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kNotReady, self);
892   CheckSystemClass(self, java_lang_DexCache, "Ljava/lang/DexCache;");
893   CHECK_EQ(java_lang_DexCache->GetObjectSize(), mirror::DexCache::InstanceSize());
894   mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kNotReady, self);
895   CheckSystemClass(self, dalvik_system_ClassExt, "Ldalvik/system/ClassExt;");
896   CHECK_EQ(dalvik_system_ClassExt->GetObjectSize(), mirror::ClassExt::InstanceSize());
897 
898   // Run Class through FindSystemClass. This initializes the dex_cache_ fields and register it
899   // in class_table_.
900   CheckSystemClass(self, java_lang_Class, "Ljava/lang/Class;");
901 
902   // Setup core array classes, i.e. Object[], String[] and Class[] and primitive
903   // arrays - can't be done until Object has a vtable and component classes are loaded.
904   FinishCoreArrayClassSetup(ClassRoot::kObjectArrayClass);
905   FinishCoreArrayClassSetup(ClassRoot::kClassArrayClass);
906   FinishCoreArrayClassSetup(ClassRoot::kJavaLangStringArrayClass);
907   FinishCoreArrayClassSetup(ClassRoot::kBooleanArrayClass);
908   FinishCoreArrayClassSetup(ClassRoot::kByteArrayClass);
909   FinishCoreArrayClassSetup(ClassRoot::kCharArrayClass);
910   FinishCoreArrayClassSetup(ClassRoot::kShortArrayClass);
911   FinishCoreArrayClassSetup(ClassRoot::kIntArrayClass);
912   FinishCoreArrayClassSetup(ClassRoot::kLongArrayClass);
913   FinishCoreArrayClassSetup(ClassRoot::kFloatArrayClass);
914   FinishCoreArrayClassSetup(ClassRoot::kDoubleArrayClass);
915 
916   // Setup the single, global copy of "iftable".
917   auto java_lang_Cloneable = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Cloneable;"));
918   CHECK(java_lang_Cloneable != nullptr);
919   auto java_io_Serializable = hs.NewHandle(FindSystemClass(self, "Ljava/io/Serializable;"));
920   CHECK(java_io_Serializable != nullptr);
921   // We assume that Cloneable/Serializable don't have superinterfaces -- normally we'd have to
922   // crawl up and explicitly list all of the supers as well.
923   object_array_class->GetIfTable()->SetInterface(0, java_lang_Cloneable.Get());
924   object_array_class->GetIfTable()->SetInterface(1, java_io_Serializable.Get());
925 
926   // Check Class[] and Object[]'s interfaces.
927   CHECK_EQ(java_lang_Cloneable.Get(), class_array_class->GetDirectInterface(0));
928   CHECK_EQ(java_io_Serializable.Get(), class_array_class->GetDirectInterface(1));
929   CHECK_EQ(java_lang_Cloneable.Get(), object_array_class->GetDirectInterface(0));
930   CHECK_EQ(java_io_Serializable.Get(), object_array_class->GetDirectInterface(1));
931 
932   CHECK_EQ(object_array_string.Get(),
933            FindSystemClass(self, GetClassRootDescriptor(ClassRoot::kJavaLangStringArrayClass)));
934 
935   // The Enum class declares a "final" finalize() method to prevent subclasses from introducing
936   // a finalizer but it is not itself consedered finalizable. Load the Enum class now and clear
937   // the finalizable flag to prevent subclasses from being marked as finalizable.
938   CHECK_EQ(LookupClass(self, "Ljava/lang/Enum;", /*class_loader=*/ nullptr), nullptr);
939   Handle<mirror::Class> java_lang_Enum = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Enum;"));
940   CHECK(java_lang_Enum->IsFinalizable());
941   java_lang_Enum->ClearFinalizable();
942 
943   // End of special init trickery, all subsequent classes may be loaded via FindSystemClass.
944 
945   // Create java.lang.reflect.Proxy root.
946   SetClassRoot(ClassRoot::kJavaLangReflectProxy,
947                FindSystemClass(self, "Ljava/lang/reflect/Proxy;"));
948 
949   // Create java.lang.reflect.Field.class root.
950   ObjPtr<mirror::Class> class_root = FindSystemClass(self, "Ljava/lang/reflect/Field;");
951   CHECK(class_root != nullptr);
952   SetClassRoot(ClassRoot::kJavaLangReflectField, class_root);
953 
954   // Create java.lang.reflect.Field array root.
955   class_root = FindSystemClass(self, "[Ljava/lang/reflect/Field;");
956   CHECK(class_root != nullptr);
957   SetClassRoot(ClassRoot::kJavaLangReflectFieldArrayClass, class_root);
958 
959   // Create java.lang.reflect.Constructor.class root and array root.
960   class_root = FindSystemClass(self, "Ljava/lang/reflect/Constructor;");
961   CHECK(class_root != nullptr);
962   SetClassRoot(ClassRoot::kJavaLangReflectConstructor, class_root);
963   class_root = FindSystemClass(self, "[Ljava/lang/reflect/Constructor;");
964   CHECK(class_root != nullptr);
965   SetClassRoot(ClassRoot::kJavaLangReflectConstructorArrayClass, class_root);
966 
967   // Create java.lang.reflect.Method.class root and array root.
968   class_root = FindSystemClass(self, "Ljava/lang/reflect/Method;");
969   CHECK(class_root != nullptr);
970   SetClassRoot(ClassRoot::kJavaLangReflectMethod, class_root);
971   class_root = FindSystemClass(self, "[Ljava/lang/reflect/Method;");
972   CHECK(class_root != nullptr);
973   SetClassRoot(ClassRoot::kJavaLangReflectMethodArrayClass, class_root);
974 
975   // Create java.lang.invoke.CallSite.class root
976   class_root = FindSystemClass(self, "Ljava/lang/invoke/CallSite;");
977   CHECK(class_root != nullptr);
978   SetClassRoot(ClassRoot::kJavaLangInvokeCallSite, class_root);
979 
980   // Create java.lang.invoke.MethodType.class root
981   class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodType;");
982   CHECK(class_root != nullptr);
983   SetClassRoot(ClassRoot::kJavaLangInvokeMethodType, class_root);
984 
985   // Create java.lang.invoke.MethodHandleImpl.class root
986   class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandleImpl;");
987   CHECK(class_root != nullptr);
988   SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandleImpl, class_root);
989   SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandle, class_root->GetSuperClass());
990 
991   // Create java.lang.invoke.MethodHandles.Lookup.class root
992   class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandles$Lookup;");
993   CHECK(class_root != nullptr);
994   SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandlesLookup, class_root);
995 
996   // Create java.lang.invoke.VarHandle.class root
997   class_root = FindSystemClass(self, "Ljava/lang/invoke/VarHandle;");
998   CHECK(class_root != nullptr);
999   SetClassRoot(ClassRoot::kJavaLangInvokeVarHandle, class_root);
1000 
1001   // Create java.lang.invoke.FieldVarHandle.class root
1002   class_root = FindSystemClass(self, "Ljava/lang/invoke/FieldVarHandle;");
1003   CHECK(class_root != nullptr);
1004   SetClassRoot(ClassRoot::kJavaLangInvokeFieldVarHandle, class_root);
1005 
1006   // Create java.lang.invoke.StaticFieldVarHandle.class root
1007   class_root = FindSystemClass(self, "Ljava/lang/invoke/StaticFieldVarHandle;");
1008   CHECK(class_root != nullptr);
1009   SetClassRoot(ClassRoot::kJavaLangInvokeStaticFieldVarHandle, class_root);
1010 
1011   // Create java.lang.invoke.ArrayElementVarHandle.class root
1012   class_root = FindSystemClass(self, "Ljava/lang/invoke/ArrayElementVarHandle;");
1013   CHECK(class_root != nullptr);
1014   SetClassRoot(ClassRoot::kJavaLangInvokeArrayElementVarHandle, class_root);
1015 
1016   // Create java.lang.invoke.ByteArrayViewVarHandle.class root
1017   class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteArrayViewVarHandle;");
1018   CHECK(class_root != nullptr);
1019   SetClassRoot(ClassRoot::kJavaLangInvokeByteArrayViewVarHandle, class_root);
1020 
1021   // Create java.lang.invoke.ByteBufferViewVarHandle.class root
1022   class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteBufferViewVarHandle;");
1023   CHECK(class_root != nullptr);
1024   SetClassRoot(ClassRoot::kJavaLangInvokeByteBufferViewVarHandle, class_root);
1025 
1026   class_root = FindSystemClass(self, "Ldalvik/system/EmulatedStackFrame;");
1027   CHECK(class_root != nullptr);
1028   SetClassRoot(ClassRoot::kDalvikSystemEmulatedStackFrame, class_root);
1029 
1030   // java.lang.ref classes need to be specially flagged, but otherwise are normal classes
1031   // finish initializing Reference class
1032   mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kNotReady, self);
1033   CheckSystemClass(self, java_lang_ref_Reference, "Ljava/lang/ref/Reference;");
1034   CHECK_EQ(java_lang_ref_Reference->GetObjectSize(), mirror::Reference::InstanceSize());
1035   CHECK_EQ(java_lang_ref_Reference->GetClassSize(),
1036            mirror::Reference::ClassSize(image_pointer_size_));
1037   class_root = FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;");
1038   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1039   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagFinalizerReference);
1040   class_root = FindSystemClass(self, "Ljava/lang/ref/PhantomReference;");
1041   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1042   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagPhantomReference);
1043   class_root = FindSystemClass(self, "Ljava/lang/ref/SoftReference;");
1044   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1045   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagSoftReference);
1046   class_root = FindSystemClass(self, "Ljava/lang/ref/WeakReference;");
1047   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1048   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagWeakReference);
1049 
1050   // Setup the ClassLoader, verifying the object_size_.
1051   class_root = FindSystemClass(self, "Ljava/lang/ClassLoader;");
1052   class_root->SetClassLoaderClass();
1053   CHECK_EQ(class_root->GetObjectSize(), mirror::ClassLoader::InstanceSize());
1054   SetClassRoot(ClassRoot::kJavaLangClassLoader, class_root);
1055 
1056   // Set up java.lang.Throwable, java.lang.ClassNotFoundException, and
1057   // java.lang.StackTraceElement as a convenience.
1058   SetClassRoot(ClassRoot::kJavaLangThrowable, FindSystemClass(self, "Ljava/lang/Throwable;"));
1059   SetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
1060                FindSystemClass(self, "Ljava/lang/ClassNotFoundException;"));
1061   SetClassRoot(ClassRoot::kJavaLangStackTraceElement,
1062                FindSystemClass(self, "Ljava/lang/StackTraceElement;"));
1063   SetClassRoot(ClassRoot::kJavaLangStackTraceElementArrayClass,
1064                FindSystemClass(self, "[Ljava/lang/StackTraceElement;"));
1065   SetClassRoot(ClassRoot::kJavaLangClassLoaderArrayClass,
1066                FindSystemClass(self, "[Ljava/lang/ClassLoader;"));
1067 
1068   // Create conflict tables that depend on the class linker.
1069   runtime->FixupConflictTables();
1070 
1071   FinishInit(self);
1072 
1073   VLOG(startup) << "ClassLinker::InitFromCompiler exiting";
1074 
1075   return true;
1076 }
1077 
CreateStringInitBindings(Thread * self,ClassLinker * class_linker)1078 static void CreateStringInitBindings(Thread* self, ClassLinker* class_linker)
1079     REQUIRES_SHARED(Locks::mutator_lock_) {
1080   // Find String.<init> -> StringFactory bindings.
1081   ObjPtr<mirror::Class> string_factory_class =
1082       class_linker->FindSystemClass(self, "Ljava/lang/StringFactory;");
1083   CHECK(string_factory_class != nullptr);
1084   ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>(class_linker);
1085   WellKnownClasses::InitStringInit(string_class, string_factory_class);
1086   // Update the primordial thread.
1087   self->InitStringEntryPoints();
1088 }
1089 
FinishInit(Thread * self)1090 void ClassLinker::FinishInit(Thread* self) {
1091   VLOG(startup) << "ClassLinker::FinishInit entering";
1092 
1093   CreateStringInitBindings(self, this);
1094 
1095   // Let the heap know some key offsets into java.lang.ref instances
1096   // Note: we hard code the field indexes here rather than using FindInstanceField
1097   // as the types of the field can't be resolved prior to the runtime being
1098   // fully initialized
1099   StackHandleScope<3> hs(self);
1100   Handle<mirror::Class> java_lang_ref_Reference =
1101       hs.NewHandle(GetClassRoot<mirror::Reference>(this));
1102   Handle<mirror::Class> java_lang_ref_FinalizerReference =
1103       hs.NewHandle(FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;"));
1104 
1105   ArtField* pendingNext = java_lang_ref_Reference->GetInstanceField(0);
1106   CHECK_STREQ(pendingNext->GetName(), "pendingNext");
1107   CHECK_STREQ(pendingNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
1108 
1109   ArtField* queue = java_lang_ref_Reference->GetInstanceField(1);
1110   CHECK_STREQ(queue->GetName(), "queue");
1111   CHECK_STREQ(queue->GetTypeDescriptor(), "Ljava/lang/ref/ReferenceQueue;");
1112 
1113   ArtField* queueNext = java_lang_ref_Reference->GetInstanceField(2);
1114   CHECK_STREQ(queueNext->GetName(), "queueNext");
1115   CHECK_STREQ(queueNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
1116 
1117   ArtField* referent = java_lang_ref_Reference->GetInstanceField(3);
1118   CHECK_STREQ(referent->GetName(), "referent");
1119   CHECK_STREQ(referent->GetTypeDescriptor(), "Ljava/lang/Object;");
1120 
1121   ArtField* zombie = java_lang_ref_FinalizerReference->GetInstanceField(2);
1122   CHECK_STREQ(zombie->GetName(), "zombie");
1123   CHECK_STREQ(zombie->GetTypeDescriptor(), "Ljava/lang/Object;");
1124 
1125   // ensure all class_roots_ are initialized
1126   for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
1127     ClassRoot class_root = static_cast<ClassRoot>(i);
1128     ObjPtr<mirror::Class> klass = GetClassRoot(class_root);
1129     CHECK(klass != nullptr);
1130     DCHECK(klass->IsArrayClass() || klass->IsPrimitive() || klass->GetDexCache() != nullptr);
1131     // note SetClassRoot does additional validation.
1132     // if possible add new checks there to catch errors early
1133   }
1134 
1135   CHECK(GetArrayIfTable() != nullptr);
1136 
1137   // disable the slow paths in FindClass and CreatePrimitiveClass now
1138   // that Object, Class, and Object[] are setup
1139   init_done_ = true;
1140 
1141   // Under sanitization, the small carve-out to handle stack overflow might not be enough to
1142   // initialize the StackOverflowError class (as it might require running the verifier). Instead,
1143   // ensure that the class will be initialized.
1144   if (kMemoryToolIsAvailable && !Runtime::Current()->IsAotCompiler()) {
1145     ObjPtr<mirror::Class> soe_klass = FindSystemClass(self, "Ljava/lang/StackOverflowError;");
1146     if (soe_klass == nullptr || !EnsureInitialized(self, hs.NewHandle(soe_klass), true, true)) {
1147       // Strange, but don't crash.
1148       LOG(WARNING) << "Could not prepare StackOverflowError.";
1149       self->ClearException();
1150     }
1151   }
1152 
1153   VLOG(startup) << "ClassLinker::FinishInit exiting";
1154 }
1155 
EnsureRootInitialized(ClassLinker * class_linker,Thread * self,ObjPtr<mirror::Class> klass)1156 static void EnsureRootInitialized(ClassLinker* class_linker,
1157                                   Thread* self,
1158                                   ObjPtr<mirror::Class> klass)
1159     REQUIRES_SHARED(Locks::mutator_lock_) {
1160   if (!klass->IsVisiblyInitialized()) {
1161     DCHECK(!klass->IsArrayClass());
1162     DCHECK(!klass->IsPrimitive());
1163     StackHandleScope<1> hs(self);
1164     Handle<mirror::Class> h_class(hs.NewHandle(klass));
1165     if (!class_linker->EnsureInitialized(
1166              self, h_class, /*can_init_fields=*/ true, /*can_init_parents=*/ true)) {
1167       LOG(FATAL) << "Exception when initializing " << h_class->PrettyClass()
1168           << ": " << self->GetException()->Dump();
1169     }
1170   }
1171 }
1172 
RunEarlyRootClinits(Thread * self)1173 void ClassLinker::RunEarlyRootClinits(Thread* self) {
1174   StackHandleScope<1u> hs(self);
1175   Handle<mirror::ObjectArray<mirror::Class>> class_roots = hs.NewHandle(GetClassRoots());
1176   EnsureRootInitialized(this, self, GetClassRoot<mirror::Class>(class_roots.Get()));
1177   EnsureRootInitialized(this, self, GetClassRoot<mirror::String>(class_roots.Get()));
1178   // `Field` class is needed for register_java_net_InetAddress in libcore, b/28153851.
1179   EnsureRootInitialized(this, self, GetClassRoot<mirror::Field>(class_roots.Get()));
1180 
1181   WellKnownClasses::Init(self->GetJniEnv());
1182 
1183   // `FinalizerReference` class is needed for initialization of `java.net.InetAddress`.
1184   // (Indirectly by constructing a `ObjectStreamField` which uses a `StringBuilder`
1185   // and, when resizing, initializes the `System` class for `System.arraycopy()`
1186   // and `System.<clinit> creates a finalizable object.)
1187   EnsureRootInitialized(
1188       this, self, WellKnownClasses::java_lang_ref_FinalizerReference_add->GetDeclaringClass());
1189 }
1190 
RunRootClinits(Thread * self)1191 void ClassLinker::RunRootClinits(Thread* self) {
1192   StackHandleScope<1u> hs(self);
1193   Handle<mirror::ObjectArray<mirror::Class>> class_roots = hs.NewHandle(GetClassRoots());
1194   for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); ++i) {
1195     EnsureRootInitialized(this, self, GetClassRoot(ClassRoot(i), class_roots.Get()));
1196   }
1197 
1198   // Make sure certain well-known classes are initialized. Note that well-known
1199   // classes are always in the boot image, so this code is primarily intended
1200   // for running without boot image but may be needed for boot image if the
1201   // AOT-initialization fails due to introduction of new code to `<clinit>`.
1202   ArtMethod* methods_of_classes_to_initialize[] = {
1203       // Initialize primitive boxing classes (avoid check at runtime).
1204       WellKnownClasses::java_lang_Boolean_valueOf,
1205       WellKnownClasses::java_lang_Byte_valueOf,
1206       WellKnownClasses::java_lang_Character_valueOf,
1207       WellKnownClasses::java_lang_Double_valueOf,
1208       WellKnownClasses::java_lang_Float_valueOf,
1209       WellKnownClasses::java_lang_Integer_valueOf,
1210       WellKnownClasses::java_lang_Long_valueOf,
1211       WellKnownClasses::java_lang_Short_valueOf,
1212       // Initialize `StackOverflowError`.
1213       WellKnownClasses::java_lang_StackOverflowError_init,
1214       // Ensure class loader classes are initialized (avoid check at runtime).
1215       // Superclass `ClassLoader` is a class root and already initialized above.
1216       // Superclass `BaseDexClassLoader` is initialized implicitly.
1217       WellKnownClasses::dalvik_system_DelegateLastClassLoader_init,
1218       WellKnownClasses::dalvik_system_DexClassLoader_init,
1219       WellKnownClasses::dalvik_system_InMemoryDexClassLoader_init,
1220       WellKnownClasses::dalvik_system_PathClassLoader_init,
1221       WellKnownClasses::java_lang_BootClassLoader_init,
1222       // Ensure `Daemons` class is initialized (avoid check at runtime).
1223       WellKnownClasses::java_lang_Daemons_start,
1224       // Ensure `Thread` and `ThreadGroup` classes are initialized (avoid check at runtime).
1225       WellKnownClasses::java_lang_Thread_init,
1226       WellKnownClasses::java_lang_ThreadGroup_add,
1227       // Ensure reference classes are initialized (avoid check at runtime).
1228       // The `FinalizerReference` class was initialized in `RunEarlyRootClinits()`.
1229       WellKnownClasses::java_lang_ref_ReferenceQueue_add,
1230       // Ensure `InvocationTargetException` class is initialized (avoid check at runtime).
1231       WellKnownClasses::java_lang_reflect_InvocationTargetException_init,
1232       // Ensure `Parameter` class is initialized (avoid check at runtime).
1233       WellKnownClasses::java_lang_reflect_Parameter_init,
1234       // Ensure `MethodHandles` and `MethodType` classes are initialized (avoid check at runtime).
1235       WellKnownClasses::java_lang_invoke_MethodHandles_lookup,
1236       WellKnownClasses::java_lang_invoke_MethodType_makeImpl,
1237       // Ensure `DirectByteBuffer` class is initialized (avoid check at runtime).
1238       WellKnownClasses::java_nio_DirectByteBuffer_init,
1239       // Ensure `FloatingDecimal` class is initialized (avoid check at runtime).
1240       WellKnownClasses::jdk_internal_math_FloatingDecimal_getBinaryToASCIIConverter_D,
1241       // Ensure reflection annotation classes are initialized (avoid check at runtime).
1242       WellKnownClasses::libcore_reflect_AnnotationFactory_createAnnotation,
1243       WellKnownClasses::libcore_reflect_AnnotationMember_init,
1244       // We're suppressing exceptions from `DdmServer` and we do not want to repeatedly
1245       // suppress class initialization error (say, due to OOM), so initialize it early.
1246       WellKnownClasses::org_apache_harmony_dalvik_ddmc_DdmServer_dispatch,
1247   };
1248   for (ArtMethod* method : methods_of_classes_to_initialize) {
1249     EnsureRootInitialized(this, self, method->GetDeclaringClass());
1250   }
1251   ArtField* fields_of_classes_to_initialize[] = {
1252       // Ensure classes used by class loaders are initialized (avoid check at runtime).
1253       WellKnownClasses::dalvik_system_DexFile_cookie,
1254       WellKnownClasses::dalvik_system_DexPathList_dexElements,
1255       WellKnownClasses::dalvik_system_DexPathList__Element_dexFile,
1256       // Ensure `VMRuntime` is initialized (avoid check at runtime).
1257       WellKnownClasses::dalvik_system_VMRuntime_nonSdkApiUsageConsumer,
1258       // Initialize empty arrays needed by `StackOverflowError`.
1259       WellKnownClasses::java_util_Collections_EMPTY_LIST,
1260       WellKnownClasses::libcore_util_EmptyArray_STACK_TRACE_ELEMENT,
1261       // Initialize boxing caches needed by the compiler.
1262       WellKnownClasses::java_lang_Byte_ByteCache_cache,
1263       WellKnownClasses::java_lang_Character_CharacterCache_cache,
1264       WellKnownClasses::java_lang_Integer_IntegerCache_cache,
1265       WellKnownClasses::java_lang_Long_LongCache_cache,
1266       WellKnownClasses::java_lang_Short_ShortCache_cache,
1267   };
1268   for (ArtField* field : fields_of_classes_to_initialize) {
1269     EnsureRootInitialized(this, self, field->GetDeclaringClass());
1270   }
1271 }
1272 
1273 ALWAYS_INLINE
ComputeMethodHash(ArtMethod * method)1274 static uint32_t ComputeMethodHash(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
1275   DCHECK(!method->IsRuntimeMethod());
1276   DCHECK(!method->IsProxyMethod());
1277   DCHECK(!method->IsObsolete());
1278   // Do not use `ArtMethod::GetNameView()` to avoid unnecessary runtime/proxy/obsolete method
1279   // checks. It is safe to avoid the read barrier here, see `ArtMethod::GetDexFile()`.
1280   const DexFile& dex_file = method->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1281   const dex::MethodId& method_id = dex_file.GetMethodId(method->GetDexMethodIndex());
1282   std::string_view name = dex_file.GetMethodNameView(method_id);
1283   return ComputeModifiedUtf8Hash(name);
1284 }
1285 
1286 ALWAYS_INLINE
MethodSignatureEquals(ArtMethod * lhs,ArtMethod * rhs)1287 static bool MethodSignatureEquals(ArtMethod* lhs, ArtMethod* rhs)
1288     REQUIRES_SHARED(Locks::mutator_lock_) {
1289   DCHECK(!lhs->IsRuntimeMethod());
1290   DCHECK(!lhs->IsProxyMethod());
1291   DCHECK(!lhs->IsObsolete());
1292   DCHECK(!rhs->IsRuntimeMethod());
1293   DCHECK(!rhs->IsProxyMethod());
1294   DCHECK(!rhs->IsObsolete());
1295   // Do not use `ArtMethod::GetDexFile()` to avoid unnecessary obsolete method checks.
1296   // It is safe to avoid the read barrier here, see `ArtMethod::GetDexFile()`.
1297   const DexFile& lhs_dex_file = lhs->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1298   const DexFile& rhs_dex_file = rhs->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1299   const dex::MethodId& lhs_mid = lhs_dex_file.GetMethodId(lhs->GetDexMethodIndex());
1300   const dex::MethodId& rhs_mid = rhs_dex_file.GetMethodId(rhs->GetDexMethodIndex());
1301   if (&lhs_dex_file == &rhs_dex_file) {
1302     return lhs_mid.name_idx_ == rhs_mid.name_idx_ &&
1303            lhs_mid.proto_idx_ == rhs_mid.proto_idx_;
1304   } else {
1305     return
1306         lhs_dex_file.GetMethodNameView(lhs_mid) == rhs_dex_file.GetMethodNameView(rhs_mid) &&
1307         lhs_dex_file.GetMethodSignature(lhs_mid) == rhs_dex_file.GetMethodSignature(rhs_mid);
1308   }
1309 }
1310 
InitializeObjectVirtualMethodHashes(ObjPtr<mirror::Class> java_lang_Object,PointerSize pointer_size,ArrayRef<uint32_t> virtual_method_hashes)1311 static void InitializeObjectVirtualMethodHashes(ObjPtr<mirror::Class> java_lang_Object,
1312                                                 PointerSize pointer_size,
1313                                                 /*out*/ ArrayRef<uint32_t> virtual_method_hashes)
1314     REQUIRES_SHARED(Locks::mutator_lock_) {
1315   ArraySlice<ArtMethod> virtual_methods = java_lang_Object->GetVirtualMethods(pointer_size);
1316   DCHECK_EQ(virtual_method_hashes.size(), virtual_methods.size());
1317   for (size_t i = 0; i != virtual_method_hashes.size(); ++i) {
1318     virtual_method_hashes[i] = ComputeMethodHash(&virtual_methods[i]);
1319   }
1320 }
1321 
1322 struct TrampolineCheckData {
1323   const void* quick_resolution_trampoline;
1324   const void* quick_imt_conflict_trampoline;
1325   const void* quick_generic_jni_trampoline;
1326   const void* quick_to_interpreter_bridge_trampoline;
1327   const void* nterp_trampoline;
1328   PointerSize pointer_size;
1329   ArtMethod* m;
1330   bool error;
1331 };
1332 
InitFromBootImage(std::string * error_msg)1333 bool ClassLinker::InitFromBootImage(std::string* error_msg) {
1334   VLOG(startup) << __FUNCTION__ << " entering";
1335   CHECK(!init_done_);
1336 
1337   Runtime* const runtime = Runtime::Current();
1338   Thread* const self = Thread::Current();
1339   gc::Heap* const heap = runtime->GetHeap();
1340   std::vector<gc::space::ImageSpace*> spaces = heap->GetBootImageSpaces();
1341   CHECK(!spaces.empty());
1342   const ImageHeader& image_header = spaces[0]->GetImageHeader();
1343   image_pointer_size_ = image_header.GetPointerSize();
1344   if (UNLIKELY(image_pointer_size_ != PointerSize::k32 &&
1345                image_pointer_size_ != PointerSize::k64)) {
1346     *error_msg =
1347         StringPrintf("Invalid image pointer size: %u", static_cast<uint32_t>(image_pointer_size_));
1348     return false;
1349   }
1350   if (!runtime->IsAotCompiler()) {
1351     // Only the Aot compiler supports having an image with a different pointer size than the
1352     // runtime. This happens on the host for compiling 32 bit tests since we use a 64 bit libart
1353     // compiler. We may also use 32 bit dex2oat on a system with 64 bit apps.
1354     if (image_pointer_size_ != kRuntimePointerSize) {
1355       *error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu",
1356                                 static_cast<size_t>(image_pointer_size_),
1357                                 sizeof(void*));
1358       return false;
1359     }
1360   }
1361   DCHECK(!runtime->HasResolutionMethod());
1362   runtime->SetResolutionMethod(image_header.GetImageMethod(ImageHeader::kResolutionMethod));
1363   runtime->SetImtConflictMethod(image_header.GetImageMethod(ImageHeader::kImtConflictMethod));
1364   runtime->SetImtUnimplementedMethod(
1365       image_header.GetImageMethod(ImageHeader::kImtUnimplementedMethod));
1366   runtime->SetCalleeSaveMethod(
1367       image_header.GetImageMethod(ImageHeader::kSaveAllCalleeSavesMethod),
1368       CalleeSaveType::kSaveAllCalleeSaves);
1369   runtime->SetCalleeSaveMethod(
1370       image_header.GetImageMethod(ImageHeader::kSaveRefsOnlyMethod),
1371       CalleeSaveType::kSaveRefsOnly);
1372   runtime->SetCalleeSaveMethod(
1373       image_header.GetImageMethod(ImageHeader::kSaveRefsAndArgsMethod),
1374       CalleeSaveType::kSaveRefsAndArgs);
1375   runtime->SetCalleeSaveMethod(
1376       image_header.GetImageMethod(ImageHeader::kSaveEverythingMethod),
1377       CalleeSaveType::kSaveEverything);
1378   runtime->SetCalleeSaveMethod(
1379       image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForClinit),
1380       CalleeSaveType::kSaveEverythingForClinit);
1381   runtime->SetCalleeSaveMethod(
1382       image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForSuspendCheck),
1383       CalleeSaveType::kSaveEverythingForSuspendCheck);
1384 
1385   std::vector<const OatFile*> oat_files =
1386       runtime->GetOatFileManager().RegisterImageOatFiles(spaces);
1387   DCHECK(!oat_files.empty());
1388   const OatHeader& default_oat_header = oat_files[0]->GetOatHeader();
1389   jni_dlsym_lookup_trampoline_ = default_oat_header.GetJniDlsymLookupTrampoline();
1390   jni_dlsym_lookup_critical_trampoline_ = default_oat_header.GetJniDlsymLookupCriticalTrampoline();
1391   quick_resolution_trampoline_ = default_oat_header.GetQuickResolutionTrampoline();
1392   quick_imt_conflict_trampoline_ = default_oat_header.GetQuickImtConflictTrampoline();
1393   quick_generic_jni_trampoline_ = default_oat_header.GetQuickGenericJniTrampoline();
1394   quick_to_interpreter_bridge_trampoline_ = default_oat_header.GetQuickToInterpreterBridge();
1395   nterp_trampoline_ = default_oat_header.GetNterpTrampoline();
1396   if (kIsDebugBuild) {
1397     // Check that the other images use the same trampoline.
1398     for (size_t i = 1; i < oat_files.size(); ++i) {
1399       const OatHeader& ith_oat_header = oat_files[i]->GetOatHeader();
1400       const void* ith_jni_dlsym_lookup_trampoline_ =
1401           ith_oat_header.GetJniDlsymLookupTrampoline();
1402       const void* ith_jni_dlsym_lookup_critical_trampoline_ =
1403           ith_oat_header.GetJniDlsymLookupCriticalTrampoline();
1404       const void* ith_quick_resolution_trampoline =
1405           ith_oat_header.GetQuickResolutionTrampoline();
1406       const void* ith_quick_imt_conflict_trampoline =
1407           ith_oat_header.GetQuickImtConflictTrampoline();
1408       const void* ith_quick_generic_jni_trampoline =
1409           ith_oat_header.GetQuickGenericJniTrampoline();
1410       const void* ith_quick_to_interpreter_bridge_trampoline =
1411           ith_oat_header.GetQuickToInterpreterBridge();
1412       const void* ith_nterp_trampoline =
1413           ith_oat_header.GetNterpTrampoline();
1414       if (ith_jni_dlsym_lookup_trampoline_ != jni_dlsym_lookup_trampoline_ ||
1415           ith_jni_dlsym_lookup_critical_trampoline_ != jni_dlsym_lookup_critical_trampoline_ ||
1416           ith_quick_resolution_trampoline != quick_resolution_trampoline_ ||
1417           ith_quick_imt_conflict_trampoline != quick_imt_conflict_trampoline_ ||
1418           ith_quick_generic_jni_trampoline != quick_generic_jni_trampoline_ ||
1419           ith_quick_to_interpreter_bridge_trampoline != quick_to_interpreter_bridge_trampoline_ ||
1420           ith_nterp_trampoline != nterp_trampoline_) {
1421         // Make sure that all methods in this image do not contain those trampolines as
1422         // entrypoints. Otherwise the class-linker won't be able to work with a single set.
1423         TrampolineCheckData data;
1424         data.error = false;
1425         data.pointer_size = GetImagePointerSize();
1426         data.quick_resolution_trampoline = ith_quick_resolution_trampoline;
1427         data.quick_imt_conflict_trampoline = ith_quick_imt_conflict_trampoline;
1428         data.quick_generic_jni_trampoline = ith_quick_generic_jni_trampoline;
1429         data.quick_to_interpreter_bridge_trampoline = ith_quick_to_interpreter_bridge_trampoline;
1430         data.nterp_trampoline = ith_nterp_trampoline;
1431         ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
1432         auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1433           if (obj->IsClass()) {
1434             ObjPtr<mirror::Class> klass = obj->AsClass();
1435             for (ArtMethod& m : klass->GetMethods(data.pointer_size)) {
1436               const void* entrypoint =
1437                   m.GetEntryPointFromQuickCompiledCodePtrSize(data.pointer_size);
1438               if (entrypoint == data.quick_resolution_trampoline ||
1439                   entrypoint == data.quick_imt_conflict_trampoline ||
1440                   entrypoint == data.quick_generic_jni_trampoline ||
1441                   entrypoint == data.quick_to_interpreter_bridge_trampoline) {
1442                 data.m = &m;
1443                 data.error = true;
1444                 return;
1445               }
1446             }
1447           }
1448         };
1449         spaces[i]->GetLiveBitmap()->Walk(visitor);
1450         if (data.error) {
1451           ArtMethod* m = data.m;
1452           LOG(ERROR) << "Found a broken ArtMethod: " << ArtMethod::PrettyMethod(m);
1453           *error_msg = "Found an ArtMethod with a bad entrypoint";
1454           return false;
1455         }
1456       }
1457     }
1458   }
1459 
1460   class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
1461       ObjPtr<mirror::ObjectArray<mirror::Class>>::DownCast(
1462           image_header.GetImageRoot(ImageHeader::kClassRoots)));
1463   DCHECK_EQ(GetClassRoot<mirror::Class>(this)->GetClassFlags(), mirror::kClassFlagClass);
1464 
1465   DCHECK_EQ(GetClassRoot<mirror::Object>(this)->GetObjectSize(), sizeof(mirror::Object));
1466   ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
1467       ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
1468           image_header.GetImageRoot(ImageHeader::kBootImageLiveObjects));
1469   runtime->SetSentinel(boot_image_live_objects->Get(ImageHeader::kClearedJniWeakSentinel));
1470   DCHECK(runtime->GetSentinel().Read()->GetClass() == GetClassRoot<mirror::Object>(this));
1471 
1472   // Boot class loader, use a null handle.
1473   if (!AddImageSpaces(ArrayRef<gc::space::ImageSpace*>(spaces),
1474                       ScopedNullHandle<mirror::ClassLoader>(),
1475                       /*context=*/nullptr,
1476                       &boot_dex_files_,
1477                       error_msg)) {
1478     return false;
1479   }
1480   // We never use AOT code for debuggable.
1481   if (!runtime->IsJavaDebuggable()) {
1482     for (gc::space::ImageSpace* space : spaces) {
1483       const ImageHeader& header = space->GetImageHeader();
1484       header.VisitJniStubMethods([&](ArtMethod* method)
1485           REQUIRES_SHARED(Locks::mutator_lock_) {
1486         const void* stub = method->GetOatMethodQuickCode(image_pointer_size_);
1487         boot_image_jni_stubs_.Put(std::make_pair(JniStubKey(method), stub));
1488         return method;
1489       }, space->Begin(), image_pointer_size_);
1490     }
1491   }
1492 
1493   InitializeObjectVirtualMethodHashes(GetClassRoot<mirror::Object>(this),
1494                                       image_pointer_size_,
1495                                       ArrayRef<uint32_t>(object_virtual_method_hashes_));
1496   FinishInit(self);
1497 
1498   VLOG(startup) << __FUNCTION__ << " exiting";
1499   return true;
1500 }
1501 
AddExtraBootDexFiles(Thread * self,std::vector<std::unique_ptr<const DexFile>> && additional_dex_files)1502 void ClassLinker::AddExtraBootDexFiles(
1503     Thread* self,
1504     std::vector<std::unique_ptr<const DexFile>>&& additional_dex_files) {
1505   for (std::unique_ptr<const DexFile>& dex_file : additional_dex_files) {
1506     AppendToBootClassPath(self, dex_file.get());
1507     if (kIsDebugBuild) {
1508       for (const auto& boot_dex_file : boot_dex_files_) {
1509         DCHECK_NE(boot_dex_file->GetLocation(), dex_file->GetLocation());
1510       }
1511     }
1512     boot_dex_files_.push_back(std::move(dex_file));
1513   }
1514 }
1515 
IsBootClassLoader(ObjPtr<mirror::Object> class_loader)1516 bool ClassLinker::IsBootClassLoader(ObjPtr<mirror::Object> class_loader) {
1517   return class_loader == nullptr ||
1518          WellKnownClasses::java_lang_BootClassLoader == class_loader->GetClass();
1519 }
1520 
1521 class CHAOnDeleteUpdateClassVisitor {
1522  public:
CHAOnDeleteUpdateClassVisitor(LinearAlloc * alloc)1523   explicit CHAOnDeleteUpdateClassVisitor(LinearAlloc* alloc)
1524       : allocator_(alloc), cha_(Runtime::Current()->GetClassLinker()->GetClassHierarchyAnalysis()),
1525         pointer_size_(Runtime::Current()->GetClassLinker()->GetImagePointerSize()),
1526         self_(Thread::Current()) {}
1527 
operator ()(ObjPtr<mirror::Class> klass)1528   bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
1529     // This class is going to be unloaded. Tell CHA about it.
1530     cha_->ResetSingleImplementationInHierarchy(klass, allocator_, pointer_size_);
1531     return true;
1532   }
1533  private:
1534   const LinearAlloc* allocator_;
1535   const ClassHierarchyAnalysis* cha_;
1536   const PointerSize pointer_size_;
1537   const Thread* self_;
1538 };
1539 
1540 /*
1541  * A class used to ensure that all references to strings interned in an AppImage have been
1542  * properly recorded in the interned references list, and is only ever run in debug mode.
1543  */
1544 class CountInternedStringReferencesVisitor {
1545  public:
CountInternedStringReferencesVisitor(const gc::space::ImageSpace & space,const InternTable::UnorderedSet & image_interns)1546   CountInternedStringReferencesVisitor(const gc::space::ImageSpace& space,
1547                                        const InternTable::UnorderedSet& image_interns)
1548       : space_(space),
1549         image_interns_(image_interns),
1550         count_(0u) {}
1551 
TestObject(ObjPtr<mirror::Object> referred_obj) const1552   void TestObject(ObjPtr<mirror::Object> referred_obj) const
1553       REQUIRES_SHARED(Locks::mutator_lock_) {
1554     if (referred_obj != nullptr &&
1555         space_.HasAddress(referred_obj.Ptr()) &&
1556         referred_obj->IsString()) {
1557       ObjPtr<mirror::String> referred_str = referred_obj->AsString();
1558       uint32_t hash = static_cast<uint32_t>(referred_str->GetStoredHashCode());
1559       // All image strings have the hash code calculated, even if they are not interned.
1560       DCHECK_EQ(hash, static_cast<uint32_t>(referred_str->ComputeHashCode()));
1561       auto it = image_interns_.FindWithHash(GcRoot<mirror::String>(referred_str), hash);
1562       if (it != image_interns_.end() && it->Read() == referred_str) {
1563         ++count_;
1564       }
1565     }
1566   }
1567 
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root) const1568   void VisitRootIfNonNull(
1569       mirror::CompressedReference<mirror::Object>* root) const
1570       REQUIRES_SHARED(Locks::mutator_lock_) {
1571     if (!root->IsNull()) {
1572       VisitRoot(root);
1573     }
1574   }
1575 
VisitRoot(mirror::CompressedReference<mirror::Object> * root) const1576   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
1577       REQUIRES_SHARED(Locks::mutator_lock_) {
1578     TestObject(root->AsMirrorPtr());
1579   }
1580 
1581   // Visit Class Fields
operator ()(ObjPtr<mirror::Object> obj,MemberOffset offset,bool is_static) const1582   void operator()(ObjPtr<mirror::Object> obj,
1583                   MemberOffset offset,
1584                   [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) {
1585     // References within image or across images don't need a read barrier.
1586     ObjPtr<mirror::Object> referred_obj =
1587         obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1588     TestObject(referred_obj);
1589   }
1590 
operator ()(ObjPtr<mirror::Class> klass,ObjPtr<mirror::Reference> ref) const1591   void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const
1592       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
1593     operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false);
1594   }
1595 
GetCount() const1596   size_t GetCount() const {
1597     return count_;
1598   }
1599 
1600  private:
1601   const gc::space::ImageSpace& space_;
1602   const InternTable::UnorderedSet& image_interns_;
1603   mutable size_t count_;  // Modified from the `const` callbacks.
1604 };
1605 
1606 /*
1607  * This function counts references to strings interned in the AppImage.
1608  * This is used in debug build to check against the number of the recorded references.
1609  */
CountInternedStringReferences(gc::space::ImageSpace & space,const InternTable::UnorderedSet & image_interns)1610 size_t CountInternedStringReferences(gc::space::ImageSpace& space,
1611                                      const InternTable::UnorderedSet& image_interns)
1612     REQUIRES_SHARED(Locks::mutator_lock_) {
1613   const gc::accounting::ContinuousSpaceBitmap* bitmap = space.GetMarkBitmap();
1614   const ImageHeader& image_header = space.GetImageHeader();
1615   const uint8_t* target_base = space.GetMemMap()->Begin();
1616   const ImageSection& objects_section = image_header.GetObjectsSection();
1617 
1618   auto objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
1619   auto objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
1620 
1621   CountInternedStringReferencesVisitor visitor(space, image_interns);
1622   bitmap->VisitMarkedRange(objects_begin,
1623                            objects_end,
1624                            [&space, &visitor](mirror::Object* obj)
1625     REQUIRES_SHARED(Locks::mutator_lock_) {
1626     if (space.HasAddress(obj)) {
1627       if (obj->IsDexCache()) {
1628         obj->VisitReferences</* kVisitNativeRoots= */ true,
1629                              kVerifyNone,
1630                              kWithoutReadBarrier>(visitor, visitor);
1631       } else {
1632         // Don't visit native roots for non-dex-cache as they can't contain
1633         // native references to strings.  This is verified during compilation
1634         // by ImageWriter::VerifyNativeGCRootInvariants.
1635         obj->VisitReferences</* kVisitNativeRoots= */ false,
1636                              kVerifyNone,
1637                              kWithoutReadBarrier>(visitor, visitor);
1638       }
1639     }
1640   });
1641   return visitor.GetCount();
1642 }
1643 
1644 template <typename Visitor>
VisitInternedStringReferences(gc::space::ImageSpace * space,const Visitor & visitor)1645 static void VisitInternedStringReferences(
1646     gc::space::ImageSpace* space,
1647     const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1648   const uint8_t* target_base = space->Begin();
1649   const ImageSection& sro_section =
1650       space->GetImageHeader().GetImageStringReferenceOffsetsSection();
1651   const size_t num_string_offsets = sro_section.Size() / sizeof(AppImageReferenceOffsetInfo);
1652 
1653   VLOG(image)
1654       << "ClassLinker:AppImage:InternStrings:imageStringReferenceOffsetCount = "
1655       << num_string_offsets;
1656 
1657   const auto* sro_base =
1658       reinterpret_cast<const AppImageReferenceOffsetInfo*>(target_base + sro_section.Offset());
1659 
1660   for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) {
1661     uint32_t base_offset = sro_base[offset_index].first;
1662 
1663     uint32_t raw_member_offset = sro_base[offset_index].second;
1664     DCHECK_ALIGNED(base_offset, 2);
1665 
1666     ObjPtr<mirror::Object> obj_ptr =
1667         reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
1668     if (obj_ptr->IsDexCache() && raw_member_offset >= sizeof(mirror::DexCache)) {
1669       // Special case for strings referenced from dex cache array: the offset is
1670       // actually decoded as an index into the dex cache string array.
1671       uint32_t index = raw_member_offset - sizeof(mirror::DexCache);
1672       mirror::GcRootArray<mirror::String>* array = obj_ptr->AsDexCache()->GetStringsArray();
1673       // The array could be concurrently set to null. See `StartupCompletedTask`.
1674       if (array != nullptr) {
1675         ObjPtr<mirror::String> referred_string = array->Get(index);
1676         DCHECK(referred_string != nullptr);
1677         ObjPtr<mirror::String> visited = visitor(referred_string);
1678         if (visited != referred_string) {
1679           array->Set(index, visited.Ptr());
1680         }
1681       }
1682     } else {
1683       DCHECK_ALIGNED(raw_member_offset, 2);
1684       MemberOffset member_offset(raw_member_offset);
1685       ObjPtr<mirror::String> referred_string =
1686           obj_ptr->GetFieldObject<mirror::String,
1687                                   kVerifyNone,
1688                                   kWithoutReadBarrier,
1689                                   /* kIsVolatile= */ false>(member_offset);
1690       DCHECK(referred_string != nullptr);
1691 
1692       ObjPtr<mirror::String> visited = visitor(referred_string);
1693       if (visited != referred_string) {
1694         obj_ptr->SetFieldObject</* kTransactionActive= */ false,
1695                                 /* kCheckTransaction= */ false,
1696                                 kVerifyNone,
1697                                 /* kIsVolatile= */ false>(member_offset, visited);
1698       }
1699     }
1700   }
1701 }
1702 
VerifyInternedStringReferences(gc::space::ImageSpace * space)1703 static void VerifyInternedStringReferences(gc::space::ImageSpace* space)
1704     REQUIRES_SHARED(Locks::mutator_lock_) {
1705   InternTable::UnorderedSet image_interns;
1706   const ImageSection& section = space->GetImageHeader().GetInternedStringsSection();
1707   if (section.Size() > 0) {
1708     size_t read_count;
1709     const uint8_t* data = space->Begin() + section.Offset();
1710     InternTable::UnorderedSet image_set(data, /*make_copy_of_data=*/ false, &read_count);
1711     image_set.swap(image_interns);
1712   }
1713   size_t num_recorded_refs = 0u;
1714   VisitInternedStringReferences(
1715       space,
1716       [&image_interns, &num_recorded_refs](ObjPtr<mirror::String> str)
1717           REQUIRES_SHARED(Locks::mutator_lock_) {
1718         auto it = image_interns.find(GcRoot<mirror::String>(str));
1719         CHECK(it != image_interns.end());
1720         CHECK(it->Read() == str);
1721         ++num_recorded_refs;
1722         return str;
1723       });
1724   size_t num_found_refs = CountInternedStringReferences(*space, image_interns);
1725   CHECK_EQ(num_recorded_refs, num_found_refs);
1726 }
1727 
1728 // new_class_set is the set of classes that were read from the class table section in the image.
1729 // If there was no class table section, it is null.
1730 // Note: using a class here to avoid having to make ClassLinker internals public.
1731 class AppImageLoadingHelper {
1732  public:
1733   static void Update(
1734       ClassLinker* class_linker,
1735       gc::space::ImageSpace* space,
1736       Handle<mirror::ClassLoader> class_loader,
1737       Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
1738       REQUIRES(!Locks::dex_lock_)
1739       REQUIRES_SHARED(Locks::mutator_lock_);
1740 
1741   static void HandleAppImageStrings(gc::space::ImageSpace* space)
1742       REQUIRES_SHARED(Locks::mutator_lock_);
1743 };
1744 
Update(ClassLinker * class_linker,gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)1745 void AppImageLoadingHelper::Update(
1746     ClassLinker* class_linker,
1747     gc::space::ImageSpace* space,
1748     Handle<mirror::ClassLoader> class_loader,
1749     Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
1750     REQUIRES(!Locks::dex_lock_)
1751     REQUIRES_SHARED(Locks::mutator_lock_) {
1752   ScopedTrace app_image_timing("AppImage:Updating");
1753 
1754   if (kIsDebugBuild && ClassLinker::kAppImageMayContainStrings) {
1755     // In debug build, verify the string references before applying
1756     // the Runtime::LoadAppImageStartupCache() option.
1757     VerifyInternedStringReferences(space);
1758   }
1759   DCHECK(class_loader.Get() != nullptr);
1760   Thread* const self = Thread::Current();
1761   Runtime* const runtime = Runtime::Current();
1762   gc::Heap* const heap = runtime->GetHeap();
1763   const ImageHeader& header = space->GetImageHeader();
1764   int32_t number_of_dex_cache_arrays_cleared = 0;
1765   {
1766     // Register dex caches with the class loader.
1767     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
1768     for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
1769       const DexFile* const dex_file = dex_cache->GetDexFile();
1770       {
1771         WriterMutexLock mu2(self, *Locks::dex_lock_);
1772         CHECK(class_linker->FindDexCacheDataLocked(*dex_file) == nullptr);
1773         if (runtime->GetStartupCompleted()) {
1774           number_of_dex_cache_arrays_cleared++;
1775           // Free up dex cache arrays that we would only allocate at startup.
1776           // We do this here before registering and within the lock to be
1777           // consistent with `StartupCompletedTask`.
1778           dex_cache->UnlinkStartupCaches();
1779         }
1780         VLOG(image) << "App image registers dex file " << dex_file->GetLocation();
1781         class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
1782       }
1783     }
1784   }
1785   if (number_of_dex_cache_arrays_cleared == dex_caches->GetLength()) {
1786     // Free up dex cache arrays that we would only allocate at startup.
1787     // If `number_of_dex_cache_arrays_cleared` isn't the number of dex caches in
1788     // the image, then there is a race with the `StartupCompletedTask`, which
1789     // will release the space instead.
1790     space->ReleaseMetadata();
1791   }
1792 
1793   if (ClassLinker::kAppImageMayContainStrings) {
1794     HandleAppImageStrings(space);
1795   }
1796 
1797   if (kVerifyArtMethodDeclaringClasses) {
1798     ScopedTrace timing("AppImage:VerifyDeclaringClasses");
1799     ReaderMutexLock rmu(self, *Locks::heap_bitmap_lock_);
1800     gc::accounting::HeapBitmap* live_bitmap = heap->GetLiveBitmap();
1801     header.VisitPackedArtMethods([&](ArtMethod& method)
1802         REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
1803       ObjPtr<mirror::Class> klass = method.GetDeclaringClassUnchecked();
1804       if (klass != nullptr) {
1805         CHECK(live_bitmap->Test(klass.Ptr())) << "Image method has unmarked declaring class";
1806       }
1807     }, space->Begin(), kRuntimePointerSize);
1808   }
1809 }
1810 
HandleAppImageStrings(gc::space::ImageSpace * space)1811 void AppImageLoadingHelper::HandleAppImageStrings(gc::space::ImageSpace* space) {
1812   // Iterate over the string reference offsets stored in the image and intern
1813   // the strings they point to.
1814   ScopedTrace timing("AppImage:InternString");
1815 
1816   Runtime* const runtime = Runtime::Current();
1817   InternTable* const intern_table = runtime->GetInternTable();
1818 
1819   // Add the intern table, removing any conflicts. For conflicts, store the new address in a map
1820   // for faster lookup.
1821   // TODO: Optimize with a bitmap or bloom filter
1822   SafeMap<mirror::String*, mirror::String*> intern_remap;
1823   auto func = [&](InternTable::UnorderedSet& interns)
1824       REQUIRES_SHARED(Locks::mutator_lock_)
1825       REQUIRES(Locks::intern_table_lock_) {
1826     const size_t non_boot_image_strings = intern_table->CountInterns(
1827         /*visit_boot_images=*/false,
1828         /*visit_non_boot_images=*/true);
1829     VLOG(image) << "AppImage:stringsInInternTableSize = " << interns.size();
1830     VLOG(image) << "AppImage:nonBootImageInternStrings = " << non_boot_image_strings;
1831     // Visit the smaller of the two sets to compute the intersection.
1832     if (interns.size() < non_boot_image_strings) {
1833       for (auto it = interns.begin(); it != interns.end(); ) {
1834         ObjPtr<mirror::String> string = it->Read();
1835         ObjPtr<mirror::String> existing = intern_table->LookupWeakLocked(string);
1836         if (existing == nullptr) {
1837           existing = intern_table->LookupStrongLocked(string);
1838         }
1839         if (existing != nullptr) {
1840           intern_remap.Put(string.Ptr(), existing.Ptr());
1841           it = interns.erase(it);
1842         } else {
1843           ++it;
1844         }
1845       }
1846     } else {
1847       intern_table->VisitInterns([&](const GcRoot<mirror::String>& root)
1848           REQUIRES_SHARED(Locks::mutator_lock_)
1849           REQUIRES(Locks::intern_table_lock_) {
1850         auto it = interns.find(root);
1851         if (it != interns.end()) {
1852           ObjPtr<mirror::String> existing = root.Read();
1853           intern_remap.Put(it->Read(), existing.Ptr());
1854           it = interns.erase(it);
1855         }
1856       }, /*visit_boot_images=*/false, /*visit_non_boot_images=*/true);
1857     }
1858     // Consistency check to ensure correctness.
1859     if (kIsDebugBuild) {
1860       for (GcRoot<mirror::String>& root : interns) {
1861         ObjPtr<mirror::String> string = root.Read();
1862         CHECK(intern_table->LookupWeakLocked(string) == nullptr) << string->ToModifiedUtf8();
1863         CHECK(intern_table->LookupStrongLocked(string) == nullptr) << string->ToModifiedUtf8();
1864       }
1865     }
1866   };
1867   intern_table->AddImageStringsToTable(space, func);
1868   if (!intern_remap.empty()) {
1869     VLOG(image) << "AppImage:conflictingInternStrings = " << intern_remap.size();
1870     VisitInternedStringReferences(
1871         space,
1872         [&intern_remap](ObjPtr<mirror::String> str) REQUIRES_SHARED(Locks::mutator_lock_) {
1873           auto it = intern_remap.find(str.Ptr());
1874           if (it != intern_remap.end()) {
1875             return ObjPtr<mirror::String>(it->second);
1876           }
1877           return str;
1878         });
1879   }
1880 }
1881 
OpenOatDexFile(const OatFile * oat_file,const char * location,std::string * error_msg)1882 static std::unique_ptr<const DexFile> OpenOatDexFile(const OatFile* oat_file,
1883                                                      const char* location,
1884                                                      std::string* error_msg)
1885     REQUIRES_SHARED(Locks::mutator_lock_) {
1886   DCHECK(error_msg != nullptr);
1887   std::unique_ptr<const DexFile> dex_file;
1888   const OatDexFile* oat_dex_file = oat_file->GetOatDexFile(location, error_msg);
1889   if (oat_dex_file == nullptr) {
1890     return std::unique_ptr<const DexFile>();
1891   }
1892   std::string inner_error_msg;
1893   dex_file = oat_dex_file->OpenDexFile(&inner_error_msg);
1894   if (dex_file == nullptr) {
1895     *error_msg = StringPrintf("Failed to open dex file %s from within oat file %s error '%s'",
1896                               location,
1897                               oat_file->GetLocation().c_str(),
1898                               inner_error_msg.c_str());
1899     return std::unique_ptr<const DexFile>();
1900   }
1901 
1902   if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) {
1903     CHECK(dex_file->GetSha1() != oat_dex_file->GetSha1());
1904     *error_msg = StringPrintf("Checksums do not match for %s: %x vs %x",
1905                               location,
1906                               dex_file->GetLocationChecksum(),
1907                               oat_dex_file->GetDexFileLocationChecksum());
1908     return std::unique_ptr<const DexFile>();
1909   }
1910   CHECK(dex_file->GetSha1() == oat_dex_file->GetSha1());
1911   return dex_file;
1912 }
1913 
OpenImageDexFiles(gc::space::ImageSpace * space,std::vector<std::unique_ptr<const DexFile>> * out_dex_files,std::string * error_msg)1914 bool ClassLinker::OpenImageDexFiles(gc::space::ImageSpace* space,
1915                                     std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1916                                     std::string* error_msg) {
1917   ScopedAssertNoThreadSuspension nts(__FUNCTION__);
1918   const ImageHeader& header = space->GetImageHeader();
1919   ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
1920   DCHECK(dex_caches_object != nullptr);
1921   ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
1922       dex_caches_object->AsObjectArray<mirror::DexCache>();
1923   const OatFile* oat_file = space->GetOatFile();
1924   for (auto dex_cache : dex_caches->Iterate()) {
1925     std::string dex_file_location(dex_cache->GetLocation()->ToModifiedUtf8());
1926     std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
1927                                                              dex_file_location.c_str(),
1928                                                              error_msg);
1929     if (dex_file == nullptr) {
1930       return false;
1931     }
1932     dex_cache->SetDexFile(dex_file.get());
1933     out_dex_files->push_back(std::move(dex_file));
1934   }
1935   return true;
1936 }
1937 
OpenAndInitImageDexFiles(const gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,std::vector<std::unique_ptr<const DexFile>> * out_dex_files,std::string * error_msg)1938 bool ClassLinker::OpenAndInitImageDexFiles(
1939     const gc::space::ImageSpace* space,
1940     Handle<mirror::ClassLoader> class_loader,
1941     std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1942     std::string* error_msg) {
1943   DCHECK(out_dex_files != nullptr);
1944   const bool app_image = class_loader != nullptr;
1945   const ImageHeader& header = space->GetImageHeader();
1946   ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
1947   DCHECK(dex_caches_object != nullptr);
1948   Thread* const self = Thread::Current();
1949   StackHandleScope<3> hs(self);
1950   Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
1951       hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
1952   const OatFile* oat_file = space->GetOatFile();
1953   if (oat_file->GetOatHeader().GetDexFileCount() !=
1954       static_cast<uint32_t>(dex_caches->GetLength())) {
1955     *error_msg =
1956         "Dex cache count and dex file count mismatch while trying to initialize from image";
1957     return false;
1958   }
1959 
1960   for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
1961     std::string dex_file_location = dex_cache->GetLocation()->ToModifiedUtf8();
1962     std::unique_ptr<const DexFile> dex_file =
1963         OpenOatDexFile(oat_file, dex_file_location.c_str(), error_msg);
1964     if (dex_file == nullptr) {
1965       return false;
1966     }
1967 
1968     {
1969       // Native fields are all null.  Initialize them.
1970       WriterMutexLock mu(self, *Locks::dex_lock_);
1971       dex_cache->Initialize(dex_file.get(), class_loader.Get());
1972     }
1973     if (!app_image) {
1974       // Register dex files, keep track of existing ones that are conflicts.
1975       AppendToBootClassPath(dex_file.get(), dex_cache);
1976     }
1977     out_dex_files->push_back(std::move(dex_file));
1978   }
1979   return true;
1980 }
1981 
1982 // Helper class for ArtMethod checks when adding an image. Keeps all required functionality
1983 // together and caches some intermediate results.
1984 template <PointerSize kPointerSize>
1985 class ImageChecker final {
1986  public:
CheckObjects(gc::Heap * heap,gc::space::ImageSpace * space)1987   static void CheckObjects(gc::Heap* heap, gc::space::ImageSpace* space)
1988       REQUIRES_SHARED(Locks::mutator_lock_) {
1989     // There can be no GC during boot image initialization, so we do not need read barriers.
1990     ScopedDebugDisallowReadBarriers sddrb(Thread::Current());
1991 
1992     CHECK_EQ(kPointerSize, space->GetImageHeader().GetPointerSize());
1993     const ImageSection& objects_section = space->GetImageHeader().GetObjectsSection();
1994     uintptr_t space_begin = reinterpret_cast<uintptr_t>(space->Begin());
1995     uintptr_t objects_begin = space_begin + objects_section.Offset();
1996     uintptr_t objects_end = objects_begin + objects_section.Size();
1997     ImageChecker ic(heap);
1998     auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1999       DCHECK(obj != nullptr);
2000       mirror::Class* obj_klass = obj->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
2001       CHECK(obj_klass != nullptr) << "Null class in object " << obj;
2002       mirror::Class* class_class = obj_klass->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
2003       CHECK(class_class != nullptr) << "Null class class " << obj;
2004       if (obj_klass == class_class) {
2005         auto klass = obj->AsClass();
2006         for (ArtField& field : klass->GetIFields()) {
2007           CHECK_EQ(field.GetDeclaringClass<kWithoutReadBarrier>(), klass);
2008         }
2009         for (ArtField& field : klass->GetSFields()) {
2010           CHECK_EQ(field.GetDeclaringClass<kWithoutReadBarrier>(), klass);
2011         }
2012         for (ArtMethod& m : klass->GetMethods(kPointerSize)) {
2013           ic.CheckArtMethod(&m, klass);
2014         }
2015         ObjPtr<mirror::PointerArray> vtable =
2016             klass->GetVTable<kDefaultVerifyFlags, kWithoutReadBarrier>();
2017         if (vtable != nullptr) {
2018           ic.CheckArtMethodPointerArray(vtable);
2019         }
2020         if (klass->ShouldHaveImt()) {
2021           ImTable* imt = klass->GetImt(kPointerSize);
2022           for (size_t i = 0; i < ImTable::kSize; ++i) {
2023             ic.CheckArtMethod(imt->Get(i, kPointerSize), /*expected_class=*/ nullptr);
2024           }
2025         }
2026         if (klass->ShouldHaveEmbeddedVTable()) {
2027           for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) {
2028             ic.CheckArtMethod(klass->GetEmbeddedVTableEntry(i, kPointerSize),
2029                               /*expected_class=*/ nullptr);
2030           }
2031         }
2032         ObjPtr<mirror::IfTable> iftable =
2033             klass->GetIfTable<kDefaultVerifyFlags, kWithoutReadBarrier>();
2034         int32_t iftable_count = (iftable != nullptr) ? iftable->Count() : 0;
2035         for (int32_t i = 0; i < iftable_count; ++i) {
2036           ObjPtr<mirror::PointerArray> method_array =
2037               iftable->GetMethodArrayOrNull<kDefaultVerifyFlags, kWithoutReadBarrier>(i);
2038           if (method_array != nullptr) {
2039             ic.CheckArtMethodPointerArray(method_array);
2040           }
2041         }
2042       }
2043     };
2044     space->GetLiveBitmap()->VisitMarkedRange(objects_begin, objects_end, visitor);
2045   }
2046 
2047  private:
ImageChecker(gc::Heap * heap)2048   explicit ImageChecker(gc::Heap* heap) {
2049     ArrayRef<gc::space::ImageSpace* const> spaces(heap->GetBootImageSpaces());
2050     space_begin_.reserve(spaces.size());
2051     for (gc::space::ImageSpace* space : spaces) {
2052       CHECK_EQ(static_cast<const void*>(space->Begin()), &space->GetImageHeader());
2053       space_begin_.push_back(space->Begin());
2054     }
2055   }
2056 
CheckArtMethod(ArtMethod * m,ObjPtr<mirror::Class> expected_class)2057   void CheckArtMethod(ArtMethod* m, ObjPtr<mirror::Class> expected_class)
2058       REQUIRES_SHARED(Locks::mutator_lock_) {
2059     ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClassUnchecked<kWithoutReadBarrier>();
2060     if (m->IsRuntimeMethod()) {
2061       CHECK(declaring_class == nullptr) << declaring_class << " " << m->PrettyMethod();
2062     } else if (m->IsCopied()) {
2063       CHECK(declaring_class != nullptr) << m->PrettyMethod();
2064     } else if (expected_class != nullptr) {
2065       CHECK_EQ(declaring_class, expected_class) << m->PrettyMethod();
2066     }
2067     bool contains = false;
2068     for (const uint8_t* begin : space_begin_) {
2069       const size_t offset = reinterpret_cast<uint8_t*>(m) - begin;
2070       const ImageHeader* header = reinterpret_cast<const ImageHeader*>(begin);
2071       if (header->GetMethodsSection().Contains(offset) ||
2072           header->GetRuntimeMethodsSection().Contains(offset)) {
2073         contains = true;
2074         break;
2075       }
2076     }
2077     CHECK(contains) << m << " not found";
2078   }
2079 
CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr)2080   void CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr)
2081       REQUIRES_SHARED(Locks::mutator_lock_) {
2082     CHECK(arr != nullptr);
2083     for (int32_t j = 0; j < arr->GetLength(); ++j) {
2084       auto* method = arr->GetElementPtrSize<ArtMethod*>(j, kPointerSize);
2085       CHECK(method != nullptr);
2086       CheckArtMethod(method, /*expected_class=*/ nullptr);
2087     }
2088   }
2089 
2090   std::vector<const uint8_t*> space_begin_;
2091 };
2092 
VerifyAppImage(const ImageHeader & header,const Handle<mirror::ClassLoader> & class_loader,ClassTable * class_table,gc::space::ImageSpace * space)2093 static void VerifyAppImage(const ImageHeader& header,
2094                            const Handle<mirror::ClassLoader>& class_loader,
2095                            ClassTable* class_table,
2096                            gc::space::ImageSpace* space)
2097     REQUIRES_SHARED(Locks::mutator_lock_) {
2098   header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2099     ObjPtr<mirror::Class> klass = method.GetDeclaringClass();
2100     if (klass != nullptr && !Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
2101       CHECK_EQ(class_table->LookupByDescriptor(klass), klass)
2102           << mirror::Class::PrettyClass(klass);
2103     }
2104   }, space->Begin(), kRuntimePointerSize);
2105   {
2106     // Verify that all direct interfaces of classes in the class table are also resolved.
2107     std::vector<ObjPtr<mirror::Class>> classes;
2108     auto verify_direct_interfaces_in_table = [&](ObjPtr<mirror::Class> klass)
2109         REQUIRES_SHARED(Locks::mutator_lock_) {
2110       if (!klass->IsPrimitive() && klass->GetClassLoader() == class_loader.Get()) {
2111         classes.push_back(klass);
2112       }
2113       return true;
2114     };
2115     class_table->Visit(verify_direct_interfaces_in_table);
2116     for (ObjPtr<mirror::Class> klass : classes) {
2117       for (uint32_t i = 0, num = klass->NumDirectInterfaces(); i != num; ++i) {
2118         CHECK(klass->GetDirectInterface(i) != nullptr)
2119             << klass->PrettyDescriptor() << " iface #" << i;
2120       }
2121     }
2122   }
2123 }
2124 
AddImageSpace(gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,ClassLoaderContext * context,const std::vector<std::unique_ptr<const DexFile>> & dex_files,std::string * error_msg)2125 bool ClassLinker::AddImageSpace(gc::space::ImageSpace* space,
2126                                 Handle<mirror::ClassLoader> class_loader,
2127                                 ClassLoaderContext* context,
2128                                 const std::vector<std::unique_ptr<const DexFile>>& dex_files,
2129                                 std::string* error_msg) {
2130   DCHECK(error_msg != nullptr);
2131   const uint64_t start_time = NanoTime();
2132   const bool app_image = class_loader != nullptr;
2133   const ImageHeader& header = space->GetImageHeader();
2134   ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
2135   DCHECK(dex_caches_object != nullptr);
2136   Runtime* const runtime = Runtime::Current();
2137   gc::Heap* const heap = runtime->GetHeap();
2138   Thread* const self = Thread::Current();
2139   // Check that the image is what we are expecting.
2140   if (image_pointer_size_ != space->GetImageHeader().GetPointerSize()) {
2141     *error_msg = StringPrintf("Application image pointer size does not match runtime: %zu vs %zu",
2142                               static_cast<size_t>(space->GetImageHeader().GetPointerSize()),
2143                               static_cast<size_t>(image_pointer_size_));
2144     return false;
2145   }
2146   size_t expected_image_roots = ImageHeader::NumberOfImageRoots(app_image);
2147   if (static_cast<size_t>(header.GetImageRoots()->GetLength()) != expected_image_roots) {
2148     *error_msg = StringPrintf("Expected %zu image roots but got %d",
2149                               expected_image_roots,
2150                               header.GetImageRoots()->GetLength());
2151     return false;
2152   }
2153   StackHandleScope<3> hs(self);
2154   Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
2155       hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
2156   Handle<mirror::ObjectArray<mirror::Class>> class_roots(hs.NewHandle(
2157       header.GetImageRoot(ImageHeader::kClassRoots)->AsObjectArray<mirror::Class>()));
2158   MutableHandle<mirror::Object> special_root(hs.NewHandle(
2159       app_image ? header.GetImageRoot(ImageHeader::kSpecialRoots) : nullptr));
2160   DCHECK(class_roots != nullptr);
2161   if (class_roots->GetLength() != static_cast<int32_t>(ClassRoot::kMax)) {
2162     *error_msg = StringPrintf("Expected %d class roots but got %d",
2163                               class_roots->GetLength(),
2164                               static_cast<int32_t>(ClassRoot::kMax));
2165     return false;
2166   }
2167   // Check against existing class roots to make sure they match the ones in the boot image.
2168   ObjPtr<mirror::ObjectArray<mirror::Class>> existing_class_roots = GetClassRoots();
2169   for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
2170     if (class_roots->Get(i) != GetClassRoot(static_cast<ClassRoot>(i), existing_class_roots)) {
2171       *error_msg = "App image class roots must have pointer equality with runtime ones.";
2172       return false;
2173     }
2174   }
2175   const OatFile* oat_file = space->GetOatFile();
2176 
2177   if (app_image) {
2178     ScopedAssertNoThreadSuspension sants("Checking app image");
2179     if (special_root == nullptr) {
2180       *error_msg = "Unexpected null special root in app image";
2181       return false;
2182     } else if (special_root->IsByteArray()) {
2183       OatHeader* oat_header = reinterpret_cast<OatHeader*>(special_root->AsByteArray()->GetData());
2184       if (!oat_header->IsValid()) {
2185         *error_msg = "Invalid oat header in special root";
2186         return false;
2187       }
2188       if (oat_file->GetVdexFile()->GetNumberOfDexFiles() != oat_header->GetDexFileCount()) {
2189         *error_msg = "Checksums count does not match";
2190         return false;
2191       }
2192       if (oat_header->IsConcurrentCopying() != gUseReadBarrier) {
2193         *error_msg = "GCs do not match";
2194         return false;
2195       }
2196 
2197       // Check if the dex checksums match the dex files that we just loaded.
2198       uint32_t* checksums = reinterpret_cast<uint32_t*>(
2199           reinterpret_cast<uint8_t*>(oat_header) + oat_header->GetHeaderSize());
2200       for (uint32_t i = 0; i  < oat_header->GetDexFileCount(); ++i) {
2201         uint32_t dex_checksum = dex_files.at(i)->GetHeader().checksum_;
2202         if (checksums[i] != dex_checksum) {
2203           *error_msg = StringPrintf(
2204               "Image and dex file checksums did not match for %s: image has %d, dex file has %d",
2205               dex_files.at(i)->GetLocation().c_str(),
2206               checksums[i],
2207               dex_checksum);
2208           return false;
2209         }
2210       }
2211 
2212       // Validate the class loader context.
2213       const char* stored_context = oat_header->GetStoreValueByKey(OatHeader::kClassPathKey);
2214       if (stored_context == nullptr) {
2215         *error_msg = "Missing class loader context in special root";
2216         return false;
2217       }
2218       if (context->VerifyClassLoaderContextMatch(stored_context) ==
2219               ClassLoaderContext::VerificationResult::kMismatch) {
2220         *error_msg = StringPrintf("Class loader contexts don't match: %s", stored_context);
2221         return false;
2222       }
2223 
2224       // Validate the apex versions.
2225       if (!gc::space::ImageSpace::ValidateApexVersions(*oat_header,
2226                                                        runtime->GetApexVersions(),
2227                                                        space->GetImageLocation(),
2228                                                        error_msg)) {
2229         return false;
2230       }
2231 
2232       // Validate the boot classpath.
2233       const char* bcp = oat_header->GetStoreValueByKey(OatHeader::kBootClassPathKey);
2234       if (bcp == nullptr) {
2235         *error_msg = "Missing boot classpath in special root";
2236         return false;
2237       }
2238       std::string runtime_bcp = android::base::Join(runtime->GetBootClassPathLocations(), ':');
2239       if (strcmp(bcp, runtime_bcp.c_str()) != 0) {
2240         *error_msg = StringPrintf("Mismatch boot classpath: image has %s, runtime has %s",
2241                                   bcp,
2242                                   runtime_bcp.c_str());
2243         return false;
2244       }
2245 
2246       // Validate the dex checksums of the boot classpath.
2247       const char* bcp_checksums =
2248           oat_header->GetStoreValueByKey(OatHeader::kBootClassPathChecksumsKey);
2249       if (bcp_checksums == nullptr) {
2250         *error_msg = "Missing boot classpath checksums in special root";
2251         return false;
2252       }
2253       if (strcmp(bcp_checksums, runtime->GetBootClassPathChecksums().c_str()) != 0) {
2254         *error_msg = StringPrintf("Mismatch boot classpath checksums: image has %s, runtime has %s",
2255                                   bcp_checksums,
2256                                   runtime->GetBootClassPathChecksums().c_str());
2257         return false;
2258       }
2259     } else if (IsBootClassLoader(special_root.Get())) {
2260       *error_msg = "Unexpected BootClassLoader in app image";
2261       return false;
2262     } else if (!special_root->IsClassLoader()) {
2263       *error_msg = "Unexpected special root in app image";
2264       return false;
2265     }
2266   }
2267 
2268   if (kCheckImageObjects) {
2269     if (!app_image) {
2270       if (image_pointer_size_ == PointerSize::k64) {
2271         ImageChecker<PointerSize::k64>::CheckObjects(heap, space);
2272       } else {
2273         ImageChecker<PointerSize::k32>::CheckObjects(heap, space);
2274       }
2275     }
2276   }
2277 
2278   // Set entry point to interpreter if in InterpretOnly mode.
2279   if (!runtime->IsAotCompiler() &&
2280       (runtime->GetInstrumentation()->InterpretOnly() ||
2281        runtime->IsJavaDebuggable())) {
2282     // Set image methods' entry point to interpreter.
2283     header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2284       if (!method.IsRuntimeMethod()) {
2285         DCHECK(method.GetDeclaringClass() != nullptr);
2286         if (!method.IsNative() && !method.IsResolutionMethod()) {
2287           method.SetEntryPointFromQuickCompiledCodePtrSize(GetQuickToInterpreterBridge(),
2288                                                             image_pointer_size_);
2289         }
2290       }
2291     }, space->Begin(), image_pointer_size_);
2292   }
2293 
2294   if (!runtime->IsAotCompiler()) {
2295     // If the boot image is not loaded by the zygote, we don't need the shared
2296     // memory optimization.
2297     // If we are profiling the boot classpath, we disable the shared memory
2298     // optimization to make sure boot classpath methods all get properly
2299     // profiled.
2300     // For debuggable runtimes we don't use AOT code, so don't use shared memory
2301     // optimization so the methods can be JITed better.
2302     //
2303     // We need to disable the flag before doing ResetCounter below, as counters
2304     // of shared memory method always hold the "hot" value.
2305     if (!runtime->IsZygote() ||
2306         runtime->GetJITOptions()->GetProfileSaverOptions().GetProfileBootClassPath() ||
2307         runtime->IsJavaDebuggable()) {
2308       header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2309         method.ClearMemorySharedMethod();
2310       }, space->Begin(), image_pointer_size_);
2311     }
2312 
2313     ScopedTrace trace("AppImage:UpdateCodeItemAndNterp");
2314     bool can_use_nterp = interpreter::CanRuntimeUseNterp();
2315     uint16_t hotness_threshold = runtime->GetJITOptions()->GetWarmupThreshold();
2316     header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2317       // In the image, the `data` pointer field of the ArtMethod contains the code
2318       // item offset. Change this to the actual pointer to the code item.
2319       if (method.HasCodeItem()) {
2320         const dex::CodeItem* code_item = method.GetDexFile()->GetCodeItem(
2321             reinterpret_cast32<uint32_t>(method.GetDataPtrSize(image_pointer_size_)));
2322         method.SetCodeItem(code_item, method.GetDexFile()->IsCompactDexFile());
2323         // The hotness counter may have changed since we compiled the image, so
2324         // reset it with the runtime value.
2325         method.ResetCounter(hotness_threshold);
2326       }
2327       if (method.GetEntryPointFromQuickCompiledCode() == nterp_trampoline_) {
2328         if (can_use_nterp) {
2329           // Set image methods' entry point that point to the nterp trampoline to the
2330           // nterp entry point. This allows taking the fast path when doing a
2331           // nterp->nterp call.
2332           DCHECK(!method.StillNeedsClinitCheck());
2333           method.SetEntryPointFromQuickCompiledCode(interpreter::GetNterpEntryPoint());
2334         } else {
2335           method.SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
2336         }
2337       }
2338     }, space->Begin(), image_pointer_size_);
2339   }
2340 
2341   if (runtime->IsVerificationSoftFail()) {
2342     header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2343       if (method.IsManagedAndInvokable()) {
2344         method.ClearSkipAccessChecks();
2345       }
2346     }, space->Begin(), image_pointer_size_);
2347   }
2348 
2349   ClassTable* class_table = nullptr;
2350   {
2351     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2352     class_table = InsertClassTableForClassLoader(class_loader.Get());
2353   }
2354   // If we have a class table section, read it and use it for verification in
2355   // UpdateAppImageClassLoadersAndDexCaches.
2356   ClassTable::ClassSet temp_set;
2357   const ImageSection& class_table_section = header.GetClassTableSection();
2358   const bool added_class_table = class_table_section.Size() > 0u;
2359   if (added_class_table) {
2360     const uint64_t start_time2 = NanoTime();
2361     size_t read_count = 0;
2362     temp_set = ClassTable::ClassSet(space->Begin() + class_table_section.Offset(),
2363                                     /*make copy*/false,
2364                                     &read_count);
2365     VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2);
2366   }
2367   if (app_image) {
2368     AppImageLoadingHelper::Update(this, space, class_loader, dex_caches);
2369 
2370     {
2371       ScopedTrace trace("AppImage:UpdateClassLoaders");
2372       // Update class loader and resolved strings. If added_class_table is false, the resolved
2373       // strings were forwarded UpdateAppImageClassLoadersAndDexCaches.
2374       ObjPtr<mirror::ClassLoader> loader(class_loader.Get());
2375       for (const ClassTable::TableSlot& root : temp_set) {
2376         // Note: We probably don't need the read barrier unless we copy the app image objects into
2377         // the region space.
2378         ObjPtr<mirror::Class> klass(root.Read());
2379         // Do not update class loader for boot image classes where the app image
2380         // class loader is only the initiating loader but not the defining loader.
2381         if (space->HasAddress(klass.Ptr())) {
2382           klass->SetClassLoader(loader);
2383         } else {
2384           DCHECK(klass->IsBootStrapClassLoaded());
2385           DCHECK(Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass.Ptr()));
2386         }
2387       }
2388     }
2389 
2390     if (kBitstringSubtypeCheckEnabled) {
2391       // Every class in the app image has initially SubtypeCheckInfo in the
2392       // Uninitialized state.
2393       //
2394       // The SubtypeCheck invariants imply that a SubtypeCheckInfo is at least Initialized
2395       // after class initialization is complete. The app image ClassStatus as-is
2396       // are almost all ClassStatus::Initialized, and being in the
2397       // SubtypeCheckInfo::kUninitialized state is violating that invariant.
2398       //
2399       // Force every app image class's SubtypeCheck to be at least kIninitialized.
2400       //
2401       // See also ImageWriter::FixupClass.
2402       ScopedTrace trace("AppImage:RecacluateSubtypeCheckBitstrings");
2403       MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2404       for (const ClassTable::TableSlot& root : temp_set) {
2405         SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(root.Read());
2406       }
2407     }
2408   }
2409   if (!oat_file->GetBssGcRoots().empty()) {
2410     // Insert oat file to class table for visiting .bss GC roots.
2411     class_table->InsertOatFile(oat_file);
2412   }
2413 
2414   if (added_class_table) {
2415     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2416     class_table->AddClassSet(std::move(temp_set));
2417   }
2418 
2419   if (kIsDebugBuild && app_image) {
2420     // This verification needs to happen after the classes have been added to the class loader.
2421     // Since it ensures classes are in the class table.
2422     ScopedTrace trace("AppImage:Verify");
2423     VerifyAppImage(header, class_loader, class_table, space);
2424   }
2425 
2426   VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time);
2427   return true;
2428 }
2429 
AddImageSpaces(ArrayRef<gc::space::ImageSpace * > spaces,Handle<mirror::ClassLoader> class_loader,ClassLoaderContext * context,std::vector<std::unique_ptr<const DexFile>> * dex_files,std::string * error_msg)2430 bool ClassLinker::AddImageSpaces(ArrayRef<gc::space::ImageSpace*> spaces,
2431                                  Handle<mirror::ClassLoader> class_loader,
2432                                  ClassLoaderContext* context,
2433                                  /*out*/ std::vector<std::unique_ptr<const DexFile>>* dex_files,
2434                                  /*out*/ std::string* error_msg) {
2435   std::vector<std::vector<std::unique_ptr<const DexFile>>> dex_files_by_space_index;
2436   for (const gc::space::ImageSpace* space : spaces) {
2437     std::vector<std::unique_ptr<const DexFile>> space_dex_files;
2438     if (!OpenAndInitImageDexFiles(space, class_loader, /*out*/ &space_dex_files, error_msg)) {
2439       return false;
2440     }
2441     dex_files_by_space_index.push_back(std::move(space_dex_files));
2442   }
2443   // This must be done in a separate loop after all dex files are initialized because there can be
2444   // references from an image space to another image space that comes after it.
2445   for (size_t i = 0u, size = spaces.size(); i != size; ++i) {
2446     std::vector<std::unique_ptr<const DexFile>>& space_dex_files = dex_files_by_space_index[i];
2447     if (!AddImageSpace(spaces[i], class_loader, context, space_dex_files, error_msg)) {
2448       return false;
2449     }
2450     // Append opened dex files at the end.
2451     std::move(space_dex_files.begin(), space_dex_files.end(), std::back_inserter(*dex_files));
2452   }
2453   return true;
2454 }
2455 
VisitClassRoots(RootVisitor * visitor,VisitRootFlags flags)2456 void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
2457   // Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since
2458   // enabling tracing requires the mutator lock, there are no race conditions here.
2459   const bool tracing_enabled = Trace::IsTracingEnabled();
2460   Thread* const self = Thread::Current();
2461   WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2462   if (gUseReadBarrier) {
2463     // We do not track new roots for CC.
2464     DCHECK_EQ(0, flags & (kVisitRootFlagNewRoots |
2465                           kVisitRootFlagClearRootLog |
2466                           kVisitRootFlagStartLoggingNewRoots |
2467                           kVisitRootFlagStopLoggingNewRoots));
2468   }
2469   if ((flags & kVisitRootFlagAllRoots) != 0) {
2470     // Argument for how root visiting deals with ArtField and ArtMethod roots.
2471     // There is 3 GC cases to handle:
2472     // Non moving concurrent:
2473     // This case is easy to handle since the reference members of ArtMethod and ArtFields are held
2474     // live by the class and class roots.
2475     //
2476     // Moving non-concurrent:
2477     // This case needs to call visit VisitNativeRoots in case the classes or dex cache arrays move.
2478     // To prevent missing roots, this case needs to ensure that there is no
2479     // suspend points between the point which we allocate ArtMethod arrays and place them in a
2480     // class which is in the class table.
2481     //
2482     // Moving concurrent:
2483     // Need to make sure to not copy ArtMethods without doing read barriers since the roots are
2484     // marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy.
2485     //
2486     // Use an unbuffered visitor since the class table uses a temporary GcRoot for holding decoded
2487     // ClassTable::TableSlot. The buffered root visiting would access a stale stack location for
2488     // these objects.
2489     UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
2490     boot_class_table_->VisitRoots(root_visitor);
2491     // If tracing is enabled, then mark all the class loaders to prevent unloading.
2492     if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
2493       for (const ClassLoaderData& data : class_loaders_) {
2494         GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
2495         root.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
2496       }
2497     }
2498   } else if (!gUseReadBarrier && (flags & kVisitRootFlagNewRoots) != 0) {
2499     for (auto& root : new_roots_) {
2500       ObjPtr<mirror::Object> old_ref = root.Read<kWithoutReadBarrier>();
2501       root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2502       ObjPtr<mirror::Object> new_ref = root.Read<kWithoutReadBarrier>();
2503       // Concurrent moving GC marked new roots through the to-space invariant.
2504       DCHECK_EQ(new_ref, old_ref);
2505     }
2506     for (const OatFile* oat_file : new_bss_roots_boot_oat_files_) {
2507       for (GcRoot<mirror::Object>& root : oat_file->GetBssGcRoots()) {
2508         ObjPtr<mirror::Object> old_ref = root.Read<kWithoutReadBarrier>();
2509         if (old_ref != nullptr) {
2510           DCHECK(old_ref->IsClass() || old_ref->IsString());
2511           root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2512           ObjPtr<mirror::Object> new_ref = root.Read<kWithoutReadBarrier>();
2513           // Concurrent moving GC marked new roots through the to-space invariant.
2514           DCHECK_EQ(new_ref, old_ref);
2515         }
2516       }
2517     }
2518   }
2519   if (!gUseReadBarrier && (flags & kVisitRootFlagClearRootLog) != 0) {
2520     new_roots_.clear();
2521     new_bss_roots_boot_oat_files_.clear();
2522   }
2523   if (!gUseReadBarrier && (flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
2524     log_new_roots_ = true;
2525   } else if (!gUseReadBarrier && (flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
2526     log_new_roots_ = false;
2527   }
2528   // We deliberately ignore the class roots in the image since we
2529   // handle image roots by using the MS/CMS rescanning of dirty cards.
2530 }
2531 
2532 // Keep in sync with InitCallback. Anything we visit, we need to
2533 // reinit references to when reinitializing a ClassLinker from a
2534 // mapped image.
VisitRoots(RootVisitor * visitor,VisitRootFlags flags,bool visit_class_roots)2535 void ClassLinker::VisitRoots(RootVisitor* visitor, VisitRootFlags flags, bool visit_class_roots) {
2536   class_roots_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
2537   if (visit_class_roots) {
2538     VisitClassRoots(visitor, flags);
2539   }
2540   // Instead of visiting the find_array_class_cache_ drop it so that it doesn't prevent class
2541   // unloading if we are marking roots.
2542   DropFindArrayClassCache();
2543 }
2544 
2545 class VisitClassLoaderClassesVisitor : public ClassLoaderVisitor {
2546  public:
VisitClassLoaderClassesVisitor(ClassVisitor * visitor)2547   explicit VisitClassLoaderClassesVisitor(ClassVisitor* visitor)
2548       : visitor_(visitor),
2549         done_(false) {}
2550 
Visit(ObjPtr<mirror::ClassLoader> class_loader)2551   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
2552       REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
2553     ClassTable* const class_table = class_loader->GetClassTable();
2554     if (!done_ && class_table != nullptr) {
2555       DefiningClassLoaderFilterVisitor visitor(class_loader, visitor_);
2556       if (!class_table->Visit(visitor)) {
2557         // If the visitor ClassTable returns false it means that we don't need to continue.
2558         done_ = true;
2559       }
2560     }
2561   }
2562 
2563  private:
2564   // Class visitor that limits the class visits from a ClassTable to the classes with
2565   // the provided defining class loader. This filter is used to avoid multiple visits
2566   // of the same class which can be recorded for multiple initiating class loaders.
2567   class DefiningClassLoaderFilterVisitor : public ClassVisitor {
2568    public:
DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,ClassVisitor * visitor)2569     DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,
2570                                      ClassVisitor* visitor)
2571         : defining_class_loader_(defining_class_loader), visitor_(visitor) { }
2572 
operator ()(ObjPtr<mirror::Class> klass)2573     bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
2574       if (klass->GetClassLoader() != defining_class_loader_) {
2575         return true;
2576       }
2577       return (*visitor_)(klass);
2578     }
2579 
2580     const ObjPtr<mirror::ClassLoader> defining_class_loader_;
2581     ClassVisitor* const visitor_;
2582   };
2583 
2584   ClassVisitor* const visitor_;
2585   // If done is true then we don't need to do any more visiting.
2586   bool done_;
2587 };
2588 
VisitClassesInternal(ClassVisitor * visitor)2589 void ClassLinker::VisitClassesInternal(ClassVisitor* visitor) {
2590   if (boot_class_table_->Visit(*visitor)) {
2591     VisitClassLoaderClassesVisitor loader_visitor(visitor);
2592     VisitClassLoaders(&loader_visitor);
2593   }
2594 }
2595 
VisitClasses(ClassVisitor * visitor)2596 void ClassLinker::VisitClasses(ClassVisitor* visitor) {
2597   Thread* const self = Thread::Current();
2598   ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2599   // Not safe to have thread suspension when we are holding a lock.
2600   if (self != nullptr) {
2601     ScopedAssertNoThreadSuspension nts(__FUNCTION__);
2602     VisitClassesInternal(visitor);
2603   } else {
2604     VisitClassesInternal(visitor);
2605   }
2606 }
2607 
2608 class GetClassesInToVector : public ClassVisitor {
2609  public:
operator ()(ObjPtr<mirror::Class> klass)2610   bool operator()(ObjPtr<mirror::Class> klass) override {
2611     classes_.push_back(klass);
2612     return true;
2613   }
2614   std::vector<ObjPtr<mirror::Class>> classes_;
2615 };
2616 
2617 class GetClassInToObjectArray : public ClassVisitor {
2618  public:
GetClassInToObjectArray(mirror::ObjectArray<mirror::Class> * arr)2619   explicit GetClassInToObjectArray(mirror::ObjectArray<mirror::Class>* arr)
2620       : arr_(arr), index_(0) {}
2621 
operator ()(ObjPtr<mirror::Class> klass)2622   bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
2623     ++index_;
2624     if (index_ <= arr_->GetLength()) {
2625       arr_->Set(index_ - 1, klass);
2626       return true;
2627     }
2628     return false;
2629   }
2630 
Succeeded() const2631   bool Succeeded() const REQUIRES_SHARED(Locks::mutator_lock_) {
2632     return index_ <= arr_->GetLength();
2633   }
2634 
2635  private:
2636   mirror::ObjectArray<mirror::Class>* const arr_;
2637   int32_t index_;
2638 };
2639 
VisitClassesWithoutClassesLock(ClassVisitor * visitor)2640 void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor) {
2641   // TODO: it may be possible to avoid secondary storage if we iterate over dex caches. The problem
2642   // is avoiding duplicates.
2643   if (!kMovingClasses) {
2644     ScopedAssertNoThreadSuspension nts(__FUNCTION__);
2645     GetClassesInToVector accumulator;
2646     VisitClasses(&accumulator);
2647     for (ObjPtr<mirror::Class> klass : accumulator.classes_) {
2648       if (!visitor->operator()(klass)) {
2649         return;
2650       }
2651     }
2652   } else {
2653     Thread* const self = Thread::Current();
2654     StackHandleScope<1> hs(self);
2655     auto classes = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
2656     // We size the array assuming classes won't be added to the class table during the visit.
2657     // If this assumption fails we iterate again.
2658     while (true) {
2659       size_t class_table_size;
2660       {
2661         ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2662         // Add 100 in case new classes get loaded when we are filling in the object array.
2663         class_table_size = NumZygoteClasses() + NumNonZygoteClasses() + 100;
2664       }
2665       ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
2666       classes.Assign(
2667           mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, class_table_size));
2668       CHECK(classes != nullptr);  // OOME.
2669       GetClassInToObjectArray accumulator(classes.Get());
2670       VisitClasses(&accumulator);
2671       if (accumulator.Succeeded()) {
2672         break;
2673       }
2674     }
2675     for (int32_t i = 0; i < classes->GetLength(); ++i) {
2676       // If the class table shrank during creation of the clases array we expect null elements. If
2677       // the class table grew then the loop repeats. If classes are created after the loop has
2678       // finished then we don't visit.
2679       ObjPtr<mirror::Class> klass = classes->Get(i);
2680       if (klass != nullptr && !visitor->operator()(klass)) {
2681         return;
2682       }
2683     }
2684   }
2685 }
2686 
~ClassLinker()2687 ClassLinker::~ClassLinker() {
2688   Thread* const self = Thread::Current();
2689   for (const ClassLoaderData& data : class_loaders_) {
2690     // CHA unloading analysis is not needed. No negative consequences are expected because
2691     // all the classloaders are deleted at the same time.
2692     PrepareToDeleteClassLoader(self, data, /*cleanup_cha=*/false);
2693   }
2694   for (const ClassLoaderData& data : class_loaders_) {
2695     delete data.allocator;
2696     delete data.class_table;
2697   }
2698   class_loaders_.clear();
2699   while (!running_visibly_initialized_callbacks_.empty()) {
2700     std::unique_ptr<VisiblyInitializedCallback> callback(
2701         std::addressof(running_visibly_initialized_callbacks_.front()));
2702     running_visibly_initialized_callbacks_.pop_front();
2703   }
2704 }
2705 
PrepareToDeleteClassLoader(Thread * self,const ClassLoaderData & data,bool cleanup_cha)2706 void ClassLinker::PrepareToDeleteClassLoader(Thread* self,
2707                                              const ClassLoaderData& data,
2708                                              bool cleanup_cha) {
2709   Runtime* const runtime = Runtime::Current();
2710   JavaVMExt* const vm = runtime->GetJavaVM();
2711   vm->DeleteWeakGlobalRef(self, data.weak_root);
2712   // Notify the JIT that we need to remove the methods and/or profiling info.
2713   if (runtime->GetJit() != nullptr) {
2714     jit::JitCodeCache* code_cache = runtime->GetJit()->GetCodeCache();
2715     if (code_cache != nullptr) {
2716       // For the JIT case, RemoveMethodsIn removes the CHA dependencies.
2717       code_cache->RemoveMethodsIn(self, *data.allocator);
2718     }
2719   } else if (cha_ != nullptr) {
2720     // If we don't have a JIT, we need to manually remove the CHA dependencies manually.
2721     cha_->RemoveDependenciesForLinearAlloc(self, data.allocator);
2722   }
2723   // Cleanup references to single implementation ArtMethods that will be deleted.
2724   if (cleanup_cha) {
2725     CHAOnDeleteUpdateClassVisitor visitor(data.allocator);
2726     data.class_table->Visit<kWithoutReadBarrier>(visitor);
2727   }
2728   {
2729     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
2730     auto end = critical_native_code_with_clinit_check_.end();
2731     for (auto it = critical_native_code_with_clinit_check_.begin(); it != end; ) {
2732       if (data.allocator->ContainsUnsafe(it->first)) {
2733         it = critical_native_code_with_clinit_check_.erase(it);
2734       } else {
2735         ++it;
2736       }
2737     }
2738   }
2739 }
2740 
AllocPointerArray(Thread * self,size_t length)2741 ObjPtr<mirror::PointerArray> ClassLinker::AllocPointerArray(Thread* self, size_t length) {
2742   return ObjPtr<mirror::PointerArray>::DownCast(
2743       image_pointer_size_ == PointerSize::k64
2744           ? ObjPtr<mirror::Array>(mirror::LongArray::Alloc(self, length))
2745           : ObjPtr<mirror::Array>(mirror::IntArray::Alloc(self, length)));
2746 }
2747 
AllocDexCache(Thread * self,const DexFile & dex_file)2748 ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(Thread* self, const DexFile& dex_file) {
2749   StackHandleScope<1> hs(self);
2750   auto dex_cache(hs.NewHandle(ObjPtr<mirror::DexCache>::DownCast(
2751       GetClassRoot<mirror::DexCache>(this)->AllocObject(self))));
2752   if (dex_cache == nullptr) {
2753     self->AssertPendingOOMException();
2754     return nullptr;
2755   }
2756   // Use InternWeak() so that the location String can be collected when the ClassLoader
2757   // with this DexCache is collected.
2758   ObjPtr<mirror::String> location = intern_table_->InternWeak(dex_file.GetLocation().c_str());
2759   if (location == nullptr) {
2760     self->AssertPendingOOMException();
2761     return nullptr;
2762   }
2763   dex_cache->SetLocation(location);
2764   return dex_cache.Get();
2765 }
2766 
AllocAndInitializeDexCache(Thread * self,const DexFile & dex_file,ObjPtr<mirror::ClassLoader> class_loader)2767 ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(
2768     Thread* self, const DexFile& dex_file, ObjPtr<mirror::ClassLoader> class_loader) {
2769   StackHandleScope<1> hs(self);
2770   Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
2771   ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(self, dex_file);
2772   if (dex_cache != nullptr) {
2773     WriterMutexLock mu(self, *Locks::dex_lock_);
2774     dex_cache->Initialize(&dex_file, h_class_loader.Get());
2775   }
2776   return dex_cache;
2777 }
2778 
2779 template <bool kMovable, typename PreFenceVisitor>
AllocClass(Thread * self,ObjPtr<mirror::Class> java_lang_Class,uint32_t class_size,const PreFenceVisitor & pre_fence_visitor)2780 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2781                                               ObjPtr<mirror::Class> java_lang_Class,
2782                                               uint32_t class_size,
2783                                               const PreFenceVisitor& pre_fence_visitor) {
2784   DCHECK_GE(class_size, sizeof(mirror::Class));
2785   gc::Heap* heap = Runtime::Current()->GetHeap();
2786   ObjPtr<mirror::Object> k = (kMovingClasses && kMovable) ?
2787       heap->AllocObject(self, java_lang_Class, class_size, pre_fence_visitor) :
2788       heap->AllocNonMovableObject(self, java_lang_Class, class_size, pre_fence_visitor);
2789   if (UNLIKELY(k == nullptr)) {
2790     self->AssertPendingOOMException();
2791     return nullptr;
2792   }
2793   return k->AsClass();
2794 }
2795 
2796 template <bool kMovable>
AllocClass(Thread * self,ObjPtr<mirror::Class> java_lang_Class,uint32_t class_size)2797 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2798                                               ObjPtr<mirror::Class> java_lang_Class,
2799                                               uint32_t class_size) {
2800   mirror::Class::InitializeClassVisitor visitor(class_size);
2801   return AllocClass<kMovable>(self, java_lang_Class, class_size, visitor);
2802 }
2803 
AllocClass(Thread * self,uint32_t class_size)2804 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self, uint32_t class_size) {
2805   return AllocClass(self, GetClassRoot<mirror::Class>(this), class_size);
2806 }
2807 
AllocPrimitiveArrayClass(Thread * self,ClassRoot primitive_root,ClassRoot array_root)2808 void ClassLinker::AllocPrimitiveArrayClass(Thread* self,
2809                                            ClassRoot primitive_root,
2810                                            ClassRoot array_root) {
2811   // We make this class non-movable for the unlikely case where it were to be
2812   // moved by a sticky-bit (minor) collection when using the Generational
2813   // Concurrent Copying (CC) collector, potentially creating a stale reference
2814   // in the `klass_` field of one of its instances allocated in the Large-Object
2815   // Space (LOS) -- see the comment about the dirty card scanning logic in
2816   // art::gc::collector::ConcurrentCopying::MarkingPhase.
2817   ObjPtr<mirror::Class> array_class = AllocClass</* kMovable= */ false>(
2818       self, GetClassRoot<mirror::Class>(this), mirror::Array::ClassSize(image_pointer_size_));
2819   ObjPtr<mirror::Class> component_type = GetClassRoot(primitive_root, this);
2820   DCHECK(component_type->IsPrimitive());
2821   array_class->SetComponentType(component_type);
2822   SetClassRoot(array_root, array_class);
2823 }
2824 
FinishArrayClassSetup(ObjPtr<mirror::Class> array_class)2825 void ClassLinker::FinishArrayClassSetup(ObjPtr<mirror::Class> array_class) {
2826   ObjPtr<mirror::Class> java_lang_Object = GetClassRoot<mirror::Object>(this);
2827   array_class->SetSuperClass(java_lang_Object);
2828   array_class->SetVTable(java_lang_Object->GetVTable());
2829   array_class->SetPrimitiveType(Primitive::kPrimNot);
2830   ObjPtr<mirror::Class> component_type = array_class->GetComponentType();
2831   DCHECK_LT(component_type->GetPrimitiveTypeSizeShift(), 4u);
2832   uint32_t class_flags =
2833       component_type->GetPrimitiveTypeSizeShift() << mirror::kArrayComponentSizeShiftShift;
2834   class_flags |= component_type->IsPrimitive()
2835                      ? (mirror::kClassFlagNoReferenceFields | mirror::kClassFlagPrimitiveArray)
2836                      : mirror::kClassFlagObjectArray;
2837   array_class->SetClassFlags(class_flags);
2838   array_class->SetClassLoader(component_type->GetClassLoader());
2839   array_class->SetStatusForPrimitiveOrArray(ClassStatus::kLoaded);
2840   array_class->PopulateEmbeddedVTable(image_pointer_size_);
2841   ImTable* object_imt = java_lang_Object->GetImt(image_pointer_size_);
2842   array_class->SetImt(object_imt, image_pointer_size_);
2843   DCHECK_EQ(array_class->NumMethods(), 0u);
2844 
2845   // don't need to set new_class->SetObjectSize(..)
2846   // because Object::SizeOf delegates to Array::SizeOf
2847 
2848   // All arrays have java/lang/Cloneable and java/io/Serializable as
2849   // interfaces.  We need to set that up here, so that stuff like
2850   // "instanceof" works right.
2851 
2852   // Use the single, global copies of "interfaces" and "iftable"
2853   // (remember not to free them for arrays).
2854   {
2855     ObjPtr<mirror::IfTable> array_iftable = GetArrayIfTable();
2856     CHECK(array_iftable != nullptr);
2857     array_class->SetIfTable(array_iftable);
2858   }
2859 
2860   // Inherit access flags from the component type.
2861   int access_flags = component_type->GetAccessFlags();
2862   // Lose any implementation detail flags; in particular, arrays aren't finalizable.
2863   access_flags &= kAccJavaFlagsMask;
2864   // Arrays can't be used as a superclass or interface, so we want to add "abstract final"
2865   // and remove "interface".
2866   access_flags |= kAccAbstract | kAccFinal;
2867   access_flags &= ~kAccInterface;
2868 
2869   array_class->SetAccessFlagsDuringLinking(access_flags);
2870 
2871   // Array classes are fully initialized either during single threaded startup,
2872   // or from a pre-fence visitor, so visibly initialized.
2873   array_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
2874 }
2875 
FinishCoreArrayClassSetup(ClassRoot array_root)2876 void ClassLinker::FinishCoreArrayClassSetup(ClassRoot array_root) {
2877   // Do not hold lock on the array class object, the initialization of
2878   // core array classes is done while the process is still single threaded.
2879   ObjPtr<mirror::Class> array_class = GetClassRoot(array_root, this);
2880   FinishArrayClassSetup(array_class);
2881 
2882   std::string descriptor;
2883   const char* raw_descriptor = array_class->GetDescriptor(&descriptor);
2884   DCHECK(raw_descriptor == descriptor.c_str());
2885   size_t hash = ComputeModifiedUtf8Hash(descriptor);
2886   ObjPtr<mirror::Class> existing = InsertClass(descriptor, array_class, hash);
2887   CHECK(existing == nullptr);
2888 }
2889 
AllocStackTraceElementArray(Thread * self,size_t length)2890 ObjPtr<mirror::ObjectArray<mirror::StackTraceElement>> ClassLinker::AllocStackTraceElementArray(
2891     Thread* self,
2892     size_t length) {
2893   return mirror::ObjectArray<mirror::StackTraceElement>::Alloc(
2894       self, GetClassRoot<mirror::ObjectArray<mirror::StackTraceElement>>(this), length);
2895 }
2896 
EnsureResolved(Thread * self,std::string_view descriptor,ObjPtr<mirror::Class> klass)2897 ObjPtr<mirror::Class> ClassLinker::EnsureResolved(Thread* self,
2898                                                   std::string_view descriptor,
2899                                                   ObjPtr<mirror::Class> klass) {
2900   DCHECK(klass != nullptr);
2901   if (kIsDebugBuild) {
2902     StackHandleScope<1> hs(self);
2903     HandleWrapperObjPtr<mirror::Class> h = hs.NewHandleWrapper(&klass);
2904     Thread::PoisonObjectPointersIfDebug();
2905   }
2906 
2907   // For temporary classes we must wait for them to be retired.
2908   if (init_done_ && klass->IsTemp()) {
2909     CHECK(!klass->IsResolved());
2910     if (klass->IsErroneousUnresolved()) {
2911       ThrowEarlierClassFailure(klass);
2912       return nullptr;
2913     }
2914     StackHandleScope<1> hs(self);
2915     Handle<mirror::Class> h_class(hs.NewHandle(klass));
2916     ObjectLock<mirror::Class> lock(self, h_class);
2917     // Loop and wait for the resolving thread to retire this class.
2918     while (!h_class->IsRetired() && !h_class->IsErroneousUnresolved()) {
2919       lock.WaitIgnoringInterrupts();
2920     }
2921     if (h_class->IsErroneousUnresolved()) {
2922       ThrowEarlierClassFailure(h_class.Get());
2923       return nullptr;
2924     }
2925     CHECK(h_class->IsRetired());
2926     // Get the updated class from class table.
2927     klass = LookupClass(self, descriptor, h_class.Get()->GetClassLoader());
2928   }
2929 
2930   // Wait for the class if it has not already been linked.
2931   size_t index = 0;
2932   // Maximum number of yield iterations until we start sleeping.
2933   static const size_t kNumYieldIterations = 1000;
2934   // How long each sleep is in us.
2935   static const size_t kSleepDurationUS = 1000;  // 1 ms.
2936   while (!klass->IsResolved() && !klass->IsErroneousUnresolved()) {
2937     StackHandleScope<1> hs(self);
2938     HandleWrapperObjPtr<mirror::Class> h_class(hs.NewHandleWrapper(&klass));
2939     {
2940       ObjectTryLock<mirror::Class> lock(self, h_class);
2941       // Can not use a monitor wait here since it may block when returning and deadlock if another
2942       // thread has locked klass.
2943       if (lock.Acquired()) {
2944         // Check for circular dependencies between classes, the lock is required for SetStatus.
2945         if (!h_class->IsResolved() && h_class->GetClinitThreadId() == self->GetTid()) {
2946           ThrowClassCircularityError(h_class.Get());
2947           mirror::Class::SetStatus(h_class, ClassStatus::kErrorUnresolved, self);
2948           return nullptr;
2949         }
2950       }
2951     }
2952     {
2953       // Handle wrapper deals with klass moving.
2954       ScopedThreadSuspension sts(self, ThreadState::kSuspended);
2955       if (index < kNumYieldIterations) {
2956         sched_yield();
2957       } else {
2958         usleep(kSleepDurationUS);
2959       }
2960     }
2961     ++index;
2962   }
2963 
2964   if (klass->IsErroneousUnresolved()) {
2965     ThrowEarlierClassFailure(klass);
2966     return nullptr;
2967   }
2968   // Return the loaded class.  No exceptions should be pending.
2969   CHECK(klass->IsResolved()) << klass->PrettyClass();
2970   self->AssertNoPendingException();
2971   return klass;
2972 }
2973 
2974 using ClassPathEntry = std::pair<const DexFile*, const dex::ClassDef*>;
2975 
2976 // Search a collection of DexFiles for a descriptor
FindInClassPath(std::string_view descriptor,size_t hash,const std::vector<const DexFile * > & class_path)2977 ClassPathEntry FindInClassPath(std::string_view descriptor,
2978                                size_t hash,
2979                                const std::vector<const DexFile*>& class_path) {
2980   for (const DexFile* dex_file : class_path) {
2981     DCHECK(dex_file != nullptr);
2982     const dex::ClassDef* dex_class_def = OatDexFile::FindClassDef(*dex_file, descriptor, hash);
2983     if (dex_class_def != nullptr) {
2984       return ClassPathEntry(dex_file, dex_class_def);
2985     }
2986   }
2987   return ClassPathEntry(nullptr, nullptr);
2988 }
2989 
2990 // Helper macro to make sure each class loader lookup call handles the case the
2991 // class loader is not recognized, or the lookup threw an exception.
2992 #define RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(call_, result_, thread_) \
2993 do {                                                                          \
2994   auto local_call = call_;                                                    \
2995   if (!local_call) {                                                          \
2996     return false;                                                             \
2997   }                                                                           \
2998   auto local_result = result_;                                                \
2999   if (local_result != nullptr) {                                              \
3000     return true;                                                              \
3001   }                                                                           \
3002   auto local_thread = thread_;                                                \
3003   if (local_thread->IsExceptionPending()) {                                   \
3004     /* Pending exception means there was an error other than */               \
3005     /* ClassNotFound that must be returned to the caller. */                  \
3006     return false;                                                             \
3007   }                                                                           \
3008 } while (0)
3009 
FindClassInSharedLibraries(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)3010 bool ClassLinker::FindClassInSharedLibraries(Thread* self,
3011                                              const char* descriptor,
3012                                              size_t descriptor_length,
3013                                              size_t hash,
3014                                              Handle<mirror::ClassLoader> class_loader,
3015                                              /*out*/ ObjPtr<mirror::Class>* result) {
3016   ArtField* field = WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders;
3017   return FindClassInSharedLibrariesHelper(
3018       self, descriptor, descriptor_length, hash, class_loader, field, result);
3019 }
3020 
FindClassInSharedLibrariesHelper(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,Handle<mirror::ClassLoader> class_loader,ArtField * field,ObjPtr<mirror::Class> * result)3021 bool ClassLinker::FindClassInSharedLibrariesHelper(Thread* self,
3022                                                    const char* descriptor,
3023                                                    size_t descriptor_length,
3024                                                    size_t hash,
3025                                                    Handle<mirror::ClassLoader> class_loader,
3026                                                    ArtField* field,
3027                                                    /*out*/ ObjPtr<mirror::Class>* result) {
3028   ObjPtr<mirror::Object> raw_shared_libraries = field->GetObject(class_loader.Get());
3029   if (raw_shared_libraries == nullptr) {
3030     return true;
3031   }
3032 
3033   StackHandleScope<2> hs(self);
3034   Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries(
3035       hs.NewHandle(raw_shared_libraries->AsObjectArray<mirror::ClassLoader>()));
3036   MutableHandle<mirror::ClassLoader> temp_loader = hs.NewHandle<mirror::ClassLoader>(nullptr);
3037   for (auto loader : shared_libraries.Iterate<mirror::ClassLoader>()) {
3038     temp_loader.Assign(loader);
3039     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3040         FindClassInBaseDexClassLoader(
3041             self, descriptor, descriptor_length, hash, temp_loader, result),
3042         *result,
3043         self);
3044   }
3045   return true;
3046 }
3047 
FindClassInSharedLibrariesAfter(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)3048 bool ClassLinker::FindClassInSharedLibrariesAfter(Thread* self,
3049                                                   const char* descriptor,
3050                                                   size_t descriptor_length,
3051                                                   size_t hash,
3052                                                   Handle<mirror::ClassLoader> class_loader,
3053                                                   /*out*/ ObjPtr<mirror::Class>* result) {
3054   ArtField* field = WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter;
3055   return FindClassInSharedLibrariesHelper(
3056       self, descriptor, descriptor_length, hash, class_loader, field, result);
3057 }
3058 
FindClassInBaseDexClassLoader(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)3059 bool ClassLinker::FindClassInBaseDexClassLoader(Thread* self,
3060                                                 const char* descriptor,
3061                                                 size_t descriptor_length,
3062                                                 size_t hash,
3063                                                 Handle<mirror::ClassLoader> class_loader,
3064                                                 /*out*/ ObjPtr<mirror::Class>* result) {
3065   // Termination case: boot class loader.
3066   if (IsBootClassLoader(class_loader.Get())) {
3067     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3068         FindClassInBootClassLoaderClassPath(self, descriptor, descriptor_length, hash, result),
3069         *result,
3070         self);
3071     return true;
3072   }
3073 
3074   if (IsPathOrDexClassLoader(class_loader) || IsInMemoryDexClassLoader(class_loader)) {
3075     // For regular path or dex class loader the search order is:
3076     //    - parent
3077     //    - shared libraries
3078     //    - class loader dex files
3079 
3080     // Create a handle as RegisterDexFile may allocate dex caches (and cause thread suspension).
3081     StackHandleScope<1> hs(self);
3082     Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
3083     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3084         FindClassInBaseDexClassLoader(self, descriptor, descriptor_length, hash, h_parent, result),
3085         *result,
3086         self);
3087     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3088         FindClassInSharedLibraries(self, descriptor, descriptor_length, hash, class_loader, result),
3089         *result,
3090         self);
3091     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3092         FindClassInBaseDexClassLoaderClassPath(
3093             self, descriptor, descriptor_length, hash, class_loader, result),
3094         *result,
3095         self);
3096     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3097         FindClassInSharedLibrariesAfter(
3098             self, descriptor, descriptor_length, hash, class_loader, result),
3099         *result,
3100         self);
3101     // We did not find a class, but the class loader chain was recognized, so we
3102     // return true.
3103     return true;
3104   }
3105 
3106   if (IsDelegateLastClassLoader(class_loader)) {
3107     // For delegate last, the search order is:
3108     //    - boot class path
3109     //    - shared libraries
3110     //    - class loader dex files
3111     //    - parent
3112     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3113         FindClassInBootClassLoaderClassPath(self, descriptor, descriptor_length, hash, result),
3114         *result,
3115         self);
3116     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3117         FindClassInSharedLibraries(self, descriptor, descriptor_length, hash, class_loader, result),
3118         *result,
3119         self);
3120     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3121         FindClassInBaseDexClassLoaderClassPath(
3122             self, descriptor, descriptor_length, hash, class_loader, result),
3123         *result,
3124         self);
3125     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3126         FindClassInSharedLibrariesAfter(
3127             self, descriptor, descriptor_length, hash, class_loader, result),
3128         *result,
3129         self);
3130 
3131     // Create a handle as RegisterDexFile may allocate dex caches (and cause thread suspension).
3132     StackHandleScope<1> hs(self);
3133     Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
3134     RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3135         FindClassInBaseDexClassLoader(self, descriptor, descriptor_length, hash, h_parent, result),
3136         *result,
3137         self);
3138     // We did not find a class, but the class loader chain was recognized, so we
3139     // return true.
3140     return true;
3141   }
3142 
3143   // Unsupported class loader.
3144   *result = nullptr;
3145   return false;
3146 }
3147 
3148 #undef RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION
3149 
3150 namespace {
3151 
3152 // Matches exceptions caught in DexFile.defineClass.
MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,ClassLinker * class_linker)3153 ALWAYS_INLINE bool MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,
3154                                                   ClassLinker* class_linker)
3155     REQUIRES_SHARED(Locks::mutator_lock_) {
3156   return
3157       // ClassNotFoundException.
3158       throwable->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
3159                                          class_linker))
3160       ||
3161       // NoClassDefFoundError. TODO: Reconsider this. b/130746382.
3162       throwable->InstanceOf(Runtime::Current()->GetPreAllocatedNoClassDefFoundError()->GetClass());
3163 }
3164 
3165 // Clear exceptions caught in DexFile.defineClass.
FilterDexFileCaughtExceptions(Thread * self,ClassLinker * class_linker)3166 ALWAYS_INLINE void FilterDexFileCaughtExceptions(Thread* self, ClassLinker* class_linker)
3167     REQUIRES_SHARED(Locks::mutator_lock_) {
3168   if (MatchesDexFileCaughtExceptions(self->GetException(), class_linker)) {
3169     self->ClearException();
3170   }
3171 }
3172 
3173 }  // namespace
3174 
3175 // Finds the class in the boot class loader.
3176 // If the class is found the method returns the resolved class. Otherwise it returns null.
FindClassInBootClassLoaderClassPath(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,ObjPtr<mirror::Class> * result)3177 bool ClassLinker::FindClassInBootClassLoaderClassPath(Thread* self,
3178                                                       const char* descriptor,
3179                                                       size_t descriptor_length,
3180                                                       size_t hash,
3181                                                       /*out*/ ObjPtr<mirror::Class>* result) {
3182   std::string_view sv_descriptor(descriptor, descriptor_length);
3183   ClassPathEntry pair = FindInClassPath(sv_descriptor, hash, boot_class_path_);
3184   if (pair.second != nullptr) {
3185     ObjPtr<mirror::Class> klass = LookupClass(self, sv_descriptor, hash, nullptr);
3186     if (klass != nullptr) {
3187       *result = EnsureResolved(self, sv_descriptor, klass);
3188     } else {
3189       *result = DefineClass(self,
3190                             descriptor,
3191                             descriptor_length,
3192                             hash,
3193                             ScopedNullHandle<mirror::ClassLoader>(),
3194                             *pair.first,
3195                             *pair.second);
3196     }
3197     if (*result == nullptr) {
3198       CHECK(self->IsExceptionPending()) << descriptor;
3199       FilterDexFileCaughtExceptions(self, this);
3200     }
3201   }
3202   // The boot classloader is always a known lookup.
3203   return true;
3204 }
3205 
FindClassInBaseDexClassLoaderClassPath(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)3206 bool ClassLinker::FindClassInBaseDexClassLoaderClassPath(
3207     Thread* self,
3208     const char* descriptor,
3209     size_t descriptor_length,
3210     size_t hash,
3211     Handle<mirror::ClassLoader> class_loader,
3212     /*out*/ ObjPtr<mirror::Class>* result) {
3213   DCHECK(IsPathOrDexClassLoader(class_loader) ||
3214          IsInMemoryDexClassLoader(class_loader) ||
3215          IsDelegateLastClassLoader(class_loader))
3216       << "Unexpected class loader for descriptor " << descriptor;
3217 
3218   std::string_view sv_descriptor(descriptor, descriptor_length);
3219   const DexFile* dex_file = nullptr;
3220   const dex::ClassDef* class_def = nullptr;
3221   ObjPtr<mirror::Class> ret;
3222   auto find_class_def = [&](const DexFile* cp_dex_file) REQUIRES_SHARED(Locks::mutator_lock_) {
3223     const dex::ClassDef* cp_class_def = OatDexFile::FindClassDef(*cp_dex_file, sv_descriptor, hash);
3224     if (cp_class_def != nullptr) {
3225       dex_file = cp_dex_file;
3226       class_def = cp_class_def;
3227       return false;  // Found a class definition, stop visit.
3228     }
3229     return true;  // Continue with the next DexFile.
3230   };
3231   VisitClassLoaderDexFiles(self, class_loader, find_class_def);
3232 
3233   if (class_def != nullptr) {
3234     *result =
3235         DefineClass(self, descriptor, descriptor_length, hash, class_loader, *dex_file, *class_def);
3236     if (UNLIKELY(*result == nullptr)) {
3237       CHECK(self->IsExceptionPending()) << descriptor;
3238       FilterDexFileCaughtExceptions(self, this);
3239     } else {
3240       DCHECK(!self->IsExceptionPending());
3241     }
3242   }
3243   // A BaseDexClassLoader is always a known lookup.
3244   return true;
3245 }
3246 
FindClass(Thread * self,const DexFile & dex_file,dex::TypeIndex type_index,Handle<mirror::ClassLoader> class_loader)3247 ObjPtr<mirror::Class> ClassLinker::FindClass(Thread* self,
3248                                              const DexFile& dex_file,
3249                                              dex::TypeIndex type_index,
3250                                              Handle<mirror::ClassLoader> class_loader) {
3251   dex::StringIndex descriptor_idx = dex_file.GetTypeId(type_index).descriptor_idx_;
3252   uint32_t utf16_length;
3253   const char* descriptor = dex_file.GetStringDataAndUtf16Length(descriptor_idx, &utf16_length);
3254   size_t descriptor_length = DexFile::Utf8Length(descriptor, utf16_length);
3255   return FindClass(self, descriptor, descriptor_length, class_loader);
3256 }
3257 
FindClass(Thread * self,const char * descriptor,size_t descriptor_length,Handle<mirror::ClassLoader> class_loader)3258 ObjPtr<mirror::Class> ClassLinker::FindClass(Thread* self,
3259                                              const char* descriptor,
3260                                              size_t descriptor_length,
3261                                              Handle<mirror::ClassLoader> class_loader) {
3262   DCHECK_EQ(strlen(descriptor), descriptor_length);
3263   DCHECK_NE(descriptor_length, 0u) << "descriptor is empty string";
3264   DCHECK(self != nullptr);
3265   self->AssertNoPendingException();
3266   self->PoisonObjectPointers();  // For DefineClass, CreateArrayClass, etc...
3267   if (descriptor_length == 1u) {
3268     // only the descriptors of primitive types should be 1 character long, also avoid class lookup
3269     // for primitive classes that aren't backed by dex files.
3270     return FindPrimitiveClass(descriptor[0]);
3271   }
3272   const std::string_view sv_descriptor(descriptor, descriptor_length);
3273   const size_t hash = ComputeModifiedUtf8Hash(sv_descriptor);
3274   // Find the class in the loaded classes table.
3275   ObjPtr<mirror::Class> klass = LookupClass(self, sv_descriptor, hash, class_loader.Get());
3276   if (klass != nullptr) {
3277     return EnsureResolved(self, sv_descriptor, klass);
3278   }
3279   // Class is not yet loaded.
3280   if (descriptor[0] != '[' && class_loader == nullptr) {
3281     // Non-array class and the boot class loader, search the boot class path.
3282     ClassPathEntry pair = FindInClassPath(sv_descriptor, hash, boot_class_path_);
3283     if (pair.second != nullptr) {
3284       return DefineClass(self,
3285                          descriptor,
3286                          descriptor_length,
3287                          hash,
3288                          ScopedNullHandle<mirror::ClassLoader>(),
3289                          *pair.first,
3290                          *pair.second);
3291     } else {
3292       // The boot class loader is searched ahead of the application class loader, failures are
3293       // expected and will be wrapped in a ClassNotFoundException. Use the pre-allocated error to
3294       // trigger the chaining with a proper stack trace.
3295       ObjPtr<mirror::Throwable> pre_allocated =
3296           Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3297       self->SetException(pre_allocated);
3298       return nullptr;
3299     }
3300   }
3301   ObjPtr<mirror::Class> result_ptr;
3302   bool descriptor_equals;
3303   if (descriptor[0] == '[') {
3304     result_ptr = CreateArrayClass(self, descriptor, descriptor_length, hash, class_loader);
3305     DCHECK_EQ(result_ptr == nullptr, self->IsExceptionPending());
3306     DCHECK(result_ptr == nullptr || result_ptr->DescriptorEquals(sv_descriptor));
3307     descriptor_equals = true;
3308   } else {
3309     ScopedObjectAccessUnchecked soa(self);
3310     bool known_hierarchy = FindClassInBaseDexClassLoader(
3311         self, descriptor, descriptor_length, hash, class_loader, &result_ptr);
3312     if (result_ptr != nullptr) {
3313       // The chain was understood and we found the class. We still need to add the class to
3314       // the class table to protect from racy programs that can try and redefine the path list
3315       // which would change the Class<?> returned for subsequent evaluation of const-class.
3316       DCHECK(known_hierarchy);
3317       DCHECK(result_ptr->DescriptorEquals(sv_descriptor));
3318       descriptor_equals = true;
3319     } else if (!self->IsExceptionPending()) {
3320       // Either the chain wasn't understood or the class wasn't found.
3321       // If there is a pending exception we didn't clear, it is a not a ClassNotFoundException and
3322       // we should return it instead of silently clearing and retrying.
3323       //
3324       // If the chain was understood but we did not find the class, let the Java-side
3325       // rediscover all this and throw the exception with the right stack trace. Note that
3326       // the Java-side could still succeed for racy programs if another thread is actively
3327       // modifying the class loader's path list.
3328 
3329       // The runtime is not allowed to call into java from a runtime-thread so just abort.
3330       if (self->IsRuntimeThread()) {
3331         // Oops, we can't call into java so we can't run actual class-loader code.
3332         // This is true for e.g. for the compiler (jit or aot).
3333         ObjPtr<mirror::Throwable> pre_allocated =
3334             Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3335         self->SetException(pre_allocated);
3336         return nullptr;
3337       }
3338 
3339       // Inlined DescriptorToDot(descriptor) with extra validation.
3340       //
3341       // Throw NoClassDefFoundError early rather than potentially load a class only to fail
3342       // the DescriptorEquals() check below and give a confusing error message. For example,
3343       // when native code erroneously calls JNI GetFieldId() with signature "java/lang/String"
3344       // instead of "Ljava/lang/String;", the message below using the "dot" names would be
3345       // "class loader [...] returned class java.lang.String instead of java.lang.String".
3346       if (UNLIKELY(descriptor[0] != 'L') ||
3347           UNLIKELY(descriptor[descriptor_length - 1] != ';') ||
3348           UNLIKELY(memchr(descriptor + 1, '.', descriptor_length - 2) != nullptr)) {
3349         ThrowNoClassDefFoundError("Invalid descriptor: %s.", descriptor);
3350         return nullptr;
3351       }
3352 
3353       std::string class_name_string(sv_descriptor.substr(1u, descriptor_length - 2u));
3354       std::replace(class_name_string.begin(), class_name_string.end(), '/', '.');
3355       if (known_hierarchy &&
3356           fast_class_not_found_exceptions_ &&
3357           !Runtime::Current()->IsJavaDebuggable()) {
3358         // For known hierarchy, we know that the class is going to throw an exception. If we aren't
3359         // debuggable, optimize this path by throwing directly here without going back to Java
3360         // language. This reduces how many ClassNotFoundExceptions happen.
3361         self->ThrowNewExceptionF("Ljava/lang/ClassNotFoundException;",
3362                                  "%s",
3363                                  class_name_string.c_str());
3364       } else {
3365         StackHandleScope<1u> hs(self);
3366         Handle<mirror::String> class_name_object = hs.NewHandle(
3367             mirror::String::AllocFromModifiedUtf8(self, class_name_string.c_str()));
3368         if (class_name_object == nullptr) {
3369           DCHECK(self->IsExceptionPending());  // OOME.
3370           return nullptr;
3371         }
3372         DCHECK(class_loader != nullptr);
3373         result_ptr = ObjPtr<mirror::Class>::DownCast(
3374             WellKnownClasses::java_lang_ClassLoader_loadClass->InvokeVirtual<'L', 'L'>(
3375                 self, class_loader.Get(), class_name_object.Get()));
3376         if (result_ptr == nullptr && !self->IsExceptionPending()) {
3377           // broken loader - throw NPE to be compatible with Dalvik
3378           ThrowNullPointerException(StringPrintf("ClassLoader.loadClass returned null for %s",
3379                                                  class_name_string.c_str()).c_str());
3380           return nullptr;
3381         }
3382         // Check the name of the returned class.
3383         descriptor_equals = (result_ptr != nullptr) && result_ptr->DescriptorEquals(sv_descriptor);
3384       }
3385     } else {
3386       DCHECK(!MatchesDexFileCaughtExceptions(self->GetException(), this));
3387     }
3388   }
3389 
3390   if (self->IsExceptionPending()) {
3391     // If the ClassLoader threw or array class allocation failed, pass that exception up.
3392     // However, to comply with the RI behavior, first check if another thread succeeded.
3393     result_ptr = LookupClass(self, sv_descriptor, hash, class_loader.Get());
3394     if (result_ptr != nullptr && !result_ptr->IsErroneous()) {
3395       self->ClearException();
3396       return EnsureResolved(self, sv_descriptor, result_ptr);
3397     }
3398     return nullptr;
3399   }
3400 
3401   // Try to insert the class to the class table, checking for mismatch.
3402   ObjPtr<mirror::Class> old;
3403   {
3404     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3405     ClassTable* const class_table = InsertClassTableForClassLoader(class_loader.Get());
3406     old = class_table->Lookup(sv_descriptor, hash);
3407     if (old == nullptr) {
3408       old = result_ptr;  // For the comparison below, after releasing the lock.
3409       if (descriptor_equals) {
3410         class_table->InsertWithHash(result_ptr, hash);
3411         WriteBarrier::ForEveryFieldWrite(class_loader.Get());
3412       }  // else throw below, after releasing the lock.
3413     }
3414   }
3415   if (UNLIKELY(old != result_ptr)) {
3416     // Return `old` (even if `!descriptor_equals`) to mimic the RI behavior for parallel
3417     // capable class loaders.  (All class loaders are considered parallel capable on Android.)
3418     ObjPtr<mirror::Class> loader_class = class_loader->GetClass();
3419     const char* loader_class_name =
3420         loader_class->GetDexFile().GetTypeDescriptor(loader_class->GetDexTypeIndex());
3421     LOG(WARNING) << "Initiating class loader of type " << DescriptorToDot(loader_class_name)
3422         << " is not well-behaved; it returned a different Class for racing loadClass(\""
3423         << DescriptorToDot(descriptor) << "\").";
3424     return EnsureResolved(self, sv_descriptor, old);
3425   }
3426   if (UNLIKELY(!descriptor_equals)) {
3427     std::string result_storage;
3428     const char* result_name = result_ptr->GetDescriptor(&result_storage);
3429     std::string loader_storage;
3430     const char* loader_class_name = class_loader->GetClass()->GetDescriptor(&loader_storage);
3431     ThrowNoClassDefFoundError(
3432         "Initiating class loader of type %s returned class %s instead of %s.",
3433         DescriptorToDot(loader_class_name).c_str(),
3434         DescriptorToDot(result_name).c_str(),
3435         DescriptorToDot(descriptor).c_str());
3436     return nullptr;
3437   }
3438   // Success.
3439   return result_ptr;
3440 }
3441 
3442 // Helper for maintaining DefineClass counting. We need to notify callbacks when we start/end a
3443 // define-class and how many recursive DefineClasses we are at in order to allow for doing  things
3444 // like pausing class definition.
3445 struct ScopedDefiningClass {
3446  public:
REQUIRES_SHAREDart::ScopedDefiningClass3447   explicit ScopedDefiningClass(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
3448       : self_(self), returned_(false) {
3449     Locks::mutator_lock_->AssertSharedHeld(self_);
3450     Runtime::Current()->GetRuntimeCallbacks()->BeginDefineClass();
3451     self_->IncrDefineClassCount();
3452   }
REQUIRES_SHAREDart::ScopedDefiningClass3453   ~ScopedDefiningClass() REQUIRES_SHARED(Locks::mutator_lock_) {
3454     Locks::mutator_lock_->AssertSharedHeld(self_);
3455     CHECK(returned_);
3456   }
3457 
Finishart::ScopedDefiningClass3458   ObjPtr<mirror::Class> Finish(Handle<mirror::Class> h_klass)
3459       REQUIRES_SHARED(Locks::mutator_lock_) {
3460     CHECK(!returned_);
3461     self_->DecrDefineClassCount();
3462     Runtime::Current()->GetRuntimeCallbacks()->EndDefineClass();
3463     Thread::PoisonObjectPointersIfDebug();
3464     returned_ = true;
3465     return h_klass.Get();
3466   }
3467 
Finishart::ScopedDefiningClass3468   ObjPtr<mirror::Class> Finish(ObjPtr<mirror::Class> klass)
3469       REQUIRES_SHARED(Locks::mutator_lock_) {
3470     StackHandleScope<1> hs(self_);
3471     Handle<mirror::Class> h_klass(hs.NewHandle(klass));
3472     return Finish(h_klass);
3473   }
3474 
Finishart::ScopedDefiningClass3475   ObjPtr<mirror::Class> Finish([[maybe_unused]] nullptr_t np)
3476       REQUIRES_SHARED(Locks::mutator_lock_) {
3477     ScopedNullHandle<mirror::Class> snh;
3478     return Finish(snh);
3479   }
3480 
3481  private:
3482   Thread* self_;
3483   bool returned_;
3484 };
3485 
DefineClass(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,Handle<mirror::ClassLoader> class_loader,const DexFile & dex_file,const dex::ClassDef & dex_class_def)3486 ObjPtr<mirror::Class> ClassLinker::DefineClass(Thread* self,
3487                                                const char* descriptor,
3488                                                size_t descriptor_length,
3489                                                size_t hash,
3490                                                Handle<mirror::ClassLoader> class_loader,
3491                                                const DexFile& dex_file,
3492                                                const dex::ClassDef& dex_class_def) {
3493   std::string_view sv_descriptor(descriptor, descriptor_length);
3494   ScopedDefiningClass sdc(self);
3495   StackHandleScope<3> hs(self);
3496   metrics::AutoTimer timer{GetMetrics()->ClassLoadingTotalTime()};
3497   metrics::AutoTimer timeDelta{GetMetrics()->ClassLoadingTotalTimeDelta()};
3498   auto klass = hs.NewHandle<mirror::Class>(nullptr);
3499 
3500   // Load the class from the dex file.
3501   if (UNLIKELY(!init_done_)) {
3502     // finish up init of hand crafted class_roots_
3503     if (sv_descriptor == "Ljava/lang/Object;") {
3504       klass.Assign(GetClassRoot<mirror::Object>(this));
3505     } else if (sv_descriptor == "Ljava/lang/Class;") {
3506       klass.Assign(GetClassRoot<mirror::Class>(this));
3507     } else if (sv_descriptor == "Ljava/lang/String;") {
3508       klass.Assign(GetClassRoot<mirror::String>(this));
3509     } else if (sv_descriptor == "Ljava/lang/ref/Reference;") {
3510       klass.Assign(GetClassRoot<mirror::Reference>(this));
3511     } else if (sv_descriptor == "Ljava/lang/DexCache;") {
3512       klass.Assign(GetClassRoot<mirror::DexCache>(this));
3513     } else if (sv_descriptor == "Ldalvik/system/ClassExt;") {
3514       klass.Assign(GetClassRoot<mirror::ClassExt>(this));
3515     }
3516   }
3517 
3518   // For AOT-compilation of an app, we may use only a public SDK to resolve symbols. If the SDK
3519   // checks are configured (a non null SdkChecker) and the descriptor is not in the provided
3520   // public class path then we prevent the definition of the class.
3521   //
3522   // NOTE that we only do the checks for the boot classpath APIs. Anything else, like the app
3523   // classpath is not checked.
3524   if (class_loader == nullptr &&
3525       Runtime::Current()->IsAotCompiler() &&
3526       DenyAccessBasedOnPublicSdk(descriptor)) {
3527     ObjPtr<mirror::Throwable> pre_allocated =
3528         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3529     self->SetException(pre_allocated);
3530     return sdc.Finish(nullptr);
3531   }
3532 
3533   // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
3534   // code to be executed. We put it up here so we can avoid all the allocations associated with
3535   // creating the class. This can happen with (eg) jit threads.
3536   if (!self->CanLoadClasses()) {
3537     // Make sure we don't try to load anything, potentially causing an infinite loop.
3538     ObjPtr<mirror::Throwable> pre_allocated =
3539         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3540     self->SetException(pre_allocated);
3541     return sdc.Finish(nullptr);
3542   }
3543 
3544   ScopedTrace trace(descriptor);
3545   if (klass == nullptr) {
3546     // Allocate a class with the status of not ready.
3547     // Interface object should get the right size here. Regular class will
3548     // figure out the right size later and be replaced with one of the right
3549     // size when the class becomes resolved.
3550     if (CanAllocClass()) {
3551       klass.Assign(AllocClass(self, SizeOfClassWithoutEmbeddedTables(dex_file, dex_class_def)));
3552     } else {
3553       return sdc.Finish(nullptr);
3554     }
3555   }
3556   if (UNLIKELY(klass == nullptr)) {
3557     self->AssertPendingOOMException();
3558     return sdc.Finish(nullptr);
3559   }
3560   // Get the real dex file. This will return the input if there aren't any callbacks or they do
3561   // nothing.
3562   DexFile const* new_dex_file = nullptr;
3563   dex::ClassDef const* new_class_def = nullptr;
3564   // TODO We should ideally figure out some way to move this after we get a lock on the klass so it
3565   // will only be called once.
3566   Runtime::Current()->GetRuntimeCallbacks()->ClassPreDefine(descriptor,
3567                                                             klass,
3568                                                             class_loader,
3569                                                             dex_file,
3570                                                             dex_class_def,
3571                                                             &new_dex_file,
3572                                                             &new_class_def);
3573   // Check to see if an exception happened during runtime callbacks. Return if so.
3574   if (self->IsExceptionPending()) {
3575     return sdc.Finish(nullptr);
3576   }
3577   ObjPtr<mirror::DexCache> dex_cache = RegisterDexFile(*new_dex_file, class_loader.Get());
3578   if (dex_cache == nullptr) {
3579     self->AssertPendingException();
3580     return sdc.Finish(nullptr);
3581   }
3582   klass->SetDexCache(dex_cache);
3583   SetupClass(*new_dex_file, *new_class_def, klass, class_loader.Get());
3584 
3585   // Mark the string class by setting its access flag.
3586   if (UNLIKELY(!init_done_)) {
3587     if (sv_descriptor == "Ljava/lang/String;") {
3588       klass->SetStringClass();
3589     }
3590   }
3591 
3592   ObjectLock<mirror::Class> lock(self, klass);
3593   klass->SetClinitThreadId(self->GetTid());
3594   // Make sure we have a valid empty iftable even if there are errors.
3595   klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
3596 
3597   // Add the newly loaded class to the loaded classes table.
3598   ObjPtr<mirror::Class> existing = InsertClass(sv_descriptor, klass.Get(), hash);
3599   if (existing != nullptr) {
3600     // We failed to insert because we raced with another thread. Calling EnsureResolved may cause
3601     // this thread to block.
3602     return sdc.Finish(EnsureResolved(self, sv_descriptor, existing));
3603   }
3604 
3605   // Load the fields and other things after we are inserted in the table. This is so that we don't
3606   // end up allocating unfree-able linear alloc resources and then lose the race condition. The
3607   // other reason is that the field roots are only visited from the class table. So we need to be
3608   // inserted before we allocate / fill in these fields.
3609   LoadClass(self, *new_dex_file, *new_class_def, klass);
3610   if (self->IsExceptionPending()) {
3611     VLOG(class_linker) << self->GetException()->Dump();
3612     // An exception occured during load, set status to erroneous while holding klass' lock in case
3613     // notification is necessary.
3614     if (!klass->IsErroneous()) {
3615       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3616     }
3617     return sdc.Finish(nullptr);
3618   }
3619 
3620   // Finish loading (if necessary) by finding parents
3621   CHECK(!klass->IsLoaded());
3622   if (!LoadSuperAndInterfaces(klass, *new_dex_file)) {
3623     // Loading failed.
3624     if (!klass->IsErroneous()) {
3625       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3626     }
3627     return sdc.Finish(nullptr);
3628   }
3629   CHECK(klass->IsLoaded());
3630 
3631   // At this point the class is loaded. Publish a ClassLoad event.
3632   // Note: this may be a temporary class. It is a listener's responsibility to handle this.
3633   Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(klass);
3634 
3635   // Link the class (if necessary)
3636   CHECK(!klass->IsResolved());
3637   // TODO: Use fast jobjects?
3638   auto interfaces = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
3639 
3640   MutableHandle<mirror::Class> h_new_class = hs.NewHandle<mirror::Class>(nullptr);
3641   if (!LinkClass(self, descriptor, klass, interfaces, &h_new_class)) {
3642     // Linking failed.
3643     if (!klass->IsErroneous()) {
3644       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3645     }
3646     return sdc.Finish(nullptr);
3647   }
3648   self->AssertNoPendingException();
3649   CHECK(h_new_class != nullptr) << descriptor;
3650   CHECK(h_new_class->IsResolved()) << descriptor << " " << h_new_class->GetStatus();
3651 
3652   // Instrumentation may have updated entrypoints for all methods of all
3653   // classes. However it could not update methods of this class while we
3654   // were loading it. Now the class is resolved, we can update entrypoints
3655   // as required by instrumentation.
3656   if (Runtime::Current()->GetInstrumentation()->EntryExitStubsInstalled()) {
3657     // We must be in the kRunnable state to prevent instrumentation from
3658     // suspending all threads to update entrypoints while we are doing it
3659     // for this class.
3660     DCHECK_EQ(self->GetState(), ThreadState::kRunnable);
3661     Runtime::Current()->GetInstrumentation()->InstallStubsForClass(h_new_class.Get());
3662   }
3663 
3664   /*
3665    * We send CLASS_PREPARE events to the debugger from here.  The
3666    * definition of "preparation" is creating the static fields for a
3667    * class and initializing them to the standard default values, but not
3668    * executing any code (that comes later, during "initialization").
3669    *
3670    * We did the static preparation in LinkClass.
3671    *
3672    * The class has been prepared and resolved but possibly not yet verified
3673    * at this point.
3674    */
3675   Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(klass, h_new_class);
3676 
3677   // Notify native debugger of the new class and its layout.
3678   jit::Jit::NewTypeLoadedIfUsingJit(h_new_class.Get());
3679 
3680   return sdc.Finish(h_new_class);
3681 }
3682 
SizeOfClassWithoutEmbeddedTables(const DexFile & dex_file,const dex::ClassDef & dex_class_def)3683 uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
3684                                                        const dex::ClassDef& dex_class_def) {
3685   size_t num_ref = 0;
3686   size_t num_8 = 0;
3687   size_t num_16 = 0;
3688   size_t num_32 = 0;
3689   size_t num_64 = 0;
3690   ClassAccessor accessor(dex_file, dex_class_def);
3691   // We allow duplicate definitions of the same field in a class_data_item
3692   // but ignore the repeated indexes here, b/21868015.
3693   uint32_t last_field_idx = dex::kDexNoIndex;
3694   for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
3695     uint32_t field_idx = field.GetIndex();
3696     // Ordering enforced by DexFileVerifier.
3697     DCHECK(last_field_idx == dex::kDexNoIndex || last_field_idx <= field_idx);
3698     if (UNLIKELY(field_idx == last_field_idx)) {
3699       continue;
3700     }
3701     last_field_idx = field_idx;
3702     const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
3703     const char* descriptor = dex_file.GetFieldTypeDescriptor(field_id);
3704     char c = descriptor[0];
3705     switch (c) {
3706       case 'L':
3707       case '[':
3708         num_ref++;
3709         break;
3710       case 'J':
3711       case 'D':
3712         num_64++;
3713         break;
3714       case 'I':
3715       case 'F':
3716         num_32++;
3717         break;
3718       case 'S':
3719       case 'C':
3720         num_16++;
3721         break;
3722       case 'B':
3723       case 'Z':
3724         num_8++;
3725         break;
3726       default:
3727         LOG(FATAL) << "Unknown descriptor: " << c;
3728         UNREACHABLE();
3729     }
3730   }
3731   return mirror::Class::ComputeClassSize(/*has_embedded_vtable=*/false,
3732                                          /*num_vtable_entries=*/0,
3733                                          num_8,
3734                                          num_16,
3735                                          num_32,
3736                                          num_64,
3737                                          num_ref,
3738                                          /*num_ref_bitmap_entries=*/0,
3739                                          image_pointer_size_);
3740 }
3741 
FixupStaticTrampolines(Thread * self,ObjPtr<mirror::Class> klass)3742 void ClassLinker::FixupStaticTrampolines(Thread* self, ObjPtr<mirror::Class> klass) {
3743   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3744   DCHECK(klass->IsVisiblyInitialized()) << klass->PrettyDescriptor();
3745   size_t num_direct_methods = klass->NumDirectMethods();
3746   if (num_direct_methods == 0) {
3747     return;  // No direct methods => no static methods.
3748   }
3749   if (UNLIKELY(klass->IsProxyClass())) {
3750     return;
3751   }
3752   PointerSize pointer_size = image_pointer_size_;
3753   if (std::any_of(klass->GetDirectMethods(pointer_size).begin(),
3754                   klass->GetDirectMethods(pointer_size).end(),
3755                   [](const ArtMethod& m) { return m.IsCriticalNative(); })) {
3756     // Store registered @CriticalNative methods, if any, to JNI entrypoints.
3757     // Direct methods are a contiguous chunk of memory, so use the ordering of the map.
3758     ArtMethod* first_method = klass->GetDirectMethod(0u, pointer_size);
3759     ArtMethod* last_method = klass->GetDirectMethod(num_direct_methods - 1u, pointer_size);
3760     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
3761     auto lb = critical_native_code_with_clinit_check_.lower_bound(first_method);
3762     while (lb != critical_native_code_with_clinit_check_.end() && lb->first <= last_method) {
3763       lb->first->SetEntryPointFromJni(lb->second);
3764       lb = critical_native_code_with_clinit_check_.erase(lb);
3765     }
3766   }
3767   Runtime* runtime = Runtime::Current();
3768   if (runtime->IsAotCompiler()) {
3769     // We should not update entrypoints when running the transactional
3770     // interpreter.
3771     return;
3772   }
3773 
3774   instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
3775   bool enable_boot_jni_stub = !runtime->IsJavaDebuggable();
3776   for (size_t method_index = 0; method_index < num_direct_methods; ++method_index) {
3777     ArtMethod* method = klass->GetDirectMethod(method_index, pointer_size);
3778     if (method->NeedsClinitCheckBeforeCall()) {
3779       const void* quick_code = instrumentation->GetCodeForInvoke(method);
3780       if (method->IsNative() && IsQuickGenericJniStub(quick_code) && enable_boot_jni_stub) {
3781         const void* boot_jni_stub = FindBootJniStub(method);
3782         if (boot_jni_stub != nullptr) {
3783           // Use boot JNI stub if found.
3784           quick_code = boot_jni_stub;
3785         }
3786       }
3787       instrumentation->UpdateMethodsCode(method, quick_code);
3788     }
3789   }
3790   // Ignore virtual methods on the iterator.
3791 }
3792 
3793 // Does anything needed to make sure that the compiler will not generate a direct invoke to this
3794 // method. Should only be called on non-invokable methods.
EnsureThrowsInvocationError(ClassLinker * class_linker,ArtMethod * method)3795 inline void EnsureThrowsInvocationError(ClassLinker* class_linker, ArtMethod* method)
3796     REQUIRES_SHARED(Locks::mutator_lock_) {
3797   DCHECK(method != nullptr);
3798   DCHECK(!method->IsInvokable());
3799   method->SetEntryPointFromQuickCompiledCodePtrSize(
3800       class_linker->GetQuickToInterpreterBridgeTrampoline(),
3801       class_linker->GetImagePointerSize());
3802 }
3803 
3804 class ClassLinker::OatClassCodeIterator {
3805  public:
OatClassCodeIterator(const OatFile::OatClass & oat_class)3806   explicit OatClassCodeIterator(const OatFile::OatClass& oat_class)
3807       : begin_(oat_class.methods_pointer_ != nullptr && oat_class.oat_file_->IsExecutable()
3808                    ? oat_class.oat_file_->Begin()
3809                    : nullptr),
3810         bitmap_(oat_class.bitmap_),
3811         current_(oat_class.methods_pointer_ != nullptr && oat_class.oat_file_->IsExecutable()
3812                      ? oat_class.methods_pointer_
3813                      : nullptr),
3814         method_index_(0u),
3815         num_methods_(oat_class.num_methods_) {
3816     DCHECK_EQ(bitmap_ != nullptr, oat_class.GetType() == OatClassType::kSomeCompiled);
3817   }
3818 
GetAndAdvance(uint32_t method_index)3819   const void* GetAndAdvance(uint32_t method_index) {
3820     if (kIsDebugBuild) {
3821       CHECK_EQ(method_index, method_index_);
3822       ++method_index_;
3823     }
3824     if (current_ == nullptr) {
3825       // We may not have a valid `num_methods_` to perform the next `DCHECK()`.
3826       return nullptr;
3827     }
3828     DCHECK_LT(method_index, num_methods_);
3829     DCHECK(begin_ != nullptr);
3830     if (bitmap_ == nullptr || BitVector::IsBitSet(bitmap_, method_index)) {
3831       DCHECK_NE(current_->code_offset_, 0u);
3832       const void* result = begin_ + current_->code_offset_;
3833       ++current_;
3834       return result;
3835     } else {
3836       return nullptr;
3837     }
3838   }
3839 
SkipAbstract(uint32_t method_index)3840   void SkipAbstract(uint32_t method_index) {
3841     if (kIsDebugBuild) {
3842       CHECK_EQ(method_index, method_index_);
3843       ++method_index_;
3844       if (current_ != nullptr) {
3845         CHECK_LT(method_index, num_methods_);
3846         CHECK(bitmap_ != nullptr);
3847         CHECK(!BitVector::IsBitSet(bitmap_, method_index));
3848       }
3849     }
3850   }
3851 
3852  private:
3853   const uint8_t* const begin_;
3854   const uint32_t* const bitmap_;
3855   const OatMethodOffsets* current_;
3856 
3857   // Debug mode members.
3858   uint32_t method_index_;
3859   const uint32_t num_methods_;
3860 };
3861 
LinkCode(ArtMethod * method,uint32_t class_def_method_index,OatClassCodeIterator * occi)3862 inline void ClassLinker::LinkCode(ArtMethod* method,
3863                                   uint32_t class_def_method_index,
3864                                   /*inout*/ OatClassCodeIterator* occi) {
3865   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3866   Runtime* const runtime = Runtime::Current();
3867   if (runtime->IsAotCompiler()) {
3868     // The following code only applies to a non-compiler runtime.
3869     return;
3870   }
3871 
3872   // Method shouldn't have already been linked.
3873   DCHECK_EQ(method->GetEntryPointFromQuickCompiledCode(), nullptr);
3874   DCHECK(!method->GetDeclaringClass()->IsVisiblyInitialized());  // Actually ClassStatus::Idx.
3875 
3876   if (!method->IsInvokable()) {
3877     EnsureThrowsInvocationError(this, method);
3878     occi->SkipAbstract(class_def_method_index);
3879     return;
3880   }
3881 
3882   const void* quick_code = occi->GetAndAdvance(class_def_method_index);
3883   if (method->IsNative() && quick_code == nullptr) {
3884     const void* boot_jni_stub = FindBootJniStub(method);
3885     if (boot_jni_stub != nullptr) {
3886       // Use boot JNI stub if found.
3887       quick_code = boot_jni_stub;
3888     }
3889   }
3890   runtime->GetInstrumentation()->InitializeMethodsCode(method, quick_code);
3891 
3892   if (method->IsNative()) {
3893     // Set up the dlsym lookup stub. Do not go through `UnregisterNative()`
3894     // as the extra processing for @CriticalNative is not needed yet.
3895     method->SetEntryPointFromJni(
3896         method->IsCriticalNative() ? GetJniDlsymLookupCriticalStub() : GetJniDlsymLookupStub());
3897   }
3898 }
3899 
SetupClass(const DexFile & dex_file,const dex::ClassDef & dex_class_def,Handle<mirror::Class> klass,ObjPtr<mirror::ClassLoader> class_loader)3900 void ClassLinker::SetupClass(const DexFile& dex_file,
3901                              const dex::ClassDef& dex_class_def,
3902                              Handle<mirror::Class> klass,
3903                              ObjPtr<mirror::ClassLoader> class_loader) {
3904   CHECK(klass != nullptr);
3905   CHECK(klass->GetDexCache() != nullptr);
3906   CHECK_EQ(ClassStatus::kNotReady, klass->GetStatus());
3907   const char* descriptor = dex_file.GetClassDescriptor(dex_class_def);
3908   CHECK(descriptor != nullptr);
3909 
3910   klass->SetClass(GetClassRoot<mirror::Class>(this));
3911   uint32_t access_flags = dex_class_def.GetJavaAccessFlags();
3912   CHECK_EQ(access_flags & ~kAccJavaFlagsMask, 0U);
3913   klass->SetAccessFlagsDuringLinking(access_flags);
3914   klass->SetClassLoader(class_loader);
3915   DCHECK_EQ(klass->GetPrimitiveType(), Primitive::kPrimNot);
3916   mirror::Class::SetStatus(klass, ClassStatus::kIdx, nullptr);
3917 
3918   klass->SetDexClassDefIndex(dex_file.GetIndexForClassDef(dex_class_def));
3919   klass->SetDexTypeIndex(dex_class_def.class_idx_);
3920 }
3921 
AllocArtFieldArray(Thread * self,LinearAlloc * allocator,size_t length)3922 LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self,
3923                                                                LinearAlloc* allocator,
3924                                                                size_t length) {
3925   if (length == 0) {
3926     return nullptr;
3927   }
3928   // If the ArtField alignment changes, review all uses of LengthPrefixedArray<ArtField>.
3929   static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4.");
3930   size_t storage_size = LengthPrefixedArray<ArtField>::ComputeSize(length);
3931   void* array_storage = allocator->Alloc(self, storage_size, LinearAllocKind::kArtFieldArray);
3932   auto* ret = new(array_storage) LengthPrefixedArray<ArtField>(length);
3933   CHECK(ret != nullptr);
3934   std::uninitialized_fill_n(&ret->At(0), length, ArtField());
3935   return ret;
3936 }
3937 
AllocArtMethodArray(Thread * self,LinearAlloc * allocator,size_t length)3938 LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self,
3939                                                                  LinearAlloc* allocator,
3940                                                                  size_t length) {
3941   if (length == 0) {
3942     return nullptr;
3943   }
3944   const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_);
3945   const size_t method_size = ArtMethod::Size(image_pointer_size_);
3946   const size_t storage_size =
3947       LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size, method_alignment);
3948   void* array_storage = allocator->Alloc(self, storage_size, LinearAllocKind::kArtMethodArray);
3949   auto* ret = new (array_storage) LengthPrefixedArray<ArtMethod>(length);
3950   CHECK(ret != nullptr);
3951   for (size_t i = 0; i < length; ++i) {
3952     new(reinterpret_cast<void*>(&ret->At(i, method_size, method_alignment))) ArtMethod;
3953   }
3954   return ret;
3955 }
3956 
GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)3957 LinearAlloc* ClassLinker::GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
3958   if (class_loader == nullptr) {
3959     return Runtime::Current()->GetLinearAlloc();
3960   }
3961   LinearAlloc* allocator = class_loader->GetAllocator();
3962   DCHECK(allocator != nullptr);
3963   return allocator;
3964 }
3965 
GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)3966 LinearAlloc* ClassLinker::GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
3967   if (class_loader == nullptr) {
3968     return Runtime::Current()->GetLinearAlloc();
3969   }
3970   WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
3971   LinearAlloc* allocator = class_loader->GetAllocator();
3972   if (allocator == nullptr) {
3973     RegisterClassLoader(class_loader);
3974     allocator = class_loader->GetAllocator();
3975     CHECK(allocator != nullptr);
3976   }
3977   return allocator;
3978 }
3979 
3980 // Helper class for iterating over method annotations, using their ordering in the dex file.
3981 // Since direct and virtual methods are separated (but each section is ordered), we shall use
3982 // separate iterators for loading direct and virtual methods.
3983 class ClassLinker::MethodAnnotationsIterator {
3984  public:
MethodAnnotationsIterator(const DexFile & dex_file,const dex::AnnotationsDirectoryItem * annotations_dir)3985   MethodAnnotationsIterator(const DexFile& dex_file,
3986                             const dex::AnnotationsDirectoryItem* annotations_dir)
3987       : current_((annotations_dir != nullptr) ? dex_file.GetMethodAnnotations(annotations_dir)
3988                                               : nullptr),
3989         end_((annotations_dir != nullptr) ? current_ + annotations_dir->methods_size_ : nullptr) {}
3990 
AdvanceTo(uint32_t method_idx)3991   const dex::MethodAnnotationsItem* AdvanceTo(uint32_t method_idx) {
3992     while (current_ != end_ && current_->method_idx_ < method_idx) {
3993       ++current_;
3994     }
3995     return (current_ != end_ && current_->method_idx_ == method_idx) ? current_ : nullptr;
3996   }
3997 
3998  private:
3999   const dex::MethodAnnotationsItem* current_;
4000   const dex::MethodAnnotationsItem* const end_;
4001 };
4002 
LoadClass(Thread * self,const DexFile & dex_file,const dex::ClassDef & dex_class_def,Handle<mirror::Class> klass)4003 void ClassLinker::LoadClass(Thread* self,
4004                             const DexFile& dex_file,
4005                             const dex::ClassDef& dex_class_def,
4006                             Handle<mirror::Class> klass) {
4007   ClassAccessor accessor(dex_file,
4008                          dex_class_def,
4009                          /* parse_hiddenapi_class_data= */ klass->IsBootStrapClassLoaded());
4010   if (!accessor.HasClassData()) {
4011     return;
4012   }
4013   Runtime* const runtime = Runtime::Current();
4014   {
4015     // Note: We cannot have thread suspension until the field and method arrays are setup or else
4016     // Class::VisitFieldRoots may miss some fields or methods.
4017     ScopedAssertNoThreadSuspension nts(__FUNCTION__);
4018     // Load static fields.
4019     // We allow duplicate definitions of the same field in a class_data_item
4020     // but ignore the repeated indexes here, b/21868015.
4021     LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
4022     LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self,
4023                                                                 allocator,
4024                                                                 accessor.NumStaticFields());
4025     LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self,
4026                                                                 allocator,
4027                                                                 accessor.NumInstanceFields());
4028     size_t num_sfields = 0u;
4029     size_t num_ifields = 0u;
4030     uint32_t last_static_field_idx = 0u;
4031     uint32_t last_instance_field_idx = 0u;
4032 
4033     // Methods
4034     bool has_oat_class = false;
4035     const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
4036         ? OatFile::FindOatClass(dex_file, klass->GetDexClassDefIndex(), &has_oat_class)
4037         : OatFile::OatClass::Invalid();
4038     OatClassCodeIterator occi(oat_class);
4039     klass->SetMethodsPtr(
4040         AllocArtMethodArray(self, allocator, accessor.NumMethods()),
4041         accessor.NumDirectMethods(),
4042         accessor.NumVirtualMethods());
4043     size_t class_def_method_index = 0;
4044     uint32_t last_dex_method_index = dex::kDexNoIndex;
4045     size_t last_class_def_method_index = 0;
4046 
4047     // Initialize separate `MethodAnnotationsIterator`s for direct and virtual methods.
4048     MethodAnnotationsIterator mai_direct(dex_file, dex_file.GetAnnotationsDirectory(dex_class_def));
4049     MethodAnnotationsIterator mai_virtual = mai_direct;
4050 
4051     uint16_t hotness_threshold = runtime->GetJITOptions()->GetWarmupThreshold();
4052     // Use the visitor since the ranged based loops are bit slower from seeking. Seeking to the
4053     // methods needs to decode all of the fields.
4054     accessor.VisitFieldsAndMethods([&](
4055         const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
4056           uint32_t field_idx = field.GetIndex();
4057           DCHECK_GE(field_idx, last_static_field_idx);  // Ordering enforced by DexFileVerifier.
4058           if (num_sfields == 0 || LIKELY(field_idx > last_static_field_idx)) {
4059             LoadField(field, klass, &sfields->At(num_sfields));
4060             ++num_sfields;
4061             last_static_field_idx = field_idx;
4062           }
4063         }, [&](const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
4064           uint32_t field_idx = field.GetIndex();
4065           DCHECK_GE(field_idx, last_instance_field_idx);  // Ordering enforced by DexFileVerifier.
4066           if (num_ifields == 0 || LIKELY(field_idx > last_instance_field_idx)) {
4067             LoadField(field, klass, &ifields->At(num_ifields));
4068             ++num_ifields;
4069             last_instance_field_idx = field_idx;
4070           }
4071         }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
4072           ArtMethod* art_method = klass->GetDirectMethodUnchecked(class_def_method_index,
4073               image_pointer_size_);
4074           LoadMethod(dex_file, method, klass.Get(), &mai_direct, art_method);
4075           LinkCode(art_method, class_def_method_index, &occi);
4076           uint32_t it_method_index = method.GetIndex();
4077           if (last_dex_method_index == it_method_index) {
4078             // duplicate case
4079             art_method->SetMethodIndex(last_class_def_method_index);
4080           } else {
4081             art_method->SetMethodIndex(class_def_method_index);
4082             last_dex_method_index = it_method_index;
4083             last_class_def_method_index = class_def_method_index;
4084           }
4085           art_method->ResetCounter(hotness_threshold);
4086           ++class_def_method_index;
4087         }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
4088           ArtMethod* art_method = klass->GetVirtualMethodUnchecked(
4089               class_def_method_index - accessor.NumDirectMethods(),
4090               image_pointer_size_);
4091           art_method->ResetCounter(hotness_threshold);
4092           LoadMethod(dex_file, method, klass.Get(), &mai_virtual, art_method);
4093           LinkCode(art_method, class_def_method_index, &occi);
4094           ++class_def_method_index;
4095         });
4096 
4097     if (UNLIKELY(num_ifields + num_sfields != accessor.NumFields())) {
4098       LOG(WARNING) << "Duplicate fields in class " << klass->PrettyDescriptor()
4099           << " (unique static fields: " << num_sfields << "/" << accessor.NumStaticFields()
4100           << ", unique instance fields: " << num_ifields << "/" << accessor.NumInstanceFields()
4101           << ")";
4102       // NOTE: Not shrinking the over-allocated sfields/ifields, just setting size.
4103       if (sfields != nullptr) {
4104         sfields->SetSize(num_sfields);
4105       }
4106       if (ifields != nullptr) {
4107         ifields->SetSize(num_ifields);
4108       }
4109     }
4110     // Set the field arrays.
4111     klass->SetSFieldsPtr(sfields);
4112     DCHECK_EQ(klass->NumStaticFields(), num_sfields);
4113     klass->SetIFieldsPtr(ifields);
4114     DCHECK_EQ(klass->NumInstanceFields(), num_ifields);
4115   }
4116   // Ensure that the card is marked so that remembered sets pick up native roots.
4117   WriteBarrier::ForEveryFieldWrite(klass.Get());
4118   self->AllowThreadSuspension();
4119 }
4120 
LoadField(const ClassAccessor::Field & field,Handle<mirror::Class> klass,ArtField * dst)4121 void ClassLinker::LoadField(const ClassAccessor::Field& field,
4122                             Handle<mirror::Class> klass,
4123                             ArtField* dst) {
4124   const uint32_t field_idx = field.GetIndex();
4125   dst->SetDexFieldIndex(field_idx);
4126   dst->SetDeclaringClass(klass.Get());
4127 
4128   // Get access flags from the DexFile and set hiddenapi runtime access flags.
4129   dst->SetAccessFlags(field.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(field));
4130 }
4131 
LoadMethod(const DexFile & dex_file,const ClassAccessor::Method & method,ObjPtr<mirror::Class> klass,MethodAnnotationsIterator * mai,ArtMethod * dst)4132 void ClassLinker::LoadMethod(const DexFile& dex_file,
4133                              const ClassAccessor::Method& method,
4134                              ObjPtr<mirror::Class> klass,
4135                              /*inout*/ MethodAnnotationsIterator* mai,
4136                              /*out*/ ArtMethod* dst) {
4137   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
4138 
4139   const uint32_t dex_method_idx = method.GetIndex();
4140   const dex::MethodId& method_id = dex_file.GetMethodId(dex_method_idx);
4141   uint32_t name_utf16_length;
4142   const char* method_name = dex_file.GetStringDataAndUtf16Length(method_id.name_idx_,
4143                                                                  &name_utf16_length);
4144   std::string_view shorty = dex_file.GetShortyView(dex_file.GetProtoId(method_id.proto_idx_));
4145 
4146   dst->SetDexMethodIndex(dex_method_idx);
4147   dst->SetDeclaringClass(klass);
4148 
4149   // Get access flags from the DexFile and set hiddenapi runtime access flags.
4150   uint32_t access_flags = method.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(method);
4151 
4152   auto has_ascii_name = [method_name, name_utf16_length](const char* ascii_name,
4153                                                          size_t length) ALWAYS_INLINE {
4154     DCHECK_EQ(strlen(ascii_name), length);
4155     return length == name_utf16_length &&
4156            method_name[length] == 0 &&  // Is `method_name` an ASCII string?
4157            memcmp(ascii_name, method_name, length) == 0;
4158   };
4159   if (UNLIKELY(has_ascii_name("finalize", sizeof("finalize") - 1u))) {
4160     // Set finalizable flag on declaring class if the method has the right signature.
4161     // When initializing without a boot image, `Object` and `Enum` shall have the finalizable
4162     // flag cleared immediately after loading these classes, see  `InitWithoutImage()`.
4163     if (shorty == "V") {
4164       klass->SetFinalizable();
4165     }
4166   } else if (method_name[0] == '<') {
4167     // Fix broken access flags for initializers. Bug 11157540.
4168     // `DexFileVerifier` rejects method names starting with '<' other than constructors.
4169     DCHECK(has_ascii_name("<init>", sizeof("<init>") - 1u) ||
4170            has_ascii_name("<clinit>", sizeof("<clinit>") - 1u)) << method_name;
4171     if (UNLIKELY((access_flags & kAccConstructor) == 0)) {
4172       LOG(WARNING) << method_name << " didn't have expected constructor access flag in class "
4173           << klass->PrettyDescriptor() << " in dex file " << dex_file.GetLocation();
4174       access_flags |= kAccConstructor;
4175     }
4176   }
4177 
4178   access_flags |= GetNterpFastPathFlags(shorty, access_flags, kRuntimeQuickCodeISA);
4179 
4180   if (UNLIKELY((access_flags & kAccNative) != 0u)) {
4181     // Check if the native method is annotated with @FastNative or @CriticalNative.
4182     const dex::MethodAnnotationsItem* method_annotations = mai->AdvanceTo(dex_method_idx);
4183     if (method_annotations != nullptr) {
4184       access_flags |=
4185           annotations::GetNativeMethodAnnotationAccessFlags(dex_file, *method_annotations);
4186     }
4187     dst->SetAccessFlags(access_flags);
4188     DCHECK(!dst->IsAbstract());
4189     DCHECK(!dst->HasCodeItem());
4190     DCHECK_EQ(method.GetCodeItemOffset(), 0u);
4191     dst->SetDataPtrSize(nullptr, image_pointer_size_);  // JNI stub/trampoline not linked yet.
4192   } else if ((access_flags & kAccAbstract) != 0u) {
4193     dst->SetAccessFlags(access_flags);
4194     // Must be done after SetAccessFlags since IsAbstract depends on it.
4195     DCHECK(dst->IsAbstract());
4196     if (klass->IsInterface()) {
4197       dst->CalculateAndSetImtIndex();
4198     }
4199     DCHECK(!dst->HasCodeItem());
4200     DCHECK_EQ(method.GetCodeItemOffset(), 0u);
4201     dst->SetDataPtrSize(nullptr, image_pointer_size_);  // Single implementation not set yet.
4202   } else {
4203     const dex::MethodAnnotationsItem* method_annotations = mai->AdvanceTo(dex_method_idx);
4204     if (method_annotations != nullptr &&
4205         annotations::MethodIsNeverCompile(dex_file, *method_annotations)) {
4206       access_flags |= kAccCompileDontBother;
4207     }
4208     dst->SetAccessFlags(access_flags);
4209     DCHECK(!dst->IsAbstract());
4210     DCHECK(dst->HasCodeItem());
4211     uint32_t code_item_offset = method.GetCodeItemOffset();
4212     DCHECK_NE(code_item_offset, 0u);
4213     if (Runtime::Current()->IsAotCompiler()) {
4214       dst->SetDataPtrSize(reinterpret_cast32<void*>(code_item_offset), image_pointer_size_);
4215     } else {
4216       dst->SetCodeItem(dex_file.GetCodeItem(code_item_offset), dex_file.IsCompactDexFile());
4217     }
4218   }
4219 
4220   if ((access_flags & kAccAbstract) == 0u &&
4221       Runtime::Current()->IsZygote() &&
4222       !Runtime::Current()->GetJITOptions()->GetProfileSaverOptions().GetProfileBootClassPath()) {
4223     DCHECK(!ArtMethod::IsAbstract(access_flags));
4224     DCHECK(!ArtMethod::IsIntrinsic(access_flags));
4225     dst->SetMemorySharedMethod();
4226     dst->SetHotCounter();
4227   }
4228 }
4229 
AppendToBootClassPath(Thread * self,const DexFile * dex_file)4230 void ClassLinker::AppendToBootClassPath(Thread* self, const DexFile* dex_file) {
4231   ObjPtr<mirror::DexCache> dex_cache =
4232       AllocAndInitializeDexCache(self, *dex_file, /* class_loader= */ nullptr);
4233   CHECK(dex_cache != nullptr) << "Failed to allocate dex cache for " << dex_file->GetLocation();
4234   AppendToBootClassPath(dex_file, dex_cache);
4235   WriteBarrierOnClassLoader(self, /*class_loader=*/nullptr, dex_cache);
4236 }
4237 
AppendToBootClassPath(const DexFile * dex_file,ObjPtr<mirror::DexCache> dex_cache)4238 void ClassLinker::AppendToBootClassPath(const DexFile* dex_file,
4239                                         ObjPtr<mirror::DexCache> dex_cache) {
4240   CHECK(dex_file != nullptr);
4241   CHECK(dex_cache != nullptr) << dex_file->GetLocation();
4242   CHECK_EQ(dex_cache->GetDexFile(), dex_file) << dex_file->GetLocation();
4243   boot_class_path_.push_back(dex_file);
4244   WriterMutexLock mu(Thread::Current(), *Locks::dex_lock_);
4245   RegisterDexFileLocked(*dex_file, dex_cache, /* class_loader= */ nullptr);
4246 }
4247 
RegisterDexFileLocked(const DexFile & dex_file,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)4248 void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file,
4249                                         ObjPtr<mirror::DexCache> dex_cache,
4250                                         ObjPtr<mirror::ClassLoader> class_loader) {
4251   Thread* const self = Thread::Current();
4252   Locks::dex_lock_->AssertExclusiveHeld(self);
4253   CHECK(dex_cache != nullptr) << dex_file.GetLocation();
4254   CHECK_EQ(dex_cache->GetDexFile(), &dex_file) << dex_file.GetLocation();
4255   // For app images, the dex cache location may be a suffix of the dex file location since the
4256   // dex file location is an absolute path.
4257   const std::string dex_cache_location = dex_cache->GetLocation()->ToModifiedUtf8();
4258   const size_t dex_cache_length = dex_cache_location.length();
4259   CHECK_GT(dex_cache_length, 0u) << dex_file.GetLocation();
4260   std::string dex_file_location = dex_file.GetLocation();
4261   // The following paths checks don't work on preopt when using boot dex files, where the dex
4262   // cache location is the one on device, and the dex_file's location is the one on host.
4263   Runtime* runtime = Runtime::Current();
4264   if (!(runtime->IsAotCompiler() && class_loader == nullptr && !kIsTargetBuild)) {
4265     CHECK_GE(dex_file_location.length(), dex_cache_length)
4266         << dex_cache_location << " " << dex_file.GetLocation();
4267     const std::string dex_file_suffix = dex_file_location.substr(
4268         dex_file_location.length() - dex_cache_length,
4269         dex_cache_length);
4270     // Example dex_cache location is SettingsProvider.apk and
4271     // dex file location is /system/priv-app/SettingsProvider/SettingsProvider.apk
4272     CHECK_EQ(dex_cache_location, dex_file_suffix);
4273   }
4274 
4275   // Check if we need to initialize OatFile data (.data.img.rel.ro and .bss
4276   // sections) needed for code execution and register the oat code range.
4277   const OatFile* oat_file =
4278       (dex_file.GetOatDexFile() != nullptr) ? dex_file.GetOatDexFile()->GetOatFile() : nullptr;
4279   bool initialize_oat_file_data = (oat_file != nullptr) && oat_file->IsExecutable();
4280   if (initialize_oat_file_data) {
4281     for (const auto& entry : dex_caches_) {
4282       if (!self->IsJWeakCleared(entry.second.weak_root) &&
4283           entry.first->GetOatDexFile() != nullptr &&
4284           entry.first->GetOatDexFile()->GetOatFile() == oat_file) {
4285         initialize_oat_file_data = false;  // Already initialized.
4286         break;
4287       }
4288     }
4289   }
4290   if (initialize_oat_file_data) {
4291     oat_file->InitializeRelocations();
4292     // Notify the fault handler about the new executable code range if needed.
4293     size_t exec_offset = oat_file->GetOatHeader().GetExecutableOffset();
4294     DCHECK_LE(exec_offset, oat_file->Size());
4295     size_t exec_size = oat_file->Size() - exec_offset;
4296     if (exec_size != 0u) {
4297       runtime->AddGeneratedCodeRange(oat_file->Begin() + exec_offset, exec_size);
4298     }
4299   }
4300 
4301   // Let hiddenapi assign a domain to the newly registered dex file.
4302   hiddenapi::InitializeDexFileDomain(dex_file, class_loader);
4303 
4304   jweak dex_cache_jweak = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, dex_cache);
4305   DexCacheData data;
4306   data.weak_root = dex_cache_jweak;
4307   data.class_table = ClassTableForClassLoader(class_loader);
4308   AddNativeDebugInfoForDex(self, &dex_file);
4309   DCHECK(data.class_table != nullptr);
4310   // Make sure to hold the dex cache live in the class table. This case happens for the boot class
4311   // path dex caches without an image.
4312   data.class_table->InsertStrongRoot(dex_cache);
4313   // Make sure that the dex cache holds the classloader live.
4314   dex_cache->SetClassLoader(class_loader);
4315   if (class_loader != nullptr) {
4316     // Since we added a strong root to the class table, do the write barrier as required for
4317     // remembered sets and generational GCs.
4318     WriteBarrier::ForEveryFieldWrite(class_loader);
4319   }
4320   bool inserted = dex_caches_.emplace(&dex_file, std::move(data)).second;
4321   CHECK(inserted);
4322 }
4323 
DecodeDexCacheLocked(Thread * self,const DexCacheData * data)4324 ObjPtr<mirror::DexCache> ClassLinker::DecodeDexCacheLocked(Thread* self, const DexCacheData* data) {
4325   return data != nullptr
4326       ? ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data->weak_root))
4327       : nullptr;
4328 }
4329 
IsSameClassLoader(ObjPtr<mirror::DexCache> dex_cache,const DexCacheData * data,ObjPtr<mirror::ClassLoader> class_loader)4330 bool ClassLinker::IsSameClassLoader(
4331     ObjPtr<mirror::DexCache> dex_cache,
4332     const DexCacheData* data,
4333     ObjPtr<mirror::ClassLoader> class_loader) {
4334   CHECK(data != nullptr);
4335   DCHECK_EQ(FindDexCacheDataLocked(*dex_cache->GetDexFile()), data);
4336   return data->class_table == ClassTableForClassLoader(class_loader);
4337 }
4338 
RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)4339 void ClassLinker::RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,
4340                                            ObjPtr<mirror::ClassLoader> class_loader) {
4341   SCOPED_TRACE << __FUNCTION__ << " " << dex_cache->GetDexFile()->GetLocation();
4342   Thread* self = Thread::Current();
4343   StackHandleScope<2> hs(self);
4344   Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
4345   Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
4346   const DexFile* dex_file = dex_cache->GetDexFile();
4347   DCHECK(dex_file != nullptr) << "Attempt to register uninitialized dex_cache object!";
4348   if (kIsDebugBuild) {
4349     ReaderMutexLock mu(self, *Locks::dex_lock_);
4350     const DexCacheData* old_data = FindDexCacheDataLocked(*dex_file);
4351     ObjPtr<mirror::DexCache> old_dex_cache = DecodeDexCacheLocked(self, old_data);
4352     DCHECK(old_dex_cache.IsNull()) << "Attempt to manually register a dex cache thats already "
4353                                    << "been registered on dex file " << dex_file->GetLocation();
4354   }
4355   ClassTable* table;
4356   {
4357     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4358     table = InsertClassTableForClassLoader(h_class_loader.Get());
4359   }
4360   // Avoid a deadlock between a garbage collecting thread running a checkpoint,
4361   // a thread holding the dex lock and blocking on a condition variable regarding
4362   // weak references access, and a thread blocking on the dex lock.
4363   gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
4364   WriterMutexLock mu(self, *Locks::dex_lock_);
4365   RegisterDexFileLocked(*dex_file, h_dex_cache.Get(), h_class_loader.Get());
4366   table->InsertStrongRoot(h_dex_cache.Get());
4367   if (h_class_loader.Get() != nullptr) {
4368     // Since we added a strong root to the class table, do the write barrier as required for
4369     // remembered sets and generational GCs.
4370     WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
4371   }
4372 }
4373 
ThrowDexFileAlreadyRegisteredError(Thread * self,const DexFile & dex_file)4374 static void ThrowDexFileAlreadyRegisteredError(Thread* self, const DexFile& dex_file)
4375     REQUIRES_SHARED(Locks::mutator_lock_) {
4376   self->ThrowNewExceptionF("Ljava/lang/InternalError;",
4377                            "Attempt to register dex file %s with multiple class loaders",
4378                            dex_file.GetLocation().c_str());
4379 }
4380 
WriteBarrierOnClassLoaderLocked(ObjPtr<mirror::ClassLoader> class_loader,ObjPtr<mirror::Object> root)4381 void ClassLinker::WriteBarrierOnClassLoaderLocked(ObjPtr<mirror::ClassLoader> class_loader,
4382                                                   ObjPtr<mirror::Object> root) {
4383   if (class_loader != nullptr) {
4384     // Since we added a strong root to the class table, do the write barrier as required for
4385     // remembered sets and generational GCs.
4386     WriteBarrier::ForEveryFieldWrite(class_loader);
4387   } else if (log_new_roots_) {
4388     new_roots_.push_back(GcRoot<mirror::Object>(root));
4389   }
4390 }
4391 
WriteBarrierOnClassLoader(Thread * self,ObjPtr<mirror::ClassLoader> class_loader,ObjPtr<mirror::Object> root)4392 void ClassLinker::WriteBarrierOnClassLoader(Thread* self,
4393                                             ObjPtr<mirror::ClassLoader> class_loader,
4394                                             ObjPtr<mirror::Object> root) {
4395   if (class_loader != nullptr) {
4396     // Since we added a strong root to the class table, do the write barrier as required for
4397     // remembered sets and generational GCs.
4398     WriteBarrier::ForEveryFieldWrite(class_loader);
4399   } else {
4400     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4401     if (log_new_roots_) {
4402       new_roots_.push_back(GcRoot<mirror::Object>(root));
4403     }
4404   }
4405 }
4406 
RegisterDexFile(const DexFile & dex_file,ObjPtr<mirror::ClassLoader> class_loader)4407 ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file,
4408                                                       ObjPtr<mirror::ClassLoader> class_loader) {
4409   Thread* self = Thread::Current();
4410   ObjPtr<mirror::DexCache> old_dex_cache;
4411   bool registered_with_another_class_loader = false;
4412   {
4413     ReaderMutexLock mu(self, *Locks::dex_lock_);
4414     const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
4415     old_dex_cache = DecodeDexCacheLocked(self, old_data);
4416     if (old_dex_cache != nullptr) {
4417       if (IsSameClassLoader(old_dex_cache, old_data, class_loader)) {
4418         return old_dex_cache;
4419       } else {
4420         // TODO This is not very clean looking. Should maybe try to make a way to request exceptions
4421         // be thrown when it's safe to do so to simplify this.
4422         registered_with_another_class_loader = true;
4423       }
4424     }
4425   }
4426   // We need to have released the dex_lock_ to allocate safely.
4427   if (registered_with_another_class_loader) {
4428     ThrowDexFileAlreadyRegisteredError(self, dex_file);
4429     return nullptr;
4430   }
4431   SCOPED_TRACE << __FUNCTION__ << " " << dex_file.GetLocation();
4432   LinearAlloc* const linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader);
4433   DCHECK(linear_alloc != nullptr);
4434   ClassTable* table;
4435   {
4436     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4437     table = InsertClassTableForClassLoader(class_loader);
4438   }
4439   // Don't alloc while holding the lock, since allocation may need to
4440   // suspend all threads and another thread may need the dex_lock_ to
4441   // get to a suspend point.
4442   StackHandleScope<3> hs(self);
4443   Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
4444   Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(AllocDexCache(self, dex_file)));
4445   {
4446     // Avoid a deadlock between a garbage collecting thread running a checkpoint,
4447     // a thread holding the dex lock and blocking on a condition variable regarding
4448     // weak references access, and a thread blocking on the dex lock.
4449     gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
4450     WriterMutexLock mu(self, *Locks::dex_lock_);
4451     const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
4452     old_dex_cache = DecodeDexCacheLocked(self, old_data);
4453     if (old_dex_cache == nullptr && h_dex_cache != nullptr) {
4454       // Do Initialize while holding dex lock to make sure two threads don't call it
4455       // at the same time with the same dex cache. Since the .bss is shared this can cause failing
4456       // DCHECK that the arrays are null.
4457       h_dex_cache->Initialize(&dex_file, h_class_loader.Get());
4458       RegisterDexFileLocked(dex_file, h_dex_cache.Get(), h_class_loader.Get());
4459     }
4460     if (old_dex_cache != nullptr) {
4461       // Another thread managed to initialize the dex cache faster, so use that DexCache.
4462       // If this thread encountered OOME, ignore it.
4463       DCHECK_EQ(h_dex_cache == nullptr, self->IsExceptionPending());
4464       self->ClearException();
4465       // We cannot call EnsureSameClassLoader() or allocate an exception while holding the
4466       // dex_lock_.
4467       if (IsSameClassLoader(old_dex_cache, old_data, h_class_loader.Get())) {
4468         return old_dex_cache;
4469       } else {
4470         registered_with_another_class_loader = true;
4471       }
4472     }
4473   }
4474   if (registered_with_another_class_loader) {
4475     ThrowDexFileAlreadyRegisteredError(self, dex_file);
4476     return nullptr;
4477   }
4478   if (h_dex_cache == nullptr) {
4479     self->AssertPendingOOMException();
4480     return nullptr;
4481   }
4482   if (table->InsertStrongRoot(h_dex_cache.Get())) {
4483     WriteBarrierOnClassLoader(self, h_class_loader.Get(), h_dex_cache.Get());
4484   } else {
4485     // Write-barrier not required if strong-root isn't inserted.
4486   }
4487   VLOG(class_linker) << "Registered dex file " << dex_file.GetLocation();
4488   PaletteNotifyDexFileLoaded(dex_file.GetLocation().c_str());
4489   return h_dex_cache.Get();
4490 }
4491 
IsDexFileRegistered(Thread * self,const DexFile & dex_file)4492 bool ClassLinker::IsDexFileRegistered(Thread* self, const DexFile& dex_file) {
4493   ReaderMutexLock mu(self, *Locks::dex_lock_);
4494   return DecodeDexCacheLocked(self, FindDexCacheDataLocked(dex_file)) != nullptr;
4495 }
4496 
FindDexCache(Thread * self,const DexFile & dex_file)4497 ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const DexFile& dex_file) {
4498   ReaderMutexLock mu(self, *Locks::dex_lock_);
4499   const DexCacheData* dex_cache_data = FindDexCacheDataLocked(dex_file);
4500   ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
4501   if (dex_cache != nullptr) {
4502     return dex_cache;
4503   }
4504   // Failure, dump diagnostic and abort.
4505   for (const auto& entry : dex_caches_) {
4506     const DexCacheData& data = entry.second;
4507     if (DecodeDexCacheLocked(self, &data) != nullptr) {
4508       LOG(FATAL_WITHOUT_ABORT) << "Registered dex file " << entry.first->GetLocation();
4509     }
4510   }
4511   LOG(FATAL) << "Failed to find DexCache for DexFile " << dex_file.GetLocation()
4512              << " " << &dex_file;
4513   UNREACHABLE();
4514 }
4515 
FindDexCache(Thread * self,const OatDexFile & oat_dex_file)4516 ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const OatDexFile& oat_dex_file) {
4517   ReaderMutexLock mu(self, *Locks::dex_lock_);
4518   const DexCacheData* dex_cache_data = FindDexCacheDataLocked(oat_dex_file);
4519   ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
4520   if (dex_cache != nullptr) {
4521     return dex_cache;
4522   }
4523   // Failure, dump diagnostic and abort.
4524   if (dex_cache_data == nullptr) {
4525     LOG(FATAL_WITHOUT_ABORT) << "NULL dex_cache_data";
4526   } else {
4527     LOG(FATAL_WITHOUT_ABORT)
4528         << "dex_cache_data=" << dex_cache_data
4529         << " weak_root=" << dex_cache_data->weak_root
4530         << " decoded_weak_root=" << self->DecodeJObject(dex_cache_data->weak_root);
4531   }
4532   for (const auto& entry : dex_caches_) {
4533     const DexCacheData& data = entry.second;
4534     if (DecodeDexCacheLocked(self, &data) != nullptr) {
4535       const OatDexFile* other_oat_dex_file = entry.first->GetOatDexFile();
4536       const OatFile* oat_file =
4537           (other_oat_dex_file == nullptr) ? nullptr : other_oat_dex_file->GetOatFile();
4538       LOG(FATAL_WITHOUT_ABORT)
4539           << "Registered dex file " << entry.first->GetLocation()
4540           << " oat_dex_file=" << other_oat_dex_file
4541           << " oat_file=" << oat_file
4542           << " oat_location=" << (oat_file == nullptr ? "null" : oat_file->GetLocation())
4543           << " dex_file=" << &entry.first
4544           << " weak_root=" << data.weak_root
4545           << " decoded_weak_root=" << self->DecodeJObject(data.weak_root)
4546           << " dex_cache_data=" << &data;
4547     }
4548   }
4549   LOG(FATAL) << "Failed to find DexCache for OatDexFile "
4550              << oat_dex_file.GetDexFileLocation()
4551              << " oat_dex_file=" << &oat_dex_file
4552              << " oat_file=" << oat_dex_file.GetOatFile()
4553              << " oat_location=" << oat_dex_file.GetOatFile()->GetLocation();
4554   UNREACHABLE();
4555 }
4556 
FindClassTable(Thread * self,ObjPtr<mirror::DexCache> dex_cache)4557 ClassTable* ClassLinker::FindClassTable(Thread* self, ObjPtr<mirror::DexCache> dex_cache) {
4558   const DexFile* dex_file = dex_cache->GetDexFile();
4559   DCHECK(dex_file != nullptr);
4560   ReaderMutexLock mu(self, *Locks::dex_lock_);
4561   auto it = dex_caches_.find(dex_file);
4562   if (it != dex_caches_.end()) {
4563     const DexCacheData& data = it->second;
4564     ObjPtr<mirror::DexCache> registered_dex_cache = DecodeDexCacheLocked(self, &data);
4565     if (registered_dex_cache != nullptr) {
4566       CHECK_EQ(registered_dex_cache, dex_cache) << dex_file->GetLocation();
4567       return data.class_table;
4568     }
4569   }
4570   return nullptr;
4571 }
4572 
FindDexCacheDataLocked(const OatDexFile & oat_dex_file)4573 const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(
4574     const OatDexFile& oat_dex_file) {
4575   auto it = std::find_if(dex_caches_.begin(), dex_caches_.end(), [&](const auto& entry) {
4576     return entry.first->GetOatDexFile() == &oat_dex_file;
4577   });
4578   return it != dex_caches_.end() ? &it->second : nullptr;
4579 }
4580 
FindDexCacheDataLocked(const DexFile & dex_file)4581 const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(const DexFile& dex_file) {
4582   auto it = dex_caches_.find(&dex_file);
4583   return it != dex_caches_.end() ? &it->second : nullptr;
4584 }
4585 
CreatePrimitiveClass(Thread * self,Primitive::Type type,ClassRoot primitive_root)4586 void ClassLinker::CreatePrimitiveClass(Thread* self,
4587                                        Primitive::Type type,
4588                                        ClassRoot primitive_root) {
4589   ObjPtr<mirror::Class> primitive_class =
4590       AllocClass(self, mirror::Class::PrimitiveClassSize(image_pointer_size_));
4591   CHECK(primitive_class != nullptr) << "OOM for primitive class " << type;
4592   // Do not hold lock on the primitive class object, the initialization of
4593   // primitive classes is done while the process is still single threaded.
4594   primitive_class->SetAccessFlagsDuringLinking(kAccPublic | kAccFinal | kAccAbstract);
4595   primitive_class->SetPrimitiveType(type);
4596   primitive_class->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
4597   DCHECK_EQ(primitive_class->NumMethods(), 0u);
4598   // Primitive classes are initialized during single threaded startup, so visibly initialized.
4599   primitive_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
4600   std::string_view descriptor(Primitive::Descriptor(type));
4601   ObjPtr<mirror::Class> existing = InsertClass(descriptor,
4602                                                primitive_class,
4603                                                ComputeModifiedUtf8Hash(descriptor));
4604   CHECK(existing == nullptr) << "InitPrimitiveClass(" << type << ") failed";
4605   SetClassRoot(primitive_root, primitive_class);
4606 }
4607 
GetArrayIfTable()4608 inline ObjPtr<mirror::IfTable> ClassLinker::GetArrayIfTable() {
4609   return GetClassRoot<mirror::ObjectArray<mirror::Object>>(this)->GetIfTable();
4610 }
4611 
4612 // Create an array class (i.e. the class object for the array, not the
4613 // array itself).  "descriptor" looks like "[C" or "[[[[B" or
4614 // "[Ljava/lang/String;".
4615 //
4616 // If "descriptor" refers to an array of primitives, look up the
4617 // primitive type's internally-generated class object.
4618 //
4619 // "class_loader" is the class loader of the class that's referring to
4620 // us.  It's used to ensure that we're looking for the element type in
4621 // the right context.  It does NOT become the class loader for the
4622 // array class; that always comes from the base element class.
4623 //
4624 // Returns null with an exception raised on failure.
CreateArrayClass(Thread * self,const char * descriptor,size_t descriptor_length,size_t hash,Handle<mirror::ClassLoader> class_loader)4625 ObjPtr<mirror::Class> ClassLinker::CreateArrayClass(Thread* self,
4626                                                     const char* descriptor,
4627                                                     size_t descriptor_length,
4628                                                     size_t hash,
4629                                                     Handle<mirror::ClassLoader> class_loader) {
4630   // Identify the underlying component type
4631   CHECK_EQ('[', descriptor[0]);
4632   std::string_view sv_descriptor(descriptor, descriptor_length);
4633   StackHandleScope<2> hs(self);
4634 
4635   // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4636   // code to be executed. We put it up here so we can avoid all the allocations associated with
4637   // creating the class. This can happen with (eg) jit threads.
4638   if (!self->CanLoadClasses()) {
4639     // Make sure we don't try to load anything, potentially causing an infinite loop.
4640     ObjPtr<mirror::Throwable> pre_allocated =
4641         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4642     self->SetException(pre_allocated);
4643     return nullptr;
4644   }
4645 
4646   MutableHandle<mirror::Class> component_type =
4647       hs.NewHandle(FindClass(self, descriptor + 1, descriptor_length - 1, class_loader));
4648   if (component_type == nullptr) {
4649     DCHECK(self->IsExceptionPending());
4650     // We need to accept erroneous classes as component types. Under AOT, we
4651     // don't accept them as we cannot encode the erroneous class in an image.
4652     std::string_view component_descriptor = sv_descriptor.substr(1u);
4653     const size_t component_hash = ComputeModifiedUtf8Hash(component_descriptor);
4654     component_type.Assign(
4655         LookupClass(self, component_descriptor, component_hash, class_loader.Get()));
4656     if (component_type == nullptr || Runtime::Current()->IsAotCompiler()) {
4657       DCHECK(self->IsExceptionPending());
4658       return nullptr;
4659     } else {
4660       self->ClearException();
4661     }
4662   }
4663   if (UNLIKELY(component_type->IsPrimitiveVoid())) {
4664     ThrowNoClassDefFoundError("Attempt to create array of void primitive type");
4665     return nullptr;
4666   }
4667   // See if the component type is already loaded.  Array classes are
4668   // always associated with the class loader of their underlying
4669   // element type -- an array of Strings goes with the loader for
4670   // java/lang/String -- so we need to look for it there.  (The
4671   // caller should have checked for the existence of the class
4672   // before calling here, but they did so with *their* class loader,
4673   // not the component type's loader.)
4674   //
4675   // If we find it, the caller adds "loader" to the class' initiating
4676   // loader list, which should prevent us from going through this again.
4677   //
4678   // This call is unnecessary if "loader" and "component_type->GetClassLoader()"
4679   // are the same, because our caller (FindClass) just did the
4680   // lookup.  (Even if we get this wrong we still have correct behavior,
4681   // because we effectively do this lookup again when we add the new
4682   // class to the hash table --- necessary because of possible races with
4683   // other threads.)
4684   if (class_loader.Get() != component_type->GetClassLoader()) {
4685     ObjPtr<mirror::Class> new_class =
4686         LookupClass(self, sv_descriptor, hash, component_type->GetClassLoader());
4687     if (new_class != nullptr) {
4688       return new_class;
4689     }
4690   }
4691   // Core array classes, i.e. Object[], Class[], String[] and primitive
4692   // arrays, have special initialization and they should be found above.
4693   DCHECK_IMPLIES(component_type->IsObjectClass(),
4694                  // Guard from false positives for errors before setting superclass.
4695                  component_type->IsErroneousUnresolved());
4696   DCHECK(!component_type->IsStringClass());
4697   DCHECK(!component_type->IsClassClass());
4698   DCHECK(!component_type->IsPrimitive());
4699 
4700   // Fill out the fields in the Class.
4701   //
4702   // It is possible to execute some methods against arrays, because
4703   // all arrays are subclasses of java_lang_Object_, so we need to set
4704   // up a vtable.  We can just point at the one in java_lang_Object_.
4705   //
4706   // Array classes are simple enough that we don't need to do a full
4707   // link step.
4708   size_t array_class_size = mirror::Array::ClassSize(image_pointer_size_);
4709   auto visitor = [this, array_class_size, component_type](ObjPtr<mirror::Object> obj,
4710                                                           size_t usable_size)
4711       REQUIRES_SHARED(Locks::mutator_lock_) {
4712     ScopedAssertNoTransactionChecks santc("CreateArrayClass");
4713     mirror::Class::InitializeClassVisitor init_class(array_class_size);
4714     init_class(obj, usable_size);
4715     ObjPtr<mirror::Class> klass = ObjPtr<mirror::Class>::DownCast(obj);
4716     klass->SetComponentType(component_type.Get());
4717     // Do not hold lock for initialization, the fence issued after the visitor
4718     // returns ensures memory visibility together with the implicit consume
4719     // semantics (for all supported architectures) for any thread that loads
4720     // the array class reference from any memory locations afterwards.
4721     FinishArrayClassSetup(klass);
4722   };
4723   auto new_class = hs.NewHandle<mirror::Class>(
4724       AllocClass(self, GetClassRoot<mirror::Class>(this), array_class_size, visitor));
4725   if (new_class == nullptr) {
4726     self->AssertPendingOOMException();
4727     return nullptr;
4728   }
4729 
4730   ObjPtr<mirror::Class> existing = InsertClass(sv_descriptor, new_class.Get(), hash);
4731   if (existing == nullptr) {
4732     // We postpone ClassLoad and ClassPrepare events to this point in time to avoid
4733     // duplicate events in case of races. Array classes don't really follow dedicated
4734     // load and prepare, anyways.
4735     Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(new_class);
4736     Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(new_class, new_class);
4737 
4738     jit::Jit::NewTypeLoadedIfUsingJit(new_class.Get());
4739     return new_class.Get();
4740   }
4741   // Another thread must have loaded the class after we
4742   // started but before we finished.  Abandon what we've
4743   // done.
4744   //
4745   // (Yes, this happens.)
4746 
4747   return existing;
4748 }
4749 
LookupPrimitiveClass(char type)4750 ObjPtr<mirror::Class> ClassLinker::LookupPrimitiveClass(char type) {
4751   ClassRoot class_root;
4752   switch (type) {
4753     case 'B': class_root = ClassRoot::kPrimitiveByte; break;
4754     case 'C': class_root = ClassRoot::kPrimitiveChar; break;
4755     case 'D': class_root = ClassRoot::kPrimitiveDouble; break;
4756     case 'F': class_root = ClassRoot::kPrimitiveFloat; break;
4757     case 'I': class_root = ClassRoot::kPrimitiveInt; break;
4758     case 'J': class_root = ClassRoot::kPrimitiveLong; break;
4759     case 'S': class_root = ClassRoot::kPrimitiveShort; break;
4760     case 'Z': class_root = ClassRoot::kPrimitiveBoolean; break;
4761     case 'V': class_root = ClassRoot::kPrimitiveVoid; break;
4762     default:
4763       return nullptr;
4764   }
4765   return GetClassRoot(class_root, this);
4766 }
4767 
FindPrimitiveClass(char type)4768 ObjPtr<mirror::Class> ClassLinker::FindPrimitiveClass(char type) {
4769   ObjPtr<mirror::Class> result = LookupPrimitiveClass(type);
4770   if (UNLIKELY(result == nullptr)) {
4771     std::string printable_type(PrintableChar(type));
4772     ThrowNoClassDefFoundError("Not a primitive type: %s", printable_type.c_str());
4773   }
4774   return result;
4775 }
4776 
InsertClass(std::string_view descriptor,ObjPtr<mirror::Class> klass,size_t hash)4777 ObjPtr<mirror::Class> ClassLinker::InsertClass(std::string_view descriptor,
4778                                                ObjPtr<mirror::Class> klass,
4779                                                size_t hash) {
4780   DCHECK(Thread::Current()->CanLoadClasses());
4781   if (VLOG_IS_ON(class_linker)) {
4782     ObjPtr<mirror::DexCache> dex_cache = klass->GetDexCache();
4783     std::string source;
4784     if (dex_cache != nullptr) {
4785       source += " from ";
4786       source += dex_cache->GetLocation()->ToModifiedUtf8();
4787     }
4788     LOG(INFO) << "Loaded class " << descriptor << source;
4789   }
4790   {
4791     WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4792     const ObjPtr<mirror::ClassLoader> class_loader = klass->GetClassLoader();
4793     ClassTable* const class_table = InsertClassTableForClassLoader(class_loader);
4794     ObjPtr<mirror::Class> existing = class_table->Lookup(descriptor, hash);
4795     if (existing != nullptr) {
4796       return existing;
4797     }
4798     VerifyObject(klass);
4799     class_table->InsertWithHash(klass, hash);
4800     WriteBarrierOnClassLoaderLocked(class_loader, klass);
4801   }
4802   if (kIsDebugBuild) {
4803     // Test that copied methods correctly can find their holder.
4804     for (ArtMethod& method : klass->GetCopiedMethods(image_pointer_size_)) {
4805       CHECK_EQ(GetHoldingClassOfCopiedMethod(&method), klass);
4806     }
4807   }
4808   return nullptr;
4809 }
4810 
WriteBarrierForBootOatFileBssRoots(const OatFile * oat_file)4811 void ClassLinker::WriteBarrierForBootOatFileBssRoots(const OatFile* oat_file) {
4812   WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4813   DCHECK(!oat_file->GetBssGcRoots().empty()) << oat_file->GetLocation();
4814   if (log_new_roots_ && !ContainsElement(new_bss_roots_boot_oat_files_, oat_file)) {
4815     new_bss_roots_boot_oat_files_.push_back(oat_file);
4816   }
4817 }
4818 
4819 // TODO This should really be in mirror::Class.
UpdateClassMethods(ObjPtr<mirror::Class> klass,LengthPrefixedArray<ArtMethod> * new_methods)4820 void ClassLinker::UpdateClassMethods(ObjPtr<mirror::Class> klass,
4821                                      LengthPrefixedArray<ArtMethod>* new_methods) {
4822   klass->SetMethodsPtrUnchecked(new_methods,
4823                                 klass->NumDirectMethods(),
4824                                 klass->NumDeclaredVirtualMethods());
4825   // Need to mark the card so that the remembered sets and mod union tables get updated.
4826   WriteBarrier::ForEveryFieldWrite(klass);
4827 }
4828 
LookupClass(Thread * self,std::string_view descriptor,ObjPtr<mirror::ClassLoader> class_loader)4829 ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4830                                                std::string_view descriptor,
4831                                                ObjPtr<mirror::ClassLoader> class_loader) {
4832   return LookupClass(self, descriptor, ComputeModifiedUtf8Hash(descriptor), class_loader);
4833 }
4834 
LookupClass(Thread * self,std::string_view descriptor,size_t hash,ObjPtr<mirror::ClassLoader> class_loader)4835 ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4836                                                std::string_view descriptor,
4837                                                size_t hash,
4838                                                ObjPtr<mirror::ClassLoader> class_loader) {
4839   ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
4840   ClassTable* const class_table = ClassTableForClassLoader(class_loader);
4841   if (class_table != nullptr) {
4842     ObjPtr<mirror::Class> result = class_table->Lookup(descriptor, hash);
4843     if (result != nullptr) {
4844       return result;
4845     }
4846   }
4847   return nullptr;
4848 }
4849 
4850 class MoveClassTableToPreZygoteVisitor : public ClassLoaderVisitor {
4851  public:
MoveClassTableToPreZygoteVisitor()4852   MoveClassTableToPreZygoteVisitor() {}
4853 
Visit(ObjPtr<mirror::ClassLoader> class_loader)4854   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
4855       REQUIRES(Locks::classlinker_classes_lock_)
4856       REQUIRES_SHARED(Locks::mutator_lock_) override {
4857     ClassTable* const class_table = class_loader->GetClassTable();
4858     if (class_table != nullptr) {
4859       class_table->FreezeSnapshot();
4860     }
4861   }
4862 };
4863 
MoveClassTableToPreZygote()4864 void ClassLinker::MoveClassTableToPreZygote() {
4865   WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4866   boot_class_table_->FreezeSnapshot();
4867   MoveClassTableToPreZygoteVisitor visitor;
4868   VisitClassLoaders(&visitor);
4869 }
4870 
AttemptSupertypeVerification(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,Handle<mirror::Class> supertype)4871 bool ClassLinker::AttemptSupertypeVerification(Thread* self,
4872                                                verifier::VerifierDeps* verifier_deps,
4873                                                Handle<mirror::Class> klass,
4874                                                Handle<mirror::Class> supertype) {
4875   DCHECK(self != nullptr);
4876   DCHECK(klass != nullptr);
4877   DCHECK(supertype != nullptr);
4878 
4879   if (!supertype->IsVerified() && !supertype->IsErroneous()) {
4880     VerifyClass(self, verifier_deps, supertype);
4881   }
4882 
4883   if (supertype->IsVerified()
4884       || supertype->ShouldVerifyAtRuntime()
4885       || supertype->IsVerifiedNeedsAccessChecks()) {
4886     // The supertype is either verified, or we soft failed at AOT time.
4887     DCHECK(supertype->IsVerified() || Runtime::Current()->IsAotCompiler());
4888     return true;
4889   }
4890   // If we got this far then we have a hard failure.
4891   std::string error_msg =
4892       StringPrintf("Rejecting class %s that attempts to sub-type erroneous class %s",
4893                    klass->PrettyDescriptor().c_str(),
4894                    supertype->PrettyDescriptor().c_str());
4895   LOG(WARNING) << error_msg  << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4896   StackHandleScope<1> hs(self);
4897   Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
4898   if (cause != nullptr) {
4899     // Set during VerifyClass call (if at all).
4900     self->ClearException();
4901   }
4902   // Change into a verify error.
4903   ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
4904   if (cause != nullptr) {
4905     self->GetException()->SetCause(cause.Get());
4906   }
4907   ClassReference ref(klass->GetDexCache()->GetDexFile(), klass->GetDexClassDefIndex());
4908   if (Runtime::Current()->IsAotCompiler()) {
4909     Runtime::Current()->GetCompilerCallbacks()->ClassRejected(ref);
4910   }
4911   // Need to grab the lock to change status.
4912   ObjectLock<mirror::Class> super_lock(self, klass);
4913   mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
4914   return false;
4915 }
4916 
VerifyClass(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,verifier::HardFailLogMode log_level)4917 verifier::FailureKind ClassLinker::VerifyClass(Thread* self,
4918                                                verifier::VerifierDeps* verifier_deps,
4919                                                Handle<mirror::Class> klass,
4920                                                verifier::HardFailLogMode log_level) {
4921   {
4922     // TODO: assert that the monitor on the Class is held
4923     ObjectLock<mirror::Class> lock(self, klass);
4924 
4925     // Is somebody verifying this now?
4926     ClassStatus old_status = klass->GetStatus();
4927     while (old_status == ClassStatus::kVerifying) {
4928       lock.WaitIgnoringInterrupts();
4929       // WaitIgnoringInterrupts can still receive an interrupt and return early, in this
4930       // case we may see the same status again. b/62912904. This is why the check is
4931       // greater or equal.
4932       CHECK(klass->IsErroneous() || (klass->GetStatus() >= old_status))
4933           << "Class '" << klass->PrettyClass()
4934           << "' performed an illegal verification state transition from " << old_status
4935           << " to " << klass->GetStatus();
4936       old_status = klass->GetStatus();
4937     }
4938 
4939     // The class might already be erroneous, for example at compile time if we attempted to verify
4940     // this class as a parent to another.
4941     if (klass->IsErroneous()) {
4942       ThrowEarlierClassFailure(klass.Get());
4943       return verifier::FailureKind::kHardFailure;
4944     }
4945 
4946     // Don't attempt to re-verify if already verified.
4947     if (klass->IsVerified()) {
4948       if (verifier_deps != nullptr &&
4949           verifier_deps->ContainsDexFile(klass->GetDexFile()) &&
4950           !verifier_deps->HasRecordedVerifiedStatus(klass->GetDexFile(), *klass->GetClassDef()) &&
4951           !Runtime::Current()->IsAotCompiler()) {
4952         // If the klass is verified, but `verifier_deps` did not record it, this
4953         // means we are running background verification of a secondary dex file.
4954         // Re-run the verifier to populate `verifier_deps`.
4955         // No need to run the verification when running on the AOT Compiler, as
4956         // the driver handles those multithreaded cases already.
4957         std::string error_msg;
4958         verifier::FailureKind failure =
4959             PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
4960         // We could have soft failures, so just check that we don't have a hard
4961         // failure.
4962         DCHECK_NE(failure, verifier::FailureKind::kHardFailure) << error_msg;
4963       }
4964       return verifier::FailureKind::kNoFailure;
4965     }
4966 
4967     if (klass->IsVerifiedNeedsAccessChecks()) {
4968       if (!Runtime::Current()->IsAotCompiler()) {
4969         // Mark the class as having a verification attempt to avoid re-running
4970         // the verifier.
4971         mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4972       }
4973       return verifier::FailureKind::kAccessChecksFailure;
4974     }
4975 
4976     // For AOT, don't attempt to re-verify if we have already found we should
4977     // verify at runtime.
4978     if (klass->ShouldVerifyAtRuntime()) {
4979       CHECK(Runtime::Current()->IsAotCompiler());
4980       return verifier::FailureKind::kSoftFailure;
4981     }
4982 
4983     DCHECK_EQ(klass->GetStatus(), ClassStatus::kResolved);
4984     mirror::Class::SetStatus(klass, ClassStatus::kVerifying, self);
4985 
4986     // Skip verification if disabled.
4987     if (!Runtime::Current()->IsVerificationEnabled()) {
4988       mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4989       UpdateClassAfterVerification(klass, image_pointer_size_, verifier::FailureKind::kNoFailure);
4990       return verifier::FailureKind::kNoFailure;
4991     }
4992   }
4993 
4994   VLOG(class_linker) << "Beginning verification for class: "
4995                      << klass->PrettyDescriptor()
4996                      << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4997 
4998   // Verify super class.
4999   StackHandleScope<2> hs(self);
5000   MutableHandle<mirror::Class> supertype(hs.NewHandle(klass->GetSuperClass()));
5001   // If we have a superclass and we get a hard verification failure we can return immediately.
5002   if (supertype != nullptr &&
5003       !AttemptSupertypeVerification(self, verifier_deps, klass, supertype)) {
5004     CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
5005     return verifier::FailureKind::kHardFailure;
5006   }
5007 
5008   // Verify all default super-interfaces.
5009   //
5010   // (1) Don't bother if the superclass has already had a soft verification failure.
5011   //
5012   // (2) Interfaces shouldn't bother to do this recursive verification because they cannot cause
5013   //     recursive initialization by themselves. This is because when an interface is initialized
5014   //     directly it must not initialize its superinterfaces. We are allowed to verify regardless
5015   //     but choose not to for an optimization. If the interfaces is being verified due to a class
5016   //     initialization (which would need all the default interfaces to be verified) the class code
5017   //     will trigger the recursive verification anyway.
5018   if ((supertype == nullptr || supertype->IsVerified())  // See (1)
5019       && !klass->IsInterface()) {                              // See (2)
5020     int32_t iftable_count = klass->GetIfTableCount();
5021     MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
5022     // Loop through all interfaces this class has defined. It doesn't matter the order.
5023     for (int32_t i = 0; i < iftable_count; i++) {
5024       iface.Assign(klass->GetIfTable()->GetInterface(i));
5025       DCHECK(iface != nullptr);
5026       // We only care if we have default interfaces and can skip if we are already verified...
5027       if (LIKELY(!iface->HasDefaultMethods() || iface->IsVerified())) {
5028         continue;
5029       } else if (UNLIKELY(!AttemptSupertypeVerification(self, verifier_deps, klass, iface))) {
5030         // We had a hard failure while verifying this interface. Just return immediately.
5031         CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
5032         return verifier::FailureKind::kHardFailure;
5033       } else if (UNLIKELY(!iface->IsVerified())) {
5034         // We softly failed to verify the iface. Stop checking and clean up.
5035         // Put the iface into the supertype handle so we know what caused us to fail.
5036         supertype.Assign(iface.Get());
5037         break;
5038       }
5039     }
5040   }
5041 
5042   // At this point if verification failed, then supertype is the "first" supertype that failed
5043   // verification (without a specific order). If verification succeeded, then supertype is either
5044   // null or the original superclass of klass and is verified.
5045   DCHECK(supertype == nullptr ||
5046          supertype.Get() == klass->GetSuperClass() ||
5047          !supertype->IsVerified());
5048 
5049   // Try to use verification information from the oat file, otherwise do runtime verification.
5050   const DexFile& dex_file = *klass->GetDexCache()->GetDexFile();
5051   ClassStatus oat_file_class_status(ClassStatus::kNotReady);
5052   bool preverified = VerifyClassUsingOatFile(self, dex_file, klass, oat_file_class_status);
5053 
5054   VLOG(class_linker) << "Class preverified status for class "
5055                      << klass->PrettyDescriptor()
5056                      << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
5057                      << ": "
5058                      << preverified
5059                      << " (" << oat_file_class_status << ")";
5060 
5061   // If the oat file says the class had an error, re-run the verifier. That way we will either:
5062   // 1) Be successful at runtime, or
5063   // 2) Get a precise error message.
5064   DCHECK_IMPLIES(mirror::Class::IsErroneous(oat_file_class_status), !preverified);
5065 
5066   std::string error_msg;
5067   verifier::FailureKind verifier_failure = verifier::FailureKind::kNoFailure;
5068   if (!preverified) {
5069     verifier_failure = PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
5070   } else if (oat_file_class_status == ClassStatus::kVerifiedNeedsAccessChecks) {
5071     verifier_failure = verifier::FailureKind::kAccessChecksFailure;
5072   }
5073 
5074   // Verification is done, grab the lock again.
5075   ObjectLock<mirror::Class> lock(self, klass);
5076   self->AssertNoPendingException();
5077 
5078   if (verifier_failure == verifier::FailureKind::kHardFailure) {
5079     VLOG(verifier) << "Verification failed on class " << klass->PrettyDescriptor()
5080                   << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
5081                   << " because: " << error_msg;
5082     ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
5083     mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5084     return verifier_failure;
5085   }
5086 
5087   // Make sure all classes referenced by catch blocks are resolved.
5088   ResolveClassExceptionHandlerTypes(klass);
5089 
5090   if (Runtime::Current()->IsAotCompiler()) {
5091     if (supertype != nullptr && supertype->ShouldVerifyAtRuntime()) {
5092       // Regardless of our own verification result, we need to verify the class
5093       // at runtime if the super class is not verified. This is required in case
5094       // we generate an app/boot image.
5095       mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
5096     } else if (verifier_failure == verifier::FailureKind::kNoFailure) {
5097       mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
5098     } else if (verifier_failure == verifier::FailureKind::kSoftFailure ||
5099                verifier_failure == verifier::FailureKind::kTypeChecksFailure) {
5100       mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
5101     } else {
5102       mirror::Class::SetStatus(klass, ClassStatus::kVerifiedNeedsAccessChecks, self);
5103     }
5104     // Notify the compiler about the verification status, in case the class
5105     // was verified implicitly (eg super class of a compiled class). When the
5106     // compiler unloads dex file after compilation, we still want to keep
5107     // verification states.
5108     Runtime::Current()->GetCompilerCallbacks()->UpdateClassState(
5109         ClassReference(&klass->GetDexFile(), klass->GetDexClassDefIndex()), klass->GetStatus());
5110   } else {
5111     mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
5112   }
5113 
5114   UpdateClassAfterVerification(klass, image_pointer_size_, verifier_failure);
5115   return verifier_failure;
5116 }
5117 
PerformClassVerification(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,verifier::HardFailLogMode log_level,std::string * error_msg)5118 verifier::FailureKind ClassLinker::PerformClassVerification(Thread* self,
5119                                                             verifier::VerifierDeps* verifier_deps,
5120                                                             Handle<mirror::Class> klass,
5121                                                             verifier::HardFailLogMode log_level,
5122                                                             std::string* error_msg) {
5123   Runtime* const runtime = Runtime::Current();
5124   StackHandleScope<2> hs(self);
5125   Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
5126   Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
5127   return verifier::ClassVerifier::VerifyClass(self,
5128                                               verifier_deps,
5129                                               dex_cache->GetDexFile(),
5130                                               klass,
5131                                               dex_cache,
5132                                               class_loader,
5133                                               *klass->GetClassDef(),
5134                                               runtime->GetCompilerCallbacks(),
5135                                               log_level,
5136                                               Runtime::Current()->GetTargetSdkVersion(),
5137                                               error_msg);
5138 }
5139 
VerifyClassUsingOatFile(Thread * self,const DexFile & dex_file,Handle<mirror::Class> klass,ClassStatus & oat_file_class_status)5140 bool ClassLinker::VerifyClassUsingOatFile(Thread* self,
5141                                           const DexFile& dex_file,
5142                                           Handle<mirror::Class> klass,
5143                                           ClassStatus& oat_file_class_status) {
5144   // If we're compiling, we can only verify the class using the oat file if
5145   // we are not compiling the image or if the class we're verifying is not part of
5146   // the compilation unit (app - dependencies). We will let the compiler callback
5147   // tell us about the latter.
5148   if (Runtime::Current()->IsAotCompiler()) {
5149     CompilerCallbacks* callbacks = Runtime::Current()->GetCompilerCallbacks();
5150     // We are compiling an app (not the image).
5151     if (!callbacks->CanUseOatStatusForVerification(klass.Get())) {
5152       return false;
5153     }
5154   }
5155 
5156   const OatDexFile* oat_dex_file = dex_file.GetOatDexFile();
5157   // In case we run without an image there won't be a backing oat file.
5158   if (oat_dex_file == nullptr || oat_dex_file->GetOatFile() == nullptr) {
5159     return false;
5160   }
5161 
5162   uint16_t class_def_index = klass->GetDexClassDefIndex();
5163   oat_file_class_status = oat_dex_file->GetOatClass(class_def_index).GetStatus();
5164   if (oat_file_class_status >= ClassStatus::kVerified) {
5165     return true;
5166   }
5167   if (oat_file_class_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
5168     // We return that the class has already been verified, and the caller should
5169     // check the class status to ensure we run with access checks.
5170     return true;
5171   }
5172 
5173   // Check the class status with the vdex file.
5174   const OatFile* oat_file = oat_dex_file->GetOatFile();
5175   if (oat_file != nullptr) {
5176     ClassStatus vdex_status = oat_file->GetVdexFile()->ComputeClassStatus(self, klass);
5177     if (vdex_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
5178       VLOG(verifier) << "Vdex verification success for " << klass->PrettyClass();
5179       oat_file_class_status = vdex_status;
5180       return true;
5181     }
5182   }
5183 
5184   // If we only verified a subset of the classes at compile time, we can end up with classes that
5185   // were resolved by the verifier.
5186   if (oat_file_class_status == ClassStatus::kResolved) {
5187     return false;
5188   }
5189   // We never expect a .oat file to have kRetryVerificationAtRuntime statuses.
5190   CHECK_NE(oat_file_class_status, ClassStatus::kRetryVerificationAtRuntime)
5191       << klass->PrettyClass() << " " << dex_file.GetLocation();
5192 
5193   if (mirror::Class::IsErroneous(oat_file_class_status)) {
5194     // Compile time verification failed with a hard error. We'll re-run
5195     // verification, which might be successful at runtime.
5196     return false;
5197   }
5198   if (oat_file_class_status == ClassStatus::kNotReady) {
5199     // Status is uninitialized if we couldn't determine the status at compile time, for example,
5200     // not loading the class.
5201     // TODO: when the verifier doesn't rely on Class-es failing to resolve/load the type hierarchy
5202     // isn't a problem and this case shouldn't occur
5203     return false;
5204   }
5205   std::string temp;
5206   LOG(FATAL) << "Unexpected class status: " << oat_file_class_status
5207              << " " << dex_file.GetLocation() << " " << klass->PrettyClass() << " "
5208              << klass->GetDescriptor(&temp);
5209   UNREACHABLE();
5210 }
5211 
ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass)5212 void ClassLinker::ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass) {
5213   for (ArtMethod& method : klass->GetMethods(image_pointer_size_)) {
5214     ResolveMethodExceptionHandlerTypes(&method);
5215   }
5216 }
5217 
ResolveMethodExceptionHandlerTypes(ArtMethod * method)5218 void ClassLinker::ResolveMethodExceptionHandlerTypes(ArtMethod* method) {
5219   // similar to DexVerifier::ScanTryCatchBlocks and dex2oat's ResolveExceptionsForMethod.
5220   CodeItemDataAccessor accessor(method->DexInstructionData());
5221   if (!accessor.HasCodeItem()) {
5222     return;  // native or abstract method
5223   }
5224   if (accessor.TriesSize() == 0) {
5225     return;  // nothing to process
5226   }
5227   const uint8_t* handlers_ptr = accessor.GetCatchHandlerData(0);
5228   CHECK(method->GetDexFile()->IsInDataSection(handlers_ptr))
5229       << method->PrettyMethod()
5230       << "@" << method->GetDexFile()->GetLocation()
5231       << "@" << reinterpret_cast<const void*>(handlers_ptr)
5232       << " is_compact_dex=" << method->GetDexFile()->IsCompactDexFile();
5233 
5234   uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr);
5235   for (uint32_t idx = 0; idx < handlers_size; idx++) {
5236     CatchHandlerIterator iterator(handlers_ptr);
5237     for (; iterator.HasNext(); iterator.Next()) {
5238       // Ensure exception types are resolved so that they don't need resolution to be delivered,
5239       // unresolved exception types will be ignored by exception delivery
5240       if (iterator.GetHandlerTypeIndex().IsValid()) {
5241         ObjPtr<mirror::Class> exception_type = ResolveType(iterator.GetHandlerTypeIndex(), method);
5242         if (exception_type == nullptr) {
5243           DCHECK(Thread::Current()->IsExceptionPending());
5244           Thread::Current()->ClearException();
5245         }
5246       }
5247     }
5248     handlers_ptr = iterator.EndDataPointer();
5249   }
5250 }
5251 
CreateProxyClass(ScopedObjectAccessAlreadyRunnable & soa,jstring name,jobjectArray interfaces,jobject loader,jobjectArray methods,jobjectArray throws)5252 ObjPtr<mirror::Class> ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& soa,
5253                                                     jstring name,
5254                                                     jobjectArray interfaces,
5255                                                     jobject loader,
5256                                                     jobjectArray methods,
5257                                                     jobjectArray throws) {
5258   Thread* self = soa.Self();
5259 
5260   // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
5261   // code to be executed. We put it up here so we can avoid all the allocations associated with
5262   // creating the class. This can happen with (eg) jit-threads.
5263   if (!self->CanLoadClasses()) {
5264     // Make sure we don't try to load anything, potentially causing an infinite loop.
5265     ObjPtr<mirror::Throwable> pre_allocated =
5266         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
5267     self->SetException(pre_allocated);
5268     return nullptr;
5269   }
5270 
5271   StackHandleScope<12> hs(self);
5272   MutableHandle<mirror::Class> temp_klass(hs.NewHandle(
5273       AllocClass(self, GetClassRoot<mirror::Class>(this), sizeof(mirror::Class))));
5274   if (temp_klass == nullptr) {
5275     CHECK(self->IsExceptionPending());  // OOME.
5276     return nullptr;
5277   }
5278   DCHECK(temp_klass->GetClass() != nullptr);
5279   temp_klass->SetObjectSize(sizeof(mirror::Proxy));
5280   // Set the class access flags incl. VerificationAttempted, so we do not try to set the flag on
5281   // the methods.
5282   temp_klass->SetAccessFlagsDuringLinking(kAccClassIsProxy | kAccPublic | kAccFinal);
5283   temp_klass->SetClassLoader(soa.Decode<mirror::ClassLoader>(loader));
5284   DCHECK_EQ(temp_klass->GetPrimitiveType(), Primitive::kPrimNot);
5285   temp_klass->SetName(soa.Decode<mirror::String>(name));
5286   temp_klass->SetDexCache(GetClassRoot<mirror::Proxy>(this)->GetDexCache());
5287   // Object has an empty iftable, copy it for that reason.
5288   temp_klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
5289   mirror::Class::SetStatus(temp_klass, ClassStatus::kIdx, self);
5290   std::string descriptor;
5291   const char* raw_descriptor = temp_klass->GetDescriptor(&descriptor);
5292   DCHECK(raw_descriptor == descriptor.c_str());
5293   const size_t hash = ComputeModifiedUtf8Hash(descriptor);
5294 
5295   // Needs to be before we insert the class so that the allocator field is set.
5296   LinearAlloc* const allocator = GetOrCreateAllocatorForClassLoader(temp_klass->GetClassLoader());
5297 
5298   // Insert the class before loading the fields as the field roots
5299   // (ArtField::declaring_class_) are only visited from the class
5300   // table. There can't be any suspend points between inserting the
5301   // class and setting the field arrays below.
5302   ObjPtr<mirror::Class> existing = InsertClass(descriptor, temp_klass.Get(), hash);
5303   CHECK(existing == nullptr);
5304 
5305   // Instance fields are inherited, but we add a couple of static fields...
5306   const size_t num_fields = 2;
5307   LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, allocator, num_fields);
5308   temp_klass->SetSFieldsPtr(sfields);
5309 
5310   // 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by
5311   // our proxy, so Class.getInterfaces doesn't return the flattened set.
5312   ArtField& interfaces_sfield = sfields->At(0);
5313   interfaces_sfield.SetDexFieldIndex(0);
5314   interfaces_sfield.SetDeclaringClass(temp_klass.Get());
5315   interfaces_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
5316 
5317   // 2. Create a static field 'throws' that holds exceptions thrown by our methods.
5318   ArtField& throws_sfield = sfields->At(1);
5319   throws_sfield.SetDexFieldIndex(1);
5320   throws_sfield.SetDeclaringClass(temp_klass.Get());
5321   throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
5322 
5323   // Proxies have 1 direct method, the constructor
5324   const size_t num_direct_methods = 1;
5325 
5326   // The array we get passed contains all methods, including private and static
5327   // ones that aren't proxied. We need to filter those out since only interface
5328   // methods (non-private & virtual) are actually proxied.
5329   Handle<mirror::ObjectArray<mirror::Method>> h_methods =
5330       hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Method>>(methods));
5331   DCHECK_EQ(h_methods->GetClass(), GetClassRoot<mirror::ObjectArray<mirror::Method>>())
5332       << mirror::Class::PrettyClass(h_methods->GetClass());
5333   // List of the actual virtual methods this class will have.
5334   std::vector<ArtMethod*> proxied_methods;
5335   std::vector<size_t> proxied_throws_idx;
5336   proxied_methods.reserve(h_methods->GetLength());
5337   proxied_throws_idx.reserve(h_methods->GetLength());
5338   // Filter out to only the non-private virtual methods.
5339   for (auto [mirror, idx] : ZipCount(h_methods.Iterate<mirror::Method>())) {
5340     ArtMethod* m = mirror->GetArtMethod();
5341     if (!m->IsPrivate() && !m->IsStatic()) {
5342       proxied_methods.push_back(m);
5343       proxied_throws_idx.push_back(idx);
5344     }
5345   }
5346   const size_t num_virtual_methods = proxied_methods.size();
5347   // We also need to filter out the 'throws'. The 'throws' are a Class[][] that
5348   // contains an array of all the classes each function is declared to throw.
5349   // This is used to wrap unexpected exceptions in a
5350   // UndeclaredThrowableException exception. This array is in the same order as
5351   // the methods array and like the methods array must be filtered to remove any
5352   // non-proxied methods.
5353   const bool has_filtered_methods =
5354       static_cast<int32_t>(num_virtual_methods) != h_methods->GetLength();
5355   MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> original_proxied_throws(
5356       hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(throws)));
5357   MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> proxied_throws(
5358       hs.NewHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(
5359           (has_filtered_methods)
5360               ? mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>::Alloc(
5361                     self, original_proxied_throws->GetClass(), num_virtual_methods)
5362               : original_proxied_throws.Get()));
5363   if (proxied_throws.IsNull() && !original_proxied_throws.IsNull()) {
5364     self->AssertPendingOOMException();
5365     return nullptr;
5366   }
5367   if (has_filtered_methods) {
5368     for (auto [orig_idx, new_idx] : ZipCount(MakeIterationRange(proxied_throws_idx))) {
5369       DCHECK_LE(new_idx, orig_idx);
5370       proxied_throws->Set(new_idx, original_proxied_throws->Get(orig_idx));
5371     }
5372   }
5373 
5374   // Create the methods array.
5375   LengthPrefixedArray<ArtMethod>* proxy_class_methods = AllocArtMethodArray(
5376         self, allocator, num_direct_methods + num_virtual_methods);
5377   // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we
5378   // want to throw OOM in the future.
5379   if (UNLIKELY(proxy_class_methods == nullptr)) {
5380     self->AssertPendingOOMException();
5381     return nullptr;
5382   }
5383   temp_klass->SetMethodsPtr(proxy_class_methods, num_direct_methods, num_virtual_methods);
5384 
5385   // Create the single direct method.
5386   CreateProxyConstructor(temp_klass, temp_klass->GetDirectMethodUnchecked(0, image_pointer_size_));
5387 
5388   // Create virtual method using specified prototypes.
5389   // TODO These should really use the iterators.
5390   for (size_t i = 0; i < num_virtual_methods; ++i) {
5391     auto* virtual_method = temp_klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
5392     auto* prototype = proxied_methods[i];
5393     CreateProxyMethod(temp_klass, prototype, virtual_method);
5394     DCHECK(virtual_method->GetDeclaringClass() != nullptr);
5395     DCHECK(prototype->GetDeclaringClass() != nullptr);
5396   }
5397 
5398   // The super class is java.lang.reflect.Proxy
5399   temp_klass->SetSuperClass(GetClassRoot<mirror::Proxy>(this));
5400   // Now effectively in the loaded state.
5401   mirror::Class::SetStatus(temp_klass, ClassStatus::kLoaded, self);
5402   self->AssertNoPendingException();
5403 
5404   // At this point the class is loaded. Publish a ClassLoad event.
5405   // Note: this may be a temporary class. It is a listener's responsibility to handle this.
5406   Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(temp_klass);
5407 
5408   MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
5409   {
5410     // Must hold lock on object when resolved.
5411     ObjectLock<mirror::Class> resolution_lock(self, temp_klass);
5412     // Link the fields and virtual methods, creating vtable and iftables.
5413     // The new class will replace the old one in the class table.
5414     Handle<mirror::ObjectArray<mirror::Class>> h_interfaces(
5415         hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces)));
5416     if (!LinkClass(self, descriptor.c_str(), temp_klass, h_interfaces, &klass)) {
5417       if (!temp_klass->IsErroneous()) {
5418         mirror::Class::SetStatus(temp_klass, ClassStatus::kErrorUnresolved, self);
5419       }
5420       return nullptr;
5421     }
5422   }
5423   CHECK(temp_klass->IsRetired());
5424   CHECK_NE(temp_klass.Get(), klass.Get());
5425 
5426   CHECK_EQ(interfaces_sfield.GetDeclaringClass(), klass.Get());
5427   interfaces_sfield.SetObject<false>(
5428       klass.Get(),
5429       soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
5430   CHECK_EQ(throws_sfield.GetDeclaringClass(), klass.Get());
5431   throws_sfield.SetObject<false>(
5432       klass.Get(),
5433       proxied_throws.Get());
5434 
5435   Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(temp_klass, klass);
5436 
5437   // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
5438   // See also ClassLinker::EnsureInitialized().
5439   if (kBitstringSubtypeCheckEnabled) {
5440     MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
5441     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(klass.Get());
5442     // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck for j.l.r.Proxy is already assigned.
5443   }
5444 
5445   VisiblyInitializedCallback* callback = nullptr;
5446   {
5447     // Lock on klass is released. Lock new class object.
5448     ObjectLock<mirror::Class> initialization_lock(self, klass);
5449     // Conservatively go through the ClassStatus::kInitialized state.
5450     callback = MarkClassInitialized(self, klass);
5451   }
5452   if (callback != nullptr) {
5453     callback->MakeVisible(self);
5454   }
5455 
5456   // Consistency checks.
5457   if (kIsDebugBuild) {
5458     CHECK(klass->GetIFieldsPtr() == nullptr);
5459     CheckProxyConstructor(klass->GetDirectMethod(0, image_pointer_size_));
5460 
5461     for (size_t i = 0; i < num_virtual_methods; ++i) {
5462       auto* virtual_method = klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
5463       CheckProxyMethod(virtual_method, proxied_methods[i]);
5464     }
5465 
5466     StackHandleScope<1> hs2(self);
5467     Handle<mirror::String> decoded_name = hs2.NewHandle(soa.Decode<mirror::String>(name));
5468     std::string interfaces_field_name(StringPrintf("java.lang.Class[] %s.interfaces",
5469                                                    decoded_name->ToModifiedUtf8().c_str()));
5470     CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(0)), interfaces_field_name);
5471 
5472     std::string throws_field_name(StringPrintf("java.lang.Class[][] %s.throws",
5473                                                decoded_name->ToModifiedUtf8().c_str()));
5474     CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(1)), throws_field_name);
5475 
5476     CHECK_EQ(klass.Get()->GetProxyInterfaces(),
5477              soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
5478     CHECK_EQ(klass.Get()->GetProxyThrows(),
5479              proxied_throws.Get());
5480   }
5481   return klass.Get();
5482 }
5483 
CreateProxyConstructor(Handle<mirror::Class> klass,ArtMethod * out)5484 void ClassLinker::CreateProxyConstructor(Handle<mirror::Class> klass, ArtMethod* out) {
5485   // Create constructor for Proxy that must initialize the method.
5486   ObjPtr<mirror::Class> proxy_class = GetClassRoot<mirror::Proxy>(this);
5487   CHECK_EQ(proxy_class->NumDirectMethods(), 21u);
5488 
5489   // Find the <init>(InvocationHandler)V method. The exact method offset varies depending
5490   // on which front-end compiler was used to build the libcore DEX files.
5491   ArtMethod* proxy_constructor = WellKnownClasses::java_lang_reflect_Proxy_init;
5492   DCHECK(proxy_constructor != nullptr)
5493       << "Could not find <init> method in java.lang.reflect.Proxy";
5494 
5495   // Clone the existing constructor of Proxy (our constructor would just invoke it so steal its
5496   // code_ too)
5497   DCHECK(out != nullptr);
5498   out->CopyFrom(proxy_constructor, image_pointer_size_);
5499   // Make this constructor public and fix the class to be our Proxy version.
5500   // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5501   // Note that the compiler calls a ResolveMethod() overload that does not handle a Proxy referrer.
5502   out->SetAccessFlags((out->GetAccessFlags() & ~kAccProtected) |
5503                       kAccPublic |
5504                       kAccCompileDontBother);
5505   out->SetDeclaringClass(klass.Get());
5506 
5507   // Set the original constructor method.
5508   out->SetDataPtrSize(proxy_constructor, image_pointer_size_);
5509 }
5510 
CheckProxyConstructor(ArtMethod * constructor) const5511 void ClassLinker::CheckProxyConstructor(ArtMethod* constructor) const {
5512   CHECK(constructor->IsConstructor());
5513   auto* np = constructor->GetInterfaceMethodIfProxy(image_pointer_size_);
5514   CHECK_STREQ(np->GetName(), "<init>");
5515   CHECK_STREQ(np->GetSignature().ToString().c_str(), "(Ljava/lang/reflect/InvocationHandler;)V");
5516   DCHECK(constructor->IsPublic());
5517 }
5518 
CreateProxyMethod(Handle<mirror::Class> klass,ArtMethod * prototype,ArtMethod * out)5519 void ClassLinker::CreateProxyMethod(Handle<mirror::Class> klass, ArtMethod* prototype,
5520                                     ArtMethod* out) {
5521   // We steal everything from the prototype (such as DexCache, invoke stub, etc.) then specialize
5522   // as necessary
5523   DCHECK(out != nullptr);
5524   out->CopyFrom(prototype, image_pointer_size_);
5525 
5526   // Set class to be the concrete proxy class.
5527   out->SetDeclaringClass(klass.Get());
5528   // Clear the abstract and default flags to ensure that defaults aren't picked in
5529   // preference to the invocation handler.
5530   const uint32_t kRemoveFlags = kAccAbstract | kAccDefault;
5531   static_assert((kAccDefault & kAccIntrinsicBits) != 0);
5532   DCHECK(!out->IsIntrinsic()) << "Removing kAccDefault from an intrinsic would be a mistake as it "
5533                               << "overlaps with kAccIntrinsicBits.";
5534   // Make the method final.
5535   // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5536   const uint32_t kAddFlags = kAccFinal | kAccCompileDontBother;
5537   out->SetAccessFlags((out->GetAccessFlags() & ~kRemoveFlags) | kAddFlags);
5538 
5539   // Set the original interface method.
5540   out->SetDataPtrSize(prototype, image_pointer_size_);
5541 
5542   // At runtime the method looks like a reference and argument saving method, clone the code
5543   // related parameters from this method.
5544   out->SetEntryPointFromQuickCompiledCode(GetQuickProxyInvokeHandler());
5545 }
5546 
CheckProxyMethod(ArtMethod * method,ArtMethod * prototype) const5547 void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) const {
5548   // Basic consistency checks.
5549   CHECK(!prototype->IsFinal());
5550   CHECK(method->IsFinal());
5551   CHECK(method->IsInvokable());
5552 
5553   // The proxy method doesn't have its own dex cache or dex file and so it steals those of its
5554   // interface prototype. The exception to this are Constructors and the Class of the Proxy itself.
5555   CHECK_EQ(prototype->GetDexMethodIndex(), method->GetDexMethodIndex());
5556   CHECK_EQ(prototype, method->GetInterfaceMethodIfProxy(image_pointer_size_));
5557 }
5558 
CanWeInitializeClass(ObjPtr<mirror::Class> klass,bool can_init_statics,bool can_init_parents)5559 bool ClassLinker::CanWeInitializeClass(ObjPtr<mirror::Class> klass,
5560                                        bool can_init_statics,
5561                                        bool can_init_parents) {
5562   if (can_init_statics && can_init_parents) {
5563     return true;
5564   }
5565   DCHECK(Runtime::Current()->IsAotCompiler());
5566 
5567   // We currently don't support initializing at AOT time classes that need access
5568   // checks.
5569   if (klass->IsVerifiedNeedsAccessChecks()) {
5570     return false;
5571   }
5572   if (!can_init_statics) {
5573     // Check if there's a class initializer.
5574     ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5575     if (clinit != nullptr) {
5576       return false;
5577     }
5578     // Check if there are encoded static values needing initialization.
5579     if (klass->NumStaticFields() != 0) {
5580       const dex::ClassDef* dex_class_def = klass->GetClassDef();
5581       DCHECK(dex_class_def != nullptr);
5582       if (dex_class_def->static_values_off_ != 0) {
5583         return false;
5584       }
5585     }
5586   }
5587   // If we are a class we need to initialize all interfaces with default methods when we are
5588   // initialized. Check all of them.
5589   if (!klass->IsInterface()) {
5590     size_t num_interfaces = klass->GetIfTableCount();
5591     for (size_t i = 0; i < num_interfaces; i++) {
5592       ObjPtr<mirror::Class> iface = klass->GetIfTable()->GetInterface(i);
5593       if (iface->HasDefaultMethods() && !iface->IsInitialized()) {
5594         if (!can_init_parents || !CanWeInitializeClass(iface, can_init_statics, can_init_parents)) {
5595           return false;
5596         }
5597       }
5598     }
5599   }
5600   if (klass->IsInterface() || !klass->HasSuperClass()) {
5601     return true;
5602   }
5603   ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
5604   if (super_class->IsInitialized()) {
5605     return true;
5606   }
5607   return can_init_parents && CanWeInitializeClass(super_class, can_init_statics, can_init_parents);
5608 }
5609 
InitializeClass(Thread * self,Handle<mirror::Class> klass,bool can_init_statics,bool can_init_parents)5610 bool ClassLinker::InitializeClass(Thread* self,
5611                                   Handle<mirror::Class> klass,
5612                                   bool can_init_statics,
5613                                   bool can_init_parents) {
5614   // see JLS 3rd edition, 12.4.2 "Detailed Initialization Procedure" for the locking protocol
5615 
5616   // Are we already initialized and therefore done?
5617   // Note: we differ from the JLS here as we don't do this under the lock, this is benign as
5618   // an initialized class will never change its state.
5619   if (klass->IsInitialized()) {
5620     return true;
5621   }
5622 
5623   // Fast fail if initialization requires a full runtime. Not part of the JLS.
5624   if (!CanWeInitializeClass(klass.Get(), can_init_statics, can_init_parents)) {
5625     return false;
5626   }
5627 
5628   self->AllowThreadSuspension();
5629   Runtime* const runtime = Runtime::Current();
5630   const bool stats_enabled = runtime->HasStatsEnabled();
5631   uint64_t t0;
5632   {
5633     ObjectLock<mirror::Class> lock(self, klass);
5634 
5635     // Re-check under the lock in case another thread initialized ahead of us.
5636     if (klass->IsInitialized()) {
5637       return true;
5638     }
5639 
5640     // Was the class already found to be erroneous? Done under the lock to match the JLS.
5641     if (klass->IsErroneous()) {
5642       ThrowEarlierClassFailure(klass.Get(), true, /* log= */ true);
5643       VlogClassInitializationFailure(klass);
5644       return false;
5645     }
5646 
5647     CHECK(klass->IsResolved() && !klass->IsErroneousResolved())
5648         << klass->PrettyClass() << ": state=" << klass->GetStatus();
5649 
5650     if (!klass->IsVerified()) {
5651       VerifyClass(self, /*verifier_deps= */ nullptr, klass);
5652       if (!klass->IsVerified()) {
5653         // We failed to verify, expect either the klass to be erroneous or verification failed at
5654         // compile time.
5655         if (klass->IsErroneous()) {
5656           // The class is erroneous. This may be a verifier error, or another thread attempted
5657           // verification and/or initialization and failed. We can distinguish those cases by
5658           // whether an exception is already pending.
5659           if (self->IsExceptionPending()) {
5660             // Check that it's a VerifyError.
5661             DCHECK(IsVerifyError(self->GetException()));
5662           } else {
5663             // Check that another thread attempted initialization.
5664             DCHECK_NE(0, klass->GetClinitThreadId());
5665             DCHECK_NE(self->GetTid(), klass->GetClinitThreadId());
5666             // Need to rethrow the previous failure now.
5667             ThrowEarlierClassFailure(klass.Get(), true);
5668           }
5669           VlogClassInitializationFailure(klass);
5670         } else {
5671           CHECK(Runtime::Current()->IsAotCompiler());
5672           CHECK(klass->ShouldVerifyAtRuntime() || klass->IsVerifiedNeedsAccessChecks());
5673           self->AssertNoPendingException();
5674           self->SetException(Runtime::Current()->GetPreAllocatedNoClassDefFoundError());
5675         }
5676         self->AssertPendingException();
5677         return false;
5678       } else {
5679         self->AssertNoPendingException();
5680       }
5681 
5682       // A separate thread could have moved us all the way to initialized. A "simple" example
5683       // involves a subclass of the current class being initialized at the same time (which
5684       // will implicitly initialize the superclass, if scheduled that way). b/28254258
5685       DCHECK(!klass->IsErroneous()) << klass->GetStatus();
5686       if (klass->IsInitialized()) {
5687         return true;
5688       }
5689     }
5690 
5691     // If the class is ClassStatus::kInitializing, either this thread is
5692     // initializing higher up the stack or another thread has beat us
5693     // to initializing and we need to wait. Either way, this
5694     // invocation of InitializeClass will not be responsible for
5695     // running <clinit> and will return.
5696     if (klass->GetStatus() == ClassStatus::kInitializing) {
5697       // Could have got an exception during verification.
5698       if (self->IsExceptionPending()) {
5699         VlogClassInitializationFailure(klass);
5700         return false;
5701       }
5702       // We caught somebody else in the act; was it us?
5703       if (klass->GetClinitThreadId() == self->GetTid()) {
5704         // Yes. That's fine. Return so we can continue initializing.
5705         return true;
5706       }
5707       // No. That's fine. Wait for another thread to finish initializing.
5708       return WaitForInitializeClass(klass, self, lock);
5709     }
5710 
5711     // Try to get the oat class's status for this class if the oat file is present. The compiler
5712     // tries to validate superclass descriptors, and writes the result into the oat file.
5713     // Runtime correctness is guaranteed by classpath checks done on loading. If the classpath
5714     // is different at runtime than it was at compile time, the oat file is rejected. So if the
5715     // oat file is present, the classpaths must match, and the runtime time check can be skipped.
5716     bool has_oat_class = false;
5717     const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
5718         ? OatFile::FindOatClass(klass->GetDexFile(), klass->GetDexClassDefIndex(), &has_oat_class)
5719         : OatFile::OatClass::Invalid();
5720     if (oat_class.GetStatus() < ClassStatus::kSuperclassValidated &&
5721         !ValidateSuperClassDescriptors(klass)) {
5722       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5723       return false;
5724     }
5725     self->AllowThreadSuspension();
5726 
5727     CHECK_EQ(klass->GetStatus(), ClassStatus::kVerified) << klass->PrettyClass()
5728         << " self.tid=" << self->GetTid() << " clinit.tid=" << klass->GetClinitThreadId();
5729 
5730     // From here out other threads may observe that we're initializing and so changes of state
5731     // require the a notification.
5732     klass->SetClinitThreadId(self->GetTid());
5733     mirror::Class::SetStatus(klass, ClassStatus::kInitializing, self);
5734 
5735     t0 = stats_enabled ? NanoTime() : 0u;
5736   }
5737 
5738   uint64_t t_sub = 0;
5739 
5740   // Initialize super classes, must be done while initializing for the JLS.
5741   if (!klass->IsInterface() && klass->HasSuperClass()) {
5742     ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
5743     if (!super_class->IsInitialized()) {
5744       CHECK(!super_class->IsInterface());
5745       CHECK(can_init_parents);
5746       StackHandleScope<1> hs(self);
5747       Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class));
5748       uint64_t super_t0 = stats_enabled ? NanoTime() : 0u;
5749       bool super_initialized = InitializeClass(self, handle_scope_super, can_init_statics, true);
5750       uint64_t super_t1 = stats_enabled ? NanoTime() : 0u;
5751       if (!super_initialized) {
5752         // The super class was verified ahead of entering initializing, we should only be here if
5753         // the super class became erroneous due to initialization.
5754         // For the case of aot compiler, the super class might also be initializing but we don't
5755         // want to process circular dependencies in pre-compile.
5756         CHECK(self->IsExceptionPending())
5757             << "Super class initialization failed for "
5758             << handle_scope_super->PrettyDescriptor()
5759             << " that has unexpected status " << handle_scope_super->GetStatus()
5760             << "\nPending exception:\n"
5761             << (self->GetException() != nullptr ? self->GetException()->Dump() : "");
5762         ObjectLock<mirror::Class> lock(self, klass);
5763         // Initialization failed because the super-class is erroneous.
5764         mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5765         return false;
5766       }
5767       t_sub = super_t1 - super_t0;
5768     }
5769   }
5770 
5771   if (!klass->IsInterface()) {
5772     // Initialize interfaces with default methods for the JLS.
5773     size_t num_direct_interfaces = klass->NumDirectInterfaces();
5774     // Only setup the (expensive) handle scope if we actually need to.
5775     if (UNLIKELY(num_direct_interfaces > 0)) {
5776       StackHandleScope<1> hs_iface(self);
5777       MutableHandle<mirror::Class> handle_scope_iface(hs_iface.NewHandle<mirror::Class>(nullptr));
5778       for (size_t i = 0; i < num_direct_interfaces; i++) {
5779         handle_scope_iface.Assign(klass->GetDirectInterface(i));
5780         CHECK(handle_scope_iface != nullptr) << klass->PrettyDescriptor() << " iface #" << i;
5781         CHECK(handle_scope_iface->IsInterface());
5782         if (handle_scope_iface->HasBeenRecursivelyInitialized()) {
5783           // We have already done this for this interface. Skip it.
5784           continue;
5785         }
5786         // We cannot just call initialize class directly because we need to ensure that ALL
5787         // interfaces with default methods are initialized. Non-default interface initialization
5788         // will not affect other non-default super-interfaces.
5789         // This is not very precise, misses all walking.
5790         uint64_t inf_t0 = stats_enabled ? NanoTime() : 0u;
5791         bool iface_initialized = InitializeDefaultInterfaceRecursive(self,
5792                                                                      handle_scope_iface,
5793                                                                      can_init_statics,
5794                                                                      can_init_parents);
5795         uint64_t inf_t1 = stats_enabled ? NanoTime() : 0u;
5796         if (!iface_initialized) {
5797           ObjectLock<mirror::Class> lock(self, klass);
5798           // Initialization failed because one of our interfaces with default methods is erroneous.
5799           mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5800           return false;
5801         }
5802         t_sub += inf_t1 - inf_t0;
5803       }
5804     }
5805   }
5806 
5807   const size_t num_static_fields = klass->NumStaticFields();
5808   if (num_static_fields > 0) {
5809     const dex::ClassDef* dex_class_def = klass->GetClassDef();
5810     CHECK(dex_class_def != nullptr);
5811     StackHandleScope<3> hs(self);
5812     Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
5813     Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
5814 
5815     // Eagerly fill in static fields so that the we don't have to do as many expensive
5816     // Class::FindStaticField in ResolveField.
5817     for (size_t i = 0; i < num_static_fields; ++i) {
5818       ArtField* field = klass->GetStaticField(i);
5819       const uint32_t field_idx = field->GetDexFieldIndex();
5820       ArtField* resolved_field = dex_cache->GetResolvedField(field_idx);
5821       if (resolved_field == nullptr) {
5822         // Populating cache of a dex file which defines `klass` should always be allowed.
5823         DCHECK(!hiddenapi::ShouldDenyAccessToMember(
5824             field,
5825             hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
5826             hiddenapi::AccessMethod::kNone));
5827         dex_cache->SetResolvedField(field_idx, field);
5828       } else {
5829         DCHECK_EQ(field, resolved_field);
5830       }
5831     }
5832 
5833     annotations::RuntimeEncodedStaticFieldValueIterator value_it(dex_cache,
5834                                                                  class_loader,
5835                                                                  this,
5836                                                                  *dex_class_def);
5837     const DexFile& dex_file = *dex_cache->GetDexFile();
5838 
5839     if (value_it.HasNext()) {
5840       ClassAccessor accessor(dex_file, *dex_class_def);
5841       CHECK(can_init_statics);
5842       for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
5843         if (!value_it.HasNext()) {
5844           break;
5845         }
5846         ArtField* art_field = ResolveField(field.GetIndex(),
5847                                            dex_cache,
5848                                            class_loader,
5849                                            /* is_static= */ true);
5850         if (Runtime::Current()->IsActiveTransaction()) {
5851           value_it.ReadValueToField<true>(art_field);
5852         } else {
5853           value_it.ReadValueToField<false>(art_field);
5854         }
5855         if (self->IsExceptionPending()) {
5856           break;
5857         }
5858         value_it.Next();
5859       }
5860       DCHECK(self->IsExceptionPending() || !value_it.HasNext());
5861     }
5862   }
5863 
5864 
5865   if (!self->IsExceptionPending()) {
5866     ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5867     if (clinit != nullptr) {
5868       CHECK(can_init_statics);
5869       JValue result;
5870       clinit->Invoke(self, nullptr, 0, &result, "V");
5871     }
5872   }
5873   self->AllowThreadSuspension();
5874   uint64_t t1 = stats_enabled ? NanoTime() : 0u;
5875 
5876   VisiblyInitializedCallback* callback = nullptr;
5877   bool success = true;
5878   {
5879     ObjectLock<mirror::Class> lock(self, klass);
5880 
5881     if (self->IsExceptionPending()) {
5882       WrapExceptionInInitializer(klass);
5883       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5884       success = false;
5885     } else if (Runtime::Current()->IsActiveTransaction() && IsTransactionAborted()) {
5886       // The exception thrown when the transaction aborted has been caught and cleared
5887       // so we need to throw it again now.
5888       VLOG(compiler) << "Return from class initializer of "
5889                      << mirror::Class::PrettyDescriptor(klass.Get())
5890                      << " without exception while transaction was aborted: re-throw it now.";
5891       ThrowTransactionAbortError(self);
5892       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5893       success = false;
5894     } else {
5895       if (stats_enabled) {
5896         RuntimeStats* global_stats = runtime->GetStats();
5897         RuntimeStats* thread_stats = self->GetStats();
5898         ++global_stats->class_init_count;
5899         ++thread_stats->class_init_count;
5900         global_stats->class_init_time_ns += (t1 - t0 - t_sub);
5901         thread_stats->class_init_time_ns += (t1 - t0 - t_sub);
5902       }
5903       // Set the class as initialized except if failed to initialize static fields.
5904       callback = MarkClassInitialized(self, klass);
5905       if (VLOG_IS_ON(class_linker)) {
5906         std::string temp;
5907         LOG(INFO) << "Initialized class " << klass->GetDescriptor(&temp) << " from " <<
5908             klass->GetLocation();
5909       }
5910     }
5911   }
5912   if (callback != nullptr) {
5913     callback->MakeVisible(self);
5914   }
5915   return success;
5916 }
5917 
5918 // We recursively run down the tree of interfaces. We need to do this in the order they are declared
5919 // and perform the initialization only on those interfaces that contain default methods.
InitializeDefaultInterfaceRecursive(Thread * self,Handle<mirror::Class> iface,bool can_init_statics,bool can_init_parents)5920 bool ClassLinker::InitializeDefaultInterfaceRecursive(Thread* self,
5921                                                       Handle<mirror::Class> iface,
5922                                                       bool can_init_statics,
5923                                                       bool can_init_parents) {
5924   CHECK(iface->IsInterface());
5925   size_t num_direct_ifaces = iface->NumDirectInterfaces();
5926   // Only create the (expensive) handle scope if we need it.
5927   if (UNLIKELY(num_direct_ifaces > 0)) {
5928     StackHandleScope<1> hs(self);
5929     MutableHandle<mirror::Class> handle_super_iface(hs.NewHandle<mirror::Class>(nullptr));
5930     // First we initialize all of iface's super-interfaces recursively.
5931     for (size_t i = 0; i < num_direct_ifaces; i++) {
5932       ObjPtr<mirror::Class> super_iface = iface->GetDirectInterface(i);
5933       CHECK(super_iface != nullptr) << iface->PrettyDescriptor() << " iface #" << i;
5934       if (!super_iface->HasBeenRecursivelyInitialized()) {
5935         // Recursive step
5936         handle_super_iface.Assign(super_iface);
5937         if (!InitializeDefaultInterfaceRecursive(self,
5938                                                  handle_super_iface,
5939                                                  can_init_statics,
5940                                                  can_init_parents)) {
5941           return false;
5942         }
5943       }
5944     }
5945   }
5946 
5947   bool result = true;
5948   // Then we initialize 'iface' if it has default methods. We do not need to (and in fact must not)
5949   // initialize if we don't have default methods.
5950   if (iface->HasDefaultMethods()) {
5951     result = EnsureInitialized(self, iface, can_init_statics, can_init_parents);
5952   }
5953 
5954   // Mark that this interface has undergone recursive default interface initialization so we know we
5955   // can skip it on any later class initializations. We do this even if we are not a default
5956   // interface since we can still avoid the traversal. This is purely a performance optimization.
5957   if (result) {
5958     // TODO This should be done in a better way
5959     // Note: Use a try-lock to avoid blocking when someone else is holding the lock on this
5960     //       interface. It is bad (Java) style, but not impossible. Marking the recursive
5961     //       initialization is a performance optimization (to avoid another idempotent visit
5962     //       for other implementing classes/interfaces), and can be revisited later.
5963     ObjectTryLock<mirror::Class> lock(self, iface);
5964     if (lock.Acquired()) {
5965       iface->SetRecursivelyInitialized();
5966     }
5967   }
5968   return result;
5969 }
5970 
WaitForInitializeClass(Handle<mirror::Class> klass,Thread * self,ObjectLock<mirror::Class> & lock)5971 bool ClassLinker::WaitForInitializeClass(Handle<mirror::Class> klass,
5972                                          Thread* self,
5973                                          ObjectLock<mirror::Class>& lock)
5974     REQUIRES_SHARED(Locks::mutator_lock_) {
5975   while (true) {
5976     self->AssertNoPendingException();
5977     CHECK(!klass->IsInitialized());
5978     lock.WaitIgnoringInterrupts();
5979 
5980     // When we wake up, repeat the test for init-in-progress.  If
5981     // there's an exception pending (only possible if
5982     // we were not using WaitIgnoringInterrupts), bail out.
5983     if (self->IsExceptionPending()) {
5984       WrapExceptionInInitializer(klass);
5985       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5986       return false;
5987     }
5988     // Spurious wakeup? Go back to waiting.
5989     if (klass->GetStatus() == ClassStatus::kInitializing) {
5990       continue;
5991     }
5992     if (klass->GetStatus() == ClassStatus::kVerified &&
5993         Runtime::Current()->IsAotCompiler()) {
5994       // Compile time initialization failed.
5995       return false;
5996     }
5997     if (klass->IsErroneous()) {
5998       // The caller wants an exception, but it was thrown in a
5999       // different thread.  Synthesize one here.
6000       ThrowNoClassDefFoundError("<clinit> failed for class %s; see exception in other thread",
6001                                 klass->PrettyDescriptor().c_str());
6002       VlogClassInitializationFailure(klass);
6003       return false;
6004     }
6005     if (klass->IsInitialized()) {
6006       return true;
6007     }
6008     LOG(FATAL) << "Unexpected class status. " << klass->PrettyClass() << " is "
6009         << klass->GetStatus();
6010   }
6011   UNREACHABLE();
6012 }
6013 
ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,ArtMethod * m)6014 static void ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,
6015                                                           Handle<mirror::Class> super_klass,
6016                                                           ArtMethod* method,
6017                                                           ArtMethod* m)
6018     REQUIRES_SHARED(Locks::mutator_lock_) {
6019   DCHECK(Thread::Current()->IsExceptionPending());
6020   DCHECK(!m->IsProxyMethod());
6021   const DexFile* dex_file = m->GetDexFile();
6022   const dex::MethodId& method_id = dex_file->GetMethodId(m->GetDexMethodIndex());
6023   const dex::ProtoId& proto_id = dex_file->GetMethodPrototype(method_id);
6024   dex::TypeIndex return_type_idx = proto_id.return_type_idx_;
6025   std::string return_type = dex_file->PrettyType(return_type_idx);
6026   std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
6027   ThrowWrappedLinkageError(klass.Get(),
6028                            "While checking class %s method %s signature against %s %s: "
6029                            "Failed to resolve return type %s with %s",
6030                            mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
6031                            ArtMethod::PrettyMethod(method).c_str(),
6032                            super_klass->IsInterface() ? "interface" : "superclass",
6033                            mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
6034                            return_type.c_str(), class_loader.c_str());
6035 }
6036 
ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,ArtMethod * m,uint32_t index,dex::TypeIndex arg_type_idx)6037 static void ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,
6038                                                    Handle<mirror::Class> super_klass,
6039                                                    ArtMethod* method,
6040                                                    ArtMethod* m,
6041                                                    uint32_t index,
6042                                                    dex::TypeIndex arg_type_idx)
6043     REQUIRES_SHARED(Locks::mutator_lock_) {
6044   DCHECK(Thread::Current()->IsExceptionPending());
6045   DCHECK(!m->IsProxyMethod());
6046   const DexFile* dex_file = m->GetDexFile();
6047   std::string arg_type = dex_file->PrettyType(arg_type_idx);
6048   std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
6049   ThrowWrappedLinkageError(klass.Get(),
6050                            "While checking class %s method %s signature against %s %s: "
6051                            "Failed to resolve arg %u type %s with %s",
6052                            mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
6053                            ArtMethod::PrettyMethod(method).c_str(),
6054                            super_klass->IsInterface() ? "interface" : "superclass",
6055                            mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
6056                            index, arg_type.c_str(), class_loader.c_str());
6057 }
6058 
ThrowSignatureMismatch(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,const std::string & error_msg)6059 static void ThrowSignatureMismatch(Handle<mirror::Class> klass,
6060                                    Handle<mirror::Class> super_klass,
6061                                    ArtMethod* method,
6062                                    const std::string& error_msg)
6063     REQUIRES_SHARED(Locks::mutator_lock_) {
6064   ThrowLinkageError(klass.Get(),
6065                     "Class %s method %s resolves differently in %s %s: %s",
6066                     mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
6067                     ArtMethod::PrettyMethod(method).c_str(),
6068                     super_klass->IsInterface() ? "interface" : "superclass",
6069                     mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
6070                     error_msg.c_str());
6071 }
6072 
HasSameSignatureWithDifferentClassLoaders(Thread * self,Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method1,ArtMethod * method2)6073 static bool HasSameSignatureWithDifferentClassLoaders(Thread* self,
6074                                                       Handle<mirror::Class> klass,
6075                                                       Handle<mirror::Class> super_klass,
6076                                                       ArtMethod* method1,
6077                                                       ArtMethod* method2)
6078     REQUIRES_SHARED(Locks::mutator_lock_) {
6079   {
6080     StackHandleScope<1> hs(self);
6081     Handle<mirror::Class> return_type(hs.NewHandle(method1->ResolveReturnType()));
6082     if (UNLIKELY(return_type == nullptr)) {
6083       ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method1);
6084       return false;
6085     }
6086     ObjPtr<mirror::Class> other_return_type = method2->ResolveReturnType();
6087     if (UNLIKELY(other_return_type == nullptr)) {
6088       ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method2);
6089       return false;
6090     }
6091     if (UNLIKELY(other_return_type != return_type.Get())) {
6092       ThrowSignatureMismatch(klass, super_klass, method1,
6093                              StringPrintf("Return types mismatch: %s(%p) vs %s(%p)",
6094                                           return_type->PrettyClassAndClassLoader().c_str(),
6095                                           return_type.Get(),
6096                                           other_return_type->PrettyClassAndClassLoader().c_str(),
6097                                           other_return_type.Ptr()));
6098       return false;
6099     }
6100   }
6101   const dex::TypeList* types1 = method1->GetParameterTypeList();
6102   const dex::TypeList* types2 = method2->GetParameterTypeList();
6103   if (types1 == nullptr) {
6104     if (types2 != nullptr && types2->Size() != 0) {
6105       ThrowSignatureMismatch(klass, super_klass, method1,
6106                              StringPrintf("Type list mismatch with %s",
6107                                           method2->PrettyMethod(true).c_str()));
6108       return false;
6109     }
6110     return true;
6111   } else if (UNLIKELY(types2 == nullptr)) {
6112     if (types1->Size() != 0) {
6113       ThrowSignatureMismatch(klass, super_klass, method1,
6114                              StringPrintf("Type list mismatch with %s",
6115                                           method2->PrettyMethod(true).c_str()));
6116       return false;
6117     }
6118     return true;
6119   }
6120   uint32_t num_types = types1->Size();
6121   if (UNLIKELY(num_types != types2->Size())) {
6122     ThrowSignatureMismatch(klass, super_klass, method1,
6123                            StringPrintf("Type list mismatch with %s",
6124                                         method2->PrettyMethod(true).c_str()));
6125     return false;
6126   }
6127   for (uint32_t i = 0; i < num_types; ++i) {
6128     StackHandleScope<1> hs(self);
6129     dex::TypeIndex param_type_idx = types1->GetTypeItem(i).type_idx_;
6130     Handle<mirror::Class> param_type(hs.NewHandle(
6131         method1->ResolveClassFromTypeIndex(param_type_idx)));
6132     if (UNLIKELY(param_type == nullptr)) {
6133       ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
6134                                              method1, i, param_type_idx);
6135       return false;
6136     }
6137     dex::TypeIndex other_param_type_idx = types2->GetTypeItem(i).type_idx_;
6138     ObjPtr<mirror::Class> other_param_type =
6139         method2->ResolveClassFromTypeIndex(other_param_type_idx);
6140     if (UNLIKELY(other_param_type == nullptr)) {
6141       ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
6142                                              method2, i, other_param_type_idx);
6143       return false;
6144     }
6145     if (UNLIKELY(param_type.Get() != other_param_type)) {
6146       ThrowSignatureMismatch(klass, super_klass, method1,
6147                              StringPrintf("Parameter %u type mismatch: %s(%p) vs %s(%p)",
6148                                           i,
6149                                           param_type->PrettyClassAndClassLoader().c_str(),
6150                                           param_type.Get(),
6151                                           other_param_type->PrettyClassAndClassLoader().c_str(),
6152                                           other_param_type.Ptr()));
6153       return false;
6154     }
6155   }
6156   return true;
6157 }
6158 
6159 
ValidateSuperClassDescriptors(Handle<mirror::Class> klass)6160 bool ClassLinker::ValidateSuperClassDescriptors(Handle<mirror::Class> klass) {
6161   if (klass->IsInterface()) {
6162     return true;
6163   }
6164   // Begin with the methods local to the superclass.
6165   Thread* self = Thread::Current();
6166   StackHandleScope<1> hs(self);
6167   MutableHandle<mirror::Class> super_klass(hs.NewHandle<mirror::Class>(nullptr));
6168   if (klass->HasSuperClass() &&
6169       klass->GetClassLoader() != klass->GetSuperClass()->GetClassLoader()) {
6170     super_klass.Assign(klass->GetSuperClass());
6171     for (int i = klass->GetSuperClass()->GetVTableLength() - 1; i >= 0; --i) {
6172       auto* m = klass->GetVTableEntry(i, image_pointer_size_);
6173       auto* super_m = klass->GetSuperClass()->GetVTableEntry(i, image_pointer_size_);
6174       if (m != super_m) {
6175         if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
6176                                                                 klass,
6177                                                                 super_klass,
6178                                                                 m,
6179                                                                 super_m))) {
6180           self->AssertPendingException();
6181           return false;
6182         }
6183       }
6184     }
6185   }
6186   for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
6187     super_klass.Assign(klass->GetIfTable()->GetInterface(i));
6188     if (klass->GetClassLoader() != super_klass->GetClassLoader()) {
6189       uint32_t num_methods = super_klass->NumVirtualMethods();
6190       for (uint32_t j = 0; j < num_methods; ++j) {
6191         auto* m = klass->GetIfTable()->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>(
6192             j, image_pointer_size_);
6193         auto* super_m = super_klass->GetVirtualMethod(j, image_pointer_size_);
6194         if (m != super_m) {
6195           if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
6196                                                                   klass,
6197                                                                   super_klass,
6198                                                                   m,
6199                                                                   super_m))) {
6200             self->AssertPendingException();
6201             return false;
6202           }
6203         }
6204       }
6205     }
6206   }
6207   return true;
6208 }
6209 
EnsureInitialized(Thread * self,Handle<mirror::Class> c,bool can_init_fields,bool can_init_parents)6210 bool ClassLinker::EnsureInitialized(Thread* self,
6211                                     Handle<mirror::Class> c,
6212                                     bool can_init_fields,
6213                                     bool can_init_parents) {
6214   DCHECK(c != nullptr);
6215 
6216   if (c->IsInitialized()) {
6217     // If we've seen an initialized but not visibly initialized class
6218     // many times, request visible initialization.
6219     if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
6220       // Thanks to the x86 memory model classes skip the initialized status.
6221       DCHECK(c->IsVisiblyInitialized());
6222     } else if (UNLIKELY(!c->IsVisiblyInitialized())) {
6223       if (self->IncrementMakeVisiblyInitializedCounter()) {
6224         MakeInitializedClassesVisiblyInitialized(self, /*wait=*/ false);
6225       }
6226     }
6227     return true;
6228   }
6229   // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
6230   //
6231   // Ensure the bitstring is initialized before any of the class initialization
6232   // logic occurs. Once a class initializer starts running, objects can
6233   // escape into the heap and use the subtype checking code.
6234   //
6235   // Note: A class whose SubtypeCheckInfo is at least Initialized means it
6236   // can be used as a source for the IsSubClass check, and that all ancestors
6237   // of the class are Assigned (can be used as a target for IsSubClass check)
6238   // or Overflowed (can be used as a source for IsSubClass check).
6239   if (kBitstringSubtypeCheckEnabled) {
6240     MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
6241     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(c.Get());
6242     // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck is already initialized.
6243   }
6244   const bool success = InitializeClass(self, c, can_init_fields, can_init_parents);
6245   if (!success) {
6246     if (can_init_fields && can_init_parents) {
6247       CHECK(self->IsExceptionPending()) << c->PrettyClass();
6248     } else {
6249       // There may or may not be an exception pending. If there is, clear it.
6250       // We propagate the exception only if we can initialize fields and parents.
6251       self->ClearException();
6252     }
6253   } else {
6254     self->AssertNoPendingException();
6255   }
6256   return success;
6257 }
6258 
FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,ObjPtr<mirror::Class> new_class)6259 void ClassLinker::FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,
6260                                                ObjPtr<mirror::Class> new_class) {
6261   DCHECK_EQ(temp_class->NumInstanceFields(), 0u);
6262   for (ArtField& field : new_class->GetIFields()) {
6263     if (field.GetDeclaringClass() == temp_class) {
6264       field.SetDeclaringClass(new_class);
6265     }
6266   }
6267 
6268   DCHECK_EQ(temp_class->NumStaticFields(), 0u);
6269   for (ArtField& field : new_class->GetSFields()) {
6270     if (field.GetDeclaringClass() == temp_class) {
6271       field.SetDeclaringClass(new_class);
6272     }
6273   }
6274 
6275   DCHECK_EQ(temp_class->NumDirectMethods(), 0u);
6276   DCHECK_EQ(temp_class->NumVirtualMethods(), 0u);
6277   for (auto& method : new_class->GetMethods(image_pointer_size_)) {
6278     if (method.GetDeclaringClass() == temp_class) {
6279       method.SetDeclaringClass(new_class);
6280     }
6281   }
6282 
6283   // Make sure the remembered set and mod-union tables know that we updated some of the native
6284   // roots.
6285   WriteBarrier::ForEveryFieldWrite(new_class);
6286 }
6287 
RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader)6288 void ClassLinker::RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
6289   CHECK(class_loader->GetAllocator() == nullptr);
6290   CHECK(class_loader->GetClassTable() == nullptr);
6291   Thread* const self = Thread::Current();
6292   ClassLoaderData data;
6293   data.weak_root = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, class_loader);
6294   // Create and set the class table.
6295   data.class_table = new ClassTable;
6296   class_loader->SetClassTable(data.class_table);
6297   // Create and set the linear allocator.
6298   data.allocator = Runtime::Current()->CreateLinearAlloc();
6299   class_loader->SetAllocator(data.allocator);
6300   // Add to the list so that we know to free the data later.
6301   class_loaders_.push_back(data);
6302 }
6303 
InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)6304 ClassTable* ClassLinker::InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
6305   if (class_loader == nullptr) {
6306     return boot_class_table_.get();
6307   }
6308   ClassTable* class_table = class_loader->GetClassTable();
6309   if (class_table == nullptr) {
6310     RegisterClassLoader(class_loader);
6311     class_table = class_loader->GetClassTable();
6312     DCHECK(class_table != nullptr);
6313   }
6314   return class_table;
6315 }
6316 
ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)6317 ClassTable* ClassLinker::ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
6318   return class_loader == nullptr ? boot_class_table_.get() : class_loader->GetClassTable();
6319 }
6320 
LinkClass(Thread * self,const char * descriptor,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,MutableHandle<mirror::Class> * h_new_class_out)6321 bool ClassLinker::LinkClass(Thread* self,
6322                             const char* descriptor,
6323                             Handle<mirror::Class> klass,
6324                             Handle<mirror::ObjectArray<mirror::Class>> interfaces,
6325                             MutableHandle<mirror::Class>* h_new_class_out) {
6326   CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
6327 
6328   if (!LinkSuperClass(klass)) {
6329     return false;
6330   }
6331   ArtMethod* imt_data[ImTable::kSize];
6332   // If there are any new conflicts compared to super class.
6333   bool new_conflict = false;
6334   std::fill_n(imt_data, arraysize(imt_data), Runtime::Current()->GetImtUnimplementedMethod());
6335   if (!LinkMethods(self, klass, interfaces, &new_conflict, imt_data)) {
6336     return false;
6337   }
6338   if (!LinkInstanceFields(self, klass)) {
6339     return false;
6340   }
6341   size_t class_size;
6342   if (!LinkStaticFields(self, klass, &class_size)) {
6343     return false;
6344   }
6345   class_size =
6346       mirror::Class::AdjustClassSizeForReferenceOffsetBitmapDuringLinking(klass.Get(), class_size);
6347   CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
6348 
6349   ImTable* imt = nullptr;
6350   if (klass->ShouldHaveImt()) {
6351     // If there are any new conflicts compared to the super class we can not make a copy. There
6352     // can be cases where both will have a conflict method at the same slot without having the same
6353     // set of conflicts. In this case, we can not share the IMT since the conflict table slow path
6354     // will possibly create a table that is incorrect for either of the classes.
6355     // Same IMT with new_conflict does not happen very often.
6356     if (!new_conflict) {
6357       ImTable* super_imt = klass->FindSuperImt(image_pointer_size_);
6358       if (super_imt != nullptr) {
6359         bool imt_equals = true;
6360         for (size_t i = 0; i < ImTable::kSize && imt_equals; ++i) {
6361           imt_equals = imt_equals && (super_imt->Get(i, image_pointer_size_) == imt_data[i]);
6362         }
6363         if (imt_equals) {
6364           imt = super_imt;
6365         }
6366       }
6367     }
6368     if (imt == nullptr) {
6369       LinearAlloc* allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
6370       imt = reinterpret_cast<ImTable*>(
6371           allocator->Alloc(self,
6372                            ImTable::SizeInBytes(image_pointer_size_),
6373                            LinearAllocKind::kNoGCRoots));
6374       if (imt == nullptr) {
6375         return false;
6376       }
6377       imt->Populate(imt_data, image_pointer_size_);
6378     }
6379   }
6380 
6381   if (!klass->IsTemp() || (!init_done_ && klass->GetClassSize() == class_size)) {
6382     // We don't need to retire this class as it has no embedded tables or it was created the
6383     // correct size during class linker initialization.
6384     CHECK_EQ(klass->GetClassSize(), class_size) << klass->PrettyDescriptor();
6385 
6386     if (klass->ShouldHaveEmbeddedVTable()) {
6387       klass->PopulateEmbeddedVTable(image_pointer_size_);
6388       klass->PopulateReferenceOffsetBitmap();
6389     }
6390     if (klass->ShouldHaveImt()) {
6391       klass->SetImt(imt, image_pointer_size_);
6392     }
6393 
6394     // Update CHA info based on whether we override methods.
6395     // Have to do this before setting the class as resolved which allows
6396     // instantiation of klass.
6397     if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
6398       cha_->UpdateAfterLoadingOf(klass);
6399     }
6400 
6401     // This will notify waiters on klass that saw the not yet resolved
6402     // class in the class_table_ during EnsureResolved.
6403     mirror::Class::SetStatus(klass, ClassStatus::kResolved, self);
6404     h_new_class_out->Assign(klass.Get());
6405   } else {
6406     CHECK(!klass->IsResolved());
6407     // Retire the temporary class and create the correctly sized resolved class.
6408     StackHandleScope<1> hs(self);
6409     Handle<mirror::Class> h_new_class =
6410         hs.NewHandle(mirror::Class::CopyOf(klass, self, class_size, imt, image_pointer_size_));
6411     // Set arrays to null since we don't want to have multiple classes with the same ArtField or
6412     // ArtMethod array pointers. If this occurs, it causes bugs in remembered sets since the GC
6413     // may not see any references to the target space and clean the card for a class if another
6414     // class had the same array pointer.
6415     klass->SetMethodsPtrUnchecked(nullptr, 0, 0);
6416     klass->SetSFieldsPtrUnchecked(nullptr);
6417     klass->SetIFieldsPtrUnchecked(nullptr);
6418     if (UNLIKELY(h_new_class == nullptr)) {
6419       self->AssertPendingOOMException();
6420       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
6421       return false;
6422     }
6423 
6424     CHECK_EQ(h_new_class->GetClassSize(), class_size);
6425     ObjectLock<mirror::Class> lock(self, h_new_class);
6426     FixupTemporaryDeclaringClass(klass.Get(), h_new_class.Get());
6427 
6428     if (LIKELY(descriptor != nullptr)) {
6429       WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
6430       const ObjPtr<mirror::ClassLoader> class_loader = h_new_class.Get()->GetClassLoader();
6431       ClassTable* const table = InsertClassTableForClassLoader(class_loader);
6432       const ObjPtr<mirror::Class> existing =
6433           table->UpdateClass(h_new_class.Get(), ComputeModifiedUtf8Hash(descriptor));
6434       CHECK_EQ(existing, klass.Get());
6435       WriteBarrierOnClassLoaderLocked(class_loader, h_new_class.Get());
6436     }
6437 
6438     // Update CHA info based on whether we override methods.
6439     // Have to do this before setting the class as resolved which allows
6440     // instantiation of klass.
6441     if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
6442       cha_->UpdateAfterLoadingOf(h_new_class);
6443     }
6444 
6445     // This will notify waiters on temp class that saw the not yet resolved class in the
6446     // class_table_ during EnsureResolved.
6447     mirror::Class::SetStatus(klass, ClassStatus::kRetired, self);
6448 
6449     CHECK_EQ(h_new_class->GetStatus(), ClassStatus::kResolving);
6450     // This will notify waiters on new_class that saw the not yet resolved
6451     // class in the class_table_ during EnsureResolved.
6452     mirror::Class::SetStatus(h_new_class, ClassStatus::kResolved, self);
6453     // Return the new class.
6454     h_new_class_out->Assign(h_new_class.Get());
6455   }
6456   return true;
6457 }
6458 
LoadSuperAndInterfaces(Handle<mirror::Class> klass,const DexFile & dex_file)6459 bool ClassLinker::LoadSuperAndInterfaces(Handle<mirror::Class> klass, const DexFile& dex_file) {
6460   CHECK_EQ(ClassStatus::kIdx, klass->GetStatus());
6461   const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
6462   dex::TypeIndex super_class_idx = class_def.superclass_idx_;
6463   if (super_class_idx.IsValid()) {
6464     // Check that a class does not inherit from itself directly.
6465     //
6466     // TODO: This is a cheap check to detect the straightforward case
6467     // of a class extending itself (b/28685551), but we should do a
6468     // proper cycle detection on loaded classes, to detect all cases
6469     // of class circularity errors (b/28830038).
6470     if (super_class_idx == class_def.class_idx_) {
6471       ThrowClassCircularityError(klass.Get(),
6472                                  "Class %s extends itself",
6473                                  klass->PrettyDescriptor().c_str());
6474       return false;
6475     }
6476 
6477     ObjPtr<mirror::Class> super_class = ResolveType(super_class_idx, klass.Get());
6478     if (super_class == nullptr) {
6479       DCHECK(Thread::Current()->IsExceptionPending());
6480       return false;
6481     }
6482     // Verify
6483     if (!klass->CanAccess(super_class)) {
6484       ThrowIllegalAccessError(klass.Get(), "Class %s extended by class %s is inaccessible",
6485                               super_class->PrettyDescriptor().c_str(),
6486                               klass->PrettyDescriptor().c_str());
6487       return false;
6488     }
6489     CHECK(super_class->IsResolved());
6490     klass->SetSuperClass(super_class);
6491   }
6492   const dex::TypeList* interfaces = dex_file.GetInterfacesList(class_def);
6493   if (interfaces != nullptr) {
6494     for (size_t i = 0; i < interfaces->Size(); i++) {
6495       dex::TypeIndex idx = interfaces->GetTypeItem(i).type_idx_;
6496       if (idx.IsValid()) {
6497         // Check that a class does not implement itself directly.
6498         //
6499         // TODO: This is a cheap check to detect the straightforward case of a class implementing
6500         // itself, but we should do a proper cycle detection on loaded classes, to detect all cases
6501         // of class circularity errors. See b/28685551, b/28830038, and b/301108855
6502         if (idx == class_def.class_idx_) {
6503           ThrowClassCircularityError(
6504               klass.Get(), "Class %s implements itself", klass->PrettyDescriptor().c_str());
6505           return false;
6506         }
6507       }
6508 
6509       ObjPtr<mirror::Class> interface = ResolveType(idx, klass.Get());
6510       if (interface == nullptr) {
6511         DCHECK(Thread::Current()->IsExceptionPending());
6512         return false;
6513       }
6514       // Verify
6515       if (!klass->CanAccess(interface)) {
6516         // TODO: the RI seemed to ignore this in my testing.
6517         ThrowIllegalAccessError(klass.Get(),
6518                                 "Interface %s implemented by class %s is inaccessible",
6519                                 interface->PrettyDescriptor().c_str(),
6520                                 klass->PrettyDescriptor().c_str());
6521         return false;
6522       }
6523     }
6524   }
6525   // Mark the class as loaded.
6526   mirror::Class::SetStatus(klass, ClassStatus::kLoaded, nullptr);
6527   return true;
6528 }
6529 
LinkSuperClass(Handle<mirror::Class> klass)6530 bool ClassLinker::LinkSuperClass(Handle<mirror::Class> klass) {
6531   CHECK(!klass->IsPrimitive());
6532   ObjPtr<mirror::Class> super = klass->GetSuperClass();
6533   ObjPtr<mirror::Class> object_class = GetClassRoot<mirror::Object>(this);
6534   if (klass.Get() == object_class) {
6535     if (super != nullptr) {
6536       ThrowClassFormatError(klass.Get(), "java.lang.Object must not have a superclass");
6537       return false;
6538     }
6539     return true;
6540   }
6541   if (super == nullptr) {
6542     ThrowLinkageError(klass.Get(), "No superclass defined for class %s",
6543                       klass->PrettyDescriptor().c_str());
6544     return false;
6545   }
6546   // Verify
6547   if (klass->IsInterface() && super != object_class) {
6548     ThrowClassFormatError(klass.Get(), "Interfaces must have java.lang.Object as superclass");
6549     return false;
6550   }
6551   if (super->IsFinal()) {
6552     ThrowVerifyError(klass.Get(),
6553                      "Superclass %s of %s is declared final",
6554                      super->PrettyDescriptor().c_str(),
6555                      klass->PrettyDescriptor().c_str());
6556     return false;
6557   }
6558   if (super->IsInterface()) {
6559     ThrowIncompatibleClassChangeError(klass.Get(),
6560                                       "Superclass %s of %s is an interface",
6561                                       super->PrettyDescriptor().c_str(),
6562                                       klass->PrettyDescriptor().c_str());
6563     return false;
6564   }
6565   if (!klass->CanAccess(super)) {
6566     ThrowIllegalAccessError(klass.Get(), "Superclass %s is inaccessible to class %s",
6567                             super->PrettyDescriptor().c_str(),
6568                             klass->PrettyDescriptor().c_str());
6569     return false;
6570   }
6571   if (!VerifyRecordClass(klass, super)) {
6572     DCHECK(Thread::Current()->IsExceptionPending());
6573     return false;
6574   }
6575 
6576   // Inherit kAccClassIsFinalizable from the superclass in case this
6577   // class doesn't override finalize.
6578   if (super->IsFinalizable()) {
6579     klass->SetFinalizable();
6580   }
6581 
6582   // Inherit class loader flag form super class.
6583   if (super->IsClassLoaderClass()) {
6584     klass->SetClassLoaderClass();
6585   }
6586 
6587   // Inherit reference flags (if any) from the superclass.
6588   uint32_t reference_flags = (super->GetClassFlags() & mirror::kClassFlagReference);
6589   if (reference_flags != 0) {
6590     CHECK_EQ(klass->GetClassFlags(), 0u);
6591     klass->SetClassFlags(klass->GetClassFlags() | reference_flags);
6592   }
6593   // Disallow custom direct subclasses of java.lang.ref.Reference.
6594   if (init_done_ && super == GetClassRoot<mirror::Reference>(this)) {
6595     ThrowLinkageError(klass.Get(),
6596                       "Class %s attempts to subclass java.lang.ref.Reference, which is not allowed",
6597                       klass->PrettyDescriptor().c_str());
6598     return false;
6599   }
6600 
6601   if (kIsDebugBuild) {
6602     // Ensure super classes are fully resolved prior to resolving fields..
6603     while (super != nullptr) {
6604       CHECK(super->IsResolved());
6605       super = super->GetSuperClass();
6606     }
6607   }
6608   return true;
6609 }
6610 
6611 // Comparator for name and signature of a method, used in finding overriding methods. Implementation
6612 // avoids the use of handles, if it didn't then rather than compare dex files we could compare dex
6613 // caches in the implementation below.
6614 class MethodNameAndSignatureComparator final : public ValueObject {
6615  public:
6616   explicit MethodNameAndSignatureComparator(ArtMethod* method)
REQUIRES_SHARED(Locks::mutator_lock_)6617       REQUIRES_SHARED(Locks::mutator_lock_) :
6618       dex_file_(method->GetDexFile()), mid_(&dex_file_->GetMethodId(method->GetDexMethodIndex())),
6619       name_view_() {
6620     DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
6621   }
6622 
GetNameView()6623   ALWAYS_INLINE std::string_view GetNameView() {
6624     if (name_view_.empty()) {
6625       name_view_ = dex_file_->GetStringView(mid_->name_idx_);
6626     }
6627     return name_view_;
6628   }
6629 
HasSameNameAndSignature(ArtMethod * other)6630   bool HasSameNameAndSignature(ArtMethod* other)
6631       REQUIRES_SHARED(Locks::mutator_lock_) {
6632     DCHECK(!other->IsProxyMethod()) << other->PrettyMethod();
6633     const DexFile* other_dex_file = other->GetDexFile();
6634     const dex::MethodId& other_mid = other_dex_file->GetMethodId(other->GetDexMethodIndex());
6635     if (dex_file_ == other_dex_file) {
6636       return mid_->name_idx_ == other_mid.name_idx_ && mid_->proto_idx_ == other_mid.proto_idx_;
6637     }
6638     return GetNameView() == other_dex_file->GetStringView(other_mid.name_idx_) &&
6639            dex_file_->GetMethodSignature(*mid_) == other_dex_file->GetMethodSignature(other_mid);
6640   }
6641 
6642  private:
6643   // Dex file for the method to compare against.
6644   const DexFile* const dex_file_;
6645   // MethodId for the method to compare against.
6646   const dex::MethodId* const mid_;
6647   // Lazily computed name from the dex file's strings.
6648   std::string_view name_view_;
6649 };
6650 
GetImtOwner(ObjPtr<mirror::Class> klass)6651 static ObjPtr<mirror::Class> GetImtOwner(ObjPtr<mirror::Class> klass)
6652     REQUIRES_SHARED(Locks::mutator_lock_) {
6653   ImTable* imt = klass->GetImt(kRuntimePointerSize);
6654   DCHECK(imt != nullptr);
6655   while (klass->HasSuperClass()) {
6656     ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
6657     // Abstract classes cannot have IMTs, so we skip them.
6658     while (super_class->IsAbstract()) {
6659       DCHECK(super_class->HasSuperClass());
6660       super_class = super_class->GetSuperClass();
6661     }
6662     DCHECK(super_class->ShouldHaveImt());
6663     if (imt != super_class->GetImt(kRuntimePointerSize)) {
6664       // IMT not shared with the super class, return the current class.
6665       DCHECK_EQ(klass->GetImt(kRuntimePointerSize), imt) << klass->PrettyClass();
6666       return klass;
6667     }
6668     klass = super_class;
6669   }
6670   return nullptr;
6671 }
6672 
AddMethodToConflictTable(ObjPtr<mirror::Class> klass,ArtMethod * conflict_method,ArtMethod * interface_method,ArtMethod * method)6673 ArtMethod* ClassLinker::AddMethodToConflictTable(ObjPtr<mirror::Class> klass,
6674                                                  ArtMethod* conflict_method,
6675                                                  ArtMethod* interface_method,
6676                                                  ArtMethod* method) {
6677   ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize);
6678   Runtime* const runtime = Runtime::Current();
6679 
6680   // The IMT may be shared with a super class, in which case we need to use that
6681   // super class's `LinearAlloc`. The conflict itself should be limited to
6682   // methods at or higher up the chain of the IMT owner, otherwise class
6683   // linker would have created a different IMT.
6684   ObjPtr<mirror::Class> imt_owner = GetImtOwner(klass);
6685   DCHECK(imt_owner != nullptr);
6686 
6687   LinearAlloc* linear_alloc = GetAllocatorForClassLoader(imt_owner->GetClassLoader());
6688   // If the imt owner is in an image, the imt is also there and not in the
6689   // linear alloc.
6690   DCHECK_IMPLIES(runtime->GetHeap()->FindSpaceFromObject(imt_owner, /*fail_ok=*/true) == nullptr,
6691                  linear_alloc->Contains(klass->GetImt(kRuntimePointerSize)));
6692 
6693   // Create a new entry if the existing one is the shared conflict method.
6694   ArtMethod* new_conflict_method = (conflict_method == runtime->GetImtConflictMethod())
6695       ? runtime->CreateImtConflictMethod(linear_alloc)
6696       : conflict_method;
6697 
6698   // Allocate a new table. Note that we will leak this table at the next conflict,
6699   // but that's a tradeoff compared to making the table fixed size.
6700   void* data = linear_alloc->Alloc(
6701       Thread::Current(),
6702       ImtConflictTable::ComputeSizeWithOneMoreEntry(current_table, image_pointer_size_),
6703       LinearAllocKind::kNoGCRoots);
6704   if (data == nullptr) {
6705     LOG(ERROR) << "Failed to allocate conflict table";
6706     return conflict_method;
6707   }
6708   ImtConflictTable* new_table = new (data) ImtConflictTable(current_table,
6709                                                             interface_method,
6710                                                             method,
6711                                                             image_pointer_size_);
6712 
6713   // Do a fence to ensure threads see the data in the table before it is assigned
6714   // to the conflict method.
6715   // Note that there is a race in the presence of multiple threads and we may leak
6716   // memory from the LinearAlloc, but that's a tradeoff compared to using
6717   // atomic operations.
6718   std::atomic_thread_fence(std::memory_order_release);
6719   new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
6720   return new_conflict_method;
6721 }
6722 
SetIMTRef(ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,ArtMethod * current_method,bool * new_conflict,ArtMethod ** imt_ref)6723 void ClassLinker::SetIMTRef(ArtMethod* unimplemented_method,
6724                             ArtMethod* imt_conflict_method,
6725                             ArtMethod* current_method,
6726                             /*out*/bool* new_conflict,
6727                             /*out*/ArtMethod** imt_ref) {
6728   // Place method in imt if entry is empty, place conflict otherwise.
6729   if (*imt_ref == unimplemented_method) {
6730     *imt_ref = current_method;
6731   } else if (!(*imt_ref)->IsRuntimeMethod()) {
6732     // If we are not a conflict and we have the same signature and name as the imt
6733     // entry, it must be that we overwrote a superclass vtable entry.
6734     // Note that we have checked IsRuntimeMethod, as there may be multiple different
6735     // conflict methods.
6736     MethodNameAndSignatureComparator imt_comparator(
6737         (*imt_ref)->GetInterfaceMethodIfProxy(image_pointer_size_));
6738     if (imt_comparator.HasSameNameAndSignature(
6739           current_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
6740       *imt_ref = current_method;
6741     } else {
6742       *imt_ref = imt_conflict_method;
6743       *new_conflict = true;
6744     }
6745   } else {
6746     // Place the default conflict method. Note that there may be an existing conflict
6747     // method in the IMT, but it could be one tailored to the super class, with a
6748     // specific ImtConflictTable.
6749     *imt_ref = imt_conflict_method;
6750     *new_conflict = true;
6751   }
6752 }
6753 
FillIMTAndConflictTables(ObjPtr<mirror::Class> klass)6754 void ClassLinker::FillIMTAndConflictTables(ObjPtr<mirror::Class> klass) {
6755   DCHECK(klass->ShouldHaveImt()) << klass->PrettyClass();
6756   DCHECK(!klass->IsTemp()) << klass->PrettyClass();
6757   ArtMethod* imt_data[ImTable::kSize];
6758   Runtime* const runtime = Runtime::Current();
6759   ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod();
6760   ArtMethod* const conflict_method = runtime->GetImtConflictMethod();
6761   std::fill_n(imt_data, arraysize(imt_data), unimplemented_method);
6762   if (klass->GetIfTable() != nullptr) {
6763     bool new_conflict = false;
6764     FillIMTFromIfTable(klass->GetIfTable(),
6765                        unimplemented_method,
6766                        conflict_method,
6767                        klass,
6768                        /*create_conflict_tables=*/true,
6769                        /*ignore_copied_methods=*/false,
6770                        &new_conflict,
6771                        &imt_data[0]);
6772   }
6773   // Compare the IMT with the super class including the conflict methods. If they are equivalent,
6774   // we can just use the same pointer.
6775   ImTable* imt = nullptr;
6776   ImTable* super_imt = klass->FindSuperImt(image_pointer_size_);
6777   if (super_imt != nullptr) {
6778     bool same = true;
6779     for (size_t i = 0; same && i < ImTable::kSize; ++i) {
6780       ArtMethod* method = imt_data[i];
6781       ArtMethod* super_method = super_imt->Get(i, image_pointer_size_);
6782       if (method != super_method) {
6783         bool is_conflict_table = method->IsRuntimeMethod() &&
6784                                  method != unimplemented_method &&
6785                                  method != conflict_method;
6786         // Verify conflict contents.
6787         bool super_conflict_table = super_method->IsRuntimeMethod() &&
6788                                     super_method != unimplemented_method &&
6789                                     super_method != conflict_method;
6790         if (!is_conflict_table || !super_conflict_table) {
6791           same = false;
6792         } else {
6793           ImtConflictTable* table1 = method->GetImtConflictTable(image_pointer_size_);
6794           ImtConflictTable* table2 = super_method->GetImtConflictTable(image_pointer_size_);
6795           same = same && table1->Equals(table2, image_pointer_size_);
6796         }
6797       }
6798     }
6799     if (same) {
6800       imt = super_imt;
6801     }
6802   }
6803   if (imt == nullptr) {
6804     imt = klass->GetImt(image_pointer_size_);
6805     DCHECK(imt != nullptr);
6806     DCHECK_NE(imt, super_imt);
6807     imt->Populate(imt_data, image_pointer_size_);
6808   } else {
6809     klass->SetImt(imt, image_pointer_size_);
6810   }
6811 }
6812 
CreateImtConflictTable(size_t count,LinearAlloc * linear_alloc,PointerSize image_pointer_size)6813 ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count,
6814                                                       LinearAlloc* linear_alloc,
6815                                                       PointerSize image_pointer_size) {
6816   void* data = linear_alloc->Alloc(Thread::Current(),
6817                                    ImtConflictTable::ComputeSize(count, image_pointer_size),
6818                                    LinearAllocKind::kNoGCRoots);
6819   return (data != nullptr) ? new (data) ImtConflictTable(count, image_pointer_size) : nullptr;
6820 }
6821 
CreateImtConflictTable(size_t count,LinearAlloc * linear_alloc)6822 ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count, LinearAlloc* linear_alloc) {
6823   return CreateImtConflictTable(count, linear_alloc, image_pointer_size_);
6824 }
6825 
FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,ObjPtr<mirror::Class> klass,bool create_conflict_tables,bool ignore_copied_methods,bool * new_conflict,ArtMethod ** imt)6826 void ClassLinker::FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,
6827                                      ArtMethod* unimplemented_method,
6828                                      ArtMethod* imt_conflict_method,
6829                                      ObjPtr<mirror::Class> klass,
6830                                      bool create_conflict_tables,
6831                                      bool ignore_copied_methods,
6832                                      /*out*/bool* new_conflict,
6833                                      /*out*/ArtMethod** imt) {
6834   uint32_t conflict_counts[ImTable::kSize] = {};
6835   for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
6836     ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
6837     const size_t num_virtuals = interface->NumVirtualMethods();
6838     const size_t method_array_count = if_table->GetMethodArrayCount(i);
6839     // Virtual methods can be larger than the if table methods if there are default methods.
6840     DCHECK_GE(num_virtuals, method_array_count);
6841     if (kIsDebugBuild) {
6842       if (klass->IsInterface()) {
6843         DCHECK_EQ(method_array_count, 0u);
6844       } else {
6845         DCHECK_EQ(interface->NumDeclaredVirtualMethods(), method_array_count);
6846       }
6847     }
6848     if (method_array_count == 0) {
6849       continue;
6850     }
6851     ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
6852     for (size_t j = 0; j < method_array_count; ++j) {
6853       ArtMethod* implementation_method =
6854           method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6855       if (ignore_copied_methods && implementation_method->IsCopied()) {
6856         continue;
6857       }
6858       DCHECK(implementation_method != nullptr);
6859       // Miranda methods cannot be used to implement an interface method, but they are safe to put
6860       // in the IMT since their entrypoint is the interface trampoline. If we put any copied methods
6861       // or interface methods in the IMT here they will not create extra conflicts since we compare
6862       // names and signatures in SetIMTRef.
6863       ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
6864       const uint32_t imt_index = interface_method->GetImtIndex();
6865 
6866       // There is only any conflicts if all of the interface methods for an IMT slot don't have
6867       // the same implementation method, keep track of this to avoid creating a conflict table in
6868       // this case.
6869 
6870       // Conflict table size for each IMT slot.
6871       ++conflict_counts[imt_index];
6872 
6873       SetIMTRef(unimplemented_method,
6874                 imt_conflict_method,
6875                 implementation_method,
6876                 /*out*/new_conflict,
6877                 /*out*/&imt[imt_index]);
6878     }
6879   }
6880 
6881   if (create_conflict_tables) {
6882     // Create the conflict tables.
6883     LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
6884     for (size_t i = 0; i < ImTable::kSize; ++i) {
6885       size_t conflicts = conflict_counts[i];
6886       if (imt[i] == imt_conflict_method) {
6887         ImtConflictTable* new_table = CreateImtConflictTable(conflicts, linear_alloc);
6888         if (new_table != nullptr) {
6889           ArtMethod* new_conflict_method =
6890               Runtime::Current()->CreateImtConflictMethod(linear_alloc);
6891           new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
6892           imt[i] = new_conflict_method;
6893         } else {
6894           LOG(ERROR) << "Failed to allocate conflict table";
6895           imt[i] = imt_conflict_method;
6896         }
6897       } else {
6898         DCHECK_NE(imt[i], imt_conflict_method);
6899       }
6900     }
6901 
6902     for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
6903       ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
6904       const size_t method_array_count = if_table->GetMethodArrayCount(i);
6905       // Virtual methods can be larger than the if table methods if there are default methods.
6906       if (method_array_count == 0) {
6907         continue;
6908       }
6909       ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
6910       for (size_t j = 0; j < method_array_count; ++j) {
6911         ArtMethod* implementation_method =
6912             method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6913         if (ignore_copied_methods && implementation_method->IsCopied()) {
6914           continue;
6915         }
6916         DCHECK(implementation_method != nullptr);
6917         ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
6918         const uint32_t imt_index = interface_method->GetImtIndex();
6919         if (!imt[imt_index]->IsRuntimeMethod() ||
6920             imt[imt_index] == unimplemented_method ||
6921             imt[imt_index] == imt_conflict_method) {
6922           continue;
6923         }
6924         ImtConflictTable* table = imt[imt_index]->GetImtConflictTable(image_pointer_size_);
6925         const size_t num_entries = table->NumEntries(image_pointer_size_);
6926         table->SetInterfaceMethod(num_entries, image_pointer_size_, interface_method);
6927         table->SetImplementationMethod(num_entries, image_pointer_size_, implementation_method);
6928       }
6929     }
6930   }
6931 }
6932 
6933 namespace {
6934 
6935 // Simple helper function that checks that no subtypes of 'val' are contained within the 'classes'
6936 // set.
NotSubinterfaceOfAny(const ScopedArenaHashSet<mirror::Class * > & classes,ObjPtr<mirror::Class> val)6937 static bool NotSubinterfaceOfAny(
6938     const ScopedArenaHashSet<mirror::Class*>& classes,
6939     ObjPtr<mirror::Class> val)
6940     REQUIRES(Roles::uninterruptible_)
6941     REQUIRES_SHARED(Locks::mutator_lock_) {
6942   DCHECK(val != nullptr);
6943   for (ObjPtr<mirror::Class> c : classes) {
6944     if (val->IsAssignableFrom(c)) {
6945       return false;
6946     }
6947   }
6948   return true;
6949 }
6950 
6951 // We record new interfaces by the index of the direct interface and the index in the
6952 // direct interface's `IfTable`, or `dex::kDexNoIndex` if it's the direct interface itself.
6953 struct NewInterfaceReference {
6954   uint32_t direct_interface_index;
6955   uint32_t direct_interface_iftable_index;
6956 };
6957 
6958 class ProxyInterfacesAccessor {
6959  public:
6960   explicit ProxyInterfacesAccessor(Handle<mirror::ObjectArray<mirror::Class>> interfaces)
REQUIRES_SHARED(Locks::mutator_lock_)6961       REQUIRES_SHARED(Locks::mutator_lock_)
6962       : interfaces_(interfaces) {}
6963 
GetLength()6964   size_t GetLength() REQUIRES_SHARED(Locks::mutator_lock_) {
6965     return interfaces_->GetLength();
6966   }
6967 
GetInterface(size_t index)6968   ObjPtr<mirror::Class> GetInterface(size_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
6969     DCHECK_LT(index, GetLength());
6970     return interfaces_->GetWithoutChecks(index);
6971   }
6972 
6973  private:
6974   Handle<mirror::ObjectArray<mirror::Class>> interfaces_;
6975 };
6976 
6977 class NonProxyInterfacesAccessor {
6978  public:
NonProxyInterfacesAccessor(ClassLinker * class_linker,Handle<mirror::Class> klass)6979   NonProxyInterfacesAccessor(ClassLinker* class_linker, Handle<mirror::Class> klass)
6980       REQUIRES_SHARED(Locks::mutator_lock_)
6981       : interfaces_(klass->GetInterfaceTypeList()),
6982         class_linker_(class_linker),
6983         klass_(klass) {
6984     DCHECK(!klass->IsProxyClass());
6985   }
6986 
GetLength()6987   size_t GetLength() REQUIRES_SHARED(Locks::mutator_lock_) {
6988     return (interfaces_ != nullptr) ? interfaces_->Size() : 0u;
6989   }
6990 
GetInterface(size_t index)6991   ObjPtr<mirror::Class> GetInterface(size_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
6992     DCHECK_LT(index, GetLength());
6993     dex::TypeIndex type_index = interfaces_->GetTypeItem(index).type_idx_;
6994     return class_linker_->LookupResolvedType(type_index, klass_.Get());
6995   }
6996 
6997  private:
6998   const dex::TypeList* interfaces_;
6999   ClassLinker* class_linker_;
7000   Handle<mirror::Class> klass_;
7001 };
7002 
7003 // Finds new interfaces to add to the interface table in addition to superclass interfaces.
7004 //
7005 // Interfaces in the interface table must satisfy the following constraint:
7006 //     all I, J: Interface | I <: J implies J precedes I
7007 // (note A <: B means that A is a subtype of B). We order this backwards so that we do not need
7008 // to reorder superclass interfaces when new interfaces are added in subclass's interface tables.
7009 //
7010 // This function returns a list of references for all interfaces in the transitive
7011 // closure of the direct interfaces that are not in the superclass interfaces.
7012 // The entries in the list are ordered to satisfy the interface table ordering
7013 // constraint and therefore the interface table formed by appending them to the
7014 // superclass interface table shall also satisfy that constraint.
7015 template <typename InterfaceAccessor>
7016 ALWAYS_INLINE
FindNewIfTableInterfaces(ObjPtr<mirror::IfTable> super_iftable,size_t super_ifcount,ScopedArenaAllocator * allocator,InterfaceAccessor && interfaces,ArrayRef<NewInterfaceReference> initial_storage,ScopedArenaVector<NewInterfaceReference> * supplemental_storage)7017 static ArrayRef<const NewInterfaceReference> FindNewIfTableInterfaces(
7018     ObjPtr<mirror::IfTable> super_iftable,
7019     size_t super_ifcount,
7020     ScopedArenaAllocator* allocator,
7021     InterfaceAccessor&& interfaces,
7022     ArrayRef<NewInterfaceReference> initial_storage,
7023     /*out*/ScopedArenaVector<NewInterfaceReference>* supplemental_storage)
7024     REQUIRES_SHARED(Locks::mutator_lock_) {
7025   ScopedAssertNoThreadSuspension nts(__FUNCTION__);
7026 
7027   // This is the set of all classes already in the iftable. Used to make checking
7028   // if a class has already been added quicker.
7029   constexpr size_t kBufferSize = 32;  // 256 bytes on 64-bit architectures.
7030   mirror::Class* buffer[kBufferSize];
7031   ScopedArenaHashSet<mirror::Class*> classes_in_iftable(buffer, kBufferSize, allocator->Adapter());
7032   // The first super_ifcount elements are from the superclass. We note that they are already added.
7033   for (size_t i = 0; i < super_ifcount; i++) {
7034     ObjPtr<mirror::Class> iface = super_iftable->GetInterface(i);
7035     DCHECK(NotSubinterfaceOfAny(classes_in_iftable, iface)) << "Bad ordering.";
7036     classes_in_iftable.Put(iface.Ptr());
7037   }
7038 
7039   ArrayRef<NewInterfaceReference> current_storage = initial_storage;
7040   DCHECK_NE(current_storage.size(), 0u);
7041   size_t num_new_interfaces = 0u;
7042   auto insert_reference = [&](uint32_t direct_interface_index,
7043                               uint32_t direct_interface_iface_index) {
7044     if (UNLIKELY(num_new_interfaces == current_storage.size())) {
7045       bool copy = current_storage.data() != supplemental_storage->data();
7046       supplemental_storage->resize(2u * num_new_interfaces);
7047       if (copy) {
7048         std::copy_n(current_storage.data(), num_new_interfaces, supplemental_storage->data());
7049       }
7050       current_storage = ArrayRef<NewInterfaceReference>(*supplemental_storage);
7051     }
7052     current_storage[num_new_interfaces] = {direct_interface_index, direct_interface_iface_index};
7053     ++num_new_interfaces;
7054   };
7055 
7056   for (size_t i = 0, num_interfaces = interfaces.GetLength(); i != num_interfaces; ++i) {
7057     ObjPtr<mirror::Class> interface = interfaces.GetInterface(i);
7058 
7059     // Let us call the first filled_ifcount elements of iftable the current-iface-list.
7060     // At this point in the loop current-iface-list has the invariant that:
7061     //    for every pair of interfaces I,J within it:
7062     //      if index_of(I) < index_of(J) then I is not a subtype of J
7063 
7064     // If we have already seen this element then all of its super-interfaces must already be in the
7065     // current-iface-list so we can skip adding it.
7066     if (classes_in_iftable.find(interface.Ptr()) == classes_in_iftable.end()) {
7067       // We haven't seen this interface so add all of its super-interfaces onto the
7068       // current-iface-list, skipping those already on it.
7069       int32_t ifcount = interface->GetIfTableCount();
7070       for (int32_t j = 0; j < ifcount; j++) {
7071         ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
7072         if (classes_in_iftable.find(super_interface.Ptr()) == classes_in_iftable.end()) {
7073           DCHECK(NotSubinterfaceOfAny(classes_in_iftable, super_interface)) << "Bad ordering.";
7074           classes_in_iftable.Put(super_interface.Ptr());
7075           insert_reference(i, j);
7076         }
7077       }
7078       // Add this interface reference after all of its super-interfaces.
7079       DCHECK(NotSubinterfaceOfAny(classes_in_iftable, interface)) << "Bad ordering";
7080       classes_in_iftable.Put(interface.Ptr());
7081       insert_reference(i, dex::kDexNoIndex);
7082     } else if (kIsDebugBuild) {
7083       // Check all super-interfaces are already in the list.
7084       int32_t ifcount = interface->GetIfTableCount();
7085       for (int32_t j = 0; j < ifcount; j++) {
7086         ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
7087         DCHECK(classes_in_iftable.find(super_interface.Ptr()) != classes_in_iftable.end())
7088             << "Iftable does not contain " << mirror::Class::PrettyClass(super_interface)
7089             << ", a superinterface of " << interface->PrettyClass();
7090       }
7091     }
7092   }
7093   return ArrayRef<const NewInterfaceReference>(current_storage.data(), num_new_interfaces);
7094 }
7095 
7096 template <typename InterfaceAccessor>
SetupInterfaceLookupTable(Thread * self,Handle<mirror::Class> klass,ScopedArenaAllocator * allocator,InterfaceAccessor && interfaces)7097 static ObjPtr<mirror::IfTable> SetupInterfaceLookupTable(
7098     Thread* self,
7099     Handle<mirror::Class> klass,
7100     ScopedArenaAllocator* allocator,
7101     InterfaceAccessor&& interfaces)
7102     REQUIRES_SHARED(Locks::mutator_lock_) {
7103   DCHECK(klass->HasSuperClass());
7104   ObjPtr<mirror::IfTable> super_iftable = klass->GetSuperClass()->GetIfTable();
7105   DCHECK(super_iftable != nullptr);
7106   const size_t num_interfaces = interfaces.GetLength();
7107 
7108   // If there are no new interfaces, return the interface table from superclass.
7109   // If any implementation methods are overridden, we shall copy the table and
7110   // the method arrays that contain any differences (copy-on-write).
7111   if (num_interfaces == 0) {
7112     return super_iftable;
7113   }
7114 
7115   // Check that every class being implemented is an interface.
7116   for (size_t i = 0; i != num_interfaces; ++i) {
7117     ObjPtr<mirror::Class> interface = interfaces.GetInterface(i);
7118     DCHECK(interface != nullptr);
7119     if (UNLIKELY(!interface->IsInterface())) {
7120       ThrowIncompatibleClassChangeError(klass.Get(),
7121                                         "Class %s implements non-interface class %s",
7122                                         klass->PrettyDescriptor().c_str(),
7123                                         interface->PrettyDescriptor().c_str());
7124       return nullptr;
7125     }
7126   }
7127 
7128   static constexpr size_t kMaxStackReferences = 16;
7129   NewInterfaceReference initial_storage[kMaxStackReferences];
7130   ScopedArenaVector<NewInterfaceReference> supplemental_storage(allocator->Adapter());
7131   const size_t super_ifcount = super_iftable->Count();
7132   ArrayRef<const NewInterfaceReference> new_interface_references =
7133       FindNewIfTableInterfaces(
7134           super_iftable,
7135           super_ifcount,
7136           allocator,
7137           interfaces,
7138           ArrayRef<NewInterfaceReference>(initial_storage),
7139           &supplemental_storage);
7140 
7141   // If all declared interfaces were already present in superclass interface table,
7142   // return the interface table from superclass. See above.
7143   if (UNLIKELY(new_interface_references.empty())) {
7144     return super_iftable;
7145   }
7146 
7147   // Create the interface table.
7148   size_t ifcount = super_ifcount + new_interface_references.size();
7149   ObjPtr<mirror::IfTable> iftable = AllocIfTable(self, ifcount, super_iftable->GetClass());
7150   if (UNLIKELY(iftable == nullptr)) {
7151     self->AssertPendingOOMException();
7152     return nullptr;
7153   }
7154   // Fill in table with superclass's iftable.
7155   if (super_ifcount != 0) {
7156     // Reload `super_iftable` as it may have been clobbered by the allocation.
7157     super_iftable = klass->GetSuperClass()->GetIfTable();
7158     for (size_t i = 0; i != super_ifcount; i++) {
7159       ObjPtr<mirror::Class> super_interface = super_iftable->GetInterface(i);
7160       DCHECK(super_interface != nullptr);
7161       iftable->SetInterface(i, super_interface);
7162       ObjPtr<mirror::PointerArray> method_array = super_iftable->GetMethodArrayOrNull(i);
7163       if (method_array != nullptr) {
7164         iftable->SetMethodArray(i, method_array);
7165       }
7166     }
7167   }
7168   // Fill in the table with additional interfaces.
7169   size_t current_index = super_ifcount;
7170   for (NewInterfaceReference ref : new_interface_references) {
7171     ObjPtr<mirror::Class> direct_interface = interfaces.GetInterface(ref.direct_interface_index);
7172     ObjPtr<mirror::Class> new_interface = (ref.direct_interface_iftable_index != dex::kDexNoIndex)
7173         ? direct_interface->GetIfTable()->GetInterface(ref.direct_interface_iftable_index)
7174         : direct_interface;
7175     iftable->SetInterface(current_index, new_interface);
7176     ++current_index;
7177   }
7178   DCHECK_EQ(current_index, ifcount);
7179 
7180   if (kIsDebugBuild) {
7181     // Check that the iftable is ordered correctly.
7182     for (size_t i = 0; i < ifcount; i++) {
7183       ObjPtr<mirror::Class> if_a = iftable->GetInterface(i);
7184       for (size_t j = i + 1; j < ifcount; j++) {
7185         ObjPtr<mirror::Class> if_b = iftable->GetInterface(j);
7186         // !(if_a <: if_b)
7187         CHECK(!if_b->IsAssignableFrom(if_a))
7188             << "Bad interface order: " << mirror::Class::PrettyClass(if_a) << " (index " << i
7189             << ") extends "
7190             << if_b->PrettyClass() << " (index " << j << ") and so should be after it in the "
7191             << "interface list.";
7192       }
7193     }
7194   }
7195 
7196   return iftable;
7197 }
7198 
7199 // Check that all vtable entries are present in this class's virtuals or are the same as a
7200 // superclasses vtable entry.
CheckClassOwnsVTableEntries(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7201 void CheckClassOwnsVTableEntries(Thread* self,
7202                                  Handle<mirror::Class> klass,
7203                                  PointerSize pointer_size)
7204     REQUIRES_SHARED(Locks::mutator_lock_) {
7205   StackHandleScope<2> hs(self);
7206   Handle<mirror::PointerArray> check_vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
7207   ObjPtr<mirror::Class> super_temp = (klass->HasSuperClass()) ? klass->GetSuperClass() : nullptr;
7208   Handle<mirror::Class> superclass(hs.NewHandle(super_temp));
7209   int32_t super_vtable_length = (superclass != nullptr) ? superclass->GetVTableLength() : 0;
7210   for (int32_t i = 0; i < check_vtable->GetLength(); ++i) {
7211     ArtMethod* m = check_vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
7212     CHECK(m != nullptr);
7213 
7214     if (m->GetMethodIndexDuringLinking() != i) {
7215       LOG(WARNING) << m->PrettyMethod()
7216                    << " has an unexpected method index for its spot in the vtable for class"
7217                    << klass->PrettyClass();
7218     }
7219     ArraySlice<ArtMethod> virtuals = klass->GetVirtualMethodsSliceUnchecked(pointer_size);
7220     auto is_same_method = [m] (const ArtMethod& meth) {
7221       return &meth == m;
7222     };
7223     if (!((super_vtable_length > i && superclass->GetVTableEntry(i, pointer_size) == m) ||
7224           std::find_if(virtuals.begin(), virtuals.end(), is_same_method) != virtuals.end())) {
7225       LOG(WARNING) << m->PrettyMethod() << " does not seem to be owned by current class "
7226                    << klass->PrettyClass() << " or any of its superclasses!";
7227     }
7228   }
7229 }
7230 
7231 // Check to make sure the vtable does not have duplicates. Duplicates could cause problems when a
7232 // method is overridden in a subclass.
7233 template <PointerSize kPointerSize>
CheckVTableHasNoDuplicates(Thread * self,Handle<mirror::Class> klass)7234 void CheckVTableHasNoDuplicates(Thread* self, Handle<mirror::Class> klass)
7235     REQUIRES_SHARED(Locks::mutator_lock_) {
7236   StackHandleScope<1> hs(self);
7237   Handle<mirror::PointerArray> vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
7238   int32_t num_entries = vtable->GetLength();
7239 
7240   // Observations:
7241   //   * The older implementation was O(n^2) and got too expensive for apps with larger classes.
7242   //   * Many classes do not override Object functions (e.g., equals/hashCode/toString). Thus,
7243   //     for many classes outside of libcore a cross-dexfile check has to be run anyways.
7244   //   * In the cross-dexfile case, with the O(n^2), in the best case O(n) cross checks would have
7245   //     to be done. It is thus OK in a single-pass algorithm to read all data, anyways.
7246   //   * The single-pass algorithm will trade memory for speed, but that is OK.
7247 
7248   CHECK_GT(num_entries, 0);
7249 
7250   auto log_fn = [&vtable, &klass](int32_t i, int32_t j) REQUIRES_SHARED(Locks::mutator_lock_) {
7251     ArtMethod* m1 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(i);
7252     ArtMethod* m2 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
7253     LOG(WARNING) << "vtable entries " << i << " and " << j << " are identical for "
7254                  << klass->PrettyClass() << " in method " << m1->PrettyMethod()
7255                 << " (0x" << std::hex << reinterpret_cast<uintptr_t>(m2) << ") and "
7256                 << m2->PrettyMethod() << "  (0x" << std::hex
7257                 << reinterpret_cast<uintptr_t>(m2) << ")";
7258   };
7259   struct BaseHashType {
7260     static size_t HashCombine(size_t seed, size_t val) {
7261       return seed ^ (val + 0x9e3779b9 + (seed << 6) + (seed >> 2));
7262     }
7263   };
7264 
7265   // Check assuming all entries come from the same dex file.
7266   {
7267     // Find the first interesting method and its dex file.
7268     int32_t start = 0;
7269     for (; start < num_entries; ++start) {
7270       ArtMethod* vtable_entry = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start);
7271       // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7272       // maybe).
7273       if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7274                                   vtable_entry->GetAccessFlags())) {
7275         continue;
7276       }
7277       break;
7278     }
7279     if (start == num_entries) {
7280       return;
7281     }
7282     const DexFile* dex_file =
7283         vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start)->
7284             GetInterfaceMethodIfProxy(kPointerSize)->GetDexFile();
7285 
7286     // Helper function to avoid logging if we have to run the cross-file checks.
7287     auto check_fn = [&](bool log_warn) REQUIRES_SHARED(Locks::mutator_lock_) {
7288       // Use a map to store seen entries, as the storage space is too large for a bitvector.
7289       using PairType = std::pair<uint32_t, uint16_t>;
7290       struct PairHash : BaseHashType {
7291         size_t operator()(const PairType& key) const {
7292           return BaseHashType::HashCombine(BaseHashType::HashCombine(0, key.first), key.second);
7293         }
7294       };
7295       HashMap<PairType, int32_t, DefaultMapEmptyFn<PairType, int32_t>, PairHash> seen;
7296       seen.reserve(2 * num_entries);
7297       bool need_slow_path = false;
7298       bool found_dup = false;
7299       for (int i = start; i < num_entries; ++i) {
7300         // Can use Unchecked here as the start loop already ensured that the arrays are correct
7301         // wrt/ kPointerSize.
7302         ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7303         if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7304                                     vtable_entry->GetAccessFlags())) {
7305           continue;
7306         }
7307         ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7308         if (dex_file != m->GetDexFile()) {
7309           need_slow_path = true;
7310           break;
7311         }
7312         const dex::MethodId* m_mid = &dex_file->GetMethodId(m->GetDexMethodIndex());
7313         PairType pair = std::make_pair(m_mid->name_idx_.index_, m_mid->proto_idx_.index_);
7314         auto it = seen.find(pair);
7315         if (it != seen.end()) {
7316           found_dup = true;
7317           if (log_warn) {
7318             log_fn(it->second, i);
7319           }
7320         } else {
7321           seen.insert(std::make_pair(pair, i));
7322         }
7323       }
7324       return std::make_pair(need_slow_path, found_dup);
7325     };
7326     std::pair<bool, bool> result = check_fn(/* log_warn= */ false);
7327     if (!result.first) {
7328       if (result.second) {
7329         check_fn(/* log_warn= */ true);
7330       }
7331       return;
7332     }
7333   }
7334 
7335   // Need to check across dex files.
7336   struct Entry {
7337     size_t cached_hash = 0;
7338     uint32_t name_len = 0;
7339     const char* name = nullptr;
7340     Signature signature = Signature::NoSignature();
7341 
7342     Entry() = default;
7343     Entry(const Entry& other) = default;
7344     Entry& operator=(const Entry& other) = default;
7345 
7346     Entry(const DexFile* dex_file, const dex::MethodId& mid)
7347         : name_len(0),  // Explicit to enforce ordering with -Werror,-Wreorder-ctor.
7348           // This call writes `name_len` and it is therefore necessary that the
7349           // initializer for `name_len` comes before it, otherwise the value
7350           // from the call would be overwritten by that initializer.
7351           name(dex_file->GetStringDataAndUtf16Length(mid.name_idx_, &name_len)),
7352           signature(dex_file->GetMethodSignature(mid)) {
7353       // The `name_len` has been initialized to the UTF16 length. Calculate length in bytes.
7354       if (name[name_len] != 0) {
7355         name_len += strlen(name + name_len);
7356       }
7357     }
7358 
7359     bool operator==(const Entry& other) const {
7360       return name_len == other.name_len &&
7361              memcmp(name, other.name, name_len) == 0 &&
7362              signature == other.signature;
7363     }
7364   };
7365   struct EntryHash {
7366     size_t operator()(const Entry& key) const {
7367       return key.cached_hash;
7368     }
7369   };
7370   HashMap<Entry, int32_t, DefaultMapEmptyFn<Entry, int32_t>, EntryHash> map;
7371   for (int32_t i = 0; i < num_entries; ++i) {
7372     // Can use Unchecked here as the first loop already ensured that the arrays are correct
7373     // wrt/ kPointerSize.
7374     ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7375     // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7376     // maybe).
7377     if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7378                                 vtable_entry->GetAccessFlags())) {
7379       continue;
7380     }
7381     ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7382     const DexFile* dex_file = m->GetDexFile();
7383     const dex::MethodId& mid = dex_file->GetMethodId(m->GetDexMethodIndex());
7384 
7385     Entry e(dex_file, mid);
7386 
7387     size_t string_hash = std::hash<std::string_view>()(std::string_view(e.name, e.name_len));
7388     size_t sig_hash = std::hash<std::string>()(e.signature.ToString());
7389     e.cached_hash = BaseHashType::HashCombine(BaseHashType::HashCombine(0u, string_hash),
7390                                               sig_hash);
7391 
7392     auto it = map.find(e);
7393     if (it != map.end()) {
7394       log_fn(it->second, i);
7395     } else {
7396       map.insert(std::make_pair(e, i));
7397     }
7398   }
7399 }
7400 
CheckVTableHasNoDuplicates(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7401 void CheckVTableHasNoDuplicates(Thread* self,
7402                                 Handle<mirror::Class> klass,
7403                                 PointerSize pointer_size)
7404     REQUIRES_SHARED(Locks::mutator_lock_) {
7405   switch (pointer_size) {
7406     case PointerSize::k64:
7407       CheckVTableHasNoDuplicates<PointerSize::k64>(self, klass);
7408       break;
7409     case PointerSize::k32:
7410       CheckVTableHasNoDuplicates<PointerSize::k32>(self, klass);
7411       break;
7412   }
7413 }
7414 
CheckVTable(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7415 static void CheckVTable(Thread* self, Handle<mirror::Class> klass, PointerSize pointer_size)
7416     REQUIRES_SHARED(Locks::mutator_lock_) {
7417   CheckClassOwnsVTableEntries(self, klass, pointer_size);
7418   CheckVTableHasNoDuplicates(self, klass, pointer_size);
7419 }
7420 
7421 }  // namespace
7422 
7423 template <PointerSize kPointerSize>
7424 class ClassLinker::LinkMethodsHelper {
7425  public:
LinkMethodsHelper(ClassLinker * class_linker,Handle<mirror::Class> klass,Thread * self,Runtime * runtime)7426   LinkMethodsHelper(ClassLinker* class_linker,
7427                     Handle<mirror::Class> klass,
7428                     Thread* self,
7429                     Runtime* runtime)
7430       : class_linker_(class_linker),
7431         klass_(klass),
7432         self_(self),
7433         runtime_(runtime),
7434         stack_(runtime->GetArenaPool()),
7435         allocator_(&stack_),
7436         copied_method_records_(copied_method_records_initial_buffer_,
7437                                kCopiedMethodRecordInitialBufferSize,
7438                                allocator_.Adapter()),
7439         num_new_copied_methods_(0u) {
7440   }
7441 
7442   // Links the virtual and interface methods for the given class.
7443   //
7444   // Arguments:
7445   // * self - The current thread.
7446   // * klass - class, whose vtable will be filled in.
7447   // * interfaces - implemented interfaces for a proxy class, otherwise null.
7448   // * out_new_conflict - whether there is a new conflict compared to the superclass.
7449   // * out_imt - interface method table to fill.
7450   bool LinkMethods(
7451       Thread* self,
7452       Handle<mirror::Class> klass,
7453       Handle<mirror::ObjectArray<mirror::Class>> interfaces,
7454       bool* out_new_conflict,
7455       ArtMethod** out_imt)
7456       REQUIRES_SHARED(Locks::mutator_lock_);
7457 
7458  private:
7459   // Allocate a pointer array.
7460   static ObjPtr<mirror::PointerArray> AllocPointerArray(Thread* self, size_t length)
7461       REQUIRES_SHARED(Locks::mutator_lock_);
7462 
7463   // Allocate method arrays for interfaces.
7464   bool AllocateIfTableMethodArrays(Thread* self,
7465                                    Handle<mirror::Class> klass,
7466                                    Handle<mirror::IfTable> iftable)
7467       REQUIRES_SHARED(Locks::mutator_lock_);
7468 
7469   // Assign vtable indexes to declared virtual methods for a non-interface class other
7470   // than `java.lang.Object`. Returns the number of vtable entries on success, 0 on failure.
7471   // This function also assigns vtable indexes for interface methods in new interfaces
7472   // and records data for copied methods which shall be referenced by the vtable.
7473   size_t AssignVTableIndexes(ObjPtr<mirror::Class> klass,
7474                              ObjPtr<mirror::Class> super_class,
7475                              bool is_super_abstract,
7476                              size_t num_virtual_methods,
7477                              ObjPtr<mirror::IfTable> iftable)
7478       REQUIRES_SHARED(Locks::mutator_lock_);
7479 
7480   bool FindCopiedMethodsForInterface(ObjPtr<mirror::Class> klass,
7481                                      size_t num_virtual_methods,
7482                                      ObjPtr<mirror::IfTable> iftable)
7483       REQUIRES_SHARED(Locks::mutator_lock_);
7484 
7485   bool LinkJavaLangObjectMethods(Thread* self, Handle<mirror::Class> klass)
7486       REQUIRES_SHARED(Locks::mutator_lock_) COLD_ATTR;
7487 
7488   void ReallocMethods(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
7489   bool FinalizeIfTable(Handle<mirror::Class> klass,
7490                        MutableHandle<mirror::IfTable> iftable,
7491                        Handle<mirror::PointerArray> vtable,
7492                        bool is_klass_abstract,
7493                        bool is_super_abstract,
7494                        bool* out_new_conflict,
7495                        ArtMethod** out_imt)
7496       REQUIRES_SHARED(Locks::mutator_lock_);
7497 
ClobberOldMethods(LengthPrefixedArray<ArtMethod> * old_methods,LengthPrefixedArray<ArtMethod> * methods)7498   void ClobberOldMethods(LengthPrefixedArray<ArtMethod>* old_methods,
7499                          LengthPrefixedArray<ArtMethod>* methods) {
7500     if (kIsDebugBuild && old_methods != nullptr) {
7501       CHECK(methods != nullptr);
7502       // Put some random garbage in old methods to help find stale pointers.
7503       if (methods != old_methods) {
7504         // Need to make sure the GC is not running since it could be scanning the methods we are
7505         // about to overwrite.
7506         ScopedThreadStateChange tsc(self_, ThreadState::kSuspended);
7507         gc::ScopedGCCriticalSection gcs(self_,
7508                                         gc::kGcCauseClassLinker,
7509                                         gc::kCollectorTypeClassLinker);
7510         const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_methods->size(),
7511                                                                             kMethodSize,
7512                                                                             kMethodAlignment);
7513         memset(old_methods, 0xFEu, old_size);
7514         // Set size to 0 to avoid visiting declaring classes.
7515         if (gUseUserfaultfd) {
7516           old_methods->SetSize(0);
7517         }
7518       }
7519     }
7520   }
7521 
7522   NO_INLINE
LogNewVirtuals(LengthPrefixedArray<ArtMethod> * methods) const7523   void LogNewVirtuals(LengthPrefixedArray<ArtMethod>* methods) const
7524       REQUIRES_SHARED(Locks::mutator_lock_) {
7525     ObjPtr<mirror::Class> klass = klass_.Get();
7526     size_t num_new_copied_methods = num_new_copied_methods_;
7527     size_t old_method_count = methods->size() - num_new_copied_methods;
7528     size_t super_vtable_length = klass->GetSuperClass()->GetVTableLength();
7529     size_t num_miranda_methods = 0u;
7530     size_t num_overriding_default_methods = 0u;
7531     size_t num_default_methods = 0u;
7532     size_t num_overriding_default_conflict_methods = 0u;
7533     size_t num_default_conflict_methods = 0u;
7534     for (size_t i = 0; i != num_new_copied_methods; ++i) {
7535       ArtMethod& m = methods->At(old_method_count + i, kMethodSize, kMethodAlignment);
7536       if (m.IsDefault()) {
7537         if (m.GetMethodIndexDuringLinking() < super_vtable_length) {
7538           ++num_overriding_default_methods;
7539         } else {
7540           ++num_default_methods;
7541         }
7542       } else if (m.IsDefaultConflicting()) {
7543         if (m.GetMethodIndexDuringLinking() < super_vtable_length) {
7544           ++num_overriding_default_conflict_methods;
7545         } else {
7546           ++num_default_conflict_methods;
7547         }
7548       } else {
7549         DCHECK(m.IsMiranda());
7550         ++num_miranda_methods;
7551       }
7552     }
7553     VLOG(class_linker) << klass->PrettyClass() << ": miranda_methods=" << num_miranda_methods
7554                        << " default_methods=" << num_default_methods
7555                        << " overriding_default_methods=" << num_overriding_default_methods
7556                        << " default_conflict_methods=" << num_default_conflict_methods
7557                        << " overriding_default_conflict_methods="
7558                        << num_overriding_default_conflict_methods;
7559   }
7560 
7561   class MethodIndexEmptyFn {
7562    public:
MakeEmpty(uint32_t & item) const7563     void MakeEmpty(uint32_t& item) const {
7564       item = dex::kDexNoIndex;
7565     }
IsEmpty(const uint32_t & item) const7566     bool IsEmpty(const uint32_t& item) const {
7567       return item == dex::kDexNoIndex;
7568     }
7569   };
7570 
7571   class VTableIndexCheckerDebug {
7572    protected:
VTableIndexCheckerDebug(size_t vtable_length)7573     explicit VTableIndexCheckerDebug(size_t vtable_length)
7574         : vtable_length_(vtable_length) {}
7575 
CheckIndex(uint32_t index) const7576     void CheckIndex(uint32_t index) const {
7577       CHECK_LT(index, vtable_length_);
7578     }
7579 
7580    private:
7581     uint32_t vtable_length_;
7582   };
7583 
7584   class VTableIndexCheckerRelease {
7585    protected:
VTableIndexCheckerRelease(size_t vtable_length)7586     explicit VTableIndexCheckerRelease([[maybe_unused]] size_t vtable_length) {}
CheckIndex(uint32_t index) const7587     void CheckIndex([[maybe_unused]] uint32_t index) const {}
7588   };
7589 
7590   using VTableIndexChecker =
7591       std::conditional_t<kIsDebugBuild, VTableIndexCheckerDebug, VTableIndexCheckerRelease>;
7592 
7593   class VTableAccessor : private VTableIndexChecker {
7594    public:
VTableAccessor(uint8_t * raw_vtable,size_t vtable_length)7595     VTableAccessor(uint8_t* raw_vtable, size_t vtable_length)
7596         REQUIRES_SHARED(Locks::mutator_lock_)
7597         : VTableIndexChecker(vtable_length),
7598           raw_vtable_(raw_vtable) {}
7599 
GetVTableEntry(uint32_t index) const7600     ArtMethod* GetVTableEntry(uint32_t index) const REQUIRES_SHARED(Locks::mutator_lock_) {
7601       this->CheckIndex(index);
7602       uint8_t* entry = raw_vtable_ + static_cast<size_t>(kPointerSize) * index;
7603       if (kPointerSize == PointerSize::k64) {
7604         return reinterpret_cast64<ArtMethod*>(*reinterpret_cast<uint64_t*>(entry));
7605       } else {
7606         return reinterpret_cast32<ArtMethod*>(*reinterpret_cast<uint32_t*>(entry));
7607       }
7608     }
7609 
7610    private:
7611     uint8_t* raw_vtable_;
7612   };
7613 
7614   class VTableSignatureHash {
7615    public:
7616     explicit VTableSignatureHash(VTableAccessor accessor)
REQUIRES_SHARED(Locks::mutator_lock_)7617         REQUIRES_SHARED(Locks::mutator_lock_)
7618         : accessor_(accessor) {}
7619 
7620     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(ArtMethod * method) const7621     size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7622       return ComputeMethodHash(method);
7623     }
7624 
7625     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t index) const7626     size_t operator()(uint32_t index) const NO_THREAD_SAFETY_ANALYSIS {
7627       return ComputeMethodHash(accessor_.GetVTableEntry(index));
7628     }
7629 
7630    private:
7631     VTableAccessor accessor_;
7632   };
7633 
7634   class VTableSignatureEqual {
7635    public:
7636     explicit VTableSignatureEqual(VTableAccessor accessor)
REQUIRES_SHARED(Locks::mutator_lock_)7637         REQUIRES_SHARED(Locks::mutator_lock_)
7638         : accessor_(accessor) {}
7639 
7640     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,ArtMethod * rhs) const7641     bool operator()(uint32_t lhs_index, ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7642       return MethodSignatureEquals(accessor_.GetVTableEntry(lhs_index), rhs);
7643     }
7644 
7645     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,uint32_t rhs_index) const7646     bool operator()(uint32_t lhs_index, uint32_t rhs_index) const NO_THREAD_SAFETY_ANALYSIS {
7647       return (*this)(lhs_index, accessor_.GetVTableEntry(rhs_index));
7648     }
7649 
7650    private:
7651     VTableAccessor accessor_;
7652   };
7653 
7654   using VTableSignatureSet =
7655       ScopedArenaHashSet<uint32_t, MethodIndexEmptyFn, VTableSignatureHash, VTableSignatureEqual>;
7656 
7657   class DeclaredVirtualSignatureHash {
7658    public:
7659     explicit DeclaredVirtualSignatureHash(ObjPtr<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_)7660         REQUIRES_SHARED(Locks::mutator_lock_)
7661         : klass_(klass) {}
7662 
7663     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(ArtMethod * method) const7664     size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7665       return ComputeMethodHash(method);
7666     }
7667 
7668     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t index) const7669     size_t operator()(uint32_t index) const NO_THREAD_SAFETY_ANALYSIS {
7670       DCHECK_LT(index, klass_->NumDeclaredVirtualMethods());
7671       ArtMethod* method = klass_->GetVirtualMethodDuringLinking(index, kPointerSize);
7672       return ComputeMethodHash(method->GetInterfaceMethodIfProxy(kPointerSize));
7673     }
7674 
7675    private:
7676     ObjPtr<mirror::Class> klass_;
7677   };
7678 
7679   class DeclaredVirtualSignatureEqual {
7680    public:
7681     explicit DeclaredVirtualSignatureEqual(ObjPtr<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_)7682         REQUIRES_SHARED(Locks::mutator_lock_)
7683         : klass_(klass) {}
7684 
7685     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,ArtMethod * rhs) const7686     bool operator()(uint32_t lhs_index, ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7687       DCHECK_LT(lhs_index, klass_->NumDeclaredVirtualMethods());
7688       ArtMethod* lhs = klass_->GetVirtualMethodDuringLinking(lhs_index, kPointerSize);
7689       return MethodSignatureEquals(lhs->GetInterfaceMethodIfProxy(kPointerSize), rhs);
7690     }
7691 
7692     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,uint32_t rhs_index) const7693     bool operator()(uint32_t lhs_index, uint32_t rhs_index) const NO_THREAD_SAFETY_ANALYSIS {
7694       DCHECK_LT(lhs_index, klass_->NumDeclaredVirtualMethods());
7695       DCHECK_LT(rhs_index, klass_->NumDeclaredVirtualMethods());
7696       return lhs_index == rhs_index;
7697     }
7698 
7699    private:
7700     ObjPtr<mirror::Class> klass_;
7701   };
7702 
7703   using DeclaredVirtualSignatureSet = ScopedArenaHashSet<uint32_t,
7704                                                          MethodIndexEmptyFn,
7705                                                          DeclaredVirtualSignatureHash,
7706                                                          DeclaredVirtualSignatureEqual>;
7707 
7708   // Helper class to keep records for determining the correct copied method to create.
7709   class CopiedMethodRecord {
7710    public:
7711     enum class State : uint32_t {
7712       // Note: The `*Single` values are used when we know that there is only one interface
7713       // method with the given signature that's not masked; that method is the main method.
7714       // We use this knowledge for faster masking check, otherwise we need to search for
7715       // a masking method through methods of all interfaces that could potentially mask it.
7716       kAbstractSingle,
7717       kDefaultSingle,
7718       kAbstract,
7719       kDefault,
7720       kDefaultConflict,
7721       kUseSuperMethod,
7722     };
7723 
CopiedMethodRecord()7724     CopiedMethodRecord()
7725         : main_method_(nullptr),
7726           method_index_(0u),
7727           state_(State::kAbstractSingle) {}
7728 
CopiedMethodRecord(ArtMethod * main_method,size_t vtable_index)7729     CopiedMethodRecord(ArtMethod* main_method, size_t vtable_index)
7730         : main_method_(main_method),
7731           method_index_(vtable_index),
7732           state_(State::kAbstractSingle) {}
7733 
7734     // Set main method. The new main method must be more specific implementation.
SetMainMethod(ArtMethod * main_method)7735     void SetMainMethod(ArtMethod* main_method) {
7736       DCHECK(main_method_ != nullptr);
7737       main_method_ = main_method;
7738     }
7739 
7740     // The main method is the first encountered default method if any,
7741     // otherwise the first encountered abstract method.
GetMainMethod() const7742     ArtMethod* GetMainMethod() const {
7743       return main_method_;
7744     }
7745 
SetMethodIndex(size_t method_index)7746     void SetMethodIndex(size_t method_index) {
7747       DCHECK_NE(method_index, dex::kDexNoIndex);
7748       method_index_ = method_index;
7749     }
7750 
GetMethodIndex() const7751     size_t GetMethodIndex() const {
7752       DCHECK_NE(method_index_, dex::kDexNoIndex);
7753       return method_index_;
7754     }
7755 
SetState(State state)7756     void SetState(State state) {
7757       state_ = state;
7758     }
7759 
GetState() const7760     State GetState() const {
7761       return state_;
7762     }
7763 
7764     ALWAYS_INLINE
UpdateStateForInterface(ObjPtr<mirror::Class> iface,ArtMethod * interface_method,ObjPtr<mirror::IfTable> iftable,size_t ifcount,size_t index)7765     void UpdateStateForInterface(ObjPtr<mirror::Class> iface,
7766                                  ArtMethod* interface_method,
7767                                  ObjPtr<mirror::IfTable> iftable,
7768                                  size_t ifcount,
7769                                  size_t index)
7770         REQUIRES_SHARED(Locks::mutator_lock_) {
7771       DCHECK_EQ(ifcount, iftable->Count());
7772       DCHECK_LT(index, ifcount);
7773       DCHECK(iface == interface_method->GetDeclaringClass());
7774       DCHECK(iface == iftable->GetInterface(index));
7775       DCHECK(interface_method->IsDefault());
7776       if (GetState() != State::kDefaultConflict) {
7777         DCHECK(GetState() == State::kDefault);
7778         // We do not record all overriding methods, so we need to walk over all
7779         // interfaces that could mask the `interface_method`.
7780         if (ContainsOverridingMethodOf(iftable, index + 1, ifcount, iface, interface_method)) {
7781           return;  // Found an overriding method that masks `interface_method`.
7782         }
7783         // We have a new default method that's not masked by any other method.
7784         SetState(State::kDefaultConflict);
7785       }
7786     }
7787 
7788     ALWAYS_INLINE
UpdateState(ObjPtr<mirror::Class> iface,ArtMethod * interface_method,size_t vtable_index,ObjPtr<mirror::IfTable> iftable,size_t ifcount,size_t index)7789     void UpdateState(ObjPtr<mirror::Class> iface,
7790                      ArtMethod* interface_method,
7791                      size_t vtable_index,
7792                      ObjPtr<mirror::IfTable> iftable,
7793                      size_t ifcount,
7794                      size_t index)
7795         REQUIRES_SHARED(Locks::mutator_lock_) {
7796       DCHECK_EQ(ifcount, iftable->Count());
7797       DCHECK_LT(index, ifcount);
7798       if (kIsDebugBuild) {
7799         if (interface_method->IsCopied()) {
7800           // Called from `FinalizeState()` for a default method from superclass.
7801           // The `index` points to the last interface inherited from the superclass
7802           // as we need to search only the new interfaces for masking methods.
7803           DCHECK(interface_method->IsDefault());
7804         } else {
7805           DCHECK(iface == interface_method->GetDeclaringClass());
7806           DCHECK(iface == iftable->GetInterface(index));
7807         }
7808       }
7809       DCHECK_EQ(vtable_index, method_index_);
7810       auto slow_is_masked = [=]() REQUIRES_SHARED(Locks::mutator_lock_) {
7811         return ContainsImplementingMethod(iftable, index + 1, ifcount, iface, vtable_index);
7812       };
7813       UpdateStateImpl(iface, interface_method, slow_is_masked);
7814     }
7815 
7816     ALWAYS_INLINE
FinalizeState(ArtMethod * super_method,size_t vtable_index,ObjPtr<mirror::IfTable> iftable,size_t ifcount,ObjPtr<mirror::IfTable> super_iftable,size_t super_ifcount)7817     void FinalizeState(ArtMethod* super_method,
7818                        size_t vtable_index,
7819                        ObjPtr<mirror::IfTable> iftable,
7820                        size_t ifcount,
7821                        ObjPtr<mirror::IfTable> super_iftable,
7822                        size_t super_ifcount)
7823         REQUIRES_SHARED(Locks::mutator_lock_) {
7824       DCHECK(super_method->IsCopied());
7825       DCHECK_EQ(vtable_index, method_index_);
7826       DCHECK_EQ(vtable_index, super_method->GetMethodIndex());
7827       DCHECK_NE(super_ifcount, 0u);
7828       if (super_method->IsDefault()) {
7829         if (UNLIKELY(super_method->IsDefaultConflicting())) {
7830           // Some of the default methods that contributed to the conflict in the superclass
7831           // may be masked by new interfaces. Walk over all the interfaces and update state
7832           // as long as the current state is not `kDefaultConflict`.
7833           size_t i = super_ifcount;
7834           while (GetState() != State::kDefaultConflict && i != 0u) {
7835             --i;
7836             ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
7837             DCHECK(iface == super_iftable->GetInterface(i));
7838             auto [found, index] =
7839                 MethodArrayContains(super_iftable->GetMethodArrayOrNull(i), super_method);
7840             if (found) {
7841               ArtMethod* interface_method = iface->GetVirtualMethod(index, kPointerSize);
7842               auto slow_is_masked = [=]() REQUIRES_SHARED(Locks::mutator_lock_) {
7843                 // Note: The `iftable` has method arrays in range [super_ifcount, ifcount) filled
7844                 // with vtable indexes but the range [0, super_ifcount) is empty, so we need to
7845                 // use the `super_iftable` filled with implementation methods for that range.
7846                 return ContainsImplementingMethod(
7847                            super_iftable, i + 1u, super_ifcount, iface, super_method) ||
7848                        ContainsImplementingMethod(
7849                            iftable, super_ifcount, ifcount, iface, vtable_index);
7850               };
7851               UpdateStateImpl(iface, interface_method, slow_is_masked);
7852             }
7853           }
7854           if (GetState() == State::kDefaultConflict) {
7855             SetState(State::kUseSuperMethod);
7856           }
7857         } else {
7858           // There was exactly one default method in superclass interfaces that was
7859           // not masked by subinterfaces. Use `UpdateState()` to process it and pass
7860           // `super_ifcount - 1` as index for checking if it's been masked by new interfaces.
7861           ObjPtr<mirror::Class> iface = super_method->GetDeclaringClass();
7862           UpdateState(
7863               iface, super_method, vtable_index, iftable, ifcount, /*index=*/ super_ifcount - 1u);
7864           if (GetMainMethod() == super_method) {
7865             DCHECK(GetState() == State::kDefault) << enum_cast<uint32_t>(GetState());
7866             SetState(State::kUseSuperMethod);
7867           }
7868         }
7869       } else {
7870         DCHECK(super_method->IsMiranda());
7871         // Any default methods with this signature in superclass interfaces have been
7872         // masked by subinterfaces. Check if we can reuse the miranda method.
7873         if (GetState() == State::kAbstractSingle || GetState() == State::kAbstract) {
7874           SetState(State::kUseSuperMethod);
7875         }
7876       }
7877     }
7878 
7879    private:
7880     template <typename Predicate>
7881     ALWAYS_INLINE
UpdateStateImpl(ObjPtr<mirror::Class> iface,ArtMethod * interface_method,Predicate && slow_is_masked)7882     void UpdateStateImpl(ObjPtr<mirror::Class> iface,
7883                          ArtMethod* interface_method,
7884                          Predicate&& slow_is_masked)
7885         REQUIRES_SHARED(Locks::mutator_lock_) {
7886       bool have_default = false;
7887       switch (GetState()) {
7888         case State::kDefaultSingle:
7889           have_default = true;
7890           FALLTHROUGH_INTENDED;
7891         case State::kAbstractSingle:
7892           if (GetMainMethod()->GetDeclaringClass()->Implements(iface)) {
7893             return;  // The main method masks the `interface_method`.
7894           }
7895           if (!interface_method->IsDefault()) {
7896             SetState(have_default ? State::kDefault : State::kAbstract);
7897             return;
7898           }
7899           break;
7900         case State::kDefault:
7901           have_default = true;
7902           FALLTHROUGH_INTENDED;
7903         case State::kAbstract:
7904           if (!interface_method->IsDefault()) {
7905             return;  // Keep the same state. We do not need to check for masking.
7906           }
7907           // We do not record all overriding methods, so we need to walk over all
7908           // interfaces that could mask the `interface_method`. The provided
7909           // predicate `slow_is_masked()` does that.
7910           if (slow_is_masked()) {
7911             return;  // Found an overriding method that masks `interface_method`.
7912           }
7913           break;
7914         case State::kDefaultConflict:
7915           return;  // The state cannot change anymore.
7916         default:
7917           LOG(FATAL) << "Unexpected state: " << enum_cast<uint32_t>(GetState());
7918           UNREACHABLE();
7919       }
7920       // We have a new default method that's not masked by any other method.
7921       DCHECK(interface_method->IsDefault());
7922       if (have_default) {
7923         SetState(State::kDefaultConflict);
7924       } else {
7925         SetMainMethod(interface_method);
7926         SetState(State::kDefault);
7927       }
7928     }
7929 
7930     // Determine if the given `iftable` contains in the given range a subinterface of `iface`
7931     // that declares a method with the same name and signature as 'interface_method'.
7932     //
7933     // Arguments
7934     // - iftable: The iftable we are searching for an overriding method.
7935     // - begin:   The start of the range to search.
7936     // - end:     The end of the range to search.
7937     // - iface:   The interface we are checking to see if anything overrides.
7938     // - interface_method:
7939     //            The interface method providing a name and signature we're searching for.
7940     //
7941     // Returns whether an overriding method was found in any subinterface of `iface`.
ContainsOverridingMethodOf(ObjPtr<mirror::IfTable> iftable,size_t begin,size_t end,ObjPtr<mirror::Class> iface,ArtMethod * interface_method)7942     static bool ContainsOverridingMethodOf(ObjPtr<mirror::IfTable> iftable,
7943                                            size_t begin,
7944                                            size_t end,
7945                                            ObjPtr<mirror::Class> iface,
7946                                            ArtMethod* interface_method)
7947         REQUIRES_SHARED(Locks::mutator_lock_) {
7948       for (size_t i = begin; i != end; ++i) {
7949         ObjPtr<mirror::Class> current_iface = iftable->GetInterface(i);
7950         for (ArtMethod& current_method : current_iface->GetDeclaredVirtualMethods(kPointerSize)) {
7951           if (MethodSignatureEquals(&current_method, interface_method)) {
7952             // Check if the i'th interface is a subtype of this one.
7953             if (current_iface->Implements(iface)) {
7954               return true;
7955             }
7956             break;
7957           }
7958         }
7959       }
7960       return false;
7961     }
7962 
7963     // Determine if the given `iftable` contains in the given range a subinterface of `iface`
7964     // that declares a method implemented by 'target'. This is an optimized version of
7965     // `ContainsOverridingMethodOf()` that searches implementation method arrays instead
7966     // of comparing signatures for declared interface methods.
7967     //
7968     // Arguments
7969     // - iftable: The iftable we are searching for an overriding method.
7970     // - begin:   The start of the range to search.
7971     // - end:     The end of the range to search.
7972     // - iface:   The interface we are checking to see if anything overrides.
7973     // - target:  The implementation method we're searching for.
7974     //            Note that the new `iftable` is filled with vtable indexes for new interfaces,
7975     //            so this needs to be the vtable index if we're searching that range.
7976     //
7977     // Returns whether the `target` was found in a method array for any subinterface of `iface`.
7978     template <typename TargetType>
ContainsImplementingMethod(ObjPtr<mirror::IfTable> iftable,size_t begin,size_t end,ObjPtr<mirror::Class> iface,TargetType target)7979     static bool ContainsImplementingMethod(ObjPtr<mirror::IfTable> iftable,
7980                                            size_t begin,
7981                                            size_t end,
7982                                            ObjPtr<mirror::Class> iface,
7983                                            TargetType target)
7984         REQUIRES_SHARED(Locks::mutator_lock_) {
7985       for (size_t i = begin; i != end; ++i) {
7986         if (MethodArrayContains(iftable->GetMethodArrayOrNull(i), target).first &&
7987             iftable->GetInterface(i)->Implements(iface)) {
7988           return true;
7989         }
7990       }
7991       return false;
7992     }
7993 
7994     template <typename TargetType>
MethodArrayContains(ObjPtr<mirror::PointerArray> method_array,TargetType target)7995     static std::pair<bool, size_t> MethodArrayContains(ObjPtr<mirror::PointerArray> method_array,
7996                                                        TargetType target)
7997         REQUIRES_SHARED(Locks::mutator_lock_) {
7998       size_t num_methods = (method_array != nullptr) ? method_array->GetLength() : 0u;
7999       for (size_t j = 0; j != num_methods; ++j) {
8000         if (method_array->GetElementPtrSize<TargetType, kPointerSize>(j) == target) {
8001           return {true, j};
8002         }
8003       }
8004       return {false, 0};
8005     }
8006 
8007     ArtMethod* main_method_;
8008     uint32_t method_index_;
8009     State state_;
8010   };
8011 
8012   class CopiedMethodRecordEmptyFn {
8013    public:
MakeEmpty(CopiedMethodRecord & item) const8014     void MakeEmpty(CopiedMethodRecord& item) const {
8015       item = CopiedMethodRecord();
8016     }
IsEmpty(const CopiedMethodRecord & item) const8017     bool IsEmpty(const CopiedMethodRecord& item) const {
8018       return item.GetMainMethod() == nullptr;
8019     }
8020   };
8021 
8022   class CopiedMethodRecordHash {
8023    public:
8024     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(ArtMethod * method) const8025     size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
8026       DCHECK(method != nullptr);
8027       return ComputeMethodHash(method);
8028     }
8029 
8030     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(const CopiedMethodRecord & record) const8031     size_t operator()(const CopiedMethodRecord& record) const NO_THREAD_SAFETY_ANALYSIS {
8032       return (*this)(record.GetMainMethod());
8033     }
8034   };
8035 
8036   class CopiedMethodRecordEqual {
8037    public:
8038     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(const CopiedMethodRecord & lhs_record,ArtMethod * rhs) const8039     bool operator()(const CopiedMethodRecord& lhs_record,
8040                     ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
8041       ArtMethod* lhs = lhs_record.GetMainMethod();
8042       DCHECK(lhs != nullptr);
8043       DCHECK(rhs != nullptr);
8044       return MethodSignatureEquals(lhs, rhs);
8045     }
8046 
8047     // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(const CopiedMethodRecord & lhs_record,const CopiedMethodRecord & rhs_record) const8048     bool operator()(const CopiedMethodRecord& lhs_record,
8049                     const CopiedMethodRecord& rhs_record) const NO_THREAD_SAFETY_ANALYSIS {
8050       return (*this)(lhs_record, rhs_record.GetMainMethod());
8051     }
8052   };
8053 
8054   using CopiedMethodRecordSet = ScopedArenaHashSet<CopiedMethodRecord,
8055                                                    CopiedMethodRecordEmptyFn,
8056                                                    CopiedMethodRecordHash,
8057                                                    CopiedMethodRecordEqual>;
8058 
8059   static constexpr size_t kMethodAlignment = ArtMethod::Alignment(kPointerSize);
8060   static constexpr size_t kMethodSize = ArtMethod::Size(kPointerSize);
8061 
8062   ClassLinker* class_linker_;
8063   Handle<mirror::Class> klass_;
8064   Thread* const self_;
8065   Runtime* const runtime_;
8066 
8067   // These are allocated on the heap to begin, we then transfer to linear alloc when we re-create
8068   // the virtual methods array.
8069   // Need to use low 4GB arenas for compiler or else the pointers wont fit in 32 bit method array
8070   // during cross compilation.
8071   // Use the linear alloc pool since this one is in the low 4gb for the compiler.
8072   ArenaStack stack_;
8073   ScopedArenaAllocator allocator_;
8074 
8075   // If there are multiple methods with the same signature in the superclass vtable
8076   // (which can happen with a new virtual method having the same signature as an
8077   // inaccessible package-private method from another package in the superclass),
8078   // we keep singly-linked lists in this single array that maps vtable index to the
8079   // next vtable index in the list, `dex::kDexNoIndex` denotes the end of a list.
8080   ArrayRef<uint32_t> same_signature_vtable_lists_;
8081 
8082   // Avoid large allocation for a few copied method records.
8083   // Keep the initial buffer on the stack to avoid arena allocations
8084   // if there are no special cases (the first arena allocation is costly).
8085   static constexpr size_t kCopiedMethodRecordInitialBufferSize = 16u;
8086   CopiedMethodRecord copied_method_records_initial_buffer_[kCopiedMethodRecordInitialBufferSize];
8087   CopiedMethodRecordSet copied_method_records_;
8088   size_t num_new_copied_methods_;
8089 };
8090 
8091 template <PointerSize kPointerSize>
8092 NO_INLINE
ReallocMethods(ObjPtr<mirror::Class> klass)8093 void ClassLinker::LinkMethodsHelper<kPointerSize>::ReallocMethods(ObjPtr<mirror::Class> klass) {
8094   // There should be no thread suspension in this function,
8095   // native allocations do not cause thread suspension.
8096   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
8097 
8098   size_t num_new_copied_methods = num_new_copied_methods_;
8099   DCHECK_NE(num_new_copied_methods, 0u);
8100   const size_t old_method_count = klass->NumMethods();
8101   const size_t new_method_count = old_method_count + num_new_copied_methods;
8102 
8103   // Attempt to realloc to save RAM if possible.
8104   LengthPrefixedArray<ArtMethod>* old_methods = klass->GetMethodsPtr();
8105   // The Realloced virtual methods aren't visible from the class roots, so there is no issue
8106   // where GCs could attempt to mark stale pointers due to memcpy. And since we overwrite the
8107   // realloced memory with out->CopyFrom, we are guaranteed to have objects in the to space since
8108   // CopyFrom has internal read barriers.
8109   //
8110   // TODO We should maybe move some of this into mirror::Class or at least into another method.
8111   const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_method_count,
8112                                                                       kMethodSize,
8113                                                                       kMethodAlignment);
8114   const size_t new_size = LengthPrefixedArray<ArtMethod>::ComputeSize(new_method_count,
8115                                                                       kMethodSize,
8116                                                                       kMethodAlignment);
8117   const size_t old_methods_ptr_size = (old_methods != nullptr) ? old_size : 0;
8118   LinearAlloc* allocator = class_linker_->GetAllocatorForClassLoader(klass->GetClassLoader());
8119   auto* methods = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(allocator->Realloc(
8120       self_, old_methods, old_methods_ptr_size, new_size, LinearAllocKind::kArtMethodArray));
8121   CHECK(methods != nullptr);  // Native allocation failure aborts.
8122 
8123   if (methods != old_methods) {
8124     if (gUseReadBarrier) {
8125       StrideIterator<ArtMethod> out = methods->begin(kMethodSize, kMethodAlignment);
8126       // Copy over the old methods. The `ArtMethod::CopyFrom()` is only necessary to not miss
8127       // read barriers since `LinearAlloc::Realloc()` won't do read barriers when it copies.
8128       for (auto& m : klass->GetMethods(kPointerSize)) {
8129         out->CopyFrom(&m, kPointerSize);
8130         ++out;
8131       }
8132     } else if (gUseUserfaultfd) {
8133       // In order to make compaction code skip updating the declaring_class_ in
8134       // old_methods, convert it into a 'no GC-root' array.
8135       allocator->ConvertToNoGcRoots(old_methods, LinearAllocKind::kArtMethodArray);
8136     }
8137   }
8138 
8139   // Collect and sort copied method records by the vtable index. This places overriding
8140   // copied methods first, sorted by the vtable index already assigned in the superclass,
8141   // followed by copied methods with new signatures in the order in which we encountered
8142   // them when going over virtual methods of new interfaces.
8143   // This order is deterministic but implementation-defined.
8144   //
8145   // Avoid arena allocation for a few records (the first arena allocation is costly).
8146   constexpr size_t kSortedRecordsBufferSize = 16;
8147   CopiedMethodRecord* sorted_records_buffer[kSortedRecordsBufferSize];
8148   CopiedMethodRecord** sorted_records = (num_new_copied_methods <= kSortedRecordsBufferSize)
8149       ? sorted_records_buffer
8150       : allocator_.AllocArray<CopiedMethodRecord*>(num_new_copied_methods);
8151   size_t filled_sorted_records = 0u;
8152   for (CopiedMethodRecord& record : copied_method_records_) {
8153     if (record.GetState() != CopiedMethodRecord::State::kUseSuperMethod) {
8154       DCHECK_LT(filled_sorted_records, num_new_copied_methods);
8155       sorted_records[filled_sorted_records] = &record;
8156       ++filled_sorted_records;
8157     }
8158   }
8159   DCHECK_EQ(filled_sorted_records, num_new_copied_methods);
8160   std::sort(sorted_records,
8161             sorted_records + num_new_copied_methods,
8162             [](const CopiedMethodRecord* lhs, const CopiedMethodRecord* rhs) {
8163               return lhs->GetMethodIndex() < rhs->GetMethodIndex();
8164             });
8165 
8166   if (klass->IsInterface()) {
8167     // Some records may have been pruned. Update method indexes in collected records.
8168     size_t interface_method_index = klass->NumDeclaredVirtualMethods();
8169     for (size_t i = 0; i != num_new_copied_methods; ++i) {
8170       CopiedMethodRecord* record = sorted_records[i];
8171       DCHECK_LE(interface_method_index, record->GetMethodIndex());
8172       record->SetMethodIndex(interface_method_index);
8173       ++interface_method_index;
8174     }
8175   }
8176 
8177   // Add copied methods.
8178   methods->SetSize(new_method_count);
8179   for (size_t i = 0; i != num_new_copied_methods; ++i) {
8180     const CopiedMethodRecord* record = sorted_records[i];
8181     ArtMethod* interface_method = record->GetMainMethod();
8182     DCHECK(!interface_method->IsCopied());
8183     ArtMethod& new_method = methods->At(old_method_count + i, kMethodSize, kMethodAlignment);
8184     new_method.CopyFrom(interface_method, kPointerSize);
8185     new_method.SetMethodIndex(dchecked_integral_cast<uint16_t>(record->GetMethodIndex()));
8186     switch (record->GetState()) {
8187       case CopiedMethodRecord::State::kAbstractSingle:
8188       case CopiedMethodRecord::State::kAbstract: {
8189         DCHECK(!klass->IsInterface());  // We do not create miranda methods for interfaces.
8190         uint32_t access_flags = new_method.GetAccessFlags();
8191         DCHECK_EQ(access_flags & (kAccAbstract | kAccIntrinsic | kAccDefault), kAccAbstract)
8192             << "Miranda method should be abstract but not intrinsic or default!";
8193         new_method.SetAccessFlags(access_flags | kAccCopied);
8194         break;
8195       }
8196       case CopiedMethodRecord::State::kDefaultSingle:
8197       case CopiedMethodRecord::State::kDefault: {
8198         DCHECK(!klass->IsInterface());  // We do not copy default methods for interfaces.
8199         // Clear the kAccSkipAccessChecks flag if it is present. Since this class hasn't been
8200         // verified yet it shouldn't have methods that are skipping access checks.
8201         // TODO This is rather arbitrary. We should maybe support classes where only some of its
8202         // methods are skip_access_checks.
8203         DCHECK_EQ(new_method.GetAccessFlags() & kAccNative, 0u);
8204         static_assert((kAccDefault & kAccIntrinsicBits) != 0);
8205         DCHECK(!new_method.IsIntrinsic()) << "Adding kAccDefault to an intrinsic would be a "
8206                                           << "mistake as it overlaps with kAccIntrinsicBits.";
8207         constexpr uint32_t kSetFlags = kAccDefault | kAccCopied;
8208         constexpr uint32_t kMaskFlags = ~kAccSkipAccessChecks;
8209         new_method.SetAccessFlags((new_method.GetAccessFlags() | kSetFlags) & kMaskFlags);
8210         break;
8211       }
8212       case CopiedMethodRecord::State::kDefaultConflict: {
8213         // This is a type of default method (there are default method impls, just a conflict)
8214         // so mark this as a default. We use the `kAccAbstract` flag to distinguish it from
8215         // invokable copied default method without using a separate access flag but the default
8216         // conflicting method is technically not abstract and ArtMethod::IsAbstract() shall
8217         // return false. Also clear the kAccSkipAccessChecks bit since this class hasn't been
8218         // verified yet it shouldn't have methods that are skipping access checks. Also clear
8219         // potential kAccSingleImplementation to avoid CHA trying to inline the default method.
8220         uint32_t access_flags = new_method.GetAccessFlags();
8221         DCHECK_EQ(access_flags & (kAccNative | kAccIntrinsic), 0u);
8222         constexpr uint32_t kSetFlags = kAccDefault | kAccAbstract | kAccCopied;
8223         static_assert((kAccDefault & kAccIntrinsicBits) != 0);
8224         DCHECK(!new_method.IsIntrinsic()) << "Adding kAccDefault to an intrinsic would be a "
8225                                           << "mistake as it overlaps with kAccIntrinsicBits.";
8226         constexpr uint32_t kMaskFlags = ~(kAccSkipAccessChecks | kAccSingleImplementation);
8227         new_method.SetAccessFlags((access_flags | kSetFlags) & kMaskFlags);
8228         new_method.SetDataPtrSize(nullptr, kPointerSize);
8229         DCHECK(new_method.IsDefaultConflicting());
8230         DCHECK(!new_method.IsAbstract());
8231         // The actual method might or might not be marked abstract since we just copied it from
8232         // a (possibly default) interface method. We need to set its entry point to be the bridge
8233         // so that the compiler will not invoke the implementation of whatever method we copied
8234         // from.
8235         EnsureThrowsInvocationError(class_linker_, &new_method);
8236         break;
8237       }
8238       default:
8239         LOG(FATAL) << "Unexpected state: " << enum_cast<uint32_t>(record->GetState());
8240         UNREACHABLE();
8241     }
8242   }
8243 
8244   if (VLOG_IS_ON(class_linker)) {
8245     LogNewVirtuals(methods);
8246   }
8247 
8248   class_linker_->UpdateClassMethods(klass, methods);
8249 }
8250 
8251 template <PointerSize kPointerSize>
FinalizeIfTable(Handle<mirror::Class> klass,MutableHandle<mirror::IfTable> iftable,Handle<mirror::PointerArray> vtable,bool is_klass_abstract,bool is_super_abstract,bool * out_new_conflict,ArtMethod ** out_imt)8252 bool ClassLinker::LinkMethodsHelper<kPointerSize>::FinalizeIfTable(
8253     Handle<mirror::Class> klass,
8254     MutableHandle<mirror::IfTable> iftable,
8255     Handle<mirror::PointerArray> vtable,
8256     bool is_klass_abstract,
8257     bool is_super_abstract,
8258     bool* out_new_conflict,
8259     ArtMethod** out_imt) {
8260   size_t ifcount = iftable->Count();
8261   // We do not need a read barrier here as the length is constant, both from-space and
8262   // to-space `IfTable`s shall yield the same result. See also `Class::GetIfTableCount()`.
8263   size_t super_ifcount =
8264       klass->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetIfTableCount();
8265 
8266   ClassLinker* class_linker = nullptr;
8267   ArtMethod* unimplemented_method = nullptr;
8268   ArtMethod* imt_conflict_method = nullptr;
8269   uintptr_t imt_methods_begin = 0u;
8270   size_t imt_methods_size = 0u;
8271   DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
8272   DCHECK_EQ(klass->GetSuperClass()->ShouldHaveImt(), !is_super_abstract);
8273   if (!is_klass_abstract) {
8274     class_linker = class_linker_;
8275     unimplemented_method = runtime_->GetImtUnimplementedMethod();
8276     imt_conflict_method = runtime_->GetImtConflictMethod();
8277     if (is_super_abstract) {
8278       // There was no IMT in superclass to copy to `out_imt[]`, so we need
8279       // to fill it with all implementation methods from superclass.
8280       DCHECK_EQ(imt_methods_begin, 0u);
8281       imt_methods_size = std::numeric_limits<size_t>::max();  // No method at the last byte.
8282     } else {
8283       // If the superclass has IMT, we have already copied it to `out_imt[]` and
8284       // we do not need to call `SetIMTRef()` for interfaces from superclass when
8285       // the implementation method is already in the superclass, only for new methods.
8286       // For simplicity, use the entire method array including direct methods.
8287       LengthPrefixedArray<ArtMethod>* const new_methods = klass->GetMethodsPtr();
8288       if (new_methods != nullptr) {
8289         DCHECK_NE(new_methods->size(), 0u);
8290         imt_methods_begin =
8291             reinterpret_cast<uintptr_t>(&new_methods->At(0, kMethodSize, kMethodAlignment));
8292         imt_methods_size = new_methods->size() * kMethodSize;
8293       }
8294     }
8295   }
8296 
8297   auto update_imt = [=](ObjPtr<mirror::Class> iface, size_t j, ArtMethod* implementation)
8298       REQUIRES_SHARED(Locks::mutator_lock_) {
8299     // Place method in imt if entry is empty, place conflict otherwise.
8300     ArtMethod** imt_ptr = &out_imt[iface->GetVirtualMethod(j, kPointerSize)->GetImtIndex()];
8301     class_linker->SetIMTRef(unimplemented_method,
8302                             imt_conflict_method,
8303                             implementation,
8304                             /*out*/out_new_conflict,
8305                             /*out*/imt_ptr);
8306   };
8307 
8308   // For interfaces inherited from superclass, the new method arrays are empty,
8309   // so use vtable indexes from implementation methods from the superclass method array.
8310   for (size_t i = 0; i != super_ifcount; ++i) {
8311     ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
8312     DCHECK(method_array == klass->GetSuperClass()->GetIfTable()->GetMethodArrayOrNull(i));
8313     if (method_array == nullptr) {
8314       continue;
8315     }
8316     size_t num_methods = method_array->GetLength();
8317     ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8318     size_t j = 0;
8319     // First loop has method array shared with the super class.
8320     for (; j != num_methods; ++j) {
8321       ArtMethod* super_implementation =
8322           method_array->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
8323       size_t vtable_index = super_implementation->GetMethodIndex();
8324       ArtMethod* implementation =
8325           vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
8326       // Check if we need to update IMT with this method, see above.
8327       if (reinterpret_cast<uintptr_t>(implementation) - imt_methods_begin < imt_methods_size) {
8328         update_imt(iface, j, implementation);
8329       }
8330       if (implementation != super_implementation) {
8331         // Copy-on-write and move to the next loop.
8332         Thread* self = self_;
8333         StackHandleScope<2u> hs(self);
8334         Handle<mirror::PointerArray> old_method_array = hs.NewHandle(method_array);
8335         HandleWrapperObjPtr<mirror::Class> h_iface = hs.NewHandleWrapper(&iface);
8336         if (ifcount == super_ifcount && iftable.Get() == klass->GetSuperClass()->GetIfTable()) {
8337           ObjPtr<mirror::IfTable> new_iftable = ObjPtr<mirror::IfTable>::DownCast(
8338               mirror::ObjectArray<mirror::Object>::CopyOf(
8339                   iftable, self, ifcount * mirror::IfTable::kMax));
8340           if (new_iftable == nullptr) {
8341             return false;
8342           }
8343           iftable.Assign(new_iftable);
8344         }
8345         method_array = ObjPtr<mirror::PointerArray>::DownCast(
8346             mirror::Array::CopyOf(old_method_array, self, num_methods));
8347         if (method_array == nullptr) {
8348           return false;
8349         }
8350         iftable->SetMethodArray(i, method_array);
8351         method_array->SetElementPtrSize(j, implementation, kPointerSize);
8352         ++j;
8353         break;
8354       }
8355     }
8356     // Second loop (if non-empty) has method array different from the superclass.
8357     for (; j != num_methods; ++j) {
8358       ArtMethod* super_implementation =
8359           method_array->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
8360       size_t vtable_index = super_implementation->GetMethodIndex();
8361       ArtMethod* implementation =
8362           vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
8363       method_array->SetElementPtrSize(j, implementation, kPointerSize);
8364       // Check if we need to update IMT with this method, see above.
8365       if (reinterpret_cast<uintptr_t>(implementation) - imt_methods_begin < imt_methods_size) {
8366         update_imt(iface, j, implementation);
8367       }
8368     }
8369   }
8370 
8371   // New interface method arrays contain vtable indexes. Translate them to methods.
8372   DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
8373   for (size_t i = super_ifcount; i != ifcount; ++i) {
8374     ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
8375     if (method_array == nullptr) {
8376       continue;
8377     }
8378     size_t num_methods = method_array->GetLength();
8379     ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8380     for (size_t j = 0; j != num_methods; ++j) {
8381       size_t vtable_index = method_array->GetElementPtrSize<size_t, kPointerSize>(j);
8382       ArtMethod* implementation =
8383           vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
8384       method_array->SetElementPtrSize(j, implementation, kPointerSize);
8385       if (!is_klass_abstract) {
8386         update_imt(iface, j, implementation);
8387       }
8388     }
8389   }
8390 
8391   return true;
8392 }
8393 
8394 template <PointerSize kPointerSize>
AllocPointerArray(Thread * self,size_t length)8395 ObjPtr<mirror::PointerArray> ClassLinker::LinkMethodsHelper<kPointerSize>::AllocPointerArray(
8396     Thread* self, size_t length) {
8397   using PointerArrayType = std::conditional_t<
8398       kPointerSize == PointerSize::k64, mirror::LongArray, mirror::IntArray>;
8399   ObjPtr<mirror::Array> array = PointerArrayType::Alloc(self, length);
8400   return ObjPtr<mirror::PointerArray>::DownCast(array);
8401 }
8402 
8403 template <PointerSize kPointerSize>
AllocateIfTableMethodArrays(Thread * self,Handle<mirror::Class> klass,Handle<mirror::IfTable> iftable)8404 bool ClassLinker::LinkMethodsHelper<kPointerSize>::AllocateIfTableMethodArrays(
8405     Thread* self,
8406     Handle<mirror::Class> klass,
8407     Handle<mirror::IfTable> iftable) {
8408   DCHECK(!klass->IsInterface());
8409   DCHECK(klass_->HasSuperClass());
8410   const size_t ifcount = iftable->Count();
8411   // We do not need a read barrier here as the length is constant, both from-space and
8412   // to-space `IfTable`s shall yield the same result. See also `Class::GetIfTableCount()`.
8413   size_t super_ifcount =
8414       klass->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetIfTableCount();
8415   if (ifcount == super_ifcount) {
8416     DCHECK(iftable.Get() == klass_->GetSuperClass()->GetIfTable());
8417     return true;
8418   }
8419 
8420   if (kIsDebugBuild) {
8421     // The method array references for superclass interfaces have been copied.
8422     // We shall allocate new arrays if needed (copy-on-write) in `FinalizeIfTable()`.
8423     ObjPtr<mirror::IfTable> super_iftable = klass_->GetSuperClass()->GetIfTable();
8424     for (size_t i = 0; i != super_ifcount; ++i) {
8425       CHECK(iftable->GetInterface(i) == super_iftable->GetInterface(i));
8426       CHECK(iftable->GetMethodArrayOrNull(i) == super_iftable->GetMethodArrayOrNull(i));
8427     }
8428   }
8429 
8430   for (size_t i = super_ifcount; i < ifcount; ++i) {
8431     size_t num_methods = iftable->GetInterface(i)->NumDeclaredVirtualMethods();
8432     if (num_methods > 0) {
8433       ObjPtr<mirror::PointerArray> method_array = AllocPointerArray(self, num_methods);
8434       if (UNLIKELY(method_array == nullptr)) {
8435         self->AssertPendingOOMException();
8436         return false;
8437       }
8438       iftable->SetMethodArray(i, method_array);
8439     }
8440   }
8441   return true;
8442 }
8443 
8444 template <PointerSize kPointerSize>
AssignVTableIndexes(ObjPtr<mirror::Class> klass,ObjPtr<mirror::Class> super_class,bool is_super_abstract,size_t num_virtual_methods,ObjPtr<mirror::IfTable> iftable)8445 size_t ClassLinker::LinkMethodsHelper<kPointerSize>::AssignVTableIndexes(
8446     ObjPtr<mirror::Class> klass,
8447     ObjPtr<mirror::Class> super_class,
8448     bool is_super_abstract,
8449     size_t num_virtual_methods,
8450     ObjPtr<mirror::IfTable> iftable) {
8451   DCHECK(!klass->IsInterface());
8452   DCHECK(klass->HasSuperClass());
8453   DCHECK(klass->GetSuperClass() == super_class);
8454 
8455   // There should be no thread suspension unless we want to throw an exception.
8456   // (We are using `ObjPtr<>` and raw vtable pointers that are invalidated by thread suspension.)
8457   std::optional<ScopedAssertNoThreadSuspension> sants(__FUNCTION__);
8458 
8459   // Prepare a hash table with virtual methods from the superclass.
8460   // For the unlikely cases that there are multiple methods with the same signature
8461   // but different vtable indexes, keep an array with indexes of the previous
8462   // methods with the same signature (walked as singly-linked lists).
8463   uint8_t* raw_super_vtable;
8464   size_t super_vtable_length;
8465   if (is_super_abstract) {
8466     DCHECK(!super_class->ShouldHaveEmbeddedVTable());
8467     ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTableDuringLinking();
8468     DCHECK(super_vtable != nullptr);
8469     raw_super_vtable = reinterpret_cast<uint8_t*>(super_vtable.Ptr()) +
8470                        mirror::Array::DataOffset(static_cast<size_t>(kPointerSize)).Uint32Value();
8471     super_vtable_length = super_vtable->GetLength();
8472   } else {
8473     DCHECK(super_class->ShouldHaveEmbeddedVTable());
8474     raw_super_vtable = reinterpret_cast<uint8_t*>(super_class.Ptr()) +
8475                        mirror::Class::EmbeddedVTableOffset(kPointerSize).Uint32Value();
8476     super_vtable_length = super_class->GetEmbeddedVTableLength();
8477   }
8478   VTableAccessor super_vtable_accessor(raw_super_vtable, super_vtable_length);
8479   static constexpr double kMinLoadFactor = 0.3;
8480   static constexpr double kMaxLoadFactor = 0.5;
8481   static constexpr size_t kMaxStackBuferSize = 256;
8482   const size_t declared_virtuals_buffer_size = num_virtual_methods * 3;
8483   const size_t super_vtable_buffer_size = super_vtable_length * 3;
8484   const size_t bit_vector_size = BitVector::BitsToWords(num_virtual_methods);
8485   const size_t total_size =
8486       declared_virtuals_buffer_size + super_vtable_buffer_size + bit_vector_size;
8487 
8488   uint32_t* declared_virtuals_buffer_ptr = (total_size <= kMaxStackBuferSize)
8489       ? reinterpret_cast<uint32_t*>(alloca(total_size * sizeof(uint32_t)))
8490       : allocator_.AllocArray<uint32_t>(total_size);
8491   uint32_t* bit_vector_buffer_ptr = declared_virtuals_buffer_ptr + declared_virtuals_buffer_size;
8492 
8493   DeclaredVirtualSignatureSet declared_virtual_signatures(
8494       kMinLoadFactor,
8495       kMaxLoadFactor,
8496       DeclaredVirtualSignatureHash(klass),
8497       DeclaredVirtualSignatureEqual(klass),
8498       declared_virtuals_buffer_ptr,
8499       declared_virtuals_buffer_size,
8500       allocator_.Adapter());
8501 
8502   ArrayRef<uint32_t> same_signature_vtable_lists;
8503   const bool is_proxy_class = klass->IsProxyClass();
8504   size_t vtable_length = super_vtable_length;
8505 
8506   // Record which declared methods are overriding a super method.
8507   BitVector initialized_methods(/* expandable= */ false,
8508                                 Allocator::GetNoopAllocator(),
8509                                 bit_vector_size,
8510                                 bit_vector_buffer_ptr);
8511 
8512   // Note: our sets hash on the method name, and therefore we pay a high
8513   // performance price when a class has many overloads.
8514   //
8515   // We populate a set of declared signatures instead of signatures from the
8516   // super vtable (which is only lazy populated in case of interface overriding,
8517   // see below). This makes sure that we pay the performance price only on that
8518   // class, and not on its subclasses (except in the case of interface overriding, see below).
8519   for (size_t i = 0; i != num_virtual_methods; ++i) {
8520     ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8521     DCHECK(!virtual_method->IsStatic()) << virtual_method->PrettyMethod();
8522     ArtMethod* signature_method = UNLIKELY(is_proxy_class)
8523         ? virtual_method->GetInterfaceMethodForProxyUnchecked(kPointerSize)
8524         : virtual_method;
8525     size_t hash = ComputeMethodHash(signature_method);
8526     declared_virtual_signatures.PutWithHash(i, hash);
8527   }
8528 
8529   // Loop through each super vtable method and see if they are overridden by a method we added to
8530   // the hash table.
8531   for (size_t j = 0; j < super_vtable_length; ++j) {
8532     // Search the hash table to see if we are overridden by any method.
8533     ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(j);
8534     if (!klass->CanAccessMember(super_method->GetDeclaringClass(),
8535                                 super_method->GetAccessFlags())) {
8536       // Continue on to the next method since this one is package private and cannot be overridden.
8537       // Before Android 4.1, the package-private method super_method might have been incorrectly
8538       // overridden.
8539       continue;
8540     }
8541     size_t hash = (j < mirror::Object::kVTableLength)
8542         ? class_linker_->object_virtual_method_hashes_[j]
8543         : ComputeMethodHash(super_method);
8544     auto it = declared_virtual_signatures.FindWithHash(super_method, hash);
8545     if (it == declared_virtual_signatures.end()) {
8546       continue;
8547     }
8548     ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(*it, kPointerSize);
8549     if (super_method->IsFinal()) {
8550       sants.reset();
8551       ThrowLinkageError(klass, "Method %s overrides final method in class %s",
8552                         virtual_method->PrettyMethod().c_str(),
8553                         super_method->GetDeclaringClassDescriptor());
8554       return 0u;
8555     }
8556     if (initialized_methods.IsBitSet(*it)) {
8557       // The method is overriding more than one method.
8558       // We record that information in a linked list to later set the method in the vtable
8559       // locations that are not the method index.
8560       if (same_signature_vtable_lists.empty()) {
8561         same_signature_vtable_lists = ArrayRef<uint32_t>(
8562             allocator_.AllocArray<uint32_t>(super_vtable_length), super_vtable_length);
8563         std::fill_n(same_signature_vtable_lists.data(), super_vtable_length, dex::kDexNoIndex);
8564         same_signature_vtable_lists_ = same_signature_vtable_lists;
8565       }
8566       same_signature_vtable_lists[j] = virtual_method->GetMethodIndexDuringLinking();
8567     } else {
8568       initialized_methods.SetBit(*it);
8569     }
8570 
8571     // We arbitrarily set to the largest index. This is also expected when
8572     // iterating over the `same_signature_vtable_lists_`.
8573     virtual_method->SetMethodIndex(j);
8574   }
8575 
8576   // Add the non-overridden methods at the end.
8577   for (size_t i = 0; i < num_virtual_methods; ++i) {
8578     if (!initialized_methods.IsBitSet(i)) {
8579       ArtMethod* local_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8580       local_method->SetMethodIndex(vtable_length);
8581       vtable_length++;
8582     }
8583   }
8584 
8585   // A lazily constructed super vtable set, which we only populate in the less
8586   // common sittuation of a superclass implementing a method declared in an
8587   // interface this class inherits.
8588   // We still try to allocate the set on the stack as using the arena will have
8589   // a larger cost.
8590   uint32_t* super_vtable_buffer_ptr = bit_vector_buffer_ptr + bit_vector_size;
8591   VTableSignatureSet super_vtable_signatures(
8592       kMinLoadFactor,
8593       kMaxLoadFactor,
8594       VTableSignatureHash(super_vtable_accessor),
8595       VTableSignatureEqual(super_vtable_accessor),
8596       super_vtable_buffer_ptr,
8597       super_vtable_buffer_size,
8598       allocator_.Adapter());
8599 
8600   // Assign vtable indexes for interface methods in new interfaces and store them
8601   // in implementation method arrays. These shall be replaced by actual method
8602   // pointers later. We do not need to do this for superclass interfaces as we can
8603   // get these vtable indexes from implementation methods in superclass iftable.
8604   // Record data for copied methods which shall be referenced by the vtable.
8605   const size_t ifcount = iftable->Count();
8606   ObjPtr<mirror::IfTable> super_iftable = super_class->GetIfTable();
8607   const size_t super_ifcount = super_iftable->Count();
8608   for (size_t i = ifcount; i != super_ifcount; ) {
8609     --i;
8610     DCHECK_LT(i, ifcount);
8611     ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8612     ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
8613     size_t num_methods = (method_array != nullptr) ? method_array->GetLength() : 0u;
8614     for (size_t j = 0; j != num_methods; ++j) {
8615       ArtMethod* interface_method = iface->GetVirtualMethod(j, kPointerSize);
8616       size_t hash = ComputeMethodHash(interface_method);
8617       ArtMethod* vtable_method = nullptr;
8618       auto it1 = declared_virtual_signatures.FindWithHash(interface_method, hash);
8619       if (it1 != declared_virtual_signatures.end()) {
8620         ArtMethod* found_method = klass->GetVirtualMethodDuringLinking(*it1, kPointerSize);
8621         // For interface overriding, we only look at public methods.
8622         if (found_method->IsPublic()) {
8623           vtable_method = found_method;
8624         }
8625       } else {
8626         // This situation should be rare (a superclass implements a method
8627         // declared in an interface this class is inheriting). Only in this case
8628         // do we lazily populate the super_vtable_signatures.
8629         if (super_vtable_signatures.empty()) {
8630           for (size_t k = 0; k < super_vtable_length; ++k) {
8631             ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(k);
8632             if (!super_method->IsPublic()) {
8633               // For interface overriding, we only look at public methods.
8634               continue;
8635             }
8636             size_t super_hash = (k < mirror::Object::kVTableLength)
8637                 ? class_linker_->object_virtual_method_hashes_[k]
8638                 : ComputeMethodHash(super_method);
8639             auto [it, inserted] = super_vtable_signatures.InsertWithHash(k, super_hash);
8640             DCHECK(inserted || super_vtable_accessor.GetVTableEntry(*it) == super_method);
8641           }
8642         }
8643         auto it2 = super_vtable_signatures.FindWithHash(interface_method, hash);
8644         if (it2 != super_vtable_signatures.end()) {
8645           vtable_method = super_vtable_accessor.GetVTableEntry(*it2);
8646         }
8647       }
8648 
8649       uint32_t vtable_index = vtable_length;
8650       if (vtable_method != nullptr) {
8651         vtable_index = vtable_method->GetMethodIndexDuringLinking();
8652         if (!vtable_method->IsOverridableByDefaultMethod()) {
8653           method_array->SetElementPtrSize(j, vtable_index, kPointerSize);
8654           continue;
8655         }
8656       }
8657 
8658       auto [it, inserted] = copied_method_records_.InsertWithHash(
8659           CopiedMethodRecord(interface_method, vtable_index), hash);
8660       if (vtable_method != nullptr) {
8661         DCHECK_EQ(vtable_index, it->GetMethodIndex());
8662       } else if (inserted) {
8663         DCHECK_EQ(vtable_index, it->GetMethodIndex());
8664         DCHECK_EQ(vtable_index, vtable_length);
8665         ++vtable_length;
8666       } else {
8667         vtable_index = it->GetMethodIndex();
8668       }
8669       method_array->SetElementPtrSize(j, it->GetMethodIndex(), kPointerSize);
8670       if (inserted) {
8671         it->SetState(interface_method->IsAbstract() ? CopiedMethodRecord::State::kAbstractSingle
8672                                                     : CopiedMethodRecord::State::kDefaultSingle);
8673       } else {
8674         it->UpdateState(iface, interface_method, vtable_index, iftable, ifcount, i);
8675       }
8676     }
8677   }
8678   // Finalize copied method records and check if we can reuse some methods from superclass vtable.
8679   size_t num_new_copied_methods = copied_method_records_.size();
8680   for (CopiedMethodRecord& record : copied_method_records_) {
8681     uint32_t vtable_index = record.GetMethodIndex();
8682     if (vtable_index < super_vtable_length) {
8683       ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(record.GetMethodIndex());
8684       DCHECK(super_method->IsOverridableByDefaultMethod());
8685       record.FinalizeState(
8686           super_method, vtable_index, iftable, ifcount, super_iftable, super_ifcount);
8687       if (record.GetState() == CopiedMethodRecord::State::kUseSuperMethod) {
8688         --num_new_copied_methods;
8689       }
8690     }
8691   }
8692   num_new_copied_methods_ = num_new_copied_methods;
8693 
8694   if (UNLIKELY(!IsUint<16>(vtable_length))) {
8695     sants.reset();
8696     ThrowClassFormatError(klass, "Too many methods defined on class: %zd", vtable_length);
8697     return 0u;
8698   }
8699 
8700   return vtable_length;
8701 }
8702 
8703 template <PointerSize kPointerSize>
FindCopiedMethodsForInterface(ObjPtr<mirror::Class> klass,size_t num_virtual_methods,ObjPtr<mirror::IfTable> iftable)8704 bool ClassLinker::LinkMethodsHelper<kPointerSize>::FindCopiedMethodsForInterface(
8705     ObjPtr<mirror::Class> klass,
8706     size_t num_virtual_methods,
8707     ObjPtr<mirror::IfTable> iftable) {
8708   DCHECK(klass->IsInterface());
8709   DCHECK(klass->HasSuperClass());
8710   DCHECK(klass->GetSuperClass()->IsObjectClass());
8711   DCHECK_EQ(klass->GetSuperClass()->GetIfTableCount(), 0);
8712 
8713   // There should be no thread suspension unless we want to throw an exception.
8714   // (We are using `ObjPtr<>`s that are invalidated by thread suspension.)
8715   std::optional<ScopedAssertNoThreadSuspension> sants(__FUNCTION__);
8716 
8717   // Prepare a `HashSet<>` with the declared virtual methods. These mask any methods
8718   // from superinterfaces, so we can filter out matching superinterface methods.
8719   static constexpr double kMinLoadFactor = 0.3;
8720   static constexpr double kMaxLoadFactor = 0.5;
8721   static constexpr size_t kMaxStackBuferSize = 256;
8722   const size_t declared_virtuals_buffer_size = num_virtual_methods * 3;
8723   uint32_t* declared_virtuals_buffer_ptr = (declared_virtuals_buffer_size <= kMaxStackBuferSize)
8724       ? reinterpret_cast<uint32_t*>(alloca(declared_virtuals_buffer_size * sizeof(uint32_t)))
8725       : allocator_.AllocArray<uint32_t>(declared_virtuals_buffer_size);
8726   DeclaredVirtualSignatureSet declared_virtual_signatures(
8727       kMinLoadFactor,
8728       kMaxLoadFactor,
8729       DeclaredVirtualSignatureHash(klass),
8730       DeclaredVirtualSignatureEqual(klass),
8731       declared_virtuals_buffer_ptr,
8732       declared_virtuals_buffer_size,
8733       allocator_.Adapter());
8734   for (size_t i = 0; i != num_virtual_methods; ++i) {
8735     ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8736     DCHECK(!virtual_method->IsStatic()) << virtual_method->PrettyMethod();
8737     size_t hash = ComputeMethodHash(virtual_method);
8738     declared_virtual_signatures.PutWithHash(i, hash);
8739   }
8740 
8741   // We do not create miranda methods for interface classes, so we do not need to track
8742   // non-default (abstract) interface methods. The downside is that we cannot use the
8743   // optimized code paths with `CopiedMethodRecord::State::kDefaultSingle` and since
8744   // we do not fill method arrays for interfaces, the method search actually has to
8745   // compare signatures instead of searching for the implementing method.
8746   const size_t ifcount = iftable->Count();
8747   size_t new_method_index = num_virtual_methods;
8748   for (size_t i = ifcount; i != 0u; ) {
8749     --i;
8750     DCHECK_LT(i, ifcount);
8751     ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8752     if (!iface->HasDefaultMethods()) {
8753       continue;  // No default methods to process.
8754     }
8755     size_t num_methods = iface->NumDeclaredVirtualMethods();
8756     for (size_t j = 0; j != num_methods; ++j) {
8757       ArtMethod* interface_method = iface->GetVirtualMethod(j, kPointerSize);
8758       if (!interface_method->IsDefault()) {
8759         continue;  // Do not process this non-default method.
8760       }
8761       size_t hash = ComputeMethodHash(interface_method);
8762       auto it1 = declared_virtual_signatures.FindWithHash(interface_method, hash);
8763       if (it1 != declared_virtual_signatures.end()) {
8764         // Virtual methods in interfaces are always public.
8765         // This is checked by the `DexFileVerifier`.
8766         DCHECK(klass->GetVirtualMethodDuringLinking(*it1, kPointerSize)->IsPublic());
8767         continue;  // This default method is masked by a method declared in this interface.
8768       }
8769 
8770       CopiedMethodRecord new_record(interface_method, new_method_index);
8771       auto it = copied_method_records_.FindWithHash(new_record, hash);
8772       if (it == copied_method_records_.end()) {
8773         // Pretend that there is another default method and try to update the state.
8774         // If the `interface_method` is not masked, the state shall change to
8775         // `kDefaultConflict`; if it is masked, the state remains `kDefault`.
8776         new_record.SetState(CopiedMethodRecord::State::kDefault);
8777         new_record.UpdateStateForInterface(iface, interface_method, iftable, ifcount, i);
8778         if (new_record.GetState() == CopiedMethodRecord::State::kDefaultConflict) {
8779           // Insert the new record with the state `kDefault`.
8780           new_record.SetState(CopiedMethodRecord::State::kDefault);
8781           copied_method_records_.PutWithHash(new_record, hash);
8782           DCHECK_EQ(new_method_index, new_record.GetMethodIndex());
8783           ++new_method_index;
8784         }
8785       } else {
8786         it->UpdateStateForInterface(iface, interface_method, iftable, ifcount, i);
8787       }
8788     }
8789   }
8790 
8791   // Prune records without conflict. (Method indexes are updated in `ReallocMethods()`.)
8792   // We do not copy normal default methods to subinterfaces, instead we find the
8793   // default method with `Class::FindVirtualMethodForInterfaceSuper()` when needed.
8794   size_t num_new_copied_methods = copied_method_records_.size();
8795   for (CopiedMethodRecord& record : copied_method_records_) {
8796     if (record.GetState() != CopiedMethodRecord::State::kDefaultConflict) {
8797       DCHECK(record.GetState() == CopiedMethodRecord::State::kDefault);
8798       record.SetState(CopiedMethodRecord::State::kUseSuperMethod);
8799       --num_new_copied_methods;
8800     }
8801   }
8802   num_new_copied_methods_ = num_new_copied_methods;
8803 
8804   return true;
8805 }
8806 
8807 
8808 template <PointerSize kPointerSize>
8809 FLATTEN
LinkMethods(Thread * self,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,bool * out_new_conflict,ArtMethod ** out_imt)8810 bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkMethods(
8811     Thread* self,
8812     Handle<mirror::Class> klass,
8813     Handle<mirror::ObjectArray<mirror::Class>> interfaces,
8814     bool* out_new_conflict,
8815     ArtMethod** out_imt) {
8816   const size_t num_virtual_methods = klass->NumVirtualMethods();
8817   if (klass->IsInterface()) {
8818     // No vtable.
8819     if (!IsUint<16>(num_virtual_methods)) {
8820       ThrowClassFormatError(klass.Get(), "Too many methods on interface: %zu", num_virtual_methods);
8821       return false;
8822     }
8823     // Assign each method an interface table index and set the default flag.
8824     bool has_defaults = false;
8825     for (size_t i = 0; i < num_virtual_methods; ++i) {
8826       ArtMethod* m = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8827       m->SetMethodIndex(i);
8828       uint32_t access_flags = m->GetAccessFlags();
8829       DCHECK(!ArtMethod::IsDefault(access_flags));
8830       DCHECK_EQ(!ArtMethod::IsAbstract(access_flags), ArtMethod::IsInvokable(access_flags));
8831       if (ArtMethod::IsInvokable(access_flags)) {
8832         // If the dex file does not support default methods, throw ClassFormatError.
8833         // This check is necessary to protect from odd cases, such as native default
8834         // methods, that the dex file verifier permits for old dex file versions. b/157170505
8835         // FIXME: This should be `if (!m->GetDexFile()->SupportsDefaultMethods())` but we're
8836         // currently running CTS tests for default methods with dex file version 035 which
8837         // does not support default methods. So, we limit this to native methods. b/157718952
8838         if (ArtMethod::IsNative(access_flags)) {
8839           DCHECK(!m->GetDexFile()->SupportsDefaultMethods());
8840           ThrowClassFormatError(klass.Get(),
8841                                 "Dex file does not support default method '%s'",
8842                                 m->PrettyMethod().c_str());
8843           return false;
8844         }
8845         if (!ArtMethod::IsPublic(access_flags)) {
8846           // The verifier should have caught the non-public method for dex version 37.
8847           // Just warn and skip it since this is from before default-methods so we don't
8848           // really need to care that it has code.
8849           LOG(WARNING) << "Default interface method " << m->PrettyMethod() << " is not public! "
8850                        << "This will be a fatal error in subsequent versions of android. "
8851                        << "Continuing anyway.";
8852         }
8853         static_assert((kAccDefault & kAccIntrinsicBits) != 0);
8854         DCHECK(!m->IsIntrinsic()) << "Adding kAccDefault to an intrinsic would be a mistake as it "
8855                                   << "overlaps with kAccIntrinsicBits.";
8856         m->SetAccessFlags(access_flags | kAccDefault);
8857         has_defaults = true;
8858       }
8859     }
8860     // Mark that we have default methods so that we won't need to scan the virtual_methods_ array
8861     // during initialization. This is a performance optimization. We could simply traverse the
8862     // virtual_methods_ array again during initialization.
8863     if (has_defaults) {
8864       klass->SetHasDefaultMethods();
8865     }
8866     ObjPtr<mirror::IfTable> iftable = SetupInterfaceLookupTable(
8867         self, klass, &allocator_, NonProxyInterfacesAccessor(class_linker_, klass));
8868     if (UNLIKELY(iftable == nullptr)) {
8869       self->AssertPendingException();
8870       return false;
8871     }
8872     size_t ifcount = iftable->Count();
8873     bool have_super_with_defaults = false;
8874     for (size_t i = 0; i != ifcount; ++i) {
8875       if (iftable->GetInterface(i)->HasDefaultMethods()) {
8876         have_super_with_defaults = true;
8877         break;
8878       }
8879     }
8880     LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8881     if (have_super_with_defaults) {
8882       if (!FindCopiedMethodsForInterface(klass.Get(), num_virtual_methods, iftable)) {
8883         self->AssertPendingException();
8884         return false;
8885       }
8886       if (num_new_copied_methods_ != 0u) {
8887         // Re-check the number of methods.
8888         size_t final_num_virtual_methods = num_virtual_methods + num_new_copied_methods_;
8889         if (!IsUint<16>(final_num_virtual_methods)) {
8890           ThrowClassFormatError(
8891               klass.Get(), "Too many methods on interface: %zu", final_num_virtual_methods);
8892           return false;
8893         }
8894         ReallocMethods(klass.Get());
8895       }
8896     }
8897     klass->SetIfTable(iftable);
8898     if (kIsDebugBuild) {
8899       // May cause thread suspension, so do this after we're done with `ObjPtr<> iftable`.
8900       ClobberOldMethods(old_methods, klass->GetMethodsPtr());
8901     }
8902     return true;
8903   } else if (LIKELY(klass->HasSuperClass())) {
8904     // We set up the interface lookup table now because we need it to determine if we need
8905     // to update any vtable entries with new default method implementations.
8906     StackHandleScope<3> hs(self);
8907     MutableHandle<mirror::IfTable> iftable = hs.NewHandle(UNLIKELY(klass->IsProxyClass())
8908         ? SetupInterfaceLookupTable(self, klass, &allocator_, ProxyInterfacesAccessor(interfaces))
8909         : SetupInterfaceLookupTable(
8910               self, klass, &allocator_, NonProxyInterfacesAccessor(class_linker_, klass)));
8911     if (UNLIKELY(iftable == nullptr)) {
8912       self->AssertPendingException();
8913       return false;
8914     }
8915 
8916     // Copy the IMT from superclass if present and needed. Update with new methods later.
8917     Handle<mirror::Class> super_class = hs.NewHandle(klass->GetSuperClass());
8918     bool is_klass_abstract = klass->IsAbstract();
8919     bool is_super_abstract = super_class->IsAbstract();
8920     DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
8921     DCHECK_EQ(super_class->ShouldHaveImt(), !is_super_abstract);
8922     if (!is_klass_abstract && !is_super_abstract) {
8923       ImTable* super_imt = super_class->GetImt(kPointerSize);
8924       for (size_t i = 0; i < ImTable::kSize; ++i) {
8925         out_imt[i] = super_imt->Get(i, kPointerSize);
8926       }
8927     }
8928 
8929     // If there are no new virtual methods and no new interfaces, we can simply reuse
8930     // the vtable from superclass. We may need to make a copy if it's embedded.
8931     const size_t super_vtable_length = super_class->GetVTableLength();
8932     if (num_virtual_methods == 0 && iftable.Get() == super_class->GetIfTable()) {
8933       DCHECK_EQ(is_super_abstract, !super_class->ShouldHaveEmbeddedVTable());
8934       if (is_super_abstract) {
8935         DCHECK(super_class->IsAbstract() && !super_class->IsArrayClass());
8936         ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTable();
8937         CHECK(super_vtable != nullptr) << super_class->PrettyClass();
8938         klass->SetVTable(super_vtable);
8939         // No IMT in the super class, we need to reconstruct it from the iftable.
8940         if (!is_klass_abstract && iftable->Count() != 0) {
8941           class_linker_->FillIMTFromIfTable(iftable.Get(),
8942                                             runtime_->GetImtUnimplementedMethod(),
8943                                             runtime_->GetImtConflictMethod(),
8944                                             klass.Get(),
8945                                             /*create_conflict_tables=*/false,
8946                                             /*ignore_copied_methods=*/false,
8947                                             out_new_conflict,
8948                                             out_imt);
8949         }
8950       } else {
8951         ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, super_vtable_length);
8952         if (UNLIKELY(vtable == nullptr)) {
8953           self->AssertPendingOOMException();
8954           return false;
8955         }
8956         for (size_t i = 0; i < super_vtable_length; i++) {
8957           vtable->SetElementPtrSize(
8958               i, super_class->GetEmbeddedVTableEntry(i, kPointerSize), kPointerSize);
8959         }
8960         klass->SetVTable(vtable);
8961         // The IMT was already copied from superclass if `klass` is not abstract.
8962       }
8963       klass->SetIfTable(iftable.Get());
8964       return true;
8965     }
8966 
8967     // Allocate method arrays, so that we can link interface methods without thread suspension,
8968     // otherwise GC could miss visiting newly allocated copied methods.
8969     // TODO: Do not allocate copied methods during linking, store only records about what
8970     // we need to allocate and allocate it at the end. Start with superclass iftable and
8971     // perform copy-on-write when needed to facilitate maximum memory sharing.
8972     if (!AllocateIfTableMethodArrays(self, klass, iftable)) {
8973       self->AssertPendingOOMException();
8974       return false;
8975     }
8976 
8977     size_t final_vtable_size = AssignVTableIndexes(
8978         klass.Get(), super_class.Get(), is_super_abstract, num_virtual_methods, iftable.Get());
8979     if (final_vtable_size == 0u) {
8980       self->AssertPendingException();
8981       return false;
8982     }
8983     DCHECK(IsUint<16>(final_vtable_size));
8984 
8985     // Allocate the new vtable.
8986     Handle<mirror::PointerArray> vtable = hs.NewHandle(AllocPointerArray(self, final_vtable_size));
8987     if (UNLIKELY(vtable == nullptr)) {
8988       self->AssertPendingOOMException();
8989       return false;
8990     }
8991 
8992     LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8993     if (num_new_copied_methods_ != 0u) {
8994       ReallocMethods(klass.Get());
8995     }
8996 
8997     // Store new virtual methods in the new vtable.
8998     ArrayRef<uint32_t> same_signature_vtable_lists = same_signature_vtable_lists_;
8999     for (ArtMethod& virtual_method : klass->GetVirtualMethodsSliceUnchecked(kPointerSize)) {
9000       uint32_t vtable_index = virtual_method.GetMethodIndexDuringLinking();
9001       vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
9002       if (UNLIKELY(vtable_index < same_signature_vtable_lists.size())) {
9003         // We may override more than one method according to JLS, see b/211854716.
9004         while (same_signature_vtable_lists[vtable_index] != dex::kDexNoIndex) {
9005           DCHECK_LT(same_signature_vtable_lists[vtable_index], vtable_index);
9006           vtable_index = same_signature_vtable_lists[vtable_index];
9007           vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
9008           if (kIsDebugBuild) {
9009             ArtMethod* current_method = super_class->GetVTableEntry(vtable_index, kPointerSize);
9010             DCHECK(klass->CanAccessMember(current_method->GetDeclaringClass(),
9011                                           current_method->GetAccessFlags()));
9012             DCHECK(!current_method->IsFinal());
9013           }
9014         }
9015       }
9016     }
9017 
9018     // For non-overridden vtable slots, copy a method from `super_class`.
9019     for (size_t j = 0; j != super_vtable_length; ++j) {
9020       if (vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j) == nullptr) {
9021         ArtMethod* super_method = super_class->GetVTableEntry(j, kPointerSize);
9022         vtable->SetElementPtrSize(j, super_method, kPointerSize);
9023       }
9024     }
9025 
9026     // Update the `iftable` (and IMT) with finalized virtual methods.
9027     if (!FinalizeIfTable(klass,
9028                          iftable,
9029                          vtable,
9030                          is_klass_abstract,
9031                          is_super_abstract,
9032                          out_new_conflict,
9033                          out_imt)) {
9034       self->AssertPendingOOMException();
9035       return false;
9036     }
9037 
9038     klass->SetVTable(vtable.Get());
9039     klass->SetIfTable(iftable.Get());
9040     if (kIsDebugBuild) {
9041       CheckVTable(self, klass, kPointerSize);
9042       ClobberOldMethods(old_methods, klass->GetMethodsPtr());
9043     }
9044     return true;
9045   } else {
9046     return LinkJavaLangObjectMethods(self, klass);
9047   }
9048 }
9049 
9050 template <PointerSize kPointerSize>
LinkJavaLangObjectMethods(Thread * self,Handle<mirror::Class> klass)9051 bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkJavaLangObjectMethods(
9052     Thread* self,
9053     Handle<mirror::Class> klass) {
9054   DCHECK_EQ(klass.Get(), GetClassRoot<mirror::Object>(class_linker_));
9055   DCHECK_EQ(klass->NumVirtualMethods(), mirror::Object::kVTableLength);
9056   static_assert(IsUint<16>(mirror::Object::kVTableLength));
9057   ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, mirror::Object::kVTableLength);
9058   if (UNLIKELY(vtable == nullptr)) {
9059     self->AssertPendingOOMException();
9060     return false;
9061   }
9062   for (size_t i = 0; i < mirror::Object::kVTableLength; ++i) {
9063     ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
9064     vtable->SetElementPtrSize(i, virtual_method, kPointerSize);
9065     virtual_method->SetMethodIndex(i);
9066   }
9067   klass->SetVTable(vtable);
9068   InitializeObjectVirtualMethodHashes(
9069       klass.Get(),
9070       kPointerSize,
9071       ArrayRef<uint32_t>(class_linker_->object_virtual_method_hashes_));
9072   // The interface table is already allocated but there are no interface methods to link.
9073   DCHECK(klass->GetIfTable() != nullptr);
9074   DCHECK_EQ(klass->GetIfTableCount(), 0);
9075   return true;
9076 }
9077 
9078 // Populate the class vtable and itable. Compute return type indices.
LinkMethods(Thread * self,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,bool * out_new_conflict,ArtMethod ** out_imt)9079 bool ClassLinker::LinkMethods(Thread* self,
9080                               Handle<mirror::Class> klass,
9081                               Handle<mirror::ObjectArray<mirror::Class>> interfaces,
9082                               bool* out_new_conflict,
9083                               ArtMethod** out_imt) {
9084   self->AllowThreadSuspension();
9085   // Link virtual methods then interface methods.
9086   Runtime* const runtime = Runtime::Current();
9087   if (LIKELY(GetImagePointerSize() == kRuntimePointerSize)) {
9088     LinkMethodsHelper<kRuntimePointerSize> helper(this, klass, self, runtime);
9089     return helper.LinkMethods(self, klass, interfaces, out_new_conflict, out_imt);
9090   } else {
9091     constexpr PointerSize kOtherPointerSize =
9092         (kRuntimePointerSize == PointerSize::k64) ? PointerSize::k32 : PointerSize::k64;
9093     LinkMethodsHelper<kOtherPointerSize> helper(this, klass, self, runtime);
9094     return helper.LinkMethods(self, klass, interfaces, out_new_conflict, out_imt);
9095   }
9096 }
9097 
9098 class ClassLinker::LinkFieldsHelper {
9099  public:
9100   static bool LinkFields(ClassLinker* class_linker,
9101                          Thread* self,
9102                          Handle<mirror::Class> klass,
9103                          bool is_static,
9104                          size_t* class_size)
9105       REQUIRES_SHARED(Locks::mutator_lock_);
9106 
9107  private:
9108   enum class FieldTypeOrder : uint16_t;
9109   class FieldGaps;
9110 
9111   struct FieldTypeOrderAndIndex {
9112     FieldTypeOrder field_type_order;
9113     uint16_t field_index;
9114   };
9115 
9116   static FieldTypeOrder FieldTypeOrderFromFirstDescriptorCharacter(char first_char);
9117 
9118   template <size_t kSize>
9119   static MemberOffset AssignFieldOffset(ArtField* field, MemberOffset field_offset)
9120       REQUIRES_SHARED(Locks::mutator_lock_);
9121 };
9122 
9123 // We use the following order of field types for assigning offsets.
9124 // Some fields can be shuffled forward to fill gaps, see
9125 // `ClassLinker::LinkFieldsHelper::LinkFields()`.
9126 enum class ClassLinker::LinkFieldsHelper::FieldTypeOrder : uint16_t {
9127   kReference = 0u,
9128   kLong,
9129   kDouble,
9130   kInt,
9131   kFloat,
9132   kChar,
9133   kShort,
9134   kBoolean,
9135   kByte,
9136 
9137   kLast64BitType = kDouble,
9138   kLast32BitType = kFloat,
9139   kLast16BitType = kShort,
9140 };
9141 
9142 ALWAYS_INLINE
9143 ClassLinker::LinkFieldsHelper::FieldTypeOrder
FieldTypeOrderFromFirstDescriptorCharacter(char first_char)9144 ClassLinker::LinkFieldsHelper::FieldTypeOrderFromFirstDescriptorCharacter(char first_char) {
9145   switch (first_char) {
9146     case 'J':
9147       return FieldTypeOrder::kLong;
9148     case 'D':
9149       return FieldTypeOrder::kDouble;
9150     case 'I':
9151       return FieldTypeOrder::kInt;
9152     case 'F':
9153       return FieldTypeOrder::kFloat;
9154     case 'C':
9155       return FieldTypeOrder::kChar;
9156     case 'S':
9157       return FieldTypeOrder::kShort;
9158     case 'Z':
9159       return FieldTypeOrder::kBoolean;
9160     case 'B':
9161       return FieldTypeOrder::kByte;
9162     default:
9163       DCHECK(first_char == 'L' || first_char == '[') << first_char;
9164       return FieldTypeOrder::kReference;
9165   }
9166 }
9167 
9168 // Gaps where we can insert fields in object layout.
9169 class ClassLinker::LinkFieldsHelper::FieldGaps {
9170  public:
9171   template <uint32_t kSize>
AlignFieldOffset(MemberOffset field_offset)9172   ALWAYS_INLINE MemberOffset AlignFieldOffset(MemberOffset field_offset) {
9173     static_assert(kSize == 2u || kSize == 4u || kSize == 8u);
9174     if (!IsAligned<kSize>(field_offset.Uint32Value())) {
9175       uint32_t gap_start = field_offset.Uint32Value();
9176       field_offset = MemberOffset(RoundUp(gap_start, kSize));
9177       AddGaps<kSize - 1u>(gap_start, field_offset.Uint32Value());
9178     }
9179     return field_offset;
9180   }
9181 
9182   template <uint32_t kSize>
HasGap() const9183   bool HasGap() const {
9184     static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
9185     return (kSize == 1u && gap1_offset_ != kNoOffset) ||
9186            (kSize <= 2u && gap2_offset_ != kNoOffset) ||
9187            gap4_offset_ != kNoOffset;
9188   }
9189 
9190   template <uint32_t kSize>
ReleaseGap()9191   MemberOffset ReleaseGap() {
9192     static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
9193     uint32_t result;
9194     if (kSize == 1u && gap1_offset_ != kNoOffset) {
9195       DCHECK(gap2_offset_ == kNoOffset || gap2_offset_ > gap1_offset_);
9196       DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap1_offset_);
9197       result = gap1_offset_;
9198       gap1_offset_ = kNoOffset;
9199     } else if (kSize <= 2u && gap2_offset_ != kNoOffset) {
9200       DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap2_offset_);
9201       result = gap2_offset_;
9202       gap2_offset_ = kNoOffset;
9203       if (kSize < 2u) {
9204         AddGaps<1u>(result + kSize, result + 2u);
9205       }
9206     } else {
9207       DCHECK_NE(gap4_offset_, kNoOffset);
9208       result = gap4_offset_;
9209       gap4_offset_ = kNoOffset;
9210       if (kSize < 4u) {
9211         AddGaps<kSize | 2u>(result + kSize, result + 4u);
9212       }
9213     }
9214     return MemberOffset(result);
9215   }
9216 
9217  private:
9218   template <uint32_t kGapsToCheck>
AddGaps(uint32_t gap_start,uint32_t gap_end)9219   void AddGaps(uint32_t gap_start, uint32_t gap_end) {
9220     if ((kGapsToCheck & 1u) != 0u) {
9221       DCHECK_LT(gap_start, gap_end);
9222       DCHECK_ALIGNED(gap_end, 2u);
9223       if ((gap_start & 1u) != 0u) {
9224         DCHECK_EQ(gap1_offset_, kNoOffset);
9225         gap1_offset_ = gap_start;
9226         gap_start += 1u;
9227         if (kGapsToCheck == 1u || gap_start == gap_end) {
9228           DCHECK_EQ(gap_start, gap_end);
9229           return;
9230         }
9231       }
9232     }
9233 
9234     if ((kGapsToCheck & 2u) != 0u) {
9235       DCHECK_LT(gap_start, gap_end);
9236       DCHECK_ALIGNED(gap_start, 2u);
9237       DCHECK_ALIGNED(gap_end, 4u);
9238       if ((gap_start & 2u) != 0u) {
9239         DCHECK_EQ(gap2_offset_, kNoOffset);
9240         gap2_offset_ = gap_start;
9241         gap_start += 2u;
9242         if (kGapsToCheck <= 3u || gap_start == gap_end) {
9243           DCHECK_EQ(gap_start, gap_end);
9244           return;
9245         }
9246       }
9247     }
9248 
9249     if ((kGapsToCheck & 4u) != 0u) {
9250       DCHECK_LT(gap_start, gap_end);
9251       DCHECK_ALIGNED(gap_start, 4u);
9252       DCHECK_ALIGNED(gap_end, 8u);
9253       DCHECK_EQ(gap_start + 4u, gap_end);
9254       DCHECK_EQ(gap4_offset_, kNoOffset);
9255       gap4_offset_ = gap_start;
9256       return;
9257     }
9258 
9259     DCHECK(false) << "Remaining gap: " << gap_start << " to " << gap_end
9260         << " after checking " << kGapsToCheck;
9261   }
9262 
9263   static constexpr uint32_t kNoOffset = static_cast<uint32_t>(-1);
9264 
9265   uint32_t gap4_offset_ = kNoOffset;
9266   uint32_t gap2_offset_ = kNoOffset;
9267   uint32_t gap1_offset_ = kNoOffset;
9268 };
9269 
9270 template <size_t kSize>
9271 ALWAYS_INLINE
AssignFieldOffset(ArtField * field,MemberOffset field_offset)9272 MemberOffset ClassLinker::LinkFieldsHelper::AssignFieldOffset(ArtField* field,
9273                                                               MemberOffset field_offset) {
9274   DCHECK_ALIGNED(field_offset.Uint32Value(), kSize);
9275   DCHECK_EQ(Primitive::ComponentSize(field->GetTypeAsPrimitiveType()), kSize);
9276   field->SetOffset(field_offset);
9277   return MemberOffset(field_offset.Uint32Value() + kSize);
9278 }
9279 
LinkFields(ClassLinker * class_linker,Thread * self,Handle<mirror::Class> klass,bool is_static,size_t * class_size)9280 bool ClassLinker::LinkFieldsHelper::LinkFields(ClassLinker* class_linker,
9281                                                Thread* self,
9282                                                Handle<mirror::Class> klass,
9283                                                bool is_static,
9284                                                size_t* class_size) {
9285   self->AllowThreadSuspension();
9286   const size_t num_fields = is_static ? klass->NumStaticFields() : klass->NumInstanceFields();
9287   LengthPrefixedArray<ArtField>* const fields = is_static ? klass->GetSFieldsPtr() :
9288       klass->GetIFieldsPtr();
9289 
9290   // Initialize field_offset
9291   MemberOffset field_offset(0);
9292   if (is_static) {
9293     field_offset = klass->GetFirstReferenceStaticFieldOffsetDuringLinking(
9294         class_linker->GetImagePointerSize());
9295   } else {
9296     ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
9297     if (super_class != nullptr) {
9298       CHECK(super_class->IsResolved())
9299           << klass->PrettyClass() << " " << super_class->PrettyClass();
9300       field_offset = MemberOffset(super_class->GetObjectSize());
9301     }
9302   }
9303 
9304   CHECK_EQ(num_fields == 0, fields == nullptr) << klass->PrettyClass();
9305 
9306   // we want a relatively stable order so that adding new fields
9307   // minimizes disruption of C++ version such as Class and Method.
9308   //
9309   // The overall sort order order is:
9310   // 1) All object reference fields, sorted alphabetically.
9311   // 2) All java long (64-bit) integer fields, sorted alphabetically.
9312   // 3) All java double (64-bit) floating point fields, sorted alphabetically.
9313   // 4) All java int (32-bit) integer fields, sorted alphabetically.
9314   // 5) All java float (32-bit) floating point fields, sorted alphabetically.
9315   // 6) All java char (16-bit) integer fields, sorted alphabetically.
9316   // 7) All java short (16-bit) integer fields, sorted alphabetically.
9317   // 8) All java boolean (8-bit) integer fields, sorted alphabetically.
9318   // 9) All java byte (8-bit) integer fields, sorted alphabetically.
9319   //
9320   // (References are first to increase the chance of reference visiting
9321   // being able to take a fast path using a bitmap of references at the
9322   // start of the object, see `Class::reference_instance_offsets_`.)
9323   //
9324   // Once the fields are sorted in this order we will attempt to fill any gaps
9325   // that might be present in the memory layout of the structure.
9326   // Note that we shall not fill gaps between the superclass fields.
9327 
9328   // Collect fields and their "type order index" (see numbered points above).
9329   const char* old_no_suspend_cause = self->StartAssertNoThreadSuspension(
9330       "Using plain ArtField references");
9331   constexpr size_t kStackBufferEntries = 64;  // Avoid allocations for small number of fields.
9332   FieldTypeOrderAndIndex stack_buffer[kStackBufferEntries];
9333   std::vector<FieldTypeOrderAndIndex> heap_buffer;
9334   ArrayRef<FieldTypeOrderAndIndex> sorted_fields;
9335   if (num_fields <= kStackBufferEntries) {
9336     sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(stack_buffer, num_fields);
9337   } else {
9338     heap_buffer.resize(num_fields);
9339     sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(heap_buffer);
9340   }
9341   size_t num_reference_fields = 0;
9342   size_t primitive_fields_start = num_fields;
9343   DCHECK_LE(num_fields, 1u << 16);
9344   for (size_t i = 0; i != num_fields; ++i) {
9345     ArtField* field = &fields->At(i);
9346     const char* descriptor = field->GetTypeDescriptor();
9347     FieldTypeOrder field_type_order = FieldTypeOrderFromFirstDescriptorCharacter(descriptor[0]);
9348     uint16_t field_index = dchecked_integral_cast<uint16_t>(i);
9349     // Insert references to the start, other fields to the end.
9350     DCHECK_LT(num_reference_fields, primitive_fields_start);
9351     if (field_type_order == FieldTypeOrder::kReference) {
9352       sorted_fields[num_reference_fields] = { field_type_order, field_index };
9353       ++num_reference_fields;
9354     } else {
9355       --primitive_fields_start;
9356       sorted_fields[primitive_fields_start] = { field_type_order, field_index };
9357     }
9358   }
9359   DCHECK_EQ(num_reference_fields, primitive_fields_start);
9360 
9361   // Reference fields are already sorted by field index (and dex field index).
9362   DCHECK(std::is_sorted(
9363       sorted_fields.begin(),
9364       sorted_fields.begin() + num_reference_fields,
9365       [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
9366         ArtField* lhs_field = &fields->At(lhs.field_index);
9367         ArtField* rhs_field = &fields->At(rhs.field_index);
9368         CHECK_EQ(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9369         CHECK_EQ(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9370         CHECK_EQ(lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex(),
9371                  lhs.field_index < rhs.field_index);
9372         return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
9373       }));
9374   // Primitive fields were stored in reverse order of their field index (and dex field index).
9375   DCHECK(std::is_sorted(
9376       sorted_fields.begin() + primitive_fields_start,
9377       sorted_fields.end(),
9378       [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
9379         ArtField* lhs_field = &fields->At(lhs.field_index);
9380         ArtField* rhs_field = &fields->At(rhs.field_index);
9381         CHECK_NE(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9382         CHECK_NE(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9383         CHECK_EQ(lhs_field->GetDexFieldIndex() > rhs_field->GetDexFieldIndex(),
9384                  lhs.field_index > rhs.field_index);
9385         return lhs.field_index > rhs.field_index;
9386       }));
9387   // Sort the primitive fields by the field type order, then field index.
9388   std::sort(sorted_fields.begin() + primitive_fields_start,
9389             sorted_fields.end(),
9390             [](const auto& lhs, const auto& rhs) {
9391               if (lhs.field_type_order != rhs.field_type_order) {
9392                 return lhs.field_type_order < rhs.field_type_order;
9393               } else {
9394                 return lhs.field_index < rhs.field_index;
9395               }
9396             });
9397   // Primitive fields are now sorted by field size (descending), then type, then field index.
9398   DCHECK(std::is_sorted(
9399       sorted_fields.begin() + primitive_fields_start,
9400       sorted_fields.end(),
9401       [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
9402         ArtField* lhs_field = &fields->At(lhs.field_index);
9403         ArtField* rhs_field = &fields->At(rhs.field_index);
9404         Primitive::Type lhs_type = lhs_field->GetTypeAsPrimitiveType();
9405         CHECK_NE(lhs_type, Primitive::kPrimNot);
9406         Primitive::Type rhs_type = rhs_field->GetTypeAsPrimitiveType();
9407         CHECK_NE(rhs_type, Primitive::kPrimNot);
9408         if (lhs_type != rhs_type) {
9409           size_t lhs_size = Primitive::ComponentSize(lhs_type);
9410           size_t rhs_size = Primitive::ComponentSize(rhs_type);
9411           return (lhs_size != rhs_size) ? (lhs_size > rhs_size) : (lhs_type < rhs_type);
9412         } else {
9413           return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
9414         }
9415       }));
9416 
9417   // Process reference fields.
9418   FieldGaps field_gaps;
9419   size_t index = 0u;
9420   if (num_reference_fields != 0u) {
9421     constexpr size_t kReferenceSize = sizeof(mirror::HeapReference<mirror::Object>);
9422     field_offset = field_gaps.AlignFieldOffset<kReferenceSize>(field_offset);
9423     for (; index != num_reference_fields; ++index) {
9424       ArtField* field = &fields->At(sorted_fields[index].field_index);
9425       field_offset = AssignFieldOffset<kReferenceSize>(field, field_offset);
9426     }
9427   }
9428   // Process 64-bit fields.
9429   if (index != num_fields &&
9430       sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
9431     field_offset = field_gaps.AlignFieldOffset<8u>(field_offset);
9432     while (index != num_fields &&
9433            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
9434       ArtField* field = &fields->At(sorted_fields[index].field_index);
9435       field_offset = AssignFieldOffset<8u>(field, field_offset);
9436       ++index;
9437     }
9438   }
9439   // Process 32-bit fields.
9440   if (index != num_fields &&
9441       sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
9442     field_offset = field_gaps.AlignFieldOffset<4u>(field_offset);
9443     if (field_gaps.HasGap<4u>()) {
9444       ArtField* field = &fields->At(sorted_fields[index].field_index);
9445       AssignFieldOffset<4u>(field, field_gaps.ReleaseGap<4u>());  // Ignore return value.
9446       ++index;
9447       DCHECK(!field_gaps.HasGap<4u>());  // There can be only one gap for a 32-bit field.
9448     }
9449     while (index != num_fields &&
9450            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
9451       ArtField* field = &fields->At(sorted_fields[index].field_index);
9452       field_offset = AssignFieldOffset<4u>(field, field_offset);
9453       ++index;
9454     }
9455   }
9456   // Process 16-bit fields.
9457   if (index != num_fields &&
9458       sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
9459     field_offset = field_gaps.AlignFieldOffset<2u>(field_offset);
9460     while (index != num_fields &&
9461            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType &&
9462            field_gaps.HasGap<2u>()) {
9463       ArtField* field = &fields->At(sorted_fields[index].field_index);
9464       AssignFieldOffset<2u>(field, field_gaps.ReleaseGap<2u>());  // Ignore return value.
9465       ++index;
9466     }
9467     while (index != num_fields &&
9468            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
9469       ArtField* field = &fields->At(sorted_fields[index].field_index);
9470       field_offset = AssignFieldOffset<2u>(field, field_offset);
9471       ++index;
9472     }
9473   }
9474   // Process 8-bit fields.
9475   for (; index != num_fields && field_gaps.HasGap<1u>(); ++index) {
9476     ArtField* field = &fields->At(sorted_fields[index].field_index);
9477     AssignFieldOffset<1u>(field, field_gaps.ReleaseGap<1u>());  // Ignore return value.
9478   }
9479   for (; index != num_fields; ++index) {
9480     ArtField* field = &fields->At(sorted_fields[index].field_index);
9481     field_offset = AssignFieldOffset<1u>(field, field_offset);
9482   }
9483 
9484   self->EndAssertNoThreadSuspension(old_no_suspend_cause);
9485 
9486   // We lie to the GC about the java.lang.ref.Reference.referent field, so it doesn't scan it.
9487   DCHECK_IMPLIES(class_linker->init_done_, !klass->DescriptorEquals("Ljava/lang/ref/Reference;"));
9488   if (!is_static &&
9489       UNLIKELY(!class_linker->init_done_) &&
9490       klass->DescriptorEquals("Ljava/lang/ref/Reference;")) {
9491     // We know there are no non-reference fields in the Reference classes, and we know
9492     // that 'referent' is alphabetically last, so this is easy...
9493     CHECK_EQ(num_reference_fields, num_fields) << klass->PrettyClass();
9494     CHECK_STREQ(fields->At(num_fields - 1).GetName(), "referent")
9495         << klass->PrettyClass();
9496     --num_reference_fields;
9497   }
9498 
9499   size_t size = field_offset.Uint32Value();
9500   // Update klass
9501   if (is_static) {
9502     klass->SetNumReferenceStaticFields(num_reference_fields);
9503     *class_size = size;
9504   } else {
9505     klass->SetNumReferenceInstanceFields(num_reference_fields);
9506     ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
9507     if (num_reference_fields == 0 || super_class == nullptr) {
9508       // object has one reference field, klass, but we ignore it since we always visit the class.
9509       // super_class is null iff the class is java.lang.Object.
9510       if (super_class == nullptr ||
9511           (super_class->GetClassFlags() & mirror::kClassFlagNoReferenceFields) != 0) {
9512         klass->SetClassFlags(klass->GetClassFlags() | mirror::kClassFlagNoReferenceFields);
9513       }
9514     }
9515     if (kIsDebugBuild) {
9516       DCHECK_EQ(super_class == nullptr, klass->DescriptorEquals("Ljava/lang/Object;"));
9517       size_t total_reference_instance_fields = 0;
9518       ObjPtr<mirror::Class> cur_super = klass.Get();
9519       while (cur_super != nullptr) {
9520         total_reference_instance_fields += cur_super->NumReferenceInstanceFieldsDuringLinking();
9521         cur_super = cur_super->GetSuperClass();
9522       }
9523       if (super_class == nullptr) {
9524         CHECK_EQ(total_reference_instance_fields, 1u) << klass->PrettyDescriptor();
9525       } else {
9526         // Check that there is at least num_reference_fields other than Object.class.
9527         CHECK_GE(total_reference_instance_fields, 1u + num_reference_fields)
9528             << klass->PrettyClass();
9529       }
9530     }
9531     if (!klass->IsVariableSize()) {
9532       std::string temp;
9533       DCHECK_GE(size, sizeof(mirror::Object)) << klass->GetDescriptor(&temp);
9534       size_t previous_size = klass->GetObjectSize();
9535       if (previous_size != 0) {
9536         // Make sure that we didn't originally have an incorrect size.
9537         CHECK_EQ(previous_size, size) << klass->GetDescriptor(&temp);
9538       }
9539       klass->SetObjectSize(size);
9540     }
9541   }
9542 
9543   if (kIsDebugBuild) {
9544     // Make sure that the fields array is ordered by name but all reference
9545     // offsets are at the beginning as far as alignment allows.
9546     MemberOffset start_ref_offset = is_static
9547         ? klass->GetFirstReferenceStaticFieldOffsetDuringLinking(class_linker->image_pointer_size_)
9548         : klass->GetFirstReferenceInstanceFieldOffset();
9549     MemberOffset end_ref_offset(start_ref_offset.Uint32Value() +
9550                                 num_reference_fields *
9551                                     sizeof(mirror::HeapReference<mirror::Object>));
9552     MemberOffset current_ref_offset = start_ref_offset;
9553     for (size_t i = 0; i < num_fields; i++) {
9554       ArtField* field = &fields->At(i);
9555       VLOG(class_linker) << "LinkFields: " << (is_static ? "static" : "instance")
9556           << " class=" << klass->PrettyClass() << " field=" << field->PrettyField()
9557           << " offset=" << field->GetOffsetDuringLinking();
9558       if (i != 0) {
9559         ArtField* const prev_field = &fields->At(i - 1);
9560         // NOTE: The field names can be the same. This is not possible in the Java language
9561         // but it's valid Java/dex bytecode and for example proguard can generate such bytecode.
9562         DCHECK_LE(strcmp(prev_field->GetName(), field->GetName()), 0);
9563       }
9564       Primitive::Type type = field->GetTypeAsPrimitiveType();
9565       bool is_primitive = type != Primitive::kPrimNot;
9566       if (klass->DescriptorEquals("Ljava/lang/ref/Reference;") &&
9567           strcmp("referent", field->GetName()) == 0) {
9568         is_primitive = true;  // We lied above, so we have to expect a lie here.
9569       }
9570       MemberOffset offset = field->GetOffsetDuringLinking();
9571       if (is_primitive) {
9572         if (offset.Uint32Value() < end_ref_offset.Uint32Value()) {
9573           // Shuffled before references.
9574           size_t type_size = Primitive::ComponentSize(type);
9575           CHECK_LT(type_size, sizeof(mirror::HeapReference<mirror::Object>));
9576           CHECK_LT(offset.Uint32Value(), start_ref_offset.Uint32Value());
9577           CHECK_LE(offset.Uint32Value() + type_size, start_ref_offset.Uint32Value());
9578           CHECK(!IsAligned<sizeof(mirror::HeapReference<mirror::Object>)>(offset.Uint32Value()));
9579         }
9580       } else {
9581         CHECK_EQ(current_ref_offset.Uint32Value(), offset.Uint32Value());
9582         current_ref_offset = MemberOffset(current_ref_offset.Uint32Value() +
9583                                           sizeof(mirror::HeapReference<mirror::Object>));
9584       }
9585     }
9586     CHECK_EQ(current_ref_offset.Uint32Value(), end_ref_offset.Uint32Value());
9587   }
9588   return true;
9589 }
9590 
LinkInstanceFields(Thread * self,Handle<mirror::Class> klass)9591 bool ClassLinker::LinkInstanceFields(Thread* self, Handle<mirror::Class> klass) {
9592   CHECK(klass != nullptr);
9593   return LinkFieldsHelper::LinkFields(this, self, klass, false, nullptr);
9594 }
9595 
LinkStaticFields(Thread * self,Handle<mirror::Class> klass,size_t * class_size)9596 bool ClassLinker::LinkStaticFields(Thread* self, Handle<mirror::Class> klass, size_t* class_size) {
9597   CHECK(klass != nullptr);
9598   return LinkFieldsHelper::LinkFields(this, self, klass, true, class_size);
9599 }
9600 
9601 enum class RecordElementType : uint8_t {
9602   kNames = 0,
9603   kTypes = 1,
9604   kSignatures = 2,
9605   kAnnotationVisibilities = 3,
9606   kAnnotations = 4
9607 };
9608 
9609 static const char* kRecordElementNames[] = {"componentNames",
9610                                             "componentTypes",
9611                                             "componentSignatures",
9612                                             "componentAnnotationVisibilities",
9613                                             "componentAnnotations"};
9614 
9615 class RecordAnnotationVisitor final : public annotations::AnnotationVisitor {
9616  public:
RecordAnnotationVisitor()9617   RecordAnnotationVisitor() {}
9618 
ValidateCounts()9619   bool ValidateCounts() {
9620     if (has_error_) {
9621       return false;
9622     }
9623 
9624     // Verify the counts.
9625     bool annotation_element_exists =
9626         (signatures_count_ != UINT32_MAX) || (annotations_count_ != UINT32_MAX);
9627     if (count_ >= 2) {
9628       SetErrorMsg("Record class can't have more than one @Record Annotation");
9629     } else if (names_count_ == UINT32_MAX) {
9630       SetErrorMsg("componentNames element is required");
9631     } else if (types_count_ == UINT32_MAX) {
9632       SetErrorMsg("componentTypes element is required");
9633     } else if (names_count_ != types_count_) {  // Every component must have a name and a type.
9634       SetErrorMsg(StringPrintf(
9635           "componentTypes is expected to have %i, but has %i types", names_count_, types_count_));
9636       // The other 3 elements are optional, but is expected to have the same count if it exists.
9637     } else if (signatures_count_ != UINT32_MAX && signatures_count_ != names_count_) {
9638       SetErrorMsg(StringPrintf("componentSignatures size is %i, but is expected to be %i",
9639                                signatures_count_,
9640                                names_count_));
9641     } else if (annotation_element_exists && visibilities_count_ != names_count_) {
9642       SetErrorMsg(
9643           StringPrintf("componentAnnotationVisibilities size is %i, but is expected to be %i",
9644                        visibilities_count_,
9645                        names_count_));
9646     } else if (annotation_element_exists && annotations_count_ != names_count_) {
9647       SetErrorMsg(StringPrintf("componentAnnotations size is %i, but is expected to be %i",
9648                                annotations_count_,
9649                                names_count_));
9650     }
9651 
9652     return !has_error_;
9653   }
9654 
IsRecordAnnotationFound()9655   bool IsRecordAnnotationFound() { return count_ != 0; }
9656 
VisitAnnotation(const char * descriptor,uint8_t visibility)9657   annotations::VisitorStatus VisitAnnotation(const char* descriptor, uint8_t visibility) override {
9658     if (has_error_) {
9659       return annotations::VisitorStatus::kVisitBreak;
9660     }
9661 
9662     if (visibility != DexFile::kDexVisibilitySystem) {
9663       return annotations::VisitorStatus::kVisitNext;
9664     }
9665 
9666     if (strcmp(descriptor, "Ldalvik/annotation/Record;") != 0) {
9667       return annotations::VisitorStatus::kVisitNext;
9668     }
9669 
9670     count_ += 1;
9671     if (count_ >= 2) {
9672       return annotations::VisitorStatus::kVisitBreak;
9673     }
9674     return annotations::VisitorStatus::kVisitInner;
9675   }
9676 
VisitAnnotationElement(const char * element_name,uint8_t type,const JValue & value)9677   annotations::VisitorStatus VisitAnnotationElement(const char* element_name,
9678                                                     uint8_t type,
9679                                                     [[maybe_unused]] const JValue& value) override {
9680     if (has_error_) {
9681       return annotations::VisitorStatus::kVisitBreak;
9682     }
9683 
9684     RecordElementType visiting_type;
9685     uint32_t* element_count;
9686     if (strcmp(element_name, "componentNames") == 0) {
9687       visiting_type = RecordElementType::kNames;
9688       element_count = &names_count_;
9689     } else if (strcmp(element_name, "componentTypes") == 0) {
9690       visiting_type = RecordElementType::kTypes;
9691       element_count = &types_count_;
9692     } else if (strcmp(element_name, "componentSignatures") == 0) {
9693       visiting_type = RecordElementType::kSignatures;
9694       element_count = &signatures_count_;
9695     } else if (strcmp(element_name, "componentAnnotationVisibilities") == 0) {
9696       visiting_type = RecordElementType::kAnnotationVisibilities;
9697       element_count = &visibilities_count_;
9698     } else if (strcmp(element_name, "componentAnnotations") == 0) {
9699       visiting_type = RecordElementType::kAnnotations;
9700       element_count = &annotations_count_;
9701     } else {
9702       // ignore this element that could be introduced in the future ART.
9703       return annotations::VisitorStatus::kVisitNext;
9704     }
9705 
9706     if ((*element_count) != UINT32_MAX) {
9707       SetErrorMsg(StringPrintf("Two %s annotation elements are found but only one is expected",
9708                                kRecordElementNames[static_cast<uint8_t>(visiting_type)]));
9709       return annotations::VisitorStatus::kVisitBreak;
9710     }
9711 
9712     if (type != DexFile::kDexAnnotationArray) {
9713       SetErrorMsg(StringPrintf("%s must be array type", element_name));
9714       return annotations::VisitorStatus::kVisitBreak;
9715     }
9716 
9717     *element_count = 0;
9718     visiting_type_ = visiting_type;
9719     return annotations::VisitorStatus::kVisitInner;
9720   }
9721 
VisitArrayElement(uint8_t depth,uint32_t index,uint8_t type,const JValue & value)9722   annotations::VisitorStatus VisitArrayElement(uint8_t depth,
9723                                                uint32_t index,
9724                                                uint8_t type,
9725                                                [[maybe_unused]] const JValue& value) override {
9726     if (has_error_) {
9727       return annotations::VisitorStatus::kVisitBreak;
9728     }
9729     switch (visiting_type_) {
9730       case RecordElementType::kNames: {
9731         if (depth == 0) {
9732           if (!ExpectedTypeOrError(
9733                   type, DexFile::kDexAnnotationString, visiting_type_, index, depth)) {
9734             return annotations::VisitorStatus::kVisitBreak;
9735           }
9736           names_count_++;
9737           return annotations::VisitorStatus::kVisitNext;
9738         }
9739         break;
9740       }
9741       case RecordElementType::kTypes: {
9742         if (depth == 0) {
9743           if (!ExpectedTypeOrError(
9744                   type, DexFile::kDexAnnotationType, visiting_type_, index, depth)) {
9745             return annotations::VisitorStatus::kVisitBreak;
9746           }
9747           types_count_++;
9748           return annotations::VisitorStatus::kVisitNext;
9749         }
9750         break;
9751       }
9752       case RecordElementType::kSignatures: {
9753         if (depth == 0) {
9754           // kDexAnnotationNull implies no generic signature for the component.
9755           if (type != DexFile::kDexAnnotationNull &&
9756               !ExpectedTypeOrError(
9757                   type, DexFile::kDexAnnotationAnnotation, visiting_type_, index, depth)) {
9758             return annotations::VisitorStatus::kVisitBreak;
9759           }
9760           signatures_count_++;
9761           return annotations::VisitorStatus::kVisitNext;
9762         }
9763         break;
9764       }
9765       case RecordElementType::kAnnotationVisibilities: {
9766         if (depth == 0) {
9767           if (!ExpectedTypeOrError(
9768                   type, DexFile::kDexAnnotationArray, visiting_type_, index, depth)) {
9769             return annotations::VisitorStatus::kVisitBreak;
9770           }
9771           visibilities_count_++;
9772           return annotations::VisitorStatus::kVisitInner;
9773         } else if (depth == 1) {
9774           if (!ExpectedTypeOrError(
9775                   type, DexFile::kDexAnnotationByte, visiting_type_, index, depth)) {
9776             return annotations::VisitorStatus::kVisitBreak;
9777           }
9778           return annotations::VisitorStatus::kVisitNext;
9779         }
9780         break;
9781       }
9782       case RecordElementType::kAnnotations: {
9783         if (depth == 0) {
9784           if (!ExpectedTypeOrError(
9785                   type, DexFile::kDexAnnotationArray, visiting_type_, index, depth)) {
9786             return annotations::VisitorStatus::kVisitBreak;
9787           }
9788           annotations_count_++;
9789           return annotations::VisitorStatus::kVisitInner;
9790         } else if (depth == 1) {
9791           if (!ExpectedTypeOrError(
9792                   type, DexFile::kDexAnnotationAnnotation, visiting_type_, index, depth)) {
9793             return annotations::VisitorStatus::kVisitBreak;
9794           }
9795           return annotations::VisitorStatus::kVisitNext;
9796         }
9797         break;
9798       }
9799     }
9800 
9801     // Should never happen if every next depth level is handled above whenever kVisitInner is
9802     // returned.
9803     DCHECK(false) << StringPrintf("Unexpected depth %i for element %s",
9804                                   depth,
9805                                   kRecordElementNames[static_cast<uint8_t>(visiting_type_)]);
9806     return annotations::VisitorStatus::kVisitBreak;
9807   }
9808 
9809  private:
9810   uint32_t count_ = 0;
9811   uint32_t names_count_ = UINT32_MAX;
9812   uint32_t types_count_ = UINT32_MAX;
9813   uint32_t signatures_count_ = UINT32_MAX;
9814   uint32_t visibilities_count_ = UINT32_MAX;
9815   uint32_t annotations_count_ = UINT32_MAX;
9816   RecordElementType visiting_type_;
9817 
ExpectedTypeOrError(uint8_t type,uint8_t expected,RecordElementType visiting_type,uint8_t depth,uint32_t index)9818   inline bool ExpectedTypeOrError(uint8_t type,
9819                                   uint8_t expected,
9820                                   RecordElementType visiting_type,
9821                                   uint8_t depth,
9822                                   uint32_t index) {
9823     if (type == expected) {
9824       return true;
9825     }
9826 
9827     SetErrorMsg(StringPrintf(
9828         "Expect 0x%02x type but got 0x%02x at the index %i and depth %i for the element %s",
9829         expected,
9830         type,
9831         index,
9832         depth,
9833         kRecordElementNames[static_cast<uint8_t>(visiting_type)]));
9834     return false;
9835   }
9836 
9837   DISALLOW_COPY_AND_ASSIGN(RecordAnnotationVisitor);
9838 };
9839 
9840 /**
9841  * Set kClassFlagRecord and verify if klass is a record class.
9842  * If the verification fails, a pending java exception is thrown.
9843  *
9844  * @return false if verification fails. If klass isn't a record class,
9845  * it should always return true.
9846  */
VerifyRecordClass(Handle<mirror::Class> klass,ObjPtr<mirror::Class> super)9847 bool ClassLinker::VerifyRecordClass(Handle<mirror::Class> klass, ObjPtr<mirror::Class> super) {
9848   CHECK(klass != nullptr);
9849   // First, we check the conditions specified in java.lang.Class#isRecord().
9850   // If any of the conditions isn't fulfilled, it's not a record class and
9851   // ART should treat it as a normal class even if it's inherited from java.lang.Record.
9852   if (!klass->IsFinal()) {
9853     return true;
9854   }
9855 
9856   if (super == nullptr) {
9857     return true;
9858   }
9859 
9860   // Compare the string directly when this ClassLinker is initializing before
9861   // WellKnownClasses initializes
9862   if (WellKnownClasses::java_lang_Record == nullptr) {
9863     if (!super->DescriptorEquals("Ljava/lang/Record;")) {
9864       return true;
9865     }
9866   } else {
9867     ObjPtr<mirror::Class> java_lang_Record =
9868         WellKnownClasses::ToClass(WellKnownClasses::java_lang_Record);
9869     if (super.Ptr() != java_lang_Record.Ptr()) {
9870       return true;
9871     }
9872   }
9873 
9874   // Verify @dalvik.annotation.Record
9875   // The annotation has a mandatory element componentNames[] and componentTypes[] of the same size.
9876   // componentSignatures[], componentAnnotationVisibilities[][], componentAnnotations[][] are
9877   // optional, but should have the same size if it exists.
9878   RecordAnnotationVisitor visitor;
9879   annotations::VisitClassAnnotations(klass, &visitor);
9880   if (UNLIKELY(visitor.HasError())) {
9881     ThrowClassFormatError(klass.Get(), "%s", visitor.GetErrorMsg().c_str());
9882     return false;
9883   }
9884 
9885   if (!visitor.IsRecordAnnotationFound()) {
9886     return true;
9887   }
9888 
9889   if (!visitor.ValidateCounts()) {
9890     ThrowClassFormatError(klass.Get(), "%s", visitor.GetErrorMsg().c_str());
9891     return false;
9892   }
9893 
9894   // Set kClassFlagRecord.
9895   klass->SetRecordClass();
9896   return true;
9897 }
9898 
DoResolveString(dex::StringIndex string_idx,ObjPtr<mirror::DexCache> dex_cache)9899 ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
9900                                                     ObjPtr<mirror::DexCache> dex_cache) {
9901   StackHandleScope<1> hs(Thread::Current());
9902   Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
9903   return DoResolveString(string_idx, h_dex_cache);
9904 }
9905 
DoResolveString(dex::StringIndex string_idx,Handle<mirror::DexCache> dex_cache)9906 ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
9907                                                     Handle<mirror::DexCache> dex_cache) {
9908   const DexFile& dex_file = *dex_cache->GetDexFile();
9909   uint32_t utf16_length;
9910   const char* utf8_data = dex_file.GetStringDataAndUtf16Length(string_idx, &utf16_length);
9911   ObjPtr<mirror::String> string = intern_table_->InternStrong(utf16_length, utf8_data);
9912   if (string != nullptr) {
9913     dex_cache->SetResolvedString(string_idx, string);
9914   }
9915   return string;
9916 }
9917 
DoLookupString(dex::StringIndex string_idx,ObjPtr<mirror::DexCache> dex_cache)9918 ObjPtr<mirror::String> ClassLinker::DoLookupString(dex::StringIndex string_idx,
9919                                                    ObjPtr<mirror::DexCache> dex_cache) {
9920   DCHECK(dex_cache != nullptr);
9921   const DexFile& dex_file = *dex_cache->GetDexFile();
9922   uint32_t utf16_length;
9923   const char* utf8_data = dex_file.GetStringDataAndUtf16Length(string_idx, &utf16_length);
9924   ObjPtr<mirror::String> string =
9925       intern_table_->LookupStrong(Thread::Current(), utf16_length, utf8_data);
9926   if (string != nullptr) {
9927     dex_cache->SetResolvedString(string_idx, string);
9928   }
9929   return string;
9930 }
9931 
DoLookupResolvedType(dex::TypeIndex type_idx,ObjPtr<mirror::Class> referrer)9932 ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
9933                                                         ObjPtr<mirror::Class> referrer) {
9934   return DoLookupResolvedType(type_idx, referrer->GetDexCache(), referrer->GetClassLoader());
9935 }
9936 
DoLookupResolvedType(dex::TypeIndex type_idx,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)9937 ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
9938                                                         ObjPtr<mirror::DexCache> dex_cache,
9939                                                         ObjPtr<mirror::ClassLoader> class_loader) {
9940   DCHECK(dex_cache->GetClassLoader() == class_loader);
9941   const DexFile& dex_file = *dex_cache->GetDexFile();
9942   std::string_view descriptor = dex_file.GetTypeDescriptorView(type_idx);
9943   ObjPtr<mirror::Class> type = LookupResolvedType(descriptor, class_loader);
9944   if (type != nullptr) {
9945     DCHECK(type->IsResolved());
9946     dex_cache->SetResolvedType(type_idx, type);
9947   }
9948   return type;
9949 }
9950 
LookupResolvedType(std::string_view descriptor,ObjPtr<mirror::ClassLoader> class_loader)9951 ObjPtr<mirror::Class> ClassLinker::LookupResolvedType(std::string_view descriptor,
9952                                                       ObjPtr<mirror::ClassLoader> class_loader) {
9953   DCHECK(!descriptor.empty()) << "descriptor is empty string";
9954   ObjPtr<mirror::Class> type = nullptr;
9955   if (descriptor.length() == 1u) {
9956     // only the descriptors of primitive types should be 1 character long, also avoid class lookup
9957     // for primitive classes that aren't backed by dex files.
9958     type = LookupPrimitiveClass(descriptor[0]);
9959   } else {
9960     Thread* const self = Thread::Current();
9961     DCHECK(self != nullptr);
9962     const size_t hash = ComputeModifiedUtf8Hash(descriptor);
9963     // Find the class in the loaded classes table.
9964     type = LookupClass(self, descriptor, hash, class_loader);
9965   }
9966   return (type != nullptr && type->IsResolved()) ? type : nullptr;
9967 }
9968 
9969 template <typename RefType>
DoResolveType(dex::TypeIndex type_idx,RefType referrer)9970 ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx, RefType referrer) {
9971   StackHandleScope<2> hs(Thread::Current());
9972   Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
9973   Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
9974   return DoResolveType(type_idx, dex_cache, class_loader);
9975 }
9976 
9977 // Instantiate the above.
9978 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9979                                                           ArtField* referrer);
9980 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9981                                                           ArtMethod* referrer);
9982 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9983                                                           ObjPtr<mirror::Class> referrer);
9984 
DoResolveType(dex::TypeIndex type_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)9985 ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9986                                                  Handle<mirror::DexCache> dex_cache,
9987                                                  Handle<mirror::ClassLoader> class_loader) {
9988   DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
9989   Thread* self = Thread::Current();
9990   const DexFile* dex_file = dex_cache->GetDexFile();
9991   ObjPtr<mirror::Class> resolved = FindClass(self, *dex_file, type_idx, class_loader);
9992   if (resolved != nullptr) {
9993     // TODO: we used to throw here if resolved's class loader was not the
9994     //       boot class loader. This was to permit different classes with the
9995     //       same name to be loaded simultaneously by different loaders
9996     dex_cache->SetResolvedType(type_idx, resolved);
9997   } else {
9998     CHECK(self->IsExceptionPending())
9999         << "Expected pending exception for failed resolution of: "
10000         << dex_file->GetTypeDescriptor(type_idx);
10001     // Convert a ClassNotFoundException to a NoClassDefFoundError.
10002     StackHandleScope<1> hs(self);
10003     Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
10004     if (cause->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException, this))) {
10005       DCHECK(resolved == nullptr);  // No Handle needed to preserve resolved.
10006       self->ClearException();
10007       ThrowNoClassDefFoundError("Failed resolution of: %s", dex_file->GetTypeDescriptor(type_idx));
10008       self->GetException()->SetCause(cause.Get());
10009     }
10010   }
10011   DCHECK((resolved == nullptr) || resolved->IsResolved())
10012       << resolved->PrettyDescriptor() << " " << resolved->GetStatus();
10013   return resolved;
10014 }
10015 
FindResolvedMethod(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t method_idx)10016 ArtMethod* ClassLinker::FindResolvedMethod(ObjPtr<mirror::Class> klass,
10017                                            ObjPtr<mirror::DexCache> dex_cache,
10018                                            ObjPtr<mirror::ClassLoader> class_loader,
10019                                            uint32_t method_idx) {
10020   DCHECK(dex_cache->GetClassLoader() == class_loader);
10021   // Search for the method using dex_cache and method_idx. The Class::Find*Method()
10022   // functions can optimize the search if the dex_cache is the same as the DexCache
10023   // of the class, with fall-back to name and signature search otherwise.
10024   ArtMethod* resolved = nullptr;
10025   if (klass->IsInterface()) {
10026     resolved = klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
10027   } else {
10028     resolved = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
10029   }
10030   DCHECK(resolved == nullptr || resolved->GetDeclaringClassUnchecked() != nullptr);
10031   if (resolved != nullptr &&
10032       // We pass AccessMethod::kNone instead of kLinking to not warn yet on the
10033       // access, as we'll be looking if the method can be accessed through an
10034       // interface.
10035       hiddenapi::ShouldDenyAccessToMember(resolved,
10036                                           hiddenapi::AccessContext(class_loader, dex_cache),
10037                                           hiddenapi::AccessMethod::kNone)) {
10038     // The resolved method that we have found cannot be accessed due to
10039     // hiddenapi (typically it is declared up the hierarchy and is not an SDK
10040     // method). Try to find an interface method from the implemented interfaces which is
10041     // part of the SDK.
10042     ArtMethod* itf_method = klass->FindAccessibleInterfaceMethod(resolved, image_pointer_size_);
10043     if (itf_method == nullptr) {
10044       // No interface method. Call ShouldDenyAccessToMember again but this time
10045       // with AccessMethod::kLinking to ensure that an appropriate warning is
10046       // logged.
10047       hiddenapi::ShouldDenyAccessToMember(resolved,
10048                                           hiddenapi::AccessContext(class_loader, dex_cache),
10049                                           hiddenapi::AccessMethod::kLinking);
10050       resolved = nullptr;
10051     } else {
10052       // We found an interface method that is accessible, continue with the resolved method.
10053     }
10054   }
10055   if (resolved != nullptr) {
10056     // In case of jmvti, the dex file gets verified before being registered, so first
10057     // check if it's registered before checking class tables.
10058     const DexFile& dex_file = *dex_cache->GetDexFile();
10059     DCHECK_IMPLIES(
10060         IsDexFileRegistered(Thread::Current(), dex_file),
10061         FindClassTable(Thread::Current(), dex_cache) == ClassTableForClassLoader(class_loader))
10062         << "DexFile referrer: " << dex_file.GetLocation()
10063         << " ClassLoader: " << DescribeLoaders(class_loader, "");
10064     // Be a good citizen and update the dex cache to speed subsequent calls.
10065     dex_cache->SetResolvedMethod(method_idx, resolved);
10066     // Disable the following invariant check as the verifier breaks it. b/73760543
10067     // const DexFile::MethodId& method_id = dex_file.GetMethodId(method_idx);
10068     // DCHECK(LookupResolvedType(method_id.class_idx_, dex_cache, class_loader) != nullptr)
10069     //    << "Method: " << resolved->PrettyMethod() << ", "
10070     //    << "Class: " << klass->PrettyClass() << " (" << klass->GetStatus() << "), "
10071     //    << "DexFile referrer: " << dex_file.GetLocation();
10072   }
10073   return resolved;
10074 }
10075 
10076 // Returns true if `method` is either null or hidden.
10077 // Does not print any warnings if it is hidden.
CheckNoSuchMethod(ArtMethod * method,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)10078 static bool CheckNoSuchMethod(ArtMethod* method,
10079                               ObjPtr<mirror::DexCache> dex_cache,
10080                               ObjPtr<mirror::ClassLoader> class_loader)
10081       REQUIRES_SHARED(Locks::mutator_lock_) {
10082   DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
10083   return method == nullptr ||
10084          hiddenapi::ShouldDenyAccessToMember(method,
10085                                              hiddenapi::AccessContext(class_loader, dex_cache),
10086                                              hiddenapi::AccessMethod::kNone);  // no warnings
10087 }
10088 
FindIncompatibleMethod(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t method_idx)10089 ArtMethod* ClassLinker::FindIncompatibleMethod(ObjPtr<mirror::Class> klass,
10090                                                ObjPtr<mirror::DexCache> dex_cache,
10091                                                ObjPtr<mirror::ClassLoader> class_loader,
10092                                                uint32_t method_idx) {
10093   DCHECK(dex_cache->GetClassLoader() == class_loader);
10094   if (klass->IsInterface()) {
10095     ArtMethod* method = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
10096     return CheckNoSuchMethod(method, dex_cache, class_loader) ? nullptr : method;
10097   } else {
10098     // If there was an interface method with the same signature, we would have
10099     // found it in the "copied" methods. Only DCHECK that the interface method
10100     // really does not exist.
10101     if (kIsDebugBuild) {
10102       ArtMethod* method =
10103           klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
10104       CHECK(CheckNoSuchMethod(method, dex_cache, class_loader) ||
10105             (klass->FindAccessibleInterfaceMethod(method, image_pointer_size_) == nullptr));
10106     }
10107     return nullptr;
10108   }
10109 }
10110 
ResolveMethodId(uint32_t method_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)10111 ArtMethod* ClassLinker::ResolveMethodId(uint32_t method_idx,
10112                                         Handle<mirror::DexCache> dex_cache,
10113                                         Handle<mirror::ClassLoader> class_loader) {
10114   DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
10115   ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx);
10116   Thread::PoisonObjectPointersIfDebug();
10117   if (resolved != nullptr) {
10118     DCHECK(!resolved->IsRuntimeMethod());
10119     DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
10120     return resolved;
10121   }
10122   // Fail, get the declaring class.
10123   const dex::MethodId& method_id = dex_cache->GetDexFile()->GetMethodId(method_idx);
10124   ObjPtr<mirror::Class> klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
10125   if (klass == nullptr) {
10126     Thread::Current()->AssertPendingException();
10127     return nullptr;
10128   }
10129   return FindResolvedMethod(klass, dex_cache.Get(), class_loader.Get(), method_idx);
10130 }
10131 
LookupResolvedField(uint32_t field_idx,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,bool is_static)10132 ArtField* ClassLinker::LookupResolvedField(uint32_t field_idx,
10133                                            ObjPtr<mirror::DexCache> dex_cache,
10134                                            ObjPtr<mirror::ClassLoader> class_loader,
10135                                            bool is_static) {
10136   DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
10137   const DexFile& dex_file = *dex_cache->GetDexFile();
10138   const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
10139   ObjPtr<mirror::Class> klass = dex_cache->GetResolvedType(field_id.class_idx_);
10140   if (klass == nullptr) {
10141     klass = LookupResolvedType(field_id.class_idx_, dex_cache, class_loader);
10142   }
10143   if (klass == nullptr) {
10144     // The class has not been resolved yet, so the field is also unresolved.
10145     return nullptr;
10146   }
10147   DCHECK(klass->IsResolved());
10148 
10149   return FindResolvedField(klass, dex_cache, class_loader, field_idx, is_static);
10150 }
10151 
ResolveFieldJLS(uint32_t field_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)10152 ArtField* ClassLinker::ResolveFieldJLS(uint32_t field_idx,
10153                                        Handle<mirror::DexCache> dex_cache,
10154                                        Handle<mirror::ClassLoader> class_loader) {
10155   DCHECK(dex_cache != nullptr);
10156   DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
10157   ArtField* resolved = dex_cache->GetResolvedField(field_idx);
10158   Thread::PoisonObjectPointersIfDebug();
10159   if (resolved != nullptr) {
10160     return resolved;
10161   }
10162   const DexFile& dex_file = *dex_cache->GetDexFile();
10163   const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
10164   ObjPtr<mirror::Class> klass = ResolveType(field_id.class_idx_, dex_cache, class_loader);
10165   if (klass == nullptr) {
10166     DCHECK(Thread::Current()->IsExceptionPending());
10167     return nullptr;
10168   }
10169 
10170   resolved = FindResolvedFieldJLS(klass, dex_cache.Get(), class_loader.Get(), field_idx);
10171   if (resolved == nullptr) {
10172     const char* name = dex_file.GetFieldName(field_id);
10173     const char* type = dex_file.GetFieldTypeDescriptor(field_id);
10174     ThrowNoSuchFieldError("", klass, type, name);
10175   }
10176   return resolved;
10177 }
10178 
FindResolvedField(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t field_idx,bool is_static)10179 ArtField* ClassLinker::FindResolvedField(ObjPtr<mirror::Class> klass,
10180                                          ObjPtr<mirror::DexCache> dex_cache,
10181                                          ObjPtr<mirror::ClassLoader> class_loader,
10182                                          uint32_t field_idx,
10183                                          bool is_static) {
10184   DCHECK(dex_cache->GetClassLoader() == class_loader);
10185   ArtField* resolved = is_static ? klass->FindStaticField(dex_cache, field_idx)
10186                                  : klass->FindInstanceField(dex_cache, field_idx);
10187   if (resolved != nullptr &&
10188       hiddenapi::ShouldDenyAccessToMember(resolved,
10189                                           hiddenapi::AccessContext(class_loader, dex_cache),
10190                                           hiddenapi::AccessMethod::kLinking)) {
10191     resolved = nullptr;
10192   }
10193 
10194   if (resolved != nullptr) {
10195     dex_cache->SetResolvedField(field_idx, resolved);
10196   }
10197 
10198   return resolved;
10199 }
10200 
FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t field_idx)10201 ArtField* ClassLinker::FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,
10202                                             ObjPtr<mirror::DexCache> dex_cache,
10203                                             ObjPtr<mirror::ClassLoader> class_loader,
10204                                             uint32_t field_idx) {
10205   DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
10206   ArtField* resolved = klass->FindField(dex_cache, field_idx);
10207 
10208   if (resolved != nullptr &&
10209       hiddenapi::ShouldDenyAccessToMember(resolved,
10210                                           hiddenapi::AccessContext(class_loader, dex_cache),
10211                                           hiddenapi::AccessMethod::kLinking)) {
10212     resolved = nullptr;
10213   }
10214 
10215   if (resolved != nullptr) {
10216     dex_cache->SetResolvedField(field_idx, resolved);
10217   }
10218 
10219   return resolved;
10220 }
10221 
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)10222 ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(
10223     Thread* self,
10224     dex::ProtoIndex proto_idx,
10225     Handle<mirror::DexCache> dex_cache,
10226     Handle<mirror::ClassLoader> class_loader) {
10227   DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
10228   DCHECK(dex_cache != nullptr);
10229   DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
10230 
10231   ObjPtr<mirror::MethodType> resolved = dex_cache->GetResolvedMethodType(proto_idx);
10232   if (resolved != nullptr) {
10233     return resolved;
10234   }
10235 
10236   VariableSizedHandleScope raw_method_type_hs(self);
10237   mirror::RawMethodType raw_method_type(&raw_method_type_hs);
10238   if (!ResolveMethodType(self, proto_idx, dex_cache, class_loader, raw_method_type)) {
10239     DCHECK(self->IsExceptionPending());
10240     return nullptr;
10241   }
10242 
10243   // The handle scope was filled with return type and paratemer types.
10244   DCHECK_EQ(raw_method_type_hs.Size(),
10245             dex_cache->GetDexFile()->GetShortyView(proto_idx).length());
10246   ObjPtr<mirror::MethodType> method_type = mirror::MethodType::Create(self, raw_method_type);
10247   if (method_type != nullptr) {
10248     // Ensure all stores for the newly created MethodType are visible, before we attempt to place
10249     // it in the DexCache (b/224733324).
10250     std::atomic_thread_fence(std::memory_order_release);
10251     dex_cache->SetResolvedMethodType(proto_idx, method_type.Ptr());
10252   }
10253   return method_type;
10254 }
10255 
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader,mirror::RawMethodType method_type)10256 bool ClassLinker::ResolveMethodType(Thread* self,
10257                                     dex::ProtoIndex proto_idx,
10258                                     Handle<mirror::DexCache> dex_cache,
10259                                     Handle<mirror::ClassLoader> class_loader,
10260                                     /*out*/ mirror::RawMethodType method_type) {
10261   DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
10262   DCHECK(dex_cache != nullptr);
10263   DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
10264 
10265   // First resolve the return type.
10266   const DexFile& dex_file = *dex_cache->GetDexFile();
10267   const dex::ProtoId& proto_id = dex_file.GetProtoId(proto_idx);
10268   ObjPtr<mirror::Class> return_type =
10269       ResolveType(proto_id.return_type_idx_, dex_cache, class_loader);
10270   if (return_type == nullptr) {
10271     DCHECK(self->IsExceptionPending());
10272     return false;
10273   }
10274   method_type.SetRType(return_type);
10275 
10276   // Then resolve the argument types.
10277   DexFileParameterIterator it(dex_file, proto_id);
10278   for (; it.HasNext(); it.Next()) {
10279     const dex::TypeIndex type_idx = it.GetTypeIdx();
10280     ObjPtr<mirror::Class> param_type = ResolveType(type_idx, dex_cache, class_loader);
10281     if (param_type == nullptr) {
10282       DCHECK(self->IsExceptionPending());
10283       return false;
10284     }
10285     method_type.AddPType(param_type);
10286   }
10287 
10288   return true;
10289 }
10290 
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,ArtMethod * referrer)10291 ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(Thread* self,
10292                                                           dex::ProtoIndex proto_idx,
10293                                                           ArtMethod* referrer) {
10294   StackHandleScope<2> hs(self);
10295   Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
10296   Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
10297   return ResolveMethodType(self, proto_idx, dex_cache, class_loader);
10298 }
10299 
ResolveMethodHandleForField(Thread * self,const dex::MethodHandleItem & method_handle,ArtMethod * referrer)10300 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForField(
10301     Thread* self,
10302     const dex::MethodHandleItem& method_handle,
10303     ArtMethod* referrer) {
10304   DexFile::MethodHandleType handle_type =
10305       static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
10306   mirror::MethodHandle::Kind kind;
10307   bool is_put;
10308   bool is_static;
10309   int32_t num_params;
10310   switch (handle_type) {
10311     case DexFile::MethodHandleType::kStaticPut: {
10312       kind = mirror::MethodHandle::Kind::kStaticPut;
10313       is_put = true;
10314       is_static = true;
10315       num_params = 1;
10316       break;
10317     }
10318     case DexFile::MethodHandleType::kStaticGet: {
10319       kind = mirror::MethodHandle::Kind::kStaticGet;
10320       is_put = false;
10321       is_static = true;
10322       num_params = 0;
10323       break;
10324     }
10325     case DexFile::MethodHandleType::kInstancePut: {
10326       kind = mirror::MethodHandle::Kind::kInstancePut;
10327       is_put = true;
10328       is_static = false;
10329       num_params = 2;
10330       break;
10331     }
10332     case DexFile::MethodHandleType::kInstanceGet: {
10333       kind = mirror::MethodHandle::Kind::kInstanceGet;
10334       is_put = false;
10335       is_static = false;
10336       num_params = 1;
10337       break;
10338     }
10339     case DexFile::MethodHandleType::kInvokeStatic:
10340     case DexFile::MethodHandleType::kInvokeInstance:
10341     case DexFile::MethodHandleType::kInvokeConstructor:
10342     case DexFile::MethodHandleType::kInvokeDirect:
10343     case DexFile::MethodHandleType::kInvokeInterface:
10344       LOG(FATAL) << "Unreachable";
10345       UNREACHABLE();
10346   }
10347 
10348   ArtField* target_field =
10349       ResolveField(method_handle.field_or_method_idx_, referrer, is_static);
10350   if (LIKELY(target_field != nullptr)) {
10351     ObjPtr<mirror::Class> target_class = target_field->GetDeclaringClass();
10352     ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
10353     if (UNLIKELY(!referring_class->CanAccessMember(target_class, target_field->GetAccessFlags()))) {
10354       ThrowIllegalAccessErrorField(referring_class, target_field);
10355       return nullptr;
10356     }
10357     // TODO(b/364876321): ResolveField might return instance field when is_static is true and
10358     // vice versa.
10359     if (UNLIKELY(is_static != target_field->IsStatic())) {
10360       ThrowIncompatibleClassChangeErrorField(target_field, is_static, referrer);
10361       return nullptr;
10362     }
10363     if (UNLIKELY(is_put && target_field->IsFinal())) {
10364       ThrowIllegalAccessErrorField(referring_class, target_field);
10365       return nullptr;
10366     }
10367   } else {
10368     DCHECK(Thread::Current()->IsExceptionPending());
10369     return nullptr;
10370   }
10371 
10372   StackHandleScope<4> hs(self);
10373   ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
10374   Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
10375       mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
10376   if (UNLIKELY(method_params == nullptr)) {
10377     DCHECK(self->IsExceptionPending());
10378     return nullptr;
10379   }
10380 
10381   Handle<mirror::Class> constructor_class;
10382   Handle<mirror::Class> return_type;
10383   switch (handle_type) {
10384     case DexFile::MethodHandleType::kStaticPut: {
10385       method_params->Set(0, target_field->ResolveType());
10386       return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
10387       break;
10388     }
10389     case DexFile::MethodHandleType::kStaticGet: {
10390       return_type = hs.NewHandle(target_field->ResolveType());
10391       break;
10392     }
10393     case DexFile::MethodHandleType::kInstancePut: {
10394       method_params->Set(0, target_field->GetDeclaringClass());
10395       method_params->Set(1, target_field->ResolveType());
10396       return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
10397       break;
10398     }
10399     case DexFile::MethodHandleType::kInstanceGet: {
10400       method_params->Set(0, target_field->GetDeclaringClass());
10401       return_type = hs.NewHandle(target_field->ResolveType());
10402       break;
10403     }
10404     case DexFile::MethodHandleType::kInvokeStatic:
10405     case DexFile::MethodHandleType::kInvokeInstance:
10406     case DexFile::MethodHandleType::kInvokeConstructor:
10407     case DexFile::MethodHandleType::kInvokeDirect:
10408     case DexFile::MethodHandleType::kInvokeInterface:
10409       LOG(FATAL) << "Unreachable";
10410       UNREACHABLE();
10411   }
10412 
10413   for (int32_t i = 0; i < num_params; ++i) {
10414     if (UNLIKELY(method_params->Get(i) == nullptr)) {
10415       DCHECK(self->IsExceptionPending());
10416       return nullptr;
10417     }
10418   }
10419 
10420   if (UNLIKELY(return_type.IsNull())) {
10421     DCHECK(self->IsExceptionPending());
10422     return nullptr;
10423   }
10424 
10425   Handle<mirror::MethodType>
10426       method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
10427   if (UNLIKELY(method_type.IsNull())) {
10428     DCHECK(self->IsExceptionPending());
10429     return nullptr;
10430   }
10431 
10432   uintptr_t target = reinterpret_cast<uintptr_t>(target_field);
10433   return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
10434 }
10435 
ResolveMethodHandleForMethod(Thread * self,const dex::MethodHandleItem & method_handle,ArtMethod * referrer)10436 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForMethod(
10437     Thread* self,
10438     const dex::MethodHandleItem& method_handle,
10439     ArtMethod* referrer) {
10440   DexFile::MethodHandleType handle_type =
10441       static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
10442   mirror::MethodHandle::Kind kind;
10443   uint32_t receiver_count = 0;
10444   ArtMethod* target_method = nullptr;
10445   switch (handle_type) {
10446     case DexFile::MethodHandleType::kStaticPut:
10447     case DexFile::MethodHandleType::kStaticGet:
10448     case DexFile::MethodHandleType::kInstancePut:
10449     case DexFile::MethodHandleType::kInstanceGet:
10450       LOG(FATAL) << "Unreachable";
10451       UNREACHABLE();
10452     case DexFile::MethodHandleType::kInvokeStatic: {
10453       kind = mirror::MethodHandle::Kind::kInvokeStatic;
10454       receiver_count = 0;
10455       target_method = ResolveMethodWithChecks(method_handle.field_or_method_idx_,
10456                                               referrer,
10457                                               InvokeType::kStatic);
10458       break;
10459     }
10460     case DexFile::MethodHandleType::kInvokeInstance: {
10461       kind = mirror::MethodHandle::Kind::kInvokeVirtual;
10462       receiver_count = 1;
10463       target_method = ResolveMethodWithChecks(method_handle.field_or_method_idx_,
10464                                               referrer,
10465                                               InvokeType::kVirtual);
10466       break;
10467     }
10468     case DexFile::MethodHandleType::kInvokeConstructor: {
10469       // Constructors are currently implemented as a transform. They
10470       // are special cased later in this method.
10471       kind = mirror::MethodHandle::Kind::kInvokeTransform;
10472       receiver_count = 0;
10473       target_method = ResolveMethodWithChecks(method_handle.field_or_method_idx_,
10474                                               referrer,
10475                                               InvokeType::kDirect);
10476       break;
10477     }
10478     case DexFile::MethodHandleType::kInvokeDirect: {
10479       kind = mirror::MethodHandle::Kind::kInvokeDirect;
10480       receiver_count = 1;
10481       StackHandleScope<2> hs(self);
10482       // A constant method handle with type kInvokeDirect can refer to
10483       // a method that is private or to a method in a super class. To
10484       // disambiguate the two options, we resolve the method ignoring
10485       // the invocation type to determine if the method is private. We
10486       // then resolve again specifying the intended invocation type to
10487       // force the appropriate checks.
10488       target_method = ResolveMethodId(method_handle.field_or_method_idx_,
10489                                       hs.NewHandle(referrer->GetDexCache()),
10490                                       hs.NewHandle(referrer->GetClassLoader()));
10491       if (UNLIKELY(target_method == nullptr)) {
10492         break;
10493       }
10494 
10495       if (target_method->IsPrivate()) {
10496         kind = mirror::MethodHandle::Kind::kInvokeDirect;
10497         target_method = ResolveMethodWithChecks(method_handle.field_or_method_idx_,
10498                                                 referrer,
10499                                                 InvokeType::kDirect);
10500       } else {
10501         kind = mirror::MethodHandle::Kind::kInvokeSuper;
10502         target_method = ResolveMethodWithChecks(method_handle.field_or_method_idx_,
10503                                                 referrer,
10504                                                 InvokeType::kSuper);
10505         if (UNLIKELY(target_method == nullptr)) {
10506           break;
10507         }
10508         // Find the method specified in the parent in referring class
10509         // so invoke-super invokes the method in the parent of the
10510         // referrer.
10511         target_method =
10512             referrer->GetDeclaringClass()->FindVirtualMethodForVirtual(target_method,
10513                                                                        kRuntimePointerSize);
10514       }
10515       break;
10516     }
10517     case DexFile::MethodHandleType::kInvokeInterface: {
10518       kind = mirror::MethodHandle::Kind::kInvokeInterface;
10519       receiver_count = 1;
10520       target_method = ResolveMethodWithChecks(method_handle.field_or_method_idx_,
10521                                               referrer,
10522                                               InvokeType::kInterface);
10523       break;
10524     }
10525   }
10526 
10527   if (UNLIKELY(target_method == nullptr)) {
10528     DCHECK(Thread::Current()->IsExceptionPending());
10529     return nullptr;
10530   }
10531 
10532   // According to JVMS 4.4.8 none of invoke* MethodHandle-s can target <clinit> methods.
10533   if (UNLIKELY(target_method->IsClassInitializer())) {
10534     ThrowClassFormatError(referrer->GetDeclaringClass(),
10535         "Method handles can't target class initializer method");
10536     return nullptr;
10537   }
10538 
10539   ObjPtr<mirror::Class> target_class = target_method->GetDeclaringClass();
10540   ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
10541   uint32_t access_flags = target_method->GetAccessFlags();
10542   if (UNLIKELY(!referring_class->CanAccessMember(target_class, access_flags))) {
10543     ThrowIllegalAccessErrorMethod(referring_class, target_method);
10544     return nullptr;
10545   }
10546 
10547   // Calculate the number of parameters from the method shorty. We add the
10548   // receiver count (0 or 1) and deduct one for the return value.
10549   uint32_t shorty_length;
10550   target_method->GetShorty(&shorty_length);
10551   int32_t num_params = static_cast<int32_t>(shorty_length + receiver_count - 1);
10552 
10553   StackHandleScope<5> hs(self);
10554   ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
10555   Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
10556       mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
10557   if (method_params.Get() == nullptr) {
10558     DCHECK(self->IsExceptionPending());
10559     return nullptr;
10560   }
10561 
10562   const DexFile* dex_file = referrer->GetDexFile();
10563   const dex::MethodId& method_id = dex_file->GetMethodId(method_handle.field_or_method_idx_);
10564   int32_t index = 0;
10565   if (receiver_count != 0) {
10566     // Insert receiver. Use the class identified in the method handle rather than the declaring
10567     // class of the resolved method which may be super class or default interface method
10568     // (b/115964401).
10569     ObjPtr<mirror::Class> receiver_class = LookupResolvedType(method_id.class_idx_, referrer);
10570     // receiver_class should have been resolved when resolving the target method.
10571     DCHECK(receiver_class != nullptr);
10572     method_params->Set(index++, receiver_class);
10573   }
10574 
10575   const dex::ProtoId& proto_id = dex_file->GetProtoId(method_id.proto_idx_);
10576   DexFileParameterIterator it(*dex_file, proto_id);
10577   while (it.HasNext()) {
10578     DCHECK_LT(index, num_params);
10579     const dex::TypeIndex type_idx = it.GetTypeIdx();
10580     ObjPtr<mirror::Class> klass = ResolveType(type_idx, referrer);
10581     if (nullptr == klass) {
10582       DCHECK(self->IsExceptionPending());
10583       return nullptr;
10584     }
10585     method_params->Set(index++, klass);
10586     it.Next();
10587   }
10588 
10589   Handle<mirror::Class> return_type =
10590       hs.NewHandle(ResolveType(proto_id.return_type_idx_, referrer));
10591   if (UNLIKELY(return_type.IsNull())) {
10592     DCHECK(self->IsExceptionPending());
10593     return nullptr;
10594   }
10595 
10596   Handle<mirror::MethodType>
10597       method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
10598   if (UNLIKELY(method_type.IsNull())) {
10599     DCHECK(self->IsExceptionPending());
10600     return nullptr;
10601   }
10602 
10603   if (UNLIKELY(handle_type == DexFile::MethodHandleType::kInvokeConstructor)) {
10604     Handle<mirror::Class> constructor_class = hs.NewHandle(target_method->GetDeclaringClass());
10605     Handle<mirror::MethodHandlesLookup> lookup =
10606         hs.NewHandle(mirror::MethodHandlesLookup::GetDefault(self));
10607     return lookup->FindConstructor(self, constructor_class, method_type);
10608   }
10609 
10610   uintptr_t target = reinterpret_cast<uintptr_t>(target_method);
10611   return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
10612 }
10613 
ResolveMethodHandle(Thread * self,uint32_t method_handle_idx,ArtMethod * referrer)10614 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandle(Thread* self,
10615                                                               uint32_t method_handle_idx,
10616                                                               ArtMethod* referrer)
10617     REQUIRES_SHARED(Locks::mutator_lock_) {
10618   const DexFile* const dex_file = referrer->GetDexFile();
10619   const dex::MethodHandleItem& method_handle = dex_file->GetMethodHandle(method_handle_idx);
10620   switch (static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_)) {
10621     case DexFile::MethodHandleType::kStaticPut:
10622     case DexFile::MethodHandleType::kStaticGet:
10623     case DexFile::MethodHandleType::kInstancePut:
10624     case DexFile::MethodHandleType::kInstanceGet:
10625       return ResolveMethodHandleForField(self, method_handle, referrer);
10626     case DexFile::MethodHandleType::kInvokeStatic:
10627     case DexFile::MethodHandleType::kInvokeInstance:
10628     case DexFile::MethodHandleType::kInvokeConstructor:
10629     case DexFile::MethodHandleType::kInvokeDirect:
10630     case DexFile::MethodHandleType::kInvokeInterface:
10631       return ResolveMethodHandleForMethod(self, method_handle, referrer);
10632   }
10633 }
10634 
IsQuickResolutionStub(const void * entry_point) const10635 bool ClassLinker::IsQuickResolutionStub(const void* entry_point) const {
10636   return (entry_point == GetQuickResolutionStub()) ||
10637       (quick_resolution_trampoline_ == entry_point);
10638 }
10639 
IsQuickToInterpreterBridge(const void * entry_point) const10640 bool ClassLinker::IsQuickToInterpreterBridge(const void* entry_point) const {
10641   return (entry_point == GetQuickToInterpreterBridge()) ||
10642       (quick_to_interpreter_bridge_trampoline_ == entry_point);
10643 }
10644 
IsQuickGenericJniStub(const void * entry_point) const10645 bool ClassLinker::IsQuickGenericJniStub(const void* entry_point) const {
10646   return (entry_point == GetQuickGenericJniStub()) ||
10647       (quick_generic_jni_trampoline_ == entry_point);
10648 }
10649 
IsJniDlsymLookupStub(const void * entry_point) const10650 bool ClassLinker::IsJniDlsymLookupStub(const void* entry_point) const {
10651   return entry_point == GetJniDlsymLookupStub() ||
10652       (jni_dlsym_lookup_trampoline_ == entry_point);
10653 }
10654 
IsJniDlsymLookupCriticalStub(const void * entry_point) const10655 bool ClassLinker::IsJniDlsymLookupCriticalStub(const void* entry_point) const {
10656   return entry_point == GetJniDlsymLookupCriticalStub() ||
10657       (jni_dlsym_lookup_critical_trampoline_ == entry_point);
10658 }
10659 
GetRuntimeQuickGenericJniStub() const10660 const void* ClassLinker::GetRuntimeQuickGenericJniStub() const {
10661   return GetQuickGenericJniStub();
10662 }
10663 
SetEntryPointsForObsoleteMethod(ArtMethod * method) const10664 void ClassLinker::SetEntryPointsForObsoleteMethod(ArtMethod* method) const {
10665   DCHECK(method->IsObsolete());
10666   // We cannot mess with the entrypoints of native methods because they are used to determine how
10667   // large the method's quick stack frame is. Without this information we cannot walk the stacks.
10668   if (!method->IsNative()) {
10669     method->SetEntryPointFromQuickCompiledCode(GetInvokeObsoleteMethodStub());
10670   }
10671 }
10672 
DumpForSigQuit(std::ostream & os)10673 void ClassLinker::DumpForSigQuit(std::ostream& os) {
10674   ScopedObjectAccess soa(Thread::Current());
10675   ReaderMutexLock mu(soa.Self(), *Locks::classlinker_classes_lock_);
10676   os << "Zygote loaded classes=" << NumZygoteClasses() << " post zygote classes="
10677      << NumNonZygoteClasses() << "\n";
10678   ReaderMutexLock mu2(soa.Self(), *Locks::dex_lock_);
10679   os << "Dumping registered class loaders\n";
10680   size_t class_loader_index = 0;
10681   for (const ClassLoaderData& class_loader : class_loaders_) {
10682     ObjPtr<mirror::ClassLoader> loader =
10683         ObjPtr<mirror::ClassLoader>::DownCast(soa.Self()->DecodeJObject(class_loader.weak_root));
10684     if (loader != nullptr) {
10685       os << "#" << class_loader_index++ << " " << loader->GetClass()->PrettyDescriptor() << ": [";
10686       bool saw_one_dex_file = false;
10687       for (const auto& entry : dex_caches_) {
10688         const DexCacheData& dex_cache = entry.second;
10689         if (dex_cache.class_table == class_loader.class_table) {
10690           if (saw_one_dex_file) {
10691             os << ":";
10692           }
10693           saw_one_dex_file = true;
10694           os << entry.first->GetLocation();
10695         }
10696       }
10697       os << "]";
10698       bool found_parent = false;
10699       if (loader->GetParent() != nullptr) {
10700         size_t parent_index = 0;
10701         for (const ClassLoaderData& class_loader2 : class_loaders_) {
10702           ObjPtr<mirror::ClassLoader> loader2 = ObjPtr<mirror::ClassLoader>::DownCast(
10703               soa.Self()->DecodeJObject(class_loader2.weak_root));
10704           if (loader2 == loader->GetParent()) {
10705             os << ", parent #" << parent_index;
10706             found_parent = true;
10707             break;
10708           }
10709           parent_index++;
10710         }
10711         if (!found_parent) {
10712           os << ", unregistered parent of type "
10713              << loader->GetParent()->GetClass()->PrettyDescriptor();
10714         }
10715       } else {
10716         os << ", no parent";
10717       }
10718       os << "\n";
10719     }
10720   }
10721   os << "Done dumping class loaders\n";
10722   Runtime* runtime = Runtime::Current();
10723   os << "Classes initialized: " << runtime->GetStat(KIND_GLOBAL_CLASS_INIT_COUNT) << " in "
10724      << PrettyDuration(runtime->GetStat(KIND_GLOBAL_CLASS_INIT_TIME)) << "\n";
10725 }
10726 
10727 class CountClassesVisitor : public ClassLoaderVisitor {
10728  public:
CountClassesVisitor()10729   CountClassesVisitor() : num_zygote_classes(0), num_non_zygote_classes(0) {}
10730 
Visit(ObjPtr<mirror::ClassLoader> class_loader)10731   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
10732       REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
10733     ClassTable* const class_table = class_loader->GetClassTable();
10734     if (class_table != nullptr) {
10735       num_zygote_classes += class_table->NumZygoteClasses(class_loader);
10736       num_non_zygote_classes += class_table->NumNonZygoteClasses(class_loader);
10737     }
10738   }
10739 
10740   size_t num_zygote_classes;
10741   size_t num_non_zygote_classes;
10742 };
10743 
NumZygoteClasses() const10744 size_t ClassLinker::NumZygoteClasses() const {
10745   CountClassesVisitor visitor;
10746   VisitClassLoaders(&visitor);
10747   return visitor.num_zygote_classes + boot_class_table_->NumZygoteClasses(nullptr);
10748 }
10749 
NumNonZygoteClasses() const10750 size_t ClassLinker::NumNonZygoteClasses() const {
10751   CountClassesVisitor visitor;
10752   VisitClassLoaders(&visitor);
10753   return visitor.num_non_zygote_classes + boot_class_table_->NumNonZygoteClasses(nullptr);
10754 }
10755 
NumLoadedClasses()10756 size_t ClassLinker::NumLoadedClasses() {
10757   ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
10758   // Only return non zygote classes since these are the ones which apps which care about.
10759   return NumNonZygoteClasses();
10760 }
10761 
GetClassesLockOwner()10762 pid_t ClassLinker::GetClassesLockOwner() {
10763   return Locks::classlinker_classes_lock_->GetExclusiveOwnerTid();
10764 }
10765 
GetDexLockOwner()10766 pid_t ClassLinker::GetDexLockOwner() {
10767   return Locks::dex_lock_->GetExclusiveOwnerTid();
10768 }
10769 
SetClassRoot(ClassRoot class_root,ObjPtr<mirror::Class> klass)10770 void ClassLinker::SetClassRoot(ClassRoot class_root, ObjPtr<mirror::Class> klass) {
10771   DCHECK(!init_done_);
10772 
10773   DCHECK(klass != nullptr);
10774   DCHECK(klass->GetClassLoader() == nullptr);
10775 
10776   mirror::ObjectArray<mirror::Class>* class_roots = class_roots_.Read();
10777   DCHECK(class_roots != nullptr);
10778   DCHECK_LT(static_cast<uint32_t>(class_root), static_cast<uint32_t>(ClassRoot::kMax));
10779   int32_t index = static_cast<int32_t>(class_root);
10780   DCHECK(class_roots->Get(index) == nullptr);
10781   class_roots->Set<false>(index, klass);
10782 }
10783 
CreateWellKnownClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files,Handle<mirror::Class> loader_class,Handle<mirror::ClassLoader> parent_loader,Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries,Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries_after)10784 ObjPtr<mirror::ClassLoader> ClassLinker::CreateWellKnownClassLoader(
10785     Thread* self,
10786     const std::vector<const DexFile*>& dex_files,
10787     Handle<mirror::Class> loader_class,
10788     Handle<mirror::ClassLoader> parent_loader,
10789     Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries,
10790     Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries_after) {
10791   CHECK(loader_class.Get() == WellKnownClasses::dalvik_system_PathClassLoader ||
10792         loader_class.Get() == WellKnownClasses::dalvik_system_DelegateLastClassLoader ||
10793         loader_class.Get() == WellKnownClasses::dalvik_system_InMemoryDexClassLoader);
10794 
10795   StackHandleScope<5> hs(self);
10796 
10797   ArtField* dex_elements_field = WellKnownClasses::dalvik_system_DexPathList_dexElements;
10798 
10799   Handle<mirror::Class> dex_elements_class(hs.NewHandle(dex_elements_field->ResolveType()));
10800   DCHECK(dex_elements_class != nullptr);
10801   DCHECK(dex_elements_class->IsArrayClass());
10802   Handle<mirror::ObjectArray<mirror::Object>> h_dex_elements(hs.NewHandle(
10803       mirror::ObjectArray<mirror::Object>::Alloc(self,
10804                                                  dex_elements_class.Get(),
10805                                                  dex_files.size())));
10806   Handle<mirror::Class> h_dex_element_class =
10807       hs.NewHandle(dex_elements_class->GetComponentType());
10808 
10809   ArtField* element_file_field = WellKnownClasses::dalvik_system_DexPathList__Element_dexFile;
10810   DCHECK_EQ(h_dex_element_class.Get(), element_file_field->GetDeclaringClass());
10811 
10812   ArtField* cookie_field = WellKnownClasses::dalvik_system_DexFile_cookie;
10813   DCHECK_EQ(cookie_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
10814 
10815   ArtField* file_name_field = WellKnownClasses::dalvik_system_DexFile_fileName;
10816   DCHECK_EQ(file_name_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
10817 
10818   // Fill the elements array.
10819   int32_t index = 0;
10820   for (const DexFile* dex_file : dex_files) {
10821     StackHandleScope<4> hs2(self);
10822 
10823     // CreateWellKnownClassLoader is only used by gtests and compiler.
10824     // Index 0 of h_long_array is supposed to be the oat file but we can leave it null.
10825     Handle<mirror::LongArray> h_long_array = hs2.NewHandle(mirror::LongArray::Alloc(
10826         self,
10827         kDexFileIndexStart + 1));
10828     DCHECK(h_long_array != nullptr);
10829     h_long_array->Set(kDexFileIndexStart, reinterpret_cast64<int64_t>(dex_file));
10830 
10831     // Note that this creates a finalizable dalvik.system.DexFile object and a corresponding
10832     // FinalizerReference which will never get cleaned up without a started runtime.
10833     Handle<mirror::Object> h_dex_file = hs2.NewHandle(
10834         cookie_field->GetDeclaringClass()->AllocObject(self));
10835     DCHECK(h_dex_file != nullptr);
10836     cookie_field->SetObject<false>(h_dex_file.Get(), h_long_array.Get());
10837 
10838     Handle<mirror::String> h_file_name = hs2.NewHandle(
10839         mirror::String::AllocFromModifiedUtf8(self, dex_file->GetLocation().c_str()));
10840     DCHECK(h_file_name != nullptr);
10841     file_name_field->SetObject<false>(h_dex_file.Get(), h_file_name.Get());
10842 
10843     Handle<mirror::Object> h_element = hs2.NewHandle(h_dex_element_class->AllocObject(self));
10844     DCHECK(h_element != nullptr);
10845     element_file_field->SetObject<false>(h_element.Get(), h_dex_file.Get());
10846 
10847     h_dex_elements->Set(index, h_element.Get());
10848     index++;
10849   }
10850   DCHECK_EQ(index, h_dex_elements->GetLength());
10851 
10852   // Create DexPathList.
10853   Handle<mirror::Object> h_dex_path_list = hs.NewHandle(
10854       dex_elements_field->GetDeclaringClass()->AllocObject(self));
10855   DCHECK(h_dex_path_list != nullptr);
10856   // Set elements.
10857   dex_elements_field->SetObject<false>(h_dex_path_list.Get(), h_dex_elements.Get());
10858   // Create an empty List for the "nativeLibraryDirectories," required for native tests.
10859   // Note: this code is uncommon(oatdump)/testing-only, so don't add further WellKnownClasses
10860   //       elements.
10861   {
10862     ArtField* native_lib_dirs = dex_elements_field->GetDeclaringClass()->
10863         FindDeclaredInstanceField("nativeLibraryDirectories", "Ljava/util/List;");
10864     DCHECK(native_lib_dirs != nullptr);
10865     ObjPtr<mirror::Class> list_class = FindSystemClass(self, "Ljava/util/ArrayList;");
10866     DCHECK(list_class != nullptr);
10867     {
10868       StackHandleScope<1> h_list_scope(self);
10869       Handle<mirror::Class> h_list_class(h_list_scope.NewHandle<mirror::Class>(list_class));
10870       bool list_init = EnsureInitialized(self, h_list_class, true, true);
10871       DCHECK(list_init);
10872       list_class = h_list_class.Get();
10873     }
10874     ObjPtr<mirror::Object> list_object = list_class->AllocObject(self);
10875     // Note: we leave the object uninitialized. This must never leak into any non-testing code, but
10876     //       is fine for testing. While it violates a Java-code invariant (the elementData field is
10877     //       normally never null), as long as one does not try to add elements, this will still
10878     //       work.
10879     native_lib_dirs->SetObject<false>(h_dex_path_list.Get(), list_object);
10880   }
10881 
10882   // Create the class loader..
10883   Handle<mirror::ClassLoader> h_class_loader = hs.NewHandle<mirror::ClassLoader>(
10884       ObjPtr<mirror::ClassLoader>::DownCast(loader_class->AllocObject(self)));
10885   DCHECK(h_class_loader != nullptr);
10886   // Set DexPathList.
10887   ArtField* path_list_field = WellKnownClasses::dalvik_system_BaseDexClassLoader_pathList;
10888   DCHECK(path_list_field != nullptr);
10889   path_list_field->SetObject<false>(h_class_loader.Get(), h_dex_path_list.Get());
10890 
10891   // Make a pretend boot-classpath.
10892   // TODO: Should we scan the image?
10893   ArtField* const parent_field = WellKnownClasses::java_lang_ClassLoader_parent;
10894   DCHECK(parent_field != nullptr);
10895   if (parent_loader.Get() == nullptr) {
10896     ObjPtr<mirror::Object> boot_loader(
10897         WellKnownClasses::java_lang_BootClassLoader->AllocObject(self));
10898     parent_field->SetObject<false>(h_class_loader.Get(), boot_loader);
10899   } else {
10900     parent_field->SetObject<false>(h_class_loader.Get(), parent_loader.Get());
10901   }
10902 
10903   ArtField* shared_libraries_field =
10904       WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders;
10905   DCHECK(shared_libraries_field != nullptr);
10906   shared_libraries_field->SetObject<false>(h_class_loader.Get(), shared_libraries.Get());
10907 
10908   ArtField* shared_libraries_after_field =
10909         WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter;
10910   DCHECK(shared_libraries_after_field != nullptr);
10911   shared_libraries_after_field->SetObject<false>(h_class_loader.Get(),
10912                                                  shared_libraries_after.Get());
10913   return h_class_loader.Get();
10914 }
10915 
CreatePathClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files)10916 jobject ClassLinker::CreatePathClassLoader(Thread* self,
10917                                            const std::vector<const DexFile*>& dex_files) {
10918   StackHandleScope<3u> hs(self);
10919   Handle<mirror::Class> d_s_pcl =
10920       hs.NewHandle(WellKnownClasses::dalvik_system_PathClassLoader.Get());
10921   auto null_parent = hs.NewHandle<mirror::ClassLoader>(nullptr);
10922   auto null_libs = hs.NewHandle<mirror::ObjectArray<mirror::ClassLoader>>(nullptr);
10923   ObjPtr<mirror::ClassLoader> class_loader =
10924       CreateWellKnownClassLoader(self, dex_files, d_s_pcl, null_parent, null_libs, null_libs);
10925   return Runtime::Current()->GetJavaVM()->AddGlobalRef(self, class_loader);
10926 }
10927 
DropFindArrayClassCache()10928 void ClassLinker::DropFindArrayClassCache() {
10929   for (size_t i = 0; i < kFindArrayCacheSize; i++) {
10930     find_array_class_cache_[i].store(GcRoot<mirror::Class>(nullptr), std::memory_order_relaxed);
10931   }
10932   find_array_class_cache_next_victim_ = 0;
10933 }
10934 
VisitClassLoaders(ClassLoaderVisitor * visitor) const10935 void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const {
10936   Thread* const self = Thread::Current();
10937   for (const ClassLoaderData& data : class_loaders_) {
10938     // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
10939     ObjPtr<mirror::ClassLoader> class_loader = ObjPtr<mirror::ClassLoader>::DownCast(
10940         self->DecodeJObject(data.weak_root));
10941     if (class_loader != nullptr) {
10942       visitor->Visit(class_loader);
10943     }
10944   }
10945 }
10946 
VisitDexCaches(DexCacheVisitor * visitor) const10947 void ClassLinker::VisitDexCaches(DexCacheVisitor* visitor) const {
10948   Thread* const self = Thread::Current();
10949   for (const auto& it : dex_caches_) {
10950     // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
10951     ObjPtr<mirror::DexCache> dex_cache = ObjPtr<mirror::DexCache>::DownCast(
10952         self->DecodeJObject(it.second.weak_root));
10953     if (dex_cache != nullptr) {
10954       visitor->Visit(dex_cache);
10955     }
10956   }
10957 }
10958 
VisitAllocators(AllocatorVisitor * visitor) const10959 void ClassLinker::VisitAllocators(AllocatorVisitor* visitor) const {
10960   for (const ClassLoaderData& data : class_loaders_) {
10961     LinearAlloc* alloc = data.allocator;
10962     if (alloc != nullptr && !visitor->Visit(alloc)) {
10963         break;
10964     }
10965   }
10966 }
10967 
InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,ObjPtr<mirror::ClassLoader> class_loader)10968 void ClassLinker::InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,
10969                                                ObjPtr<mirror::ClassLoader> class_loader) {
10970   DCHECK(dex_file != nullptr);
10971   Thread* const self = Thread::Current();
10972   WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
10973   ClassTable* const table = ClassTableForClassLoader(class_loader);
10974   DCHECK(table != nullptr);
10975   if (table->InsertStrongRoot(dex_file)) {
10976     WriteBarrierOnClassLoaderLocked(class_loader, dex_file);
10977   } else {
10978     // Write-barrier not required if strong-root isn't inserted.
10979   }
10980 }
10981 
CleanupClassLoaders()10982 void ClassLinker::CleanupClassLoaders() {
10983   Thread* const self = Thread::Current();
10984   std::list<ClassLoaderData> to_delete;
10985   // Do the delete outside the lock to avoid lock violation in jit code cache.
10986   {
10987     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
10988     for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) {
10989       auto this_it = it;
10990       ++it;
10991       const ClassLoaderData& data = *this_it;
10992       // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
10993       ObjPtr<mirror::ClassLoader> class_loader =
10994           ObjPtr<mirror::ClassLoader>::DownCast(self->DecodeJObject(data.weak_root));
10995       if (class_loader == nullptr) {
10996         VLOG(class_linker) << "Freeing class loader";
10997         to_delete.splice(to_delete.end(), class_loaders_, this_it);
10998       }
10999     }
11000   }
11001   if (to_delete.empty()) {
11002     return;
11003   }
11004   std::set<const OatFile*> unregistered_oat_files;
11005   JavaVMExt* vm = self->GetJniEnv()->GetVm();
11006   {
11007     WriterMutexLock mu(self, *Locks::dex_lock_);
11008     for (auto it = dex_caches_.begin(), end = dex_caches_.end(); it != end; ) {
11009       const DexFile* dex_file = it->first;
11010       const DexCacheData& data = it->second;
11011       if (self->DecodeJObject(data.weak_root) == nullptr) {
11012         DCHECK(to_delete.end() != std::find_if(
11013             to_delete.begin(),
11014             to_delete.end(),
11015             [&](const ClassLoaderData& cld) { return cld.class_table == data.class_table; }));
11016         if (dex_file->GetOatDexFile() != nullptr &&
11017             dex_file->GetOatDexFile()->GetOatFile() != nullptr &&
11018             dex_file->GetOatDexFile()->GetOatFile()->IsExecutable()) {
11019           unregistered_oat_files.insert(dex_file->GetOatDexFile()->GetOatFile());
11020         }
11021         vm->DeleteWeakGlobalRef(self, data.weak_root);
11022         it = dex_caches_.erase(it);
11023       } else {
11024         ++it;
11025       }
11026     }
11027   }
11028   {
11029     ScopedDebugDisallowReadBarriers sddrb(self);
11030     for (ClassLoaderData& data : to_delete) {
11031       // CHA unloading analysis and SingleImplementaion cleanups are required.
11032       PrepareToDeleteClassLoader(self, data, /*cleanup_cha=*/true);
11033     }
11034   }
11035   for (const ClassLoaderData& data : to_delete) {
11036     delete data.allocator;
11037     delete data.class_table;
11038   }
11039   Runtime* runtime = Runtime::Current();
11040   if (!unregistered_oat_files.empty()) {
11041     for (const OatFile* oat_file : unregistered_oat_files) {
11042       // Notify the fault handler about removal of the executable code range if needed.
11043       DCHECK(oat_file->IsExecutable());
11044       size_t exec_offset = oat_file->GetOatHeader().GetExecutableOffset();
11045       DCHECK_LE(exec_offset, oat_file->Size());
11046       size_t exec_size = oat_file->Size() - exec_offset;
11047       if (exec_size != 0u) {
11048         runtime->RemoveGeneratedCodeRange(oat_file->Begin() + exec_offset, exec_size);
11049       }
11050     }
11051   }
11052 
11053   if (runtime->GetStartupLinearAlloc() != nullptr) {
11054     // Because the startup linear alloc can contain dex cache arrays associated
11055     // to class loaders that got unloaded, we need to delete these
11056     // arrays.
11057     StartupCompletedTask::DeleteStartupDexCaches(self, /* called_by_gc= */ true);
11058     DCHECK_EQ(runtime->GetStartupLinearAlloc(), nullptr);
11059   }
11060 }
11061 
11062 class ClassLinker::FindVirtualMethodHolderVisitor : public ClassVisitor {
11063  public:
FindVirtualMethodHolderVisitor(const ArtMethod * method,PointerSize pointer_size)11064   FindVirtualMethodHolderVisitor(const ArtMethod* method, PointerSize pointer_size)
11065       : method_(method),
11066         pointer_size_(pointer_size) {}
11067 
operator ()(ObjPtr<mirror::Class> klass)11068   bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) override {
11069     if (klass->GetVirtualMethodsSliceUnchecked(pointer_size_).Contains(method_)) {
11070       holder_ = klass;
11071     }
11072     // Return false to stop searching if holder_ is not null.
11073     return holder_ == nullptr;
11074   }
11075 
11076   ObjPtr<mirror::Class> holder_ = nullptr;
11077   const ArtMethod* const method_;
11078   const PointerSize pointer_size_;
11079 };
11080 
GetHoldingClassOfCopiedMethod(ArtMethod * method)11081 ObjPtr<mirror::Class> ClassLinker::GetHoldingClassOfCopiedMethod(ArtMethod* method) {
11082   ScopedTrace trace(__FUNCTION__);  // Since this function is slow, have a trace to notify people.
11083   CHECK(method->IsCopied());
11084   FindVirtualMethodHolderVisitor visitor(method, image_pointer_size_);
11085   VisitClasses(&visitor);
11086   DCHECK(visitor.holder_ != nullptr);
11087   return visitor.holder_;
11088 }
11089 
GetHoldingClassLoaderOfCopiedMethod(Thread * self,ArtMethod * method)11090 ObjPtr<mirror::ClassLoader> ClassLinker::GetHoldingClassLoaderOfCopiedMethod(Thread* self,
11091                                                                              ArtMethod* method) {
11092   // Note: `GetHoldingClassOfCopiedMethod(method)` is a lot more expensive than finding
11093   // the class loader, so we're using it only to verify the result in debug mode.
11094   CHECK(method->IsCopied());
11095   gc::Heap* heap = Runtime::Current()->GetHeap();
11096   // Check if the copied method is in the boot class path.
11097   if (heap->IsBootImageAddress(method) || GetAllocatorForClassLoader(nullptr)->Contains(method)) {
11098     DCHECK(GetHoldingClassOfCopiedMethod(method)->GetClassLoader() == nullptr);
11099     return nullptr;
11100   }
11101   // Check if the copied method is in an app image.
11102   // Note: Continuous spaces contain boot image spaces and app image spaces.
11103   // However, they are sorted by address, so boot images are not trivial to skip.
11104   ArrayRef<gc::space::ContinuousSpace* const> spaces(heap->GetContinuousSpaces());
11105   DCHECK_GE(spaces.size(), heap->GetBootImageSpaces().size());
11106   for (gc::space::ContinuousSpace* space : spaces) {
11107     if (space->IsImageSpace()) {
11108       gc::space::ImageSpace* image_space = space->AsImageSpace();
11109       size_t offset = reinterpret_cast<const uint8_t*>(method) - image_space->Begin();
11110       const ImageSection& methods_section = image_space->GetImageHeader().GetMethodsSection();
11111       if (offset - methods_section.Offset() < methods_section.Size()) {
11112         // Grab the class loader from the first non-BCP class in the app image class table.
11113         // Note: If we allow classes from arbitrary parent or library class loaders in app
11114         // images, this shall need to be updated to actually search for the exact class.
11115         const ImageSection& class_table_section =
11116             image_space->GetImageHeader().GetClassTableSection();
11117         CHECK_NE(class_table_section.Size(), 0u);
11118         const uint8_t* ptr = image_space->Begin() + class_table_section.Offset();
11119         size_t read_count = 0;
11120         ClassTable::ClassSet class_set(ptr, /*make_copy_of_data=*/ false, &read_count);
11121         CHECK(!class_set.empty());
11122         auto it = class_set.begin();
11123         // No read barrier needed for references to non-movable image classes.
11124         while ((*it).Read<kWithoutReadBarrier>()->IsBootStrapClassLoaded()) {
11125           ++it;
11126           CHECK(it != class_set.end());
11127         }
11128         ObjPtr<mirror::ClassLoader> class_loader =
11129             (*it).Read<kWithoutReadBarrier>()->GetClassLoader();
11130         DCHECK(GetHoldingClassOfCopiedMethod(method)->GetClassLoader() == class_loader);
11131         return class_loader;
11132       }
11133     }
11134   }
11135   // Otherwise, the method must be in one of the `LinearAlloc` memory areas.
11136   jweak result = nullptr;
11137   {
11138     ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
11139     for (const ClassLoaderData& data : class_loaders_) {
11140       if (data.allocator->Contains(method)) {
11141         result = data.weak_root;
11142         break;
11143       }
11144     }
11145   }
11146   CHECK(result != nullptr) << "Did not find allocator holding the copied method: " << method
11147       << " " << method->PrettyMethod();
11148   // The `method` is alive, so the class loader must also be alive.
11149   return ObjPtr<mirror::ClassLoader>::DownCast(
11150       Runtime::Current()->GetJavaVM()->DecodeWeakGlobalAsStrong(result));
11151 }
11152 
DenyAccessBasedOnPublicSdk(ArtMethod * art_method) const11153 bool ClassLinker::DenyAccessBasedOnPublicSdk([[maybe_unused]] ArtMethod* art_method) const
11154     REQUIRES_SHARED(Locks::mutator_lock_) {
11155   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11156   LOG(FATAL) << "UNREACHABLE";
11157   UNREACHABLE();
11158 }
11159 
DenyAccessBasedOnPublicSdk(ArtField * art_field) const11160 bool ClassLinker::DenyAccessBasedOnPublicSdk([[maybe_unused]] ArtField* art_field) const
11161     REQUIRES_SHARED(Locks::mutator_lock_) {
11162   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11163   LOG(FATAL) << "UNREACHABLE";
11164   UNREACHABLE();
11165 }
11166 
DenyAccessBasedOnPublicSdk(std::string_view type_descriptor) const11167 bool ClassLinker::DenyAccessBasedOnPublicSdk(
11168     [[maybe_unused]] std::string_view type_descriptor) const {
11169   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11170   LOG(FATAL) << "UNREACHABLE";
11171   UNREACHABLE();
11172 }
11173 
SetEnablePublicSdkChecks(bool enabled)11174 void ClassLinker::SetEnablePublicSdkChecks([[maybe_unused]] bool enabled) {
11175   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11176   LOG(FATAL) << "UNREACHABLE";
11177   UNREACHABLE();
11178 }
11179 
TransactionWriteConstraint(Thread * self,ObjPtr<mirror::Object> obj)11180 bool ClassLinker::TransactionWriteConstraint(
11181     [[maybe_unused]] Thread* self, [[maybe_unused]] ObjPtr<mirror::Object> obj) {
11182   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11183   LOG(FATAL) << "UNREACHABLE";
11184   UNREACHABLE();
11185 }
11186 
TransactionWriteValueConstraint(Thread * self,ObjPtr<mirror::Object> value)11187 bool ClassLinker::TransactionWriteValueConstraint(
11188     [[maybe_unused]] Thread* self, [[maybe_unused]] ObjPtr<mirror::Object> value) {
11189   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11190   LOG(FATAL) << "UNREACHABLE";
11191   UNREACHABLE();
11192 }
11193 
TransactionAllocationConstraint(Thread * self,ObjPtr<mirror::Class> klass)11194 bool ClassLinker::TransactionAllocationConstraint(
11195     [[maybe_unused]] Thread* self, [[maybe_unused]] ObjPtr<mirror::Class> klass) {
11196   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11197   LOG(FATAL) << "UNREACHABLE";
11198   UNREACHABLE();
11199 }
11200 
RecordWriteFieldBoolean(mirror::Object * obj,MemberOffset field_offset,uint8_t value,bool is_volatile)11201 void ClassLinker::RecordWriteFieldBoolean([[maybe_unused]] mirror::Object* obj,
11202                                           [[maybe_unused]] MemberOffset field_offset,
11203                                           [[maybe_unused]] uint8_t value,
11204                                           [[maybe_unused]] bool is_volatile) {
11205   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11206   LOG(FATAL) << "UNREACHABLE";
11207   UNREACHABLE();
11208 }
11209 
RecordWriteFieldByte(mirror::Object * obj,MemberOffset field_offset,int8_t value,bool is_volatile)11210 void ClassLinker::RecordWriteFieldByte([[maybe_unused]] mirror::Object* obj,
11211                                        [[maybe_unused]] MemberOffset field_offset,
11212                                        [[maybe_unused]] int8_t value,
11213                                        [[maybe_unused]] bool is_volatile) {
11214   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11215   LOG(FATAL) << "UNREACHABLE";
11216   UNREACHABLE();
11217 }
11218 
RecordWriteFieldChar(mirror::Object * obj,MemberOffset field_offset,uint16_t value,bool is_volatile)11219 void ClassLinker::RecordWriteFieldChar([[maybe_unused]] mirror::Object* obj,
11220                                        [[maybe_unused]] MemberOffset field_offset,
11221                                        [[maybe_unused]] uint16_t value,
11222                                        [[maybe_unused]] bool is_volatile) {
11223   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11224   LOG(FATAL) << "UNREACHABLE";
11225   UNREACHABLE();
11226 }
11227 
RecordWriteFieldShort(mirror::Object * obj,MemberOffset field_offset,int16_t value,bool is_volatile)11228 void ClassLinker::RecordWriteFieldShort([[maybe_unused]] mirror::Object* obj,
11229                                         [[maybe_unused]] MemberOffset field_offset,
11230                                         [[maybe_unused]] int16_t value,
11231                                         [[maybe_unused]] bool is_volatile) {
11232   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11233   LOG(FATAL) << "UNREACHABLE";
11234   UNREACHABLE();
11235 }
11236 
RecordWriteField32(mirror::Object * obj,MemberOffset field_offset,uint32_t value,bool is_volatile)11237 void ClassLinker::RecordWriteField32([[maybe_unused]] mirror::Object* obj,
11238                                      [[maybe_unused]] MemberOffset field_offset,
11239                                      [[maybe_unused]] uint32_t value,
11240                                      [[maybe_unused]] bool is_volatile) {
11241   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11242   LOG(FATAL) << "UNREACHABLE";
11243   UNREACHABLE();
11244 }
11245 
RecordWriteField64(mirror::Object * obj,MemberOffset field_offset,uint64_t value,bool is_volatile)11246 void ClassLinker::RecordWriteField64([[maybe_unused]] mirror::Object* obj,
11247                                      [[maybe_unused]] MemberOffset field_offset,
11248                                      [[maybe_unused]] uint64_t value,
11249                                      [[maybe_unused]] bool is_volatile) {
11250   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11251   LOG(FATAL) << "UNREACHABLE";
11252   UNREACHABLE();
11253 }
11254 
RecordWriteFieldReference(mirror::Object * obj,MemberOffset field_offset,ObjPtr<mirror::Object> value,bool is_volatile)11255 void ClassLinker::RecordWriteFieldReference([[maybe_unused]] mirror::Object* obj,
11256                                             [[maybe_unused]] MemberOffset field_offset,
11257                                             [[maybe_unused]] ObjPtr<mirror::Object> value,
11258                                             [[maybe_unused]] bool is_volatile) {
11259   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11260   LOG(FATAL) << "UNREACHABLE";
11261   UNREACHABLE();
11262 }
11263 
RecordWriteArray(mirror::Array * array,size_t index,uint64_t value)11264 void ClassLinker::RecordWriteArray([[maybe_unused]] mirror::Array* array,
11265                                    [[maybe_unused]] size_t index,
11266                                    [[maybe_unused]] uint64_t value) {
11267   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11268   LOG(FATAL) << "UNREACHABLE";
11269   UNREACHABLE();
11270 }
11271 
RecordStrongStringInsertion(ObjPtr<mirror::String> s)11272 void ClassLinker::RecordStrongStringInsertion([[maybe_unused]] ObjPtr<mirror::String> s) {
11273   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11274   LOG(FATAL) << "UNREACHABLE";
11275   UNREACHABLE();
11276 }
11277 
RecordWeakStringInsertion(ObjPtr<mirror::String> s)11278 void ClassLinker::RecordWeakStringInsertion([[maybe_unused]] ObjPtr<mirror::String> s) {
11279   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11280   LOG(FATAL) << "UNREACHABLE";
11281   UNREACHABLE();
11282 }
11283 
RecordStrongStringRemoval(ObjPtr<mirror::String> s)11284 void ClassLinker::RecordStrongStringRemoval([[maybe_unused]] ObjPtr<mirror::String> s) {
11285   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11286   LOG(FATAL) << "UNREACHABLE";
11287   UNREACHABLE();
11288 }
11289 
RecordWeakStringRemoval(ObjPtr<mirror::String> s)11290 void ClassLinker::RecordWeakStringRemoval([[maybe_unused]] ObjPtr<mirror::String> s) {
11291   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11292   LOG(FATAL) << "UNREACHABLE";
11293   UNREACHABLE();
11294 }
11295 
RecordResolveString(ObjPtr<mirror::DexCache> dex_cache,dex::StringIndex string_idx)11296 void ClassLinker::RecordResolveString([[maybe_unused]] ObjPtr<mirror::DexCache> dex_cache,
11297                                       [[maybe_unused]] dex::StringIndex string_idx) {
11298   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11299   LOG(FATAL) << "UNREACHABLE";
11300   UNREACHABLE();
11301 }
11302 
RecordResolveMethodType(ObjPtr<mirror::DexCache> dex_cache,dex::ProtoIndex proto_idx)11303 void ClassLinker::RecordResolveMethodType([[maybe_unused]] ObjPtr<mirror::DexCache> dex_cache,
11304                                           [[maybe_unused]] dex::ProtoIndex proto_idx) {
11305   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11306   LOG(FATAL) << "UNREACHABLE";
11307   UNREACHABLE();
11308 }
11309 
ThrowTransactionAbortError(Thread * self)11310 void ClassLinker::ThrowTransactionAbortError([[maybe_unused]] Thread* self) {
11311   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11312   LOG(FATAL) << "UNREACHABLE";
11313   UNREACHABLE();
11314 }
11315 
AbortTransactionF(Thread * self,const char * fmt,...)11316 void ClassLinker::AbortTransactionF(
11317     [[maybe_unused]] Thread* self, [[maybe_unused]] const char* fmt, ...) {
11318   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11319   LOG(FATAL) << "UNREACHABLE";
11320   UNREACHABLE();
11321 }
11322 
AbortTransactionV(Thread * self,const char * fmt,va_list args)11323 void ClassLinker::AbortTransactionV([[maybe_unused]] Thread* self,
11324                                     [[maybe_unused]] const char* fmt,
11325                                     [[maybe_unused]] va_list args) {
11326   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11327   LOG(FATAL) << "UNREACHABLE";
11328   UNREACHABLE();
11329 }
11330 
IsTransactionAborted() const11331 bool ClassLinker::IsTransactionAborted() const {
11332   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11333   LOG(FATAL) << "UNREACHABLE";
11334   UNREACHABLE();
11335 }
11336 
VisitTransactionRoots(RootVisitor * visitor)11337 void ClassLinker::VisitTransactionRoots([[maybe_unused]] RootVisitor* visitor) {
11338   // Nothing to do for normal `ClassLinker`, only `AotClassLinker` handles transactions.
11339 }
11340 
GetTransactionalInterpreter()11341 const void* ClassLinker::GetTransactionalInterpreter() {
11342   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11343   LOG(FATAL) << "UNREACHABLE";
11344   UNREACHABLE();
11345 }
11346 
RemoveDexFromCaches(const DexFile & dex_file)11347 void ClassLinker::RemoveDexFromCaches(const DexFile& dex_file) {
11348   ReaderMutexLock mu(Thread::Current(), *Locks::dex_lock_);
11349 
11350   auto it = dex_caches_.find(&dex_file);
11351   if (it != dex_caches_.end()) {
11352       dex_caches_.erase(it);
11353   }
11354 }
11355 
11356 // GetClassLoadersVisitor collects visited class loaders.
11357 class GetClassLoadersVisitor : public ClassLoaderVisitor {
11358  public:
GetClassLoadersVisitor(VariableSizedHandleScope * class_loaders)11359   explicit GetClassLoadersVisitor(VariableSizedHandleScope* class_loaders)
11360       : class_loaders_(class_loaders) {}
11361 
Visit(ObjPtr<mirror::ClassLoader> class_loader)11362   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
11363       REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
11364     DCHECK(class_loader != nullptr);
11365     class_loaders_->NewHandle(class_loader);
11366   }
11367 
11368  private:
11369   VariableSizedHandleScope* const class_loaders_;
11370 };
11371 
GetClassLoaders(Thread * self,VariableSizedHandleScope * handles)11372 void ClassLinker::GetClassLoaders(Thread* self, VariableSizedHandleScope* handles) {
11373   GetClassLoadersVisitor class_loader_visitor(handles);
11374   ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
11375   VisitClassLoaders(&class_loader_visitor);
11376 }
11377 
11378 // Instantiate ClassLinker::AllocClass.
11379 template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ true>(
11380     Thread* self,
11381     ObjPtr<mirror::Class> java_lang_Class,
11382     uint32_t class_size);
11383 template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ false>(
11384     Thread* self,
11385     ObjPtr<mirror::Class> java_lang_Class,
11386     uint32_t class_size);
11387 
11388 }  // namespace art
11389