xref: /aosp_15_r20/art/runtime/instrumentation.cc (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "instrumentation.h"
18 
19 #include <functional>
20 #include <optional>
21 #include <sstream>
22 
23 #include <android-base/logging.h>
24 
25 #include "arch/context.h"
26 #include "art_field-inl.h"
27 #include "art_method-inl.h"
28 #include "base/atomic.h"
29 #include "base/callee_save_type.h"
30 #include "class_linker.h"
31 #include "debugger.h"
32 #include "dex/dex_file-inl.h"
33 #include "dex/dex_file_types.h"
34 #include "dex/dex_instruction-inl.h"
35 #include "entrypoints/quick/quick_alloc_entrypoints.h"
36 #include "entrypoints/quick/quick_entrypoints.h"
37 #include "entrypoints/quick/runtime_entrypoints_list.h"
38 #include "entrypoints/runtime_asm_entrypoints.h"
39 #include "gc_root-inl.h"
40 #include "interpreter/interpreter.h"
41 #include "interpreter/interpreter_common.h"
42 #include "jit/jit.h"
43 #include "jit/jit_code_cache.h"
44 #include "jvalue-inl.h"
45 #include "jvalue.h"
46 #include "mirror/class-inl.h"
47 #include "mirror/dex_cache.h"
48 #include "mirror/object-inl.h"
49 #include "mirror/object_array-inl.h"
50 #include "nterp_helpers.h"
51 #include "nth_caller_visitor.h"
52 #include "oat/oat_file_manager.h"
53 #include "oat/oat_quick_method_header.h"
54 #include "runtime-inl.h"
55 #include "thread.h"
56 #include "thread_list.h"
57 
58 namespace art HIDDEN {
59 
60 namespace instrumentation {
61 
62 constexpr bool kVerboseInstrumentation = false;
63 
MethodExited(Thread * thread,ArtMethod * method,OptionalFrame frame,MutableHandle<mirror::Object> & return_value)64 void InstrumentationListener::MethodExited(
65     Thread* thread,
66     ArtMethod* method,
67     OptionalFrame frame,
68     MutableHandle<mirror::Object>& return_value) {
69   DCHECK_EQ(method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive(),
70             Primitive::kPrimNot);
71   const void* original_ret = return_value.Get();
72   JValue v;
73   v.SetL(return_value.Get());
74   MethodExited(thread, method, frame, v);
75   DCHECK(original_ret == v.GetL()) << "Return value changed";
76 }
77 
FieldWritten(Thread * thread,Handle<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,Handle<mirror::Object> field_value)78 void InstrumentationListener::FieldWritten(Thread* thread,
79                                            Handle<mirror::Object> this_object,
80                                            ArtMethod* method,
81                                            uint32_t dex_pc,
82                                            ArtField* field,
83                                            Handle<mirror::Object> field_value) {
84   DCHECK(!field->IsPrimitiveType());
85   JValue v;
86   v.SetL(field_value.Get());
87   FieldWritten(thread, this_object, method, dex_pc, field, v);
88 }
89 
90 // Instrumentation works on non-inlined frames by updating returned PCs
91 // of compiled frames.
92 static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
93     StackVisitor::StackWalkKind::kSkipInlinedFrames;
94 
95 class InstallStubsClassVisitor : public ClassVisitor {
96  public:
InstallStubsClassVisitor(Instrumentation * instrumentation)97   explicit InstallStubsClassVisitor(Instrumentation* instrumentation)
98       : instrumentation_(instrumentation) {}
99 
operator ()(ObjPtr<mirror::Class> klass)100   bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES(Locks::mutator_lock_) {
101     instrumentation_->InstallStubsForClass(klass.Ptr());
102     return true;  // we visit all classes.
103   }
104 
105  private:
106   Instrumentation* const instrumentation_;
107 };
108 
Instrumentation()109 Instrumentation::Instrumentation()
110     : run_exit_hooks_(false),
111       instrumentation_level_(InstrumentationLevel::kInstrumentNothing),
112       forced_interpret_only_(false),
113       have_method_entry_listeners_(0),
114       have_method_exit_listeners_(0),
115       have_method_unwind_listeners_(false),
116       have_dex_pc_listeners_(false),
117       have_field_read_listeners_(false),
118       have_field_write_listeners_(false),
119       have_exception_thrown_listeners_(false),
120       have_watched_frame_pop_listeners_(false),
121       have_branch_listeners_(false),
122       have_exception_handled_listeners_(false),
123       quick_alloc_entry_points_instrumentation_counter_(0),
124       alloc_entrypoints_instrumented_(false) {}
125 
ProcessMethodUnwindCallbacks(Thread * self,std::queue<ArtMethod * > & methods,MutableHandle<mirror::Throwable> & exception)126 bool Instrumentation::ProcessMethodUnwindCallbacks(Thread* self,
127                                                    std::queue<ArtMethod*>& methods,
128                                                    MutableHandle<mirror::Throwable>& exception) {
129   DCHECK(!self->IsExceptionPending());
130   if (!HasMethodUnwindListeners()) {
131     return true;
132   }
133   if (kVerboseInstrumentation) {
134     LOG(INFO) << "Popping frames for exception " << exception->Dump();
135   }
136   // The instrumentation events expect the exception to be set.
137   self->SetException(exception.Get());
138   bool new_exception_thrown = false;
139 
140   // Process callbacks for all methods that would be unwound until a new exception is thrown.
141   while (!methods.empty()) {
142     ArtMethod* method = methods.front();
143     methods.pop();
144     if (kVerboseInstrumentation) {
145       LOG(INFO) << "Popping for unwind " << method->PrettyMethod();
146     }
147 
148     if (method->IsRuntimeMethod()) {
149       continue;
150     }
151 
152     // Notify listeners of method unwind.
153     // TODO: improve the dex_pc information here.
154     uint32_t dex_pc = dex::kDexNoIndex;
155     MethodUnwindEvent(self, method, dex_pc);
156     new_exception_thrown = self->GetException() != exception.Get();
157     if (new_exception_thrown) {
158       break;
159     }
160   }
161 
162   exception.Assign(self->GetException());
163   self->ClearException();
164   if (kVerboseInstrumentation && new_exception_thrown) {
165     LOG(INFO) << "Did partial pop of frames due to new exception";
166   }
167   return !new_exception_thrown;
168 }
169 
InstallStubsForClass(ObjPtr<mirror::Class> klass)170 void Instrumentation::InstallStubsForClass(ObjPtr<mirror::Class> klass) {
171   if (!klass->IsResolved()) {
172     // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
173     // could not be initialized or linked with regards to class inheritance.
174   } else if (klass->IsErroneousResolved()) {
175     // We can't execute code in a erroneous class: do nothing.
176   } else {
177     for (ArtMethod& method : klass->GetMethods(kRuntimePointerSize)) {
178       InstallStubsForMethod(&method);
179     }
180   }
181 }
182 
CanHandleInitializationCheck(const void * code)183 static bool CanHandleInitializationCheck(const void* code) {
184   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
185   return class_linker->IsQuickResolutionStub(code) ||
186          class_linker->IsQuickToInterpreterBridge(code) ||
187          class_linker->IsQuickGenericJniStub(code) ||
188          (code == interpreter::GetNterpWithClinitEntryPoint());
189 }
190 
IsProxyInit(ArtMethod * method)191 static bool IsProxyInit(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
192   // Annoyingly this can be called before we have actually initialized WellKnownClasses so therefore
193   // we also need to check this based on the declaring-class descriptor. The check is valid because
194   // Proxy only has a single constructor.
195   ArtMethod* well_known_proxy_init = WellKnownClasses::java_lang_reflect_Proxy_init;
196   if (well_known_proxy_init == method) {
197     return true;
198   }
199 
200   if (well_known_proxy_init != nullptr) {
201     return false;
202   }
203 
204   return method->IsConstructor() && !method->IsStatic() &&
205       method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;");
206 }
207 
208 // Returns true if we need entry exit stub to call entry hooks. JITed code
209 // directly call entry / exit hooks and don't need the stub.
CodeSupportsEntryExitHooks(const void * entry_point,ArtMethod * method)210 static bool CodeSupportsEntryExitHooks(const void* entry_point, ArtMethod* method)
211     REQUIRES_SHARED(Locks::mutator_lock_) {
212   // Proxy.init should always run with the switch interpreter where entry / exit hooks are
213   // supported.
214   if (IsProxyInit(method)) {
215     return true;
216   }
217 
218   // In some tests runtime isn't setup fully and hence the entry points could be nullptr.
219   // just be conservative and return false here.
220   if (entry_point == nullptr) {
221     return false;
222   }
223 
224   ClassLinker* linker = Runtime::Current()->GetClassLinker();
225   // Interpreter supports entry / exit hooks. Resolution stubs fetch code that supports entry / exit
226   // hooks when required. So return true for both cases.
227   if (linker->IsQuickToInterpreterBridge(entry_point) ||
228       linker->IsQuickResolutionStub(entry_point)) {
229     return true;
230   }
231 
232   // When jiting code for debuggable runtimes / instrumentation is active  we generate the code to
233   // call method entry / exit hooks when required.
234   jit::Jit* jit = Runtime::Current()->GetJit();
235   if (jit != nullptr && jit->GetCodeCache()->ContainsPc(entry_point)) {
236     // If JITed code was compiled with instrumentation support we support entry / exit hooks.
237     OatQuickMethodHeader* header = OatQuickMethodHeader::FromEntryPoint(entry_point);
238     return CodeInfo::IsDebuggable(header->GetOptimizedCodeInfoPtr());
239   }
240 
241   // GenericJni trampoline can handle entry / exit hooks.
242   if (linker->IsQuickGenericJniStub(entry_point)) {
243     return true;
244   }
245 
246   // The remaining cases are nterp / oat code / JIT code that isn't compiled with instrumentation
247   // support.
248   return false;
249 }
250 
251 template <typename T>
CompareExchange(uintptr_t ptr,uintptr_t old_value,uintptr_t new_value)252 bool CompareExchange(uintptr_t ptr, uintptr_t old_value, uintptr_t new_value) {
253   std::atomic<T>* atomic_addr = reinterpret_cast<std::atomic<T>*>(ptr);
254   T cast_old_value = dchecked_integral_cast<T>(old_value);
255   return atomic_addr->compare_exchange_strong(cast_old_value,
256                                               dchecked_integral_cast<T>(new_value),
257                                               std::memory_order_relaxed);
258 }
259 
UpdateEntryPoints(ArtMethod * method,const void * new_code)260 static void UpdateEntryPoints(ArtMethod* method, const void* new_code)
261     REQUIRES_SHARED(Locks::mutator_lock_) {
262   if (kIsDebugBuild) {
263     if (method->StillNeedsClinitCheckMayBeDead()) {
264       CHECK(CanHandleInitializationCheck(new_code));
265     }
266     jit::Jit* jit = Runtime::Current()->GetJit();
267     if (jit != nullptr && jit->GetCodeCache()->ContainsPc(new_code)) {
268       // Ensure we always have the thumb entrypoint for JIT on arm32.
269       if (kRuntimeQuickCodeISA == InstructionSet::kArm) {
270         CHECK_EQ(reinterpret_cast<uintptr_t>(new_code) & 1, 1u);
271       }
272     }
273     const Instrumentation* instr = Runtime::Current()->GetInstrumentation();
274     if (instr->EntryExitStubsInstalled()) {
275       CHECK(CodeSupportsEntryExitHooks(new_code, method));
276     }
277     if (instr->InterpreterStubsInstalled() && !method->IsNative()) {
278       CHECK_EQ(new_code, GetQuickToInterpreterBridge());
279     }
280   }
281   const void* current_entry_point = method->GetEntryPointFromQuickCompiledCode();
282   if (current_entry_point == new_code) {
283     // If the method is from a boot image, don't dirty it if the entrypoint
284     // doesn't change.
285     return;
286   }
287 
288   // Do an atomic exchange to avoid potentially unregistering JIT code twice.
289   MemberOffset offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kRuntimePointerSize);
290   uintptr_t old_value = reinterpret_cast<uintptr_t>(current_entry_point);
291   uintptr_t new_value = reinterpret_cast<uintptr_t>(new_code);
292   uintptr_t ptr = reinterpret_cast<uintptr_t>(method) + offset.Uint32Value();
293   bool success = (kRuntimePointerSize == PointerSize::k32)
294       ? CompareExchange<uint32_t>(ptr, old_value, new_value)
295       : CompareExchange<uint64_t>(ptr, old_value, new_value);
296 
297   // If we successfully updated the entrypoint and the old entrypoint is JITted
298   // code, register the old entrypoint as zombie.
299   jit::Jit* jit = Runtime::Current()->GetJit();
300   if (success &&
301       jit != nullptr &&
302       jit->GetCodeCache()->ContainsPc(current_entry_point)) {
303     jit->GetCodeCache()->AddZombieCode(method, current_entry_point);
304   }
305 }
306 
NeedsDexPcEvents(ArtMethod * method,Thread * thread)307 bool Instrumentation::NeedsDexPcEvents(ArtMethod* method, Thread* thread) {
308   return (InterpretOnly(method) || thread->IsForceInterpreter()) && HasDexPcListeners();
309 }
310 
InterpretOnly(ArtMethod * method)311 bool Instrumentation::InterpretOnly(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
312   if (method->IsNative()) {
313     return false;
314   }
315   return InterpretOnly() || IsDeoptimized(method);
316 }
317 
CanUseAotCode(const void * quick_code)318 static bool CanUseAotCode(const void* quick_code)
319     REQUIRES_SHARED(Locks::mutator_lock_) {
320   if (quick_code == nullptr) {
321     return false;
322   }
323   Runtime* runtime = Runtime::Current();
324   // For simplicity, we never use AOT code for debuggable.
325   if (runtime->IsJavaDebuggable()) {
326     return false;
327   }
328 
329   if (runtime->IsNativeDebuggable()) {
330     DCHECK(runtime->UseJitCompilation() && runtime->GetJit()->JitAtFirstUse());
331     // If we are doing native debugging, ignore application's AOT code,
332     // since we want to JIT it (at first use) with extra stackmaps for native
333     // debugging. We keep however all AOT code from the boot image,
334     // since the JIT-at-first-use is blocking and would result in non-negligible
335     // startup performance impact.
336     return runtime->GetHeap()->IsInBootImageOatFile(quick_code);
337   }
338 
339   return true;
340 }
341 
CanUseNterp(ArtMethod * method)342 static bool CanUseNterp(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
343   return interpreter::CanRuntimeUseNterp() &&
344       CanMethodUseNterp(method) &&
345       method->IsDeclaringClassVerifiedMayBeDead();
346 }
347 
GetOptimizedCodeFor(ArtMethod * method)348 static const void* GetOptimizedCodeFor(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
349   DCHECK(!Runtime::Current()->GetInstrumentation()->InterpretOnly(method));
350   CHECK(method->IsInvokable()) << method->PrettyMethod();
351   if (method->IsProxyMethod()) {
352     return GetQuickProxyInvokeHandler();
353   }
354 
355   // In debuggable mode, we can only use AOT code for native methods.
356   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
357   const void* aot_code = method->GetOatMethodQuickCode(class_linker->GetImagePointerSize());
358   if (CanUseAotCode(aot_code)) {
359     return aot_code;
360   }
361 
362   // If the method has been precompiled, there can be a JIT version.
363   jit::Jit* jit = Runtime::Current()->GetJit();
364   if (jit != nullptr) {
365     const void* code = jit->GetCodeCache()->GetSavedEntryPointOfPreCompiledMethod(method);
366     if (code != nullptr) {
367       return code;
368     }
369   }
370 
371   // We need to check if the class has been verified for setting up nterp, as
372   // the verifier could punt the method to the switch interpreter in case we
373   // need to do lock counting.
374   if (CanUseNterp(method)) {
375     return interpreter::GetNterpEntryPoint();
376   }
377 
378   return method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge();
379 }
380 
InitializeMethodsCode(ArtMethod * method,const void * aot_code)381 void Instrumentation::InitializeMethodsCode(ArtMethod* method, const void* aot_code)
382     REQUIRES_SHARED(Locks::mutator_lock_) {
383   if (!method->IsInvokable()) {
384     DCHECK(method->GetEntryPointFromQuickCompiledCode() == nullptr ||
385            Runtime::Current()->GetClassLinker()->IsQuickToInterpreterBridge(
386                method->GetEntryPointFromQuickCompiledCode()));
387     UpdateEntryPoints(method, GetQuickToInterpreterBridge());
388     return;
389   }
390 
391   // Use instrumentation entrypoints if instrumentation is installed.
392   if (UNLIKELY(EntryExitStubsInstalled() || IsForcedInterpretOnly() || IsDeoptimized(method))) {
393     UpdateEntryPoints(
394         method, method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
395     return;
396   }
397 
398   // Special case if we need an initialization check.
399   // The method and its declaring class may be dead when starting JIT GC during managed heap GC.
400   if (method->StillNeedsClinitCheckMayBeDead()) {
401     // If we have code but the method needs a class initialization check before calling
402     // that code, install the resolution stub that will perform the check.
403     // It will be replaced by the proper entry point by ClassLinker::FixupStaticTrampolines
404     // after initializing class (see ClassLinker::InitializeClass method).
405     // Note: this mimics the logic in image_writer.cc that installs the resolution
406     // stub only if we have compiled code or we can execute nterp, and the method needs a class
407     // initialization check.
408     if (aot_code != nullptr || method->IsNative() || CanUseNterp(method)) {
409       if (kIsDebugBuild && CanUseNterp(method)) {
410         // Adds some test coverage for the nterp clinit entrypoint.
411         UpdateEntryPoints(method, interpreter::GetNterpWithClinitEntryPoint());
412       } else {
413         UpdateEntryPoints(method, GetQuickResolutionStub());
414       }
415     } else {
416       UpdateEntryPoints(method, GetQuickToInterpreterBridge());
417     }
418     return;
419   }
420 
421   // Use the provided AOT code if possible.
422   if (CanUseAotCode(aot_code)) {
423     UpdateEntryPoints(method, aot_code);
424     return;
425   }
426 
427   // We check if the class is verified as we need the slow interpreter for lock verification.
428   // If the class is not verified, This will be updated in
429   // ClassLinker::UpdateClassAfterVerification.
430   if (CanUseNterp(method)) {
431     UpdateEntryPoints(method, interpreter::GetNterpEntryPoint());
432     return;
433   }
434 
435   // Use default entrypoints.
436   UpdateEntryPoints(
437       method, method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
438 }
439 
InstallStubsForMethod(ArtMethod * method)440 void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
441   if (!method->IsInvokable() || method->IsProxyMethod()) {
442     // Do not change stubs for these methods.
443     return;
444   }
445   // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
446   // TODO We should remove the need for this since it means we cannot always correctly detect calls
447   // to Proxy.<init>
448   if (IsProxyInit(method)) {
449     return;
450   }
451 
452   // If the instrumentation needs to go through the interpreter, just update the
453   // entrypoint to interpreter.
454   if (InterpretOnly(method)) {
455     UpdateEntryPoints(method, GetQuickToInterpreterBridge());
456     return;
457   }
458 
459   if (EntryExitStubsInstalled()) {
460     // Install interpreter bridge / GenericJni stub if the existing code doesn't support
461     // entry / exit hooks.
462     if (!CodeSupportsEntryExitHooks(method->GetEntryPointFromQuickCompiledCode(), method)) {
463       UpdateEntryPoints(
464           method, method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
465     }
466     return;
467   }
468 
469   // We're being asked to restore the entrypoints after instrumentation.
470   CHECK_EQ(instrumentation_level_, InstrumentationLevel::kInstrumentNothing);
471   // We need to have the resolution stub still if the class is not initialized.
472   if (method->StillNeedsClinitCheck()) {
473     UpdateEntryPoints(method, GetQuickResolutionStub());
474     return;
475   }
476   UpdateEntryPoints(method, GetOptimizedCodeFor(method));
477 }
478 
UpdateEntrypointsForDebuggable()479 void Instrumentation::UpdateEntrypointsForDebuggable() {
480   Runtime* runtime = Runtime::Current();
481   // If we are transitioning from non-debuggable to debuggable, we patch
482   // entry points of methods to remove any aot / JITed entry points.
483   InstallStubsClassVisitor visitor(this);
484   runtime->GetClassLinker()->VisitClasses(&visitor);
485 }
486 
MethodSupportsExitEvents(ArtMethod * method,const OatQuickMethodHeader * header)487 bool Instrumentation::MethodSupportsExitEvents(ArtMethod* method,
488                                                const OatQuickMethodHeader* header) {
489   if (header == nullptr) {
490     // Header can be a nullptr for runtime / proxy methods that doesn't support method exit hooks
491     // or for native methods that use generic jni stubs. Generic jni stubs support method exit
492     // hooks.
493     return method->IsNative();
494   }
495 
496   if (header->IsNterpMethodHeader()) {
497     // Nterp doesn't support method exit events
498     return false;
499   }
500 
501   DCHECK(header->IsOptimized());
502   if (CodeInfo::IsDebuggable(header->GetOptimizedCodeInfoPtr())) {
503     // For optimized code, we only support method entry / exit hooks if they are compiled as
504     // debuggable.
505     return true;
506   }
507 
508   return false;
509 }
510 
511 // Updates on stack frames to support any changes related to instrumentation.
512 // For JITed frames, DeoptimizeFlag is updated to enable deoptimization of
513 // methods when necessary. Shadow frames are updated if dex pc event
514 // notification has changed. When force_deopt is true then DeoptimizationFlag is
515 // updated to force a deoptimization.
InstrumentationInstallStack(Thread * thread,bool deopt_all_frames)516 void InstrumentationInstallStack(Thread* thread, bool deopt_all_frames)
517     REQUIRES(Locks::mutator_lock_) {
518   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
519   struct InstallStackVisitor final : public StackVisitor {
520     InstallStackVisitor(Thread* thread_in,
521                         Context* context,
522                         bool deopt_all_frames)
523         : StackVisitor(thread_in, context, kInstrumentationStackWalk),
524           deopt_all_frames_(deopt_all_frames),
525           runtime_methods_need_deopt_check_(false) {}
526 
527     bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
528       ArtMethod* m = GetMethod();
529       if (m == nullptr || m->IsRuntimeMethod()) {
530         if (kVerboseInstrumentation) {
531           LOG(INFO) << "  Skipping upcall / runtime method. Frame " << GetFrameId();
532         }
533         return true;  // Ignore upcalls and runtime methods.
534       }
535 
536       bool is_shadow_frame = GetCurrentQuickFrame() == nullptr;
537       if (kVerboseInstrumentation) {
538         LOG(INFO) << "Processing frame: method: " << m->PrettyMethod()
539                   << " is_shadow_frame: " << is_shadow_frame;
540       }
541 
542       // Handle interpreter frame.
543       if (is_shadow_frame) {
544         // Since we are updating the instrumentation related information we have to recalculate
545         // NeedsDexPcEvents. For example, when a new method or thread is deoptimized / interpreter
546         // stubs are installed the NeedsDexPcEvents could change for the shadow frames on the stack.
547         // If we don't update it here we would miss reporting dex pc events which is incorrect.
548         ShadowFrame* shadow_frame = GetCurrentShadowFrame();
549         DCHECK(shadow_frame != nullptr);
550         shadow_frame->SetNotifyDexPcMoveEvents(
551             Runtime::Current()->GetInstrumentation()->NeedsDexPcEvents(GetMethod(), GetThread()));
552         return true;  // Continue.
553       }
554 
555       DCHECK(!m->IsRuntimeMethod());
556       const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
557       // If it is a JITed frame then just set the deopt bit if required otherwise continue.
558       // We need kForceDeoptForRedefinition to ensure we don't use any JITed code after a
559       // redefinition. We support redefinition only if the runtime has started off as a
560       // debuggable runtime which makes sure we don't use any AOT or Nterp code.
561       // The CheckCallerForDeopt is an optimization which we only do for non-native JITed code for
562       // now. We can extend it to native methods but that needs reserving an additional stack slot.
563       // We don't do it currently since that wasn't important for debugger performance.
564       if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
565         if (deopt_all_frames_) {
566           runtime_methods_need_deopt_check_ = true;
567           SetShouldDeoptimizeFlag(DeoptimizeFlagValue::kForceDeoptForRedefinition);
568         }
569         SetShouldDeoptimizeFlag(DeoptimizeFlagValue::kCheckCallerForDeopt);
570       }
571 
572       return true;  // Continue.
573     }
574     bool deopt_all_frames_;
575     bool runtime_methods_need_deopt_check_;
576   };
577   if (kVerboseInstrumentation) {
578     std::string thread_name;
579     thread->GetThreadName(thread_name);
580     LOG(INFO) << "Installing exit stubs in " << thread_name;
581   }
582 
583   std::unique_ptr<Context> context(Context::Create());
584   InstallStackVisitor visitor(thread,
585                               context.get(),
586                               deopt_all_frames);
587   visitor.WalkStack(true);
588 
589   if (visitor.runtime_methods_need_deopt_check_) {
590     thread->SetDeoptCheckRequired(true);
591   }
592 
593   thread->VerifyStack();
594 }
595 
UpdateNeedsDexPcEventsOnStack(Thread * thread)596 void UpdateNeedsDexPcEventsOnStack(Thread* thread) REQUIRES(Locks::mutator_lock_) {
597   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
598 
599   struct InstallStackVisitor final : public StackVisitor {
600     InstallStackVisitor(Thread* thread_in, Context* context)
601         : StackVisitor(thread_in, context, kInstrumentationStackWalk) {}
602 
603     bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
604       ShadowFrame* shadow_frame = GetCurrentShadowFrame();
605       if (shadow_frame != nullptr) {
606         shadow_frame->SetNotifyDexPcMoveEvents(
607             Runtime::Current()->GetInstrumentation()->NeedsDexPcEvents(GetMethod(), GetThread()));
608       }
609       return true;
610     }
611   };
612 
613   std::unique_ptr<Context> context(Context::Create());
614   InstallStackVisitor visitor(thread, context.get());
615   visitor.WalkStack(true);
616 }
617 
ReportMethodEntryForOnStackMethods(InstrumentationListener * listener,Thread * thread)618 void ReportMethodEntryForOnStackMethods(InstrumentationListener* listener, Thread* thread)
619     REQUIRES(Locks::mutator_lock_) {
620   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
621 
622   struct InstallStackVisitor final : public StackVisitor {
623     InstallStackVisitor(Thread* thread_in, Context* context)
624         : StackVisitor(thread_in, context, kInstrumentationStackWalk) {}
625 
626     bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
627       ArtMethod* m = GetMethod();
628       if (m == nullptr || m->IsRuntimeMethod()) {
629         // Skip upcall / runtime methods
630         return true;
631       }
632 
633       if (GetCurrentShadowFrame() != nullptr) {
634         stack_methods_.push_back(m);
635       } else {
636         const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
637         if (Runtime::Current()->GetInstrumentation()->MethodSupportsExitEvents(m, method_header)) {
638           // It is unexpected to see a method enter event but not a method exit event so record
639           // stack methods only for frames that support method exit events. Even if we deoptimize we
640           // make sure that we only call method exit event if the frame supported it in the first
641           // place. For ex: deoptimizing from JITed code with debug support calls a method exit hook
642           // but deoptimizing from nterp doesn't.
643           stack_methods_.push_back(m);
644         }
645       }
646       return true;
647     }
648 
649     std::vector<ArtMethod*> stack_methods_;
650   };
651 
652   if (kVerboseInstrumentation) {
653     std::string thread_name;
654     thread->GetThreadName(thread_name);
655     LOG(INFO) << "Updating DexPcMoveEvents on shadow frames on stack  " << thread_name;
656   }
657 
658   std::unique_ptr<Context> context(Context::Create());
659   InstallStackVisitor visitor(thread, context.get());
660   visitor.WalkStack(true);
661 
662   // Create method enter events for all methods currently on the thread's stack.
663   for (auto smi = visitor.stack_methods_.rbegin(); smi != visitor.stack_methods_.rend(); smi++) {
664     listener->MethodEntered(thread, *smi);
665   }
666 }
667 
InstrumentThreadStack(Thread * thread,bool force_deopt)668 void Instrumentation::InstrumentThreadStack(Thread* thread, bool force_deopt) {
669   run_exit_hooks_ = true;
670   InstrumentationInstallStack(thread, force_deopt);
671 }
672 
InstrumentAllThreadStacks(bool force_deopt)673 void Instrumentation::InstrumentAllThreadStacks(bool force_deopt) {
674   run_exit_hooks_ = true;
675   MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
676   for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
677     InstrumentThreadStack(thread, force_deopt);
678   }
679 }
680 
InstrumentationRestoreStack(Thread * thread)681 static void InstrumentationRestoreStack(Thread* thread) REQUIRES(Locks::mutator_lock_) {
682   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
683 
684   struct RestoreStackVisitor final : public StackVisitor {
685     RestoreStackVisitor(Thread* thread)
686         : StackVisitor(thread, nullptr, kInstrumentationStackWalk), thread_(thread) {}
687 
688     bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
689       if (GetCurrentQuickFrame() == nullptr) {
690         return true;
691       }
692 
693       const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
694       if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
695         // We shouldn't restore stack if any of the frames need a force deopt
696         DCHECK(!ShouldForceDeoptForRedefinition());
697         UnsetShouldDeoptimizeFlag(DeoptimizeFlagValue::kCheckCallerForDeopt);
698       }
699       return true;  // Continue.
700     }
701     Thread* const thread_;
702   };
703 
704   if (kVerboseInstrumentation) {
705     std::string thread_name;
706     thread->GetThreadName(thread_name);
707     LOG(INFO) << "Restoring stack for " << thread_name;
708   }
709   DCHECK(!thread->IsDeoptCheckRequired());
710   RestoreStackVisitor visitor(thread);
711   visitor.WalkStack(true);
712 }
713 
HasFramesNeedingForceDeopt(Thread * thread)714 static bool HasFramesNeedingForceDeopt(Thread* thread) REQUIRES(Locks::mutator_lock_) {
715   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
716 
717   struct CheckForForceDeoptStackVisitor final : public StackVisitor {
718     CheckForForceDeoptStackVisitor(Thread* thread)
719         : StackVisitor(thread, nullptr, kInstrumentationStackWalk),
720           thread_(thread),
721           force_deopt_check_needed_(false) {}
722 
723     bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
724       if (GetCurrentQuickFrame() == nullptr) {
725         return true;
726       }
727 
728       const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
729       if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
730         if (ShouldForceDeoptForRedefinition()) {
731           force_deopt_check_needed_ = true;
732           return false;
733         }
734       }
735       return true;  // Continue.
736     }
737     Thread* const thread_;
738     bool force_deopt_check_needed_;
739   };
740 
741   CheckForForceDeoptStackVisitor visitor(thread);
742   visitor.WalkStack(true);
743   // If there is a frame that requires a force deopt we should have set the IsDeoptCheckRequired
744   // bit. We don't check if the bit needs to be reset on every method exit / deoptimization. We
745   // only check when we no longer need instrumentation support. So it is possible that the bit is
746   // set but we don't find any frames that need a force deopt on the stack so reverse implication
747   // doesn't hold.
748   DCHECK_IMPLIES(visitor.force_deopt_check_needed_, thread->IsDeoptCheckRequired());
749   return visitor.force_deopt_check_needed_;
750 }
751 
DeoptimizeAllThreadFrames()752 void Instrumentation::DeoptimizeAllThreadFrames() {
753   InstrumentAllThreadStacks(/* force_deopt= */ true);
754 }
755 
HasEvent(Instrumentation::InstrumentationEvent expected,uint32_t events)756 static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
757   return (events & expected) != 0;
758 }
759 
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener)760 static bool PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
761                                      uint32_t events,
762                                      std::list<InstrumentationListener*>& list,
763                                      InstrumentationListener* listener)
764     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
765   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
766   if (!HasEvent(event, events)) {
767     return false;
768   }
769   // If there is a free slot in the list, we insert the listener in that slot.
770   // Otherwise we add it to the end of the list.
771   auto it = std::find(list.begin(), list.end(), nullptr);
772   if (it != list.end()) {
773     *it = listener;
774   } else {
775     list.push_back(listener);
776   }
777   return true;
778 }
779 
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)780 static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
781                                      uint32_t events,
782                                      std::list<InstrumentationListener*>& list,
783                                      InstrumentationListener* listener,
784                                      bool* has_listener)
785     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
786   if (PotentiallyAddListenerTo(event, events, list, listener)) {
787     *has_listener = true;
788   }
789 }
790 
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,uint8_t * has_listener,uint8_t flag)791 static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
792                                      uint32_t events,
793                                      std::list<InstrumentationListener*>& list,
794                                      InstrumentationListener* listener,
795                                      uint8_t* has_listener,
796                                      uint8_t flag)
797     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
798   if (PotentiallyAddListenerTo(event, events, list, listener)) {
799     *has_listener = *has_listener | flag;
800   }
801 }
802 
AddListener(InstrumentationListener * listener,uint32_t events,bool is_trace_listener)803 void Instrumentation::AddListener(InstrumentationListener* listener,
804                                   uint32_t events,
805                                   bool is_trace_listener) {
806   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
807   if (is_trace_listener) {
808     PotentiallyAddListenerTo(kMethodEntered,
809                              events,
810                              method_entry_fast_trace_listeners_,
811                              listener,
812                              &have_method_entry_listeners_,
813                              kFastTraceListeners);
814   } else {
815     PotentiallyAddListenerTo(kMethodEntered,
816                              events,
817                              method_entry_slow_listeners_,
818                              listener,
819                              &have_method_entry_listeners_,
820                              kSlowMethodEntryExitListeners);
821   }
822   if (is_trace_listener) {
823     PotentiallyAddListenerTo(kMethodExited,
824                              events,
825                              method_exit_fast_trace_listeners_,
826                              listener,
827                              &have_method_exit_listeners_,
828                              kFastTraceListeners);
829   } else {
830     PotentiallyAddListenerTo(kMethodExited,
831                              events,
832                              method_exit_slow_listeners_,
833                              listener,
834                              &have_method_exit_listeners_,
835                              kSlowMethodEntryExitListeners);
836   }
837   PotentiallyAddListenerTo(kMethodUnwind,
838                            events,
839                            method_unwind_listeners_,
840                            listener,
841                            &have_method_unwind_listeners_);
842   PotentiallyAddListenerTo(kBranch,
843                            events,
844                            branch_listeners_,
845                            listener,
846                            &have_branch_listeners_);
847   PotentiallyAddListenerTo(kDexPcMoved,
848                            events,
849                            dex_pc_listeners_,
850                            listener,
851                            &have_dex_pc_listeners_);
852   PotentiallyAddListenerTo(kFieldRead,
853                            events,
854                            field_read_listeners_,
855                            listener,
856                            &have_field_read_listeners_);
857   PotentiallyAddListenerTo(kFieldWritten,
858                            events,
859                            field_write_listeners_,
860                            listener,
861                            &have_field_write_listeners_);
862   PotentiallyAddListenerTo(kExceptionThrown,
863                            events,
864                            exception_thrown_listeners_,
865                            listener,
866                            &have_exception_thrown_listeners_);
867   PotentiallyAddListenerTo(kWatchedFramePop,
868                            events,
869                            watched_frame_pop_listeners_,
870                            listener,
871                            &have_watched_frame_pop_listeners_);
872   PotentiallyAddListenerTo(kExceptionHandled,
873                            events,
874                            exception_handled_listeners_,
875                            listener,
876                            &have_exception_handled_listeners_);
877   if (HasEvent(kDexPcMoved, events)) {
878     MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
879     for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
880       UpdateNeedsDexPcEventsOnStack(thread);
881     }
882   }
883 }
884 
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener)885 static bool PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
886                                           uint32_t events,
887                                           std::list<InstrumentationListener*>& list,
888                                           InstrumentationListener* listener)
889     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
890   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
891   if (!HasEvent(event, events)) {
892     return false;
893   }
894   auto it = std::find(list.begin(), list.end(), listener);
895   if (it != list.end()) {
896     // Just update the entry, do not remove from the list. Removing entries in the list
897     // is unsafe when mutators are iterating over it.
898     *it = nullptr;
899   }
900 
901   // Check if the list contains any non-null listener.
902   for (InstrumentationListener* l : list) {
903     if (l != nullptr) {
904       return false;
905     }
906   }
907 
908   return true;
909 }
910 
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)911 static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
912                                           uint32_t events,
913                                           std::list<InstrumentationListener*>& list,
914                                           InstrumentationListener* listener,
915                                           bool* has_listener)
916     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
917   if (PotentiallyRemoveListenerFrom(event, events, list, listener)) {
918     *has_listener = false;
919   }
920 }
921 
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,uint8_t * has_listener,uint8_t flag)922 static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
923                                           uint32_t events,
924                                           std::list<InstrumentationListener*>& list,
925                                           InstrumentationListener* listener,
926                                           uint8_t* has_listener,
927                                           uint8_t flag)
928     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
929   if (PotentiallyRemoveListenerFrom(event, events, list, listener)) {
930     *has_listener = *has_listener & ~flag;
931   }
932 }
933 
RemoveListener(InstrumentationListener * listener,uint32_t events,bool is_trace_listener)934 void Instrumentation::RemoveListener(InstrumentationListener* listener,
935                                      uint32_t events,
936                                      bool is_trace_listener) {
937   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
938   if (is_trace_listener) {
939     PotentiallyRemoveListenerFrom(kMethodEntered,
940                                   events,
941                                   method_entry_fast_trace_listeners_,
942                                   listener,
943                                   &have_method_entry_listeners_,
944                                   kFastTraceListeners);
945   } else {
946     PotentiallyRemoveListenerFrom(kMethodEntered,
947                                   events,
948                                   method_entry_slow_listeners_,
949                                   listener,
950                                   &have_method_entry_listeners_,
951                                   kSlowMethodEntryExitListeners);
952   }
953   if (is_trace_listener) {
954     PotentiallyRemoveListenerFrom(kMethodExited,
955                                   events,
956                                   method_exit_fast_trace_listeners_,
957                                   listener,
958                                   &have_method_exit_listeners_,
959                                   kFastTraceListeners);
960   } else {
961     PotentiallyRemoveListenerFrom(kMethodExited,
962                                   events,
963                                   method_exit_slow_listeners_,
964                                   listener,
965                                   &have_method_exit_listeners_,
966                                   kSlowMethodEntryExitListeners);
967   }
968   PotentiallyRemoveListenerFrom(kMethodUnwind,
969                                 events,
970                                 method_unwind_listeners_,
971                                 listener,
972                                 &have_method_unwind_listeners_);
973   PotentiallyRemoveListenerFrom(kBranch,
974                                 events,
975                                 branch_listeners_,
976                                 listener,
977                                 &have_branch_listeners_);
978   PotentiallyRemoveListenerFrom(kDexPcMoved,
979                                 events,
980                                 dex_pc_listeners_,
981                                 listener,
982                                 &have_dex_pc_listeners_);
983   PotentiallyRemoveListenerFrom(kFieldRead,
984                                 events,
985                                 field_read_listeners_,
986                                 listener,
987                                 &have_field_read_listeners_);
988   PotentiallyRemoveListenerFrom(kFieldWritten,
989                                 events,
990                                 field_write_listeners_,
991                                 listener,
992                                 &have_field_write_listeners_);
993   PotentiallyRemoveListenerFrom(kExceptionThrown,
994                                 events,
995                                 exception_thrown_listeners_,
996                                 listener,
997                                 &have_exception_thrown_listeners_);
998   PotentiallyRemoveListenerFrom(kWatchedFramePop,
999                                 events,
1000                                 watched_frame_pop_listeners_,
1001                                 listener,
1002                                 &have_watched_frame_pop_listeners_);
1003   PotentiallyRemoveListenerFrom(kExceptionHandled,
1004                                 events,
1005                                 exception_handled_listeners_,
1006                                 listener,
1007                                 &have_exception_handled_listeners_);
1008   if (HasEvent(kDexPcMoved, events)) {
1009     MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
1010     for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
1011       UpdateNeedsDexPcEventsOnStack(thread);
1012     }
1013   }
1014 }
1015 
GetCurrentInstrumentationLevel() const1016 Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
1017   return instrumentation_level_;
1018 }
1019 
ConfigureStubs(const char * key,InstrumentationLevel desired_level,bool try_switch_to_non_debuggable)1020 void Instrumentation::ConfigureStubs(const char* key,
1021                                      InstrumentationLevel desired_level,
1022                                      bool try_switch_to_non_debuggable) {
1023   // Store the instrumentation level for this key or remove it.
1024   if (desired_level == InstrumentationLevel::kInstrumentNothing) {
1025     // The client no longer needs instrumentation.
1026     requested_instrumentation_levels_.erase(key);
1027   } else {
1028     // The client needs instrumentation.
1029     requested_instrumentation_levels_.Overwrite(key, desired_level);
1030   }
1031 
1032   UpdateStubs(try_switch_to_non_debuggable);
1033 }
1034 
UpdateInstrumentationLevel(InstrumentationLevel requested_level)1035 void Instrumentation::UpdateInstrumentationLevel(InstrumentationLevel requested_level) {
1036   instrumentation_level_ = requested_level;
1037 }
1038 
EnableEntryExitHooks(const char * key)1039 void Instrumentation::EnableEntryExitHooks(const char* key) {
1040   DCHECK(Runtime::Current()->IsJavaDebuggable());
1041   ConfigureStubs(key,
1042                  InstrumentationLevel::kInstrumentWithEntryExitHooks,
1043                  /*try_switch_to_non_debuggable=*/false);
1044 }
1045 
MaybeRestoreInstrumentationStack()1046 void Instrumentation::MaybeRestoreInstrumentationStack() {
1047   // Restore stack only if there is no method currently deoptimized.
1048   if (!IsDeoptimizedMethodsEmpty()) {
1049     return;
1050   }
1051 
1052   Thread* self = Thread::Current();
1053   MutexLock mu(self, *Locks::thread_list_lock_);
1054   bool no_remaining_deopts = true;
1055   // Check that there are no other forced deoptimizations. Do it here so we only need to lock
1056   // thread_list_lock once.
1057   // The compiler gets confused on the thread annotations, so use
1058   // NO_THREAD_SAFETY_ANALYSIS. Note that we hold the mutator lock
1059   // exclusively at this point.
1060   Locks::mutator_lock_->AssertExclusiveHeld(self);
1061   Runtime::Current()->GetThreadList()->ForEach([&](Thread* t) NO_THREAD_SAFETY_ANALYSIS {
1062     bool has_force_deopt_frames = HasFramesNeedingForceDeopt(t);
1063     if (!has_force_deopt_frames) {
1064       // We no longer have any frames that require a force deopt check. If the bit was true then we
1065       // had some frames earlier but they already got deoptimized and are no longer on stack.
1066       t->SetDeoptCheckRequired(false);
1067     }
1068     no_remaining_deopts =
1069         no_remaining_deopts &&
1070         !t->IsForceInterpreter() &&
1071         !t->HasDebuggerShadowFrames() &&
1072         !has_force_deopt_frames;
1073   });
1074   if (no_remaining_deopts) {
1075     Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack);
1076     run_exit_hooks_ = false;
1077   }
1078 }
1079 
UpdateStubs(bool try_switch_to_non_debuggable)1080 void Instrumentation::UpdateStubs(bool try_switch_to_non_debuggable) {
1081   // Look for the highest required instrumentation level.
1082   InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
1083   for (const auto& v : requested_instrumentation_levels_) {
1084     requested_level = std::max(requested_level, v.second);
1085   }
1086 
1087   if (GetCurrentInstrumentationLevel() == requested_level) {
1088     // We're already set.
1089     return;
1090   }
1091 
1092   Thread* const self = Thread::Current();
1093   Runtime* runtime = Runtime::Current();
1094   Locks::mutator_lock_->AssertExclusiveHeld(self);
1095   Locks::thread_list_lock_->AssertNotHeld(self);
1096   // The following needs to happen in the same order.
1097   // 1. Update the instrumentation level
1098   // 2. Switch the runtime to non-debuggable if requested. We switch to non-debuggable only when
1099   // the instrumentation level is set to kInstrumentNothing. So this needs to happen only after
1100   // updating the instrumentation level.
1101   // 3. Update the entry points. We use AOT code only if we aren't debuggable runtime. So update
1102   // entrypoints after switching the instrumentation level.
1103   UpdateInstrumentationLevel(requested_level);
1104   if (try_switch_to_non_debuggable) {
1105     MaybeSwitchRuntimeDebugState(self);
1106   }
1107   InstallStubsClassVisitor visitor(this);
1108   runtime->GetClassLinker()->VisitClasses(&visitor);
1109   if (requested_level > InstrumentationLevel::kInstrumentNothing) {
1110     InstrumentAllThreadStacks(/* force_deopt= */ false);
1111   } else {
1112     MaybeRestoreInstrumentationStack();
1113   }
1114 }
1115 
ResetQuickAllocEntryPointsForThread(Thread * thread,void * arg)1116 static void ResetQuickAllocEntryPointsForThread(Thread* thread, [[maybe_unused]] void* arg) {
1117   thread->ResetQuickAllocEntryPointsForThread();
1118 }
1119 
SetEntrypointsInstrumented(bool instrumented)1120 void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
1121   Thread* self = Thread::Current();
1122   Runtime* runtime = Runtime::Current();
1123   Locks::mutator_lock_->AssertNotHeld(self);
1124   Locks::instrument_entrypoints_lock_->AssertHeld(self);
1125   if (runtime->IsStarted()) {
1126     ScopedSuspendAll ssa(__FUNCTION__);
1127     MutexLock mu(self, *Locks::runtime_shutdown_lock_);
1128     SetQuickAllocEntryPointsInstrumented(instrumented);
1129     ResetQuickAllocEntryPoints();
1130     alloc_entrypoints_instrumented_ = instrumented;
1131   } else {
1132     MutexLock mu(self, *Locks::runtime_shutdown_lock_);
1133     SetQuickAllocEntryPointsInstrumented(instrumented);
1134 
1135     // Note: ResetQuickAllocEntryPoints only works when the runtime is started. Manually run the
1136     //       update for just this thread.
1137     // Note: self may be null. One of those paths is setting instrumentation in the Heap
1138     //       constructor for gcstress mode.
1139     if (self != nullptr) {
1140       ResetQuickAllocEntryPointsForThread(self, nullptr);
1141     }
1142 
1143     alloc_entrypoints_instrumented_ = instrumented;
1144   }
1145 }
1146 
InstrumentQuickAllocEntryPoints()1147 void Instrumentation::InstrumentQuickAllocEntryPoints() {
1148   MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
1149   InstrumentQuickAllocEntryPointsLocked();
1150 }
1151 
UninstrumentQuickAllocEntryPoints()1152 void Instrumentation::UninstrumentQuickAllocEntryPoints() {
1153   MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
1154   UninstrumentQuickAllocEntryPointsLocked();
1155 }
1156 
InstrumentQuickAllocEntryPointsLocked()1157 void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
1158   Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
1159   if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
1160     SetEntrypointsInstrumented(true);
1161   }
1162   ++quick_alloc_entry_points_instrumentation_counter_;
1163 }
1164 
UninstrumentQuickAllocEntryPointsLocked()1165 void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
1166   Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
1167   CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
1168   --quick_alloc_entry_points_instrumentation_counter_;
1169   if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
1170     SetEntrypointsInstrumented(false);
1171   }
1172 }
1173 
ResetQuickAllocEntryPoints()1174 void Instrumentation::ResetQuickAllocEntryPoints() {
1175   Runtime* runtime = Runtime::Current();
1176   if (runtime->IsStarted()) {
1177     MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
1178     runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
1179   }
1180 }
1181 
EntryPointString(const void * code)1182 std::string Instrumentation::EntryPointString(const void* code) {
1183   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1184   jit::Jit* jit = Runtime::Current()->GetJit();
1185   if (class_linker->IsQuickToInterpreterBridge(code)) {
1186     return "interpreter";
1187   } else if (class_linker->IsQuickResolutionStub(code)) {
1188     return "resolution";
1189   } else if (jit != nullptr && jit->GetCodeCache()->ContainsPc(code)) {
1190     return "jit";
1191   } else if (code == GetInvokeObsoleteMethodStub()) {
1192     return "obsolete";
1193   } else if (code == interpreter::GetNterpEntryPoint()) {
1194     return "nterp";
1195   } else if (code == interpreter::GetNterpWithClinitEntryPoint()) {
1196     return "nterp with clinit";
1197   } else if (class_linker->IsQuickGenericJniStub(code)) {
1198     return "generic jni";
1199   } else if (Runtime::Current()->GetOatFileManager().ContainsPc(code)) {
1200     return "oat";
1201   } else if (OatQuickMethodHeader::IsStub(reinterpret_cast<const uint8_t*>(code)).value_or(false)) {
1202     return "stub";
1203   }
1204   return "unknown";
1205 }
1206 
UpdateMethodsCodeImpl(ArtMethod * method,const void * new_code)1207 void Instrumentation::UpdateMethodsCodeImpl(ArtMethod* method, const void* new_code) {
1208   if (!EntryExitStubsInstalled()) {
1209     // Fast path: no instrumentation.
1210     DCHECK(!IsDeoptimized(method));
1211     UpdateEntryPoints(method, new_code);
1212     return;
1213   }
1214 
1215   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1216   if (class_linker->IsQuickToInterpreterBridge(new_code)) {
1217     // It's always OK to update to the interpreter.
1218     UpdateEntryPoints(method, new_code);
1219     return;
1220   }
1221 
1222   if (InterpretOnly(method)) {
1223     DCHECK(class_linker->IsQuickToInterpreterBridge(method->GetEntryPointFromQuickCompiledCode()))
1224         << EntryPointString(method->GetEntryPointFromQuickCompiledCode());
1225     // Don't update, stay deoptimized.
1226     return;
1227   }
1228 
1229   if (EntryExitStubsInstalled() && !CodeSupportsEntryExitHooks(new_code, method)) {
1230     DCHECK(CodeSupportsEntryExitHooks(method->GetEntryPointFromQuickCompiledCode(), method))
1231         << EntryPointString(method->GetEntryPointFromQuickCompiledCode()) << " "
1232         << method->PrettyMethod();
1233     // If we need entry / exit stubs but the new_code doesn't support entry / exit hooks just skip.
1234     return;
1235   }
1236 
1237   // At this point, we can update as asked.
1238   UpdateEntryPoints(method, new_code);
1239 }
1240 
UpdateNativeMethodsCodeToJitCode(ArtMethod * method,const void * new_code)1241 void Instrumentation::UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* new_code) {
1242   // We don't do any read barrier on `method`'s declaring class in this code, as the JIT might
1243   // enter here on a soon-to-be deleted ArtMethod. Updating the entrypoint is OK though, as
1244   // the ArtMethod is still in memory.
1245   if (EntryExitStubsInstalled() && !CodeSupportsEntryExitHooks(new_code, method)) {
1246     // If the new code doesn't support entry exit hooks but we need them don't update with the new
1247     // code.
1248     return;
1249   }
1250   UpdateEntryPoints(method, new_code);
1251 }
1252 
UpdateMethodsCode(ArtMethod * method,const void * new_code)1253 void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* new_code) {
1254   DCHECK(method->GetDeclaringClass()->IsResolved());
1255   UpdateMethodsCodeImpl(method, new_code);
1256 }
1257 
AddDeoptimizedMethod(ArtMethod * method)1258 bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
1259   if (IsDeoptimizedMethod(method)) {
1260     // Already in the map. Return.
1261     return false;
1262   }
1263   // Not found. Add it.
1264   deoptimized_methods_.insert(method);
1265   return true;
1266 }
1267 
IsDeoptimizedMethod(ArtMethod * method)1268 bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
1269   return deoptimized_methods_.find(method) != deoptimized_methods_.end();
1270 }
1271 
RemoveDeoptimizedMethod(ArtMethod * method)1272 bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
1273   auto it = deoptimized_methods_.find(method);
1274   if (it == deoptimized_methods_.end()) {
1275     return false;
1276   }
1277   deoptimized_methods_.erase(it);
1278   return true;
1279 }
1280 
Deoptimize(ArtMethod * method)1281 void Instrumentation::Deoptimize(ArtMethod* method) {
1282   CHECK(!method->IsNative());
1283   CHECK(!method->IsProxyMethod());
1284   CHECK(method->IsInvokable());
1285 
1286   {
1287     Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
1288     bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
1289     CHECK(has_not_been_deoptimized) << "Method " << ArtMethod::PrettyMethod(method)
1290         << " is already deoptimized";
1291   }
1292 
1293   if (method->IsObsolete()) {
1294     // If method was marked as obsolete it should have `GetInvokeObsoleteMethodStub`
1295     // as its quick entry point
1296     CHECK_EQ(method->GetEntryPointFromQuickCompiledCode(), GetInvokeObsoleteMethodStub());
1297     return;
1298   }
1299 
1300   if (!InterpreterStubsInstalled()) {
1301     UpdateEntryPoints(method, GetQuickToInterpreterBridge());
1302 
1303     // Instrument thread stacks to request a check if the caller needs a deoptimization.
1304     // This isn't a strong deopt. We deopt this method if it is still in the deopt methods list.
1305     // If by the time we hit this frame we no longer need a deopt it is safe to continue.
1306     InstrumentAllThreadStacks(/* force_deopt= */ false);
1307   }
1308   CHECK_EQ(method->GetEntryPointFromQuickCompiledCode(), GetQuickToInterpreterBridge());
1309 }
1310 
Undeoptimize(ArtMethod * method)1311 void Instrumentation::Undeoptimize(ArtMethod* method) {
1312   CHECK(!method->IsNative());
1313   CHECK(!method->IsProxyMethod());
1314   CHECK(method->IsInvokable());
1315 
1316   {
1317     Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
1318     bool found_and_erased = RemoveDeoptimizedMethod(method);
1319     CHECK(found_and_erased) << "Method " << ArtMethod::PrettyMethod(method)
1320         << " is not deoptimized";
1321   }
1322 
1323   // If interpreter stubs are still needed nothing to do.
1324   if (InterpreterStubsInstalled()) {
1325     return;
1326   }
1327 
1328   if (method->IsObsolete()) {
1329     // Don't update entry points for obsolete methods. The entrypoint should
1330     // have been set to InvokeObsoleteMethoStub.
1331     DCHECK_EQ(method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize),
1332               GetInvokeObsoleteMethodStub());
1333     return;
1334   }
1335 
1336   // We are not using interpreter stubs for deoptimization. Restore the code of the method.
1337   // We still retain interpreter bridge if we need it for other reasons.
1338   if (InterpretOnly(method)) {
1339     UpdateEntryPoints(method, GetQuickToInterpreterBridge());
1340   } else if (method->StillNeedsClinitCheck()) {
1341     UpdateEntryPoints(method, GetQuickResolutionStub());
1342   } else {
1343     UpdateEntryPoints(method, GetMaybeInstrumentedCodeForInvoke(method));
1344   }
1345 
1346   // If there is no deoptimized method left, we can restore the stack of each thread.
1347   if (!EntryExitStubsInstalled()) {
1348     MaybeRestoreInstrumentationStack();
1349   }
1350 }
1351 
IsDeoptimizedMethodsEmpty() const1352 bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
1353   return deoptimized_methods_.empty();
1354 }
1355 
IsDeoptimized(ArtMethod * method)1356 bool Instrumentation::IsDeoptimized(ArtMethod* method) {
1357   DCHECK(method != nullptr);
1358   return IsDeoptimizedMethod(method);
1359 }
1360 
DisableDeoptimization(const char * key,bool try_switch_to_non_debuggable)1361 void Instrumentation::DisableDeoptimization(const char* key, bool try_switch_to_non_debuggable) {
1362   // Remove any instrumentation support added for deoptimization.
1363   ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing, try_switch_to_non_debuggable);
1364   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
1365   // Undeoptimized selected methods.
1366   while (true) {
1367     ArtMethod* method;
1368     {
1369       if (deoptimized_methods_.empty()) {
1370         break;
1371       }
1372       method = *deoptimized_methods_.begin();
1373       CHECK(method != nullptr);
1374     }
1375     Undeoptimize(method);
1376   }
1377 }
1378 
MaybeSwitchRuntimeDebugState(Thread * self)1379 void Instrumentation::MaybeSwitchRuntimeDebugState(Thread* self) {
1380   Runtime* runtime = Runtime::Current();
1381   // Return early if runtime is shutting down.
1382   if (runtime->IsShuttingDown(self)) {
1383     return;
1384   }
1385 
1386   // Don't switch the state if we started off as JavaDebuggable or if we still need entry / exit
1387   // hooks for other reasons.
1388   if (EntryExitStubsInstalled() || runtime->IsJavaDebuggableAtInit()) {
1389     return;
1390   }
1391 
1392   art::jit::Jit* jit = runtime->GetJit();
1393   if (jit != nullptr) {
1394     jit->GetCodeCache()->InvalidateAllCompiledCode();
1395     jit->GetJitCompiler()->SetDebuggableCompilerOption(false);
1396   }
1397   runtime->SetRuntimeDebugState(art::Runtime::RuntimeDebugState::kNonJavaDebuggable);
1398 }
1399 
DeoptimizeEverything(const char * key)1400 void Instrumentation::DeoptimizeEverything(const char* key) {
1401   // We want to switch to non-debuggable only when the debugger / profile tools are detaching.
1402   // This call is used for supporting debug related features (ex: single stepping across all
1403   // threads) while the debugger is still connected.
1404   ConfigureStubs(key,
1405                  InstrumentationLevel::kInstrumentWithInterpreter,
1406                  /*try_switch_to_non_debuggable=*/false);
1407 }
1408 
UndeoptimizeEverything(const char * key)1409 void Instrumentation::UndeoptimizeEverything(const char* key) {
1410   CHECK(InterpreterStubsInstalled());
1411   // We want to switch to non-debuggable only when the debugger / profile tools are detaching.
1412   // This is used when we no longer need to run in interpreter. The debugger is still connected
1413   // so don't switch the runtime. We use "DisableDeoptimization" when detaching the debugger.
1414   ConfigureStubs(key,
1415                  InstrumentationLevel::kInstrumentNothing,
1416                  /*try_switch_to_non_debuggable=*/false);
1417 }
1418 
EnableMethodTracing(const char * key,InstrumentationListener * listener,bool needs_interpreter)1419 void Instrumentation::EnableMethodTracing(const char* key,
1420                                           InstrumentationListener* listener,
1421                                           bool needs_interpreter) {
1422   InstrumentationLevel level;
1423   if (needs_interpreter) {
1424     level = InstrumentationLevel::kInstrumentWithInterpreter;
1425   } else {
1426     level = InstrumentationLevel::kInstrumentWithEntryExitHooks;
1427   }
1428   // We are enabling method tracing here and need to stay in debuggable.
1429   ConfigureStubs(key, level, /*try_switch_to_non_debuggable=*/false);
1430 
1431   MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
1432   for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
1433     ReportMethodEntryForOnStackMethods(listener, thread);
1434   }
1435 }
1436 
DisableMethodTracing(const char * key)1437 void Instrumentation::DisableMethodTracing(const char* key) {
1438   // We no longer need to be in debuggable runtime since we are stopping method tracing. If no
1439   // other debugger / profiling tools are active switch back to non-debuggable.
1440   ConfigureStubs(key,
1441                  InstrumentationLevel::kInstrumentNothing,
1442                  /*try_switch_to_non_debuggable=*/true);
1443 }
1444 
GetCodeForInvoke(ArtMethod * method)1445 const void* Instrumentation::GetCodeForInvoke(ArtMethod* method) {
1446   // This is called by instrumentation and resolution trampolines
1447   // and that should never be getting proxy methods.
1448   DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1449   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1450   const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
1451   // If we don't have the instrumentation, the resolution stub, or the
1452   // interpreter, just return the current entrypoint,
1453   // assuming it's the most optimized.
1454   if (!class_linker->IsQuickResolutionStub(code) &&
1455       !class_linker->IsQuickToInterpreterBridge(code)) {
1456     return code;
1457   }
1458 
1459   if (InterpretOnly(method)) {
1460     // If we're forced into interpreter just use it.
1461     return GetQuickToInterpreterBridge();
1462   }
1463 
1464   return GetOptimizedCodeFor(method);
1465 }
1466 
GetMaybeInstrumentedCodeForInvoke(ArtMethod * method)1467 const void* Instrumentation::GetMaybeInstrumentedCodeForInvoke(ArtMethod* method) {
1468   // This is called by resolution trampolines and that should never be getting proxy methods.
1469   DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1470   const void* code = GetCodeForInvoke(method);
1471   if (EntryExitStubsInstalled() && !CodeSupportsEntryExitHooks(code, method)) {
1472     return method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge();
1473   }
1474   return code;
1475 }
1476 
MethodEnterEventImpl(Thread * thread,ArtMethod * method) const1477 void Instrumentation::MethodEnterEventImpl(Thread* thread, ArtMethod* method) const {
1478   DCHECK(!method->IsRuntimeMethod());
1479   if (HasMethodEntryListeners()) {
1480     for (InstrumentationListener* listener : method_entry_slow_listeners_) {
1481       if (listener != nullptr) {
1482         listener->MethodEntered(thread, method);
1483       }
1484     }
1485     for (InstrumentationListener* listener : method_entry_fast_trace_listeners_) {
1486       if (listener != nullptr) {
1487         listener->MethodEntered(thread, method);
1488       }
1489     }
1490   }
1491 }
1492 
1493 template <>
MethodExitEventImpl(Thread * thread,ArtMethod * method,OptionalFrame frame,MutableHandle<mirror::Object> & return_value) const1494 void Instrumentation::MethodExitEventImpl(Thread* thread,
1495                                           ArtMethod* method,
1496                                           OptionalFrame frame,
1497                                           MutableHandle<mirror::Object>& return_value) const {
1498   if (HasMethodExitListeners()) {
1499     for (InstrumentationListener* listener : method_exit_slow_listeners_) {
1500       if (listener != nullptr) {
1501         listener->MethodExited(thread, method, frame, return_value);
1502       }
1503     }
1504     for (InstrumentationListener* listener : method_exit_fast_trace_listeners_) {
1505       if (listener != nullptr) {
1506         listener->MethodExited(thread, method, frame, return_value);
1507       }
1508     }
1509   }
1510 }
1511 
MethodExitEventImpl(Thread * thread,ArtMethod * method,OptionalFrame frame,JValue & return_value) const1512 template<> void Instrumentation::MethodExitEventImpl(Thread* thread,
1513                                                      ArtMethod* method,
1514                                                      OptionalFrame frame,
1515                                                      JValue& return_value) const {
1516   if (HasMethodExitListeners()) {
1517     Thread* self = Thread::Current();
1518     StackHandleScope<1> hs(self);
1519     if (method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive() !=
1520         Primitive::kPrimNot) {
1521       for (InstrumentationListener* listener : method_exit_slow_listeners_) {
1522         if (listener != nullptr) {
1523           listener->MethodExited(thread, method, frame, return_value);
1524         }
1525       }
1526       for (InstrumentationListener* listener : method_exit_fast_trace_listeners_) {
1527         if (listener != nullptr) {
1528           listener->MethodExited(thread, method, frame, return_value);
1529         }
1530       }
1531     } else {
1532       MutableHandle<mirror::Object> ret(hs.NewHandle(return_value.GetL()));
1533       MethodExitEventImpl(thread, method, frame, ret);
1534       return_value.SetL(ret.Get());
1535     }
1536   }
1537 }
1538 
MethodUnwindEvent(Thread * thread,ArtMethod * method,uint32_t dex_pc) const1539 void Instrumentation::MethodUnwindEvent(Thread* thread,
1540                                         ArtMethod* method,
1541                                         uint32_t dex_pc) const {
1542   if (HasMethodUnwindListeners()) {
1543     for (InstrumentationListener* listener : method_unwind_listeners_) {
1544       if (listener != nullptr) {
1545         listener->MethodUnwind(thread, method, dex_pc);
1546       }
1547     }
1548   }
1549 }
1550 
DexPcMovedEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1551 void Instrumentation::DexPcMovedEventImpl(Thread* thread,
1552                                           ObjPtr<mirror::Object> this_object,
1553                                           ArtMethod* method,
1554                                           uint32_t dex_pc) const {
1555   Thread* self = Thread::Current();
1556   StackHandleScope<1> hs(self);
1557   Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1558   for (InstrumentationListener* listener : dex_pc_listeners_) {
1559     if (listener != nullptr) {
1560       listener->DexPcMoved(thread, thiz, method, dex_pc);
1561     }
1562   }
1563 }
1564 
BranchImpl(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset) const1565 void Instrumentation::BranchImpl(Thread* thread,
1566                                  ArtMethod* method,
1567                                  uint32_t dex_pc,
1568                                  int32_t offset) const {
1569   for (InstrumentationListener* listener : branch_listeners_) {
1570     if (listener != nullptr) {
1571       listener->Branch(thread, method, dex_pc, offset);
1572     }
1573   }
1574 }
1575 
WatchedFramePopImpl(Thread * thread,const ShadowFrame & frame) const1576 void Instrumentation::WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const {
1577   for (InstrumentationListener* listener : watched_frame_pop_listeners_) {
1578     if (listener != nullptr) {
1579       listener->WatchedFramePop(thread, frame);
1580     }
1581   }
1582 }
1583 
FieldReadEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field) const1584 void Instrumentation::FieldReadEventImpl(Thread* thread,
1585                                          ObjPtr<mirror::Object> this_object,
1586                                          ArtMethod* method,
1587                                          uint32_t dex_pc,
1588                                          ArtField* field) const {
1589   Thread* self = Thread::Current();
1590   StackHandleScope<1> hs(self);
1591   Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1592   for (InstrumentationListener* listener : field_read_listeners_) {
1593     if (listener != nullptr) {
1594       listener->FieldRead(thread, thiz, method, dex_pc, field);
1595     }
1596   }
1597 }
1598 
FieldWriteEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value) const1599 void Instrumentation::FieldWriteEventImpl(Thread* thread,
1600                                           ObjPtr<mirror::Object> this_object,
1601                                           ArtMethod* method,
1602                                           uint32_t dex_pc,
1603                                           ArtField* field,
1604                                           const JValue& field_value) const {
1605   Thread* self = Thread::Current();
1606   StackHandleScope<2> hs(self);
1607   Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1608   if (field->IsPrimitiveType()) {
1609     for (InstrumentationListener* listener : field_write_listeners_) {
1610       if (listener != nullptr) {
1611         listener->FieldWritten(thread, thiz, method, dex_pc, field, field_value);
1612       }
1613     }
1614   } else {
1615     Handle<mirror::Object> val(hs.NewHandle(field_value.GetL()));
1616     for (InstrumentationListener* listener : field_write_listeners_) {
1617       if (listener != nullptr) {
1618         listener->FieldWritten(thread, thiz, method, dex_pc, field, val);
1619       }
1620     }
1621   }
1622 }
1623 
ExceptionThrownEvent(Thread * thread,ObjPtr<mirror::Throwable> exception_object) const1624 void Instrumentation::ExceptionThrownEvent(Thread* thread,
1625                                            ObjPtr<mirror::Throwable> exception_object) const {
1626   Thread* self = Thread::Current();
1627   StackHandleScope<1> hs(self);
1628   Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1629   if (HasExceptionThrownListeners()) {
1630     DCHECK_EQ(thread->GetException(), h_exception.Get());
1631     thread->ClearException();
1632     for (InstrumentationListener* listener : exception_thrown_listeners_) {
1633       if (listener != nullptr) {
1634         listener->ExceptionThrown(thread, h_exception);
1635       }
1636     }
1637     // See b/65049545 for discussion about this behavior.
1638     thread->AssertNoPendingException();
1639     thread->SetException(h_exception.Get());
1640   }
1641 }
1642 
ExceptionHandledEvent(Thread * thread,ObjPtr<mirror::Throwable> exception_object) const1643 void Instrumentation::ExceptionHandledEvent(Thread* thread,
1644                                             ObjPtr<mirror::Throwable> exception_object) const {
1645   Thread* self = Thread::Current();
1646   StackHandleScope<1> hs(self);
1647   Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1648   if (HasExceptionHandledListeners()) {
1649     // We should have cleared the exception so that callers can detect a new one.
1650     DCHECK(thread->GetException() == nullptr);
1651     for (InstrumentationListener* listener : exception_handled_listeners_) {
1652       if (listener != nullptr) {
1653         listener->ExceptionHandled(thread, h_exception);
1654       }
1655     }
1656   }
1657 }
1658 
GetDeoptimizationMethodType(ArtMethod * method)1659 DeoptimizationMethodType Instrumentation::GetDeoptimizationMethodType(ArtMethod* method) {
1660   if (method->IsRuntimeMethod()) {
1661     // Certain methods have strict requirement on whether the dex instruction
1662     // should be re-executed upon deoptimization.
1663     if (method == Runtime::Current()->GetCalleeSaveMethod(
1664         CalleeSaveType::kSaveEverythingForClinit)) {
1665       return DeoptimizationMethodType::kKeepDexPc;
1666     }
1667     if (method == Runtime::Current()->GetCalleeSaveMethod(
1668         CalleeSaveType::kSaveEverythingForSuspendCheck)) {
1669       return DeoptimizationMethodType::kKeepDexPc;
1670     }
1671   }
1672   return DeoptimizationMethodType::kDefault;
1673 }
1674 
GetReturnValue(ArtMethod * method,bool * is_ref,uint64_t * gpr_result,uint64_t * fpr_result)1675 JValue Instrumentation::GetReturnValue(ArtMethod* method,
1676                                        bool* is_ref,
1677                                        uint64_t* gpr_result,
1678                                        uint64_t* fpr_result) {
1679   uint32_t length;
1680   const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1681 
1682   // Runtime method does not call into MethodExitEvent() so there should not be
1683   // suspension point below.
1684   ScopedAssertNoThreadSuspension ants(__FUNCTION__, method->IsRuntimeMethod());
1685   DCHECK(!method->IsRuntimeMethod());
1686   char return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
1687 
1688   *is_ref = return_shorty == '[' || return_shorty == 'L';
1689   JValue return_value;
1690   if (return_shorty == 'V') {
1691     return_value.SetJ(0);
1692   } else if (return_shorty == 'F' || return_shorty == 'D') {
1693     return_value.SetJ(*fpr_result);
1694   } else {
1695     return_value.SetJ(*gpr_result);
1696   }
1697   return return_value;
1698 }
1699 
PushDeoptContextIfNeeded(Thread * self,DeoptimizationMethodType deopt_type,bool is_ref,const JValue & return_value)1700 bool Instrumentation::PushDeoptContextIfNeeded(Thread* self,
1701                                                DeoptimizationMethodType deopt_type,
1702                                                bool is_ref,
1703                                                const JValue& return_value)
1704     REQUIRES_SHARED(Locks::mutator_lock_) {
1705   if (self->IsExceptionPending()) {
1706     return false;
1707   }
1708 
1709   ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame();
1710   DCHECK(sp != nullptr && (*sp)->IsRuntimeMethod());
1711   if (!ShouldDeoptimizeCaller(self, sp)) {
1712     return false;
1713   }
1714 
1715   // TODO(mythria): The current deopt behaviour is we just re-execute the
1716   // alloc instruction so we don't need the return value. For instrumentation
1717   // related deopts, we actually don't need to and can use the result we got
1718   // here. Since this is a debug only feature it is not very important but
1719   // consider reusing the result in future.
1720   self->PushDeoptimizationContext(
1721       return_value, is_ref, nullptr, /* from_code= */ false, deopt_type);
1722   self->SetException(Thread::GetDeoptimizationException());
1723   return true;
1724 }
1725 
DeoptimizeIfNeeded(Thread * self,ArtMethod ** sp,DeoptimizationMethodType type,JValue return_value,bool is_reference)1726 std::unique_ptr<Context> Instrumentation::DeoptimizeIfNeeded(Thread* self,
1727                                                              ArtMethod** sp,
1728                                                              DeoptimizationMethodType type,
1729                                                              JValue return_value,
1730                                                              bool is_reference) {
1731   if (self->IsAsyncExceptionPending() || ShouldDeoptimizeCaller(self, sp)) {
1732     self->PushDeoptimizationContext(return_value,
1733                                     is_reference,
1734                                     nullptr,
1735                                     /* from_code= */ false,
1736                                     type);
1737     // This is requested from suspend points or when returning from runtime methods so exit
1738     // callbacks wouldn't be run yet. So don't skip method callbacks.
1739     return self->Deoptimize(DeoptimizationKind::kFullFrame,
1740                             /* single_frame= */ false,
1741                             /* skip_method_exit_callbacks= */ false);
1742   }
1743   // No exception or deoptimization.
1744   return nullptr;
1745 }
1746 
NeedsSlowInterpreterForMethod(Thread * self,ArtMethod * method)1747 bool Instrumentation::NeedsSlowInterpreterForMethod(Thread* self, ArtMethod* method) {
1748   return (method != nullptr) &&
1749          (InterpreterStubsInstalled() ||
1750           IsDeoptimized(method) ||
1751           self->IsForceInterpreter() ||
1752           // NB Since structurally obsolete compiled methods might have the offsets of
1753           // methods/fields compiled in we need to go back to interpreter whenever we hit
1754           // them.
1755           method->GetDeclaringClass()->IsObsoleteObject() ||
1756           Dbg::IsForcedInterpreterNeededForUpcall(self, method));
1757 }
1758 
ShouldDeoptimizeCaller(Thread * self,ArtMethod ** sp)1759 bool Instrumentation::ShouldDeoptimizeCaller(Thread* self, ArtMethod** sp) {
1760   // When exit stubs aren't called we don't need to check for any instrumentation related
1761   // deoptimizations.
1762   if (!RunExitHooks()) {
1763     return false;
1764   }
1765 
1766   ArtMethod* runtime_method = *sp;
1767   DCHECK(runtime_method->IsRuntimeMethod());
1768   QuickMethodFrameInfo frame_info = Runtime::Current()->GetRuntimeMethodFrameInfo(runtime_method);
1769   return ShouldDeoptimizeCaller(self, sp, frame_info.FrameSizeInBytes());
1770 }
1771 
ShouldDeoptimizeCaller(Thread * self,ArtMethod ** sp,size_t frame_size)1772 bool Instrumentation::ShouldDeoptimizeCaller(Thread* self, ArtMethod** sp, size_t frame_size) {
1773   uintptr_t caller_sp = reinterpret_cast<uintptr_t>(sp) + frame_size;
1774   ArtMethod* caller = *(reinterpret_cast<ArtMethod**>(caller_sp));
1775   uintptr_t caller_pc_addr = reinterpret_cast<uintptr_t>(sp) + (frame_size - sizeof(void*));
1776   uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(caller_pc_addr);
1777 
1778   if (caller == nullptr ||
1779       caller->IsNative() ||
1780       caller->IsRuntimeMethod()) {
1781     // We need to check for a deoptimization here because when a redefinition happens it is
1782     // not safe to use any compiled code because the field offsets might change. For native
1783     // methods, we don't embed any field offsets so no need to check for a deoptimization.
1784     // If the caller is null we don't need to do anything. This can happen when the caller
1785     // is being interpreted by the switch interpreter (when called from
1786     // artQuickToInterpreterBridge) / during shutdown / early startup.
1787     return false;
1788   }
1789 
1790   bool needs_deopt = NeedsSlowInterpreterForMethod(self, caller);
1791 
1792   // Non java debuggable apps don't support redefinition and hence it isn't required to check if
1793   // frame needs to be deoptimized. Even in debuggable apps, we only need this check when a
1794   // redefinition has actually happened. This is indicated by IsDeoptCheckRequired flag. We also
1795   // want to avoid getting method header when we need a deopt anyway.
1796   if (Runtime::Current()->IsJavaDebuggable() && !needs_deopt && self->IsDeoptCheckRequired()) {
1797     const OatQuickMethodHeader* header = caller->GetOatQuickMethodHeader(caller_pc);
1798     if (header != nullptr && header->HasShouldDeoptimizeFlag()) {
1799       DCHECK(header->IsOptimized());
1800       uint8_t* should_deopt_flag_addr =
1801           reinterpret_cast<uint8_t*>(caller_sp) + header->GetShouldDeoptimizeFlagOffset();
1802       if ((*should_deopt_flag_addr &
1803            static_cast<uint8_t>(DeoptimizeFlagValue::kForceDeoptForRedefinition)) != 0) {
1804         needs_deopt = true;
1805       }
1806     }
1807   }
1808 
1809   if (needs_deopt) {
1810     if (!Runtime::Current()->IsAsyncDeoptimizeable(caller, caller_pc)) {
1811       LOG(WARNING) << "Got a deoptimization request on un-deoptimizable method "
1812                    << caller->PrettyMethod();
1813       return false;
1814     }
1815     return true;
1816   }
1817 
1818   return false;
1819 }
1820 
1821 }  // namespace instrumentation
1822 }  // namespace art
1823