1 /* 2 * Copyright (C) 2024 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_ENTRYPOINTS_QUICK_RUNTIME_ENTRYPOINTS_LIST_H_ 18 #define ART_RUNTIME_ENTRYPOINTS_QUICK_RUNTIME_ENTRYPOINTS_LIST_H_ 19 20 21 #include "entrypoints/entrypoint_utils.h" 22 #include "arch/instruction_set.h" 23 #include <math.h> 24 25 namespace art { 26 27 namespace mirror { 28 class Array; 29 class Class; 30 template<class MirrorType> class CompressedReference; 31 class Object; 32 class String; 33 class Throwable; 34 template<class T> class PrimitiveArray; 35 using ByteArray = PrimitiveArray<int8_t>; 36 using CharArray = PrimitiveArray<uint16_t>; 37 } // namespace mirror 38 39 class ArtMethod; 40 template<class MirrorType> class GcRoot; 41 template<class MirrorType> class StackReference; 42 class Thread; 43 class Context; 44 enum class DeoptimizationKind; 45 46 // All C++ quick entrypoints, i.e.: C++ entrypoint functions called from quick assembly code. 47 // Format is name, attribute, return type, argument types. 48 #define RUNTIME_ENTRYPOINT_LIST(V) \ 49 V(artDeliverPendingExceptionFromCode, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 50 Thread* self) \ 51 V(artInvokeObsoleteMethod, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 52 ArtMethod* method, \ 53 Thread* self) \ 54 V(artDeliverExceptionFromCode, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 55 mirror::Throwable* exception, \ 56 Thread* self) \ 57 V(artThrowNullPointerExceptionFromCode, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 58 Thread* self) \ 59 V(artThrowNullPointerExceptionFromSignal, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 60 uintptr_t addr, \ 61 Thread* self) \ 62 V(artThrowDivZeroFromCode, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 63 Thread* self) \ 64 V(artThrowArrayBoundsFromCode, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 65 int index, \ 66 int length, \ 67 Thread* self) \ 68 V(artThrowStringBoundsFromCode, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 69 int index, \ 70 int length, \ 71 Thread* self) \ 72 V(artThrowStackOverflowFromCode, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 73 Thread* self) \ 74 V(artThrowClassCastExceptionForObject, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 75 mirror::Object* obj, \ 76 mirror::Class* dest_type, \ 77 Thread* self) \ 78 V(artThrowArrayStoreException, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 79 mirror::Object* array, \ 80 mirror::Object* value, \ 81 Thread* self) \ 82 \ 83 V(artDeoptimizeIfNeeded, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 84 Thread* self, \ 85 uintptr_t result, \ 86 bool is_ref) \ 87 V(artTestSuspendFromCode, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 88 Thread* self) \ 89 V(artImplicitSuspendFromCode, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 90 Thread* self) \ 91 V(artCompileOptimized, REQUIRES_SHARED(Locks::mutator_lock_), void, \ 92 ArtMethod* method, \ 93 Thread* self) \ 94 \ 95 V(artQuickToInterpreterBridge, REQUIRES_SHARED(Locks::mutator_lock_), uint64_t, \ 96 ArtMethod* method, \ 97 Thread* self, \ 98 ArtMethod** sp) \ 99 V(artQuickProxyInvokeHandler, REQUIRES_SHARED(Locks::mutator_lock_), uint64_t, \ 100 ArtMethod* proxy_method, \ 101 mirror::Object* receiver, \ 102 Thread* self, \ 103 ArtMethod** sp) \ 104 V(artQuickResolutionTrampoline, REQUIRES_SHARED(Locks::mutator_lock_), const void*, \ 105 ArtMethod* called, \ 106 mirror::Object* receiver, \ 107 Thread* self, \ 108 ArtMethod** sp) \ 109 V(artQuickGenericJniTrampoline, REQUIRES_SHARED(Locks::mutator_lock_) \ 110 NO_THREAD_SAFETY_ANALYSIS, const void*, \ 111 Thread* self, \ 112 ArtMethod** managed_sp, \ 113 uintptr_t* reserved_area) \ 114 V(artQuickGenericJniEndTrampoline, , uint64_t, \ 115 Thread* self, \ 116 jvalue result, \ 117 uint64_t result_fp) \ 118 V(artInvokeInterfaceTrampolineWithAccessCheck, REQUIRES_SHARED(Locks::mutator_lock_), \ 119 TwoWordReturn, \ 120 uint32_t method_idx, \ 121 mirror::Object* this_object, \ 122 Thread* self, \ 123 ArtMethod** sp) \ 124 V(artInvokeDirectTrampolineWithAccessCheck, REQUIRES_SHARED(Locks::mutator_lock_), \ 125 TwoWordReturn, \ 126 uint32_t method_idx, \ 127 mirror::Object* this_object, \ 128 Thread* self, \ 129 ArtMethod** sp) \ 130 V(artInvokeStaticTrampolineWithAccessCheck, REQUIRES_SHARED(Locks::mutator_lock_), \ 131 TwoWordReturn, \ 132 uint32_t method_idx, \ 133 [[maybe_unused]] mirror::Object* this_object, \ 134 Thread* self, \ 135 ArtMethod** sp) \ 136 V(artInvokeSuperTrampolineWithAccessCheck, REQUIRES_SHARED(Locks::mutator_lock_), \ 137 TwoWordReturn, \ 138 uint32_t method_idx, \ 139 mirror::Object* this_object, \ 140 Thread* self, \ 141 ArtMethod** sp) \ 142 V(artInvokeVirtualTrampolineWithAccessCheck, REQUIRES_SHARED(Locks::mutator_lock_), \ 143 TwoWordReturn, \ 144 uint32_t method_idx, \ 145 mirror::Object* this_object, \ 146 Thread* self, \ 147 ArtMethod** sp) \ 148 V(artInvokeInterfaceTrampoline, REQUIRES_SHARED(Locks::mutator_lock_), TwoWordReturn, \ 149 ArtMethod* interface_method, \ 150 mirror::Object* raw_this_object, \ 151 Thread* self, \ 152 ArtMethod** sp) \ 153 V(artInvokePolymorphic, REQUIRES_SHARED(Locks::mutator_lock_), uint64_t, \ 154 mirror::Object* raw_receiver, \ 155 Thread* self, \ 156 ArtMethod** sp) \ 157 V(artInvokePolymorphicWithHiddenReceiver, REQUIRES_SHARED(Locks::mutator_lock_), uint64_t, \ 158 mirror::Object* raw_receiver, \ 159 Thread* self, \ 160 ArtMethod** sp) \ 161 V(artInvokeCustom, REQUIRES_SHARED(Locks::mutator_lock_), uint64_t, \ 162 uint32_t call_site_idx, \ 163 Thread* self, \ 164 ArtMethod** sp) \ 165 V(artJniMethodEntryHook, REQUIRES_SHARED(Locks::mutator_lock_), void, \ 166 Thread* self) \ 167 V(artMethodEntryHook, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 168 ArtMethod* method, \ 169 Thread* self, \ 170 ArtMethod** sp) \ 171 V(artMethodExitHook, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 172 Thread* self, \ 173 ArtMethod** sp, \ 174 uint64_t* gpr_result, \ 175 uint64_t* fpr_result, \ 176 uint32_t frame_size) \ 177 \ 178 V(artIsAssignableFromCode, REQUIRES_SHARED(Locks::mutator_lock_), size_t, \ 179 mirror::Class* klass, \ 180 mirror::Class* ref_class) \ 181 V(artInstanceOfFromCode, REQUIRES_SHARED(Locks::mutator_lock_), size_t, \ 182 mirror::Object* obj, \ 183 mirror::Class* ref_class) \ 184 \ 185 V(artInitializeStaticStorageFromCode, REQUIRES_SHARED(Locks::mutator_lock_), mirror::Class*, \ 186 mirror::Class* klass, \ 187 Thread* self) \ 188 V(artResolveTypeFromCode, REQUIRES_SHARED(Locks::mutator_lock_), mirror::Class*, \ 189 uint32_t type_idx, \ 190 Thread* self) \ 191 V(artResolveTypeAndVerifyAccessFromCode, REQUIRES_SHARED(Locks::mutator_lock_), mirror::Class*, \ 192 uint32_t type_idx, \ 193 Thread* self) \ 194 V(artResolveMethodHandleFromCode, REQUIRES_SHARED(Locks::mutator_lock_), mirror::MethodHandle*, \ 195 uint32_t method_handle_idx, \ 196 Thread* self) \ 197 V(artResolveMethodTypeFromCode, REQUIRES_SHARED(Locks::mutator_lock_), mirror::MethodType*, \ 198 uint32_t proto_idx, \ 199 Thread* self) \ 200 V(artResolveStringFromCode, REQUIRES_SHARED(Locks::mutator_lock_), mirror::String*, \ 201 int32_t string_idx, Thread* self) \ 202 \ 203 V(artDeoptimize, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 204 Thread* self, \ 205 bool skip_method_exit_callbacks) \ 206 V(artDeoptimizeFromCompiledCode, REQUIRES_SHARED(Locks::mutator_lock_), Context*, \ 207 DeoptimizationKind kind, \ 208 Thread* self) \ 209 \ 210 V(artHandleFillArrayDataFromCode, REQUIRES_SHARED(Locks::mutator_lock_), int, \ 211 const Instruction::ArrayDataPayload* payload, \ 212 mirror::Array* array, \ 213 Thread* self) \ 214 \ 215 V(artJniReadBarrier, REQUIRES_SHARED(Locks::mutator_lock_) HOT_ATTR, void, \ 216 ArtMethod* method) \ 217 V(artJniMethodStart, UNLOCK_FUNCTION(Locks::mutator_lock_) HOT_ATTR, void, \ 218 Thread* self) \ 219 V(artJniUnlockObject, NO_THREAD_SAFETY_ANALYSIS REQUIRES(!Roles::uninterruptible_) \ 220 REQUIRES_SHARED(Locks::mutator_lock_) HOT_ATTR, void, \ 221 mirror::Object* locked, \ 222 Thread* self) \ 223 V(artJniMethodEnd, SHARED_LOCK_FUNCTION(Locks::mutator_lock_) HOT_ATTR, void, \ 224 Thread* self) \ 225 V(artJniMonitoredMethodStart, UNLOCK_FUNCTION(Locks::mutator_lock_), void, \ 226 Thread* self) \ 227 V(artJniMonitoredMethodEnd, SHARED_LOCK_FUNCTION(Locks::mutator_lock_), void, \ 228 Thread* self) \ 229 \ 230 V(artStringBuilderAppend, REQUIRES_SHARED(Locks::mutator_lock_) HOT_ATTR, mirror::String*, \ 231 uint32_t format, \ 232 const uint32_t* args, \ 233 Thread* self) \ 234 \ 235 V(artContextCopyForLongJump, , void, \ 236 Context* context, \ 237 uintptr_t* gprs, \ 238 uintptr_t* fprs) \ 239 \ 240 GENERATE_ENTRYPOINTS_DECL_FOR_ALLOCATOR(V, DlMalloc) \ 241 GENERATE_ENTRYPOINTS_DECL_FOR_ALLOCATOR(V, RosAlloc) \ 242 GENERATE_ENTRYPOINTS_DECL_FOR_ALLOCATOR(V, BumpPointer) \ 243 GENERATE_ENTRYPOINTS_DECL_FOR_ALLOCATOR(V, TLAB) \ 244 GENERATE_ENTRYPOINTS_DECL_FOR_ALLOCATOR(V, Region) \ 245 GENERATE_ENTRYPOINTS_DECL_FOR_ALLOCATOR(V, RegionTLAB) \ 246 \ 247 ART_GET_FIELD_FROM_CODE_DECL(V, Byte, ssize_t, uint32_t) \ 248 ART_GET_FIELD_FROM_CODE_DECL(V, Boolean, size_t, uint32_t) \ 249 ART_GET_FIELD_FROM_CODE_DECL(V, Short, ssize_t, uint16_t) \ 250 ART_GET_FIELD_FROM_CODE_DECL(V, Char, size_t, uint16_t) \ 251 ART_GET_FIELD_FROM_CODE_DECL(V, 32, FIELD_RETURN_TYPE_32, uint32_t) \ 252 ART_GET_FIELD_FROM_CODE_DECL(V, 64, uint64_t, uint64_t) \ 253 ART_GET_FIELD_FROM_CODE_DECL(V, Obj, mirror::Object*, mirror::Object*) \ 254 V(artSet8StaticFromCompiledCode, REQUIRES_SHARED(Locks::mutator_lock_), int, \ 255 uint32_t field_idx, \ 256 uint32_t new_value, \ 257 Thread* self) \ 258 V(artSet16StaticFromCompiledCode, REQUIRES_SHARED(Locks::mutator_lock_), int, \ 259 uint32_t field_idx, \ 260 uint16_t new_value, \ 261 Thread* self) \ 262 V(artSet8InstanceFromCompiledCode, REQUIRES_SHARED(Locks::mutator_lock_), int, \ 263 uint32_t field_idx, \ 264 mirror::Object* obj, \ 265 uint8_t new_value, \ 266 Thread* self) \ 267 V(artSet16InstanceFromCompiledCode, REQUIRES_SHARED(Locks::mutator_lock_), int, \ 268 uint32_t field_idx, \ 269 mirror::Object* obj, \ 270 uint16_t new_value, \ 271 Thread* self) \ 272 V(artSet8StaticFromCode, REQUIRES_SHARED(Locks::mutator_lock_), int, \ 273 uint32_t field_idx, \ 274 uint32_t new_value, \ 275 ArtMethod* referrer, \ 276 Thread* self) \ 277 V(artSet16StaticFromCode, REQUIRES_SHARED(Locks::mutator_lock_), int, \ 278 uint32_t field_idx, \ 279 uint16_t new_value, \ 280 ArtMethod* referrer, \ 281 Thread* self) \ 282 V(artSet8InstanceFromCode, REQUIRES_SHARED(Locks::mutator_lock_), int, \ 283 uint32_t field_idx, \ 284 mirror::Object* obj, \ 285 uint8_t new_value, \ 286 ArtMethod* referrer, \ 287 Thread* self) \ 288 V(artSet16InstanceFromCode, REQUIRES_SHARED(Locks::mutator_lock_), int, \ 289 uint32_t field_idx, \ 290 mirror::Object* obj, \ 291 uint16_t new_value, \ 292 ArtMethod* referrer, \ 293 Thread* self) \ 294 V(artReadBarrierMark, REQUIRES_SHARED(Locks::mutator_lock_) HOT_ATTR, mirror::Object*, \ 295 mirror::Object* obj) \ 296 V(artReadBarrierSlow, REQUIRES_SHARED(Locks::mutator_lock_) HOT_ATTR, mirror::Object*, \ 297 mirror::Object* ref, \ 298 mirror::Object* obj, \ 299 uint32_t offset) \ 300 V(artReadBarrierForRootSlow, REQUIRES_SHARED(Locks::mutator_lock_) HOT_ATTR, mirror::Object*, \ 301 GcRoot<mirror::Object>* root) \ 302 \ 303 V(artLockObjectFromCode, NO_THREAD_SAFETY_ANALYSIS REQUIRES(!Roles::uninterruptible_) \ 304 REQUIRES_SHARED(Locks::mutator_lock_), int, \ 305 mirror::Object* obj, \ 306 Thread* self) \ 307 V(artUnlockObjectFromCode, NO_THREAD_SAFETY_ANALYSIS REQUIRES(!Roles::uninterruptible_) \ 308 REQUIRES_SHARED(Locks::mutator_lock_), int, \ 309 mirror::Object* obj, \ 310 Thread* self) \ 311 \ 312 V(artFindNativeMethodRunnable, REQUIRES_SHARED(Locks::mutator_lock_), const void*, \ 313 Thread* self) \ 314 V(artFindNativeMethod, , const void*, \ 315 Thread* self) \ 316 V(artCriticalNativeFrameSize, REQUIRES_SHARED(Locks::mutator_lock_), size_t, \ 317 ArtMethod* method, \ 318 uintptr_t caller_pc) \ 319 \ 320 V(artLmul, , int64_t, \ 321 int64_t a, \ 322 int64_t b) \ 323 V(artLdiv, , int64_t, \ 324 int64_t a, \ 325 int64_t b) \ 326 V(artLmod, , int64_t, \ 327 int64_t a, \ 328 int64_t b) \ 329 \ 330 V(art_l2d, , double, \ 331 int64_t l) \ 332 V(art_l2f, , float, \ 333 int64_t l) \ 334 V(art_d2l, , int64_t, \ 335 double d) \ 336 V(art_f2l, , int64_t, \ 337 float f) \ 338 V(art_d2i, , int32_t, \ 339 double d) \ 340 V(art_f2i, , int32_t, \ 341 float f) \ 342 V(fmodf, , float, \ 343 float, \ 344 float) \ 345 V(fmod, , double, \ 346 double, \ 347 double) 348 349 // Declarations from quick_alloc_entrypoints.cc 350 #define GENERATE_ENTRYPOINTS_DECL_FOR_ALLOCATOR_INST(V, suffix, suffix2) \ 351 V(artAllocObjectFromCodeWithChecks##suffix##suffix2, REQUIRES_SHARED(Locks::mutator_lock_), \ 352 mirror::Object*, \ 353 mirror::Class* klass, \ 354 Thread* self) \ 355 V(artAllocObjectFromCodeResolved##suffix##suffix2, REQUIRES_SHARED(Locks::mutator_lock_), \ 356 mirror::Object*, \ 357 mirror::Class* klass, \ 358 Thread* self) \ 359 V(artAllocObjectFromCodeInitialized##suffix##suffix2, REQUIRES_SHARED(Locks::mutator_lock_), \ 360 mirror::Object*, \ 361 mirror::Class* klass, \ 362 Thread* self) \ 363 V(artAllocStringObject##suffix##suffix2, REQUIRES_SHARED(Locks::mutator_lock_), \ 364 mirror::String*, \ 365 mirror::Class* klass, \ 366 Thread* self) \ 367 V(artAllocArrayFromCodeResolved##suffix##suffix2, REQUIRES_SHARED(Locks::mutator_lock_), \ 368 mirror::Array*, \ 369 mirror::Class* klass, \ 370 int32_t component_count, \ 371 Thread* self) \ 372 V(artAllocStringFromBytesFromCode##suffix##suffix2, REQUIRES_SHARED(Locks::mutator_lock_), \ 373 mirror::String*, \ 374 mirror::ByteArray* byte_array, \ 375 int32_t high, \ 376 int32_t offset, \ 377 int32_t byte_count, \ 378 Thread* self) \ 379 V(artAllocStringFromCharsFromCode##suffix##suffix2, REQUIRES_SHARED(Locks::mutator_lock_), \ 380 mirror::String*, \ 381 int32_t offset, \ 382 int32_t char_count, \ 383 mirror::CharArray* char_array, \ 384 Thread* self) \ 385 V(artAllocStringFromStringFromCode##suffix##suffix2, REQUIRES_SHARED(Locks::mutator_lock_), \ 386 mirror::String*, \ 387 mirror::String* string, \ 388 Thread* self) 389 390 #define GENERATE_ENTRYPOINTS_DECL_FOR_ALLOCATOR(V, suffix) \ 391 GENERATE_ENTRYPOINTS_DECL_FOR_ALLOCATOR_INST(V, suffix, Instrumented) \ 392 GENERATE_ENTRYPOINTS_DECL_FOR_ALLOCATOR_INST(V, suffix, ) 393 394 // Declarations from quick_field_entrypoints.cc 395 #define ART_GET_FIELD_FROM_CODE_DECL(V, Kind, RetType, SetType) \ 396 V(artGet ## Kind ## StaticFromCode, REQUIRES_SHARED(Locks::mutator_lock_), RetType, \ 397 uint32_t field_idx, \ 398 ArtMethod* referrer, \ 399 Thread* self) \ 400 V(artGet ## Kind ## InstanceFromCode, REQUIRES_SHARED(Locks::mutator_lock_), RetType, \ 401 uint32_t field_idx, \ 402 mirror::Object* obj, \ 403 ArtMethod* referrer, \ 404 Thread* self) \ 405 V(artSet ## Kind ## StaticFromCode, REQUIRES_SHARED(Locks::mutator_lock_), int, \ 406 uint32_t field_idx, \ 407 SetType new_value, \ 408 ArtMethod* referrer, \ 409 Thread* self) \ 410 V(artSet ## Kind ## InstanceFromCode, REQUIRES_SHARED(Locks::mutator_lock_), int, \ 411 uint32_t field_idx, \ 412 mirror::Object* obj, \ 413 SetType new_value, \ 414 ArtMethod* referrer, \ 415 Thread* self) \ 416 V(artGet ## Kind ## StaticFromCompiledCode, REQUIRES_SHARED(Locks::mutator_lock_), RetType, \ 417 uint32_t field_idx, \ 418 Thread* self) \ 419 V(artGet ## Kind ## InstanceFromCompiledCode, REQUIRES_SHARED(Locks::mutator_lock_), RetType, \ 420 uint32_t field_idx, \ 421 mirror::Object* obj, \ 422 Thread* self) \ 423 V(artSet ## Kind ## StaticFromCompiledCode, REQUIRES_SHARED(Locks::mutator_lock_), int, \ 424 uint32_t field_idx, \ 425 SetType new_value, \ 426 Thread* self) \ 427 V(artSet ## Kind ## InstanceFromCompiledCode, REQUIRES_SHARED(Locks::mutator_lock_), int, \ 428 uint32_t field_idx, \ 429 mirror::Object* obj, \ 430 SetType new_value, \ 431 Thread* self) 432 433 #if defined(__riscv) 434 #define FIELD_RETURN_TYPE_32 uint32_t 435 #else 436 #define FIELD_RETURN_TYPE_32 size_t 437 #endif 438 439 // Define a macro that will extract information from RUNTIME_ENTRYPOINT_LIST to create a function 440 // declaration. 441 #define ENTRYPOINT_ENUM(name, attr, rettype, ...) \ 442 extern "C" rettype name(__VA_ARGS__) attr; 443 444 // Declare all C++ quick entrypoints. 445 RUNTIME_ENTRYPOINT_LIST(ENTRYPOINT_ENUM) 446 #undef ENTRYPOINT_ENUM 447 448 } // namespace art 449 450 #endif // ART_RUNTIME_ENTRYPOINTS_QUICK_RUNTIME_ENTRYPOINTS_LIST_H_ 451