1 /* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_VERIFIER_REGISTER_LINE_H_ 18 #define ART_RUNTIME_VERIFIER_REGISTER_LINE_H_ 19 20 #include <limits> 21 #include <memory> 22 #include <vector> 23 24 #include <android-base/logging.h> 25 26 #include "base/arena_containers.h" 27 #include "base/locks.h" 28 #include "base/macros.h" 29 #include "base/safe_map.h" 30 #include "reg_type.h" 31 32 namespace art HIDDEN { 33 34 class Instruction; 35 36 namespace verifier { 37 38 class MethodVerifier; 39 class RegType; 40 class RegTypeCache; 41 42 /* 43 * Register type categories, for type checking. 44 * 45 * The spec says category 1 includes boolean, byte, char, short, int, float, reference, and 46 * returnAddress. Category 2 includes long and double. 47 * 48 * We treat object references separately, so we have "category1nr". We don't support jsr/ret, so 49 * there is no "returnAddress" type. 50 */ 51 enum TypeCategory { 52 kTypeCategoryUnknown = 0, 53 kTypeCategory1nr = 1, // boolean, byte, char, short, int, float 54 kTypeCategory2 = 2, // long, double 55 kTypeCategoryRef = 3, // object reference 56 }; 57 58 // What to do with the lock levels when setting the register type. 59 enum class LockOp { 60 kClear, // Clear the lock levels recorded. 61 kKeep // Leave the lock levels alone. 62 }; 63 64 // During verification, we associate one of these with every "interesting" instruction. We track 65 // the status of all registers, and (if the method has any monitor-enter instructions) maintain a 66 // stack of entered monitors (identified by code unit offset). 67 class RegisterLine { 68 public: 69 using RegisterStackMask = uint32_t; 70 // A map from register to a bit vector of indices into the monitors_ stack. 71 using RegToLockDepthsMap = ArenaSafeMap<uint32_t, RegisterStackMask>; 72 73 // Maximum number of nested monitors to track before giving up and 74 // taking the slow path. 75 static constexpr size_t kMaxMonitorStackDepth = 76 std::numeric_limits<RegisterStackMask>::digits; 77 78 // Create a register line of num_regs registers. 79 static RegisterLine* Create(size_t num_regs, ArenaAllocator& allocator, RegTypeCache* reg_types); 80 81 // Implement category-1 "move" instructions. Copy a 32-bit value from "vsrc" to "vdst". 82 void CopyRegister1(MethodVerifier* verifier, uint32_t vdst, uint32_t vsrc, TypeCategory cat) 83 REQUIRES_SHARED(Locks::mutator_lock_); 84 85 // Implement category-2 "move" instructions. Copy a 64-bit value from "vsrc" to "vdst". This 86 // copies both halves of the register. 87 void CopyRegister2(MethodVerifier* verifier, uint32_t vdst, uint32_t vsrc) 88 REQUIRES_SHARED(Locks::mutator_lock_); 89 90 // Implement "move-result". Copy the category-1 value from the result register to another 91 // register, and reset the result register. 92 void CopyResultRegister1(MethodVerifier* verifier, uint32_t vdst, bool is_reference) 93 REQUIRES_SHARED(Locks::mutator_lock_); 94 95 // Implement "move-result-wide". Copy the category-2 value from the result register to another 96 // register, and reset the result register. 97 void CopyResultRegister2(MethodVerifier* verifier, uint32_t vdst) 98 REQUIRES_SHARED(Locks::mutator_lock_); 99 100 // Set the invisible result register to unknown 101 void SetResultTypeToUnknown(RegTypeCache* reg_types) REQUIRES_SHARED(Locks::mutator_lock_); 102 103 // Set the type of register N, verifying that the register is valid. If "newType" is the "Lo" 104 // part of a 64-bit value, register N+1 will be set to "newType+1". 105 // The register index was validated during the static pass, so we don't need to check it here. 106 // 107 // LockOp::kClear should be used by default; it will clear the lock levels associated with the 108 // register. An example is setting the register type because an instruction writes to the 109 // register. 110 // LockOp::kKeep keeps the lock levels of the register and only changes the register type. This 111 // is typical when the underlying value did not change, but we have "different" type information 112 // available now. An example is sharpening types after a check-cast. Note that when given kKeep, 113 // the new_type is dchecked to be a reference type. 114 ALWAYS_INLINE void SetRegisterType(uint32_t vdst, RegType::Kind new_kind) 115 REQUIRES_SHARED(Locks::mutator_lock_); 116 template <LockOp kLockOp> 117 ALWAYS_INLINE void SetRegisterType(uint32_t vdst, const RegType& new_type) 118 REQUIRES_SHARED(Locks::mutator_lock_); 119 120 void SetRegisterTypeWide(uint32_t vdst, RegType::Kind new_kind1, RegType::Kind new_kind2) 121 REQUIRES_SHARED(Locks::mutator_lock_); 122 void SetRegisterTypeWide(uint32_t vdst, const RegType& new_type1, const RegType& new_type2) 123 REQUIRES_SHARED(Locks::mutator_lock_); 124 125 /* Set the type of the "result" register. */ 126 void SetResultRegisterType(MethodVerifier* verifier, const RegType& new_type) 127 REQUIRES_SHARED(Locks::mutator_lock_); 128 129 void SetResultRegisterTypeWide(const RegType& new_type1, const RegType& new_type2) 130 REQUIRES_SHARED(Locks::mutator_lock_); 131 132 /* 133 * Set register type for a `new-instance` instruction. 134 * For `new-instance`, we additionally record the allocation dex pc for vreg `vdst`. 135 * This is used to keep track of registers that hold the same uninitialized reference, 136 * so that we can update them all when a constructor is called on any of them. 137 */ 138 void SetRegisterTypeForNewInstance(uint32_t vdst, const RegType& uninit_type, uint32_t dex_pc) 139 REQUIRES_SHARED(Locks::mutator_lock_); 140 141 // Get the id of the register tyoe of register vsrc. 142 uint16_t GetRegisterTypeId(uint32_t vsrc) const; 143 144 // Get the type of register vsrc. 145 const RegType& GetRegisterType(MethodVerifier* verifier, uint32_t vsrc) const; 146 147 void CopyFromLine(const RegisterLine* src); 148 149 std::string Dump(MethodVerifier* verifier) const REQUIRES_SHARED(Locks::mutator_lock_); 150 FillWithGarbage()151 void FillWithGarbage() { 152 memset(&line_, 0xf1, num_regs_ * sizeof(uint16_t)); 153 monitors_.clear(); 154 reg_to_lock_depths_.clear(); 155 } 156 157 /* 158 * In debug mode, assert that the register line does not contain an uninitialized register 159 * type for a `new-instance` allocation at a specific dex pc. We do this check before recording 160 * the uninitialized register type and dex pc for a `new-instance` instruction. 161 */ 162 void DCheckUniqueNewInstanceDexPc(MethodVerifier* verifier, uint32_t dex_pc) 163 REQUIRES_SHARED(Locks::mutator_lock_); 164 165 /* 166 * Update all registers holding the uninitialized type currently recorded for vreg `vsrc` to 167 * instead hold the corresponding initialized reference type. This is called when an appropriate 168 * constructor is invoked -- all copies of the reference must be marked as initialized. 169 */ 170 void MarkRefsAsInitialized(MethodVerifier* verifier, uint32_t vsrc) 171 REQUIRES_SHARED(Locks::mutator_lock_); 172 173 /* 174 * Update all registers to be Conflict except vsrc. 175 */ 176 void MarkAllRegistersAsConflicts(MethodVerifier* verifier); 177 void MarkAllRegistersAsConflictsExcept(MethodVerifier* verifier, uint32_t vsrc); 178 void MarkAllRegistersAsConflictsExceptWide(MethodVerifier* verifier, uint32_t vsrc); 179 SetThisInitialized()180 void SetThisInitialized() { 181 this_initialized_ = true; 182 } 183 CopyThisInitialized(const RegisterLine & src)184 void CopyThisInitialized(const RegisterLine& src) { 185 this_initialized_ = src.this_initialized_; 186 } 187 188 /* 189 * Check constraints on constructor return. Specifically, make sure that the "this" argument got 190 * initialized. 191 * The "this" argument to <init> uses code offset kUninitThisArgAddr, which puts it at the start 192 * of the list in slot 0. If we see a register with an uninitialized slot 0 reference, we know it 193 * somehow didn't get initialized. 194 */ 195 bool CheckConstructorReturn(MethodVerifier* verifier) const; 196 197 // Compare two register lines. Returns 0 if they match. 198 // Using this for a sort is unwise, since the value can change based on machine endianness. CompareLine(const RegisterLine * line2)199 int CompareLine(const RegisterLine* line2) const { 200 if (monitors_ != line2->monitors_) { 201 return 1; 202 } 203 // TODO: DCHECK(reg_to_lock_depths_ == line2->reg_to_lock_depths_); 204 return memcmp(&line_, &line2->line_, num_regs_ * sizeof(uint16_t)); 205 } 206 NumRegs()207 size_t NumRegs() const { 208 return num_regs_; 209 } 210 211 // Return how many bytes of memory a register line uses. 212 ALWAYS_INLINE static size_t ComputeSize(size_t num_regs); 213 214 // Verify/push monitor onto the monitor stack, locking the value in reg_idx at location insn_idx. 215 void PushMonitor(MethodVerifier* verifier, uint32_t reg_idx, int32_t insn_idx) 216 REQUIRES_SHARED(Locks::mutator_lock_); 217 218 // Verify/pop monitor from monitor stack ensuring that we believe the monitor is locked 219 void PopMonitor(MethodVerifier* verifier, uint32_t reg_idx) 220 REQUIRES_SHARED(Locks::mutator_lock_); 221 222 // Stack of currently held monitors and where they were locked MonitorStackDepth()223 size_t MonitorStackDepth() const { 224 return monitors_.size(); 225 } 226 227 // We expect no monitors to be held at certain points, such a method returns. Verify the stack 228 // is empty, queueing a LOCKING error else. 229 void VerifyMonitorStackEmpty(MethodVerifier* verifier) const; 230 231 bool MergeRegisters(MethodVerifier* verifier, const RegisterLine* incoming_line) 232 REQUIRES_SHARED(Locks::mutator_lock_); 233 GetMonitorEnterCount()234 size_t GetMonitorEnterCount() const { 235 return monitors_.size(); 236 } 237 GetMonitorEnterDexPc(size_t i)238 uint32_t GetMonitorEnterDexPc(size_t i) const { 239 return monitors_[i]; 240 } 241 242 // We give access to the lock depth map to avoid an expensive poll loop for FindLocksAtDexPC. 243 template <typename T> IterateRegToLockDepths(T fn)244 void IterateRegToLockDepths(T fn) const { 245 for (const auto& pair : reg_to_lock_depths_) { 246 const uint32_t reg = pair.first; 247 uint32_t depths = pair.second; 248 uint32_t depth = 0; 249 while (depths != 0) { 250 if ((depths & 1) != 0) { 251 fn(reg, depth); 252 } 253 depths >>= 1; 254 depth++; 255 } 256 } 257 } 258 259 private: 260 // For uninitialized types we need to check for allocation dex pc mismatch when merging. 261 // This does not apply to uninitialized "this" reference types. 262 static bool NeedsAllocationDexPc(const RegType& reg_type); 263 264 void EnsureAllocationDexPcsAvailable(); 265 266 template <LockOp kLockOp> 267 ALWAYS_INLINE void SetRegisterTypeImpl(uint32_t vdst, uint16_t new_id) 268 REQUIRES_SHARED(Locks::mutator_lock_); 269 void SetRegisterTypeWideImpl(uint32_t vdst, uint16_t new_id1, uint16_t new_id2) 270 REQUIRES_SHARED(Locks::mutator_lock_); 271 CopyRegToLockDepth(size_t dst,size_t src)272 void CopyRegToLockDepth(size_t dst, size_t src) { 273 auto it = reg_to_lock_depths_.find(src); 274 if (it != reg_to_lock_depths_.end()) { 275 reg_to_lock_depths_.Put(dst, it->second); 276 } 277 } 278 IsSetLockDepth(size_t reg,size_t depth)279 bool IsSetLockDepth(size_t reg, size_t depth) { 280 auto it = reg_to_lock_depths_.find(reg); 281 if (it != reg_to_lock_depths_.end()) { 282 return (it->second & (1 << depth)) != 0; 283 } else { 284 return false; 285 } 286 } 287 SetRegToLockDepth(size_t reg,size_t depth)288 bool SetRegToLockDepth(size_t reg, size_t depth) { 289 CHECK_LT(depth, kMaxMonitorStackDepth); 290 if (IsSetLockDepth(reg, depth)) { 291 return false; // Register already holds lock so locking twice is erroneous. 292 } 293 auto it = reg_to_lock_depths_.find(reg); 294 if (it == reg_to_lock_depths_.end()) { 295 reg_to_lock_depths_.Put(reg, 1 << depth); 296 } else { 297 it->second |= (1 << depth); 298 } 299 return true; 300 } 301 302 void ClearRegToLockDepth(size_t reg, size_t depth); 303 ClearAllRegToLockDepths(size_t reg)304 void ClearAllRegToLockDepths(size_t reg) { 305 reg_to_lock_depths_.erase(reg); 306 } 307 308 RegisterLine(size_t num_regs, ArenaAllocator& allocator, RegTypeCache* reg_types); 309 310 static constexpr uint32_t kNoDexPc = static_cast<uint32_t>(-1); 311 312 // Length of reg_types_ 313 const uint32_t num_regs_; 314 315 // Storage for the result register's type, valid after an invocation. 316 uint16_t result_[2]; 317 318 // Track allocation dex pcs for `new-instance` results moved to other registers. 319 uint32_t* allocation_dex_pcs_; 320 321 // A stack of monitor enter locations. 322 ArenaVector<uint32_t> monitors_; 323 324 // A map from register to a bit vector of indices into the monitors_ stack. As we pop the monitor 325 // stack we verify that monitor-enter/exit are correctly nested. That is, if there was a 326 // monitor-enter on v5 and then on v6, we expect the monitor-exit to be on v6 then on v5. 327 RegToLockDepthsMap reg_to_lock_depths_; 328 329 // Whether "this" initialization (a constructor supercall) has happened. 330 bool this_initialized_; 331 332 // An array of RegType Ids associated with each dex register. 333 uint16_t line_[1]; 334 335 friend class RegisterLineArenaDelete; 336 337 DISALLOW_COPY_AND_ASSIGN(RegisterLine); 338 }; 339 340 class RegisterLineArenaDelete : public ArenaDelete<RegisterLine> { 341 public: 342 void operator()(RegisterLine* ptr) const; 343 }; 344 345 using RegisterLineArenaUniquePtr = std::unique_ptr<RegisterLine, RegisterLineArenaDelete>; 346 347 } // namespace verifier 348 } // namespace art 349 350 #endif // ART_RUNTIME_VERIFIER_REGISTER_LINE_H_ 351