Vladimir Marko | 2b5eaa2 | 2013-12-13 13:59:30 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2013 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "base/stl_util.h" |
| 18 | #include "dex_file.h" |
| 19 | #include "dex_instruction.h" |
| 20 | #include "dex_instruction-inl.h" |
| 21 | #include "base/mutex.h" |
| 22 | #include "base/mutex-inl.h" |
| 23 | #include "mirror/art_method.h" |
| 24 | #include "mirror/art_method-inl.h" |
| 25 | #include "mirror/class.h" |
| 26 | #include "mirror/class-inl.h" |
| 27 | #include "mirror/dex_cache.h" |
| 28 | #include "mirror/dex_cache-inl.h" |
| 29 | #include "mirror/object.h" |
| 30 | #include "mirror/object-inl.h" |
| 31 | #include "verified_methods_data.h" |
| 32 | #include "verifier/dex_gc_map.h" |
| 33 | #include "verifier/method_verifier.h" |
| 34 | #include "verifier/method_verifier-inl.h" |
| 35 | #include "verifier/register_line.h" |
| 36 | #include "verifier/register_line-inl.h" |
| 37 | |
| 38 | namespace art { |
| 39 | |
| 40 | VerifiedMethodsData::VerifiedMethodsData() |
| 41 | : dex_gc_maps_lock_("compiler GC maps lock"), |
| 42 | dex_gc_maps_(), |
| 43 | safecast_map_lock_("compiler Cast Elision lock"), |
| 44 | safecast_map_(), |
| 45 | devirt_maps_lock_("compiler Devirtualization lock"), |
| 46 | devirt_maps_(), |
| 47 | rejected_classes_lock_("compiler rejected classes lock"), |
| 48 | rejected_classes_() { |
| 49 | } |
| 50 | |
| 51 | VerifiedMethodsData::~VerifiedMethodsData() { |
| 52 | Thread* self = Thread::Current(); |
| 53 | { |
| 54 | WriterMutexLock mu(self, dex_gc_maps_lock_); |
| 55 | STLDeleteValues(&dex_gc_maps_); |
| 56 | } |
| 57 | { |
| 58 | WriterMutexLock mu(self, safecast_map_lock_); |
| 59 | STLDeleteValues(&safecast_map_); |
| 60 | } |
| 61 | { |
| 62 | WriterMutexLock mu(self, devirt_maps_lock_); |
| 63 | STLDeleteValues(&devirt_maps_); |
| 64 | } |
| 65 | } |
| 66 | |
| 67 | bool VerifiedMethodsData::ProcessVerifiedMethod(verifier::MethodVerifier* method_verifier) { |
| 68 | MethodReference ref = method_verifier->GetMethodReference(); |
| 69 | bool compile = IsCandidateForCompilation(ref, method_verifier->GetAccessFlags()); |
| 70 | if (compile) { |
| 71 | /* Generate a register map and add it to the method. */ |
| 72 | const std::vector<uint8_t>* dex_gc_map = GenerateGcMap(method_verifier); |
| 73 | if (dex_gc_map == NULL) { |
| 74 | DCHECK(method_verifier->HasFailures()); |
| 75 | return false; // Not a real failure, but a failure to encode |
| 76 | } |
| 77 | if (kIsDebugBuild) { |
| 78 | VerifyGcMap(method_verifier, *dex_gc_map); |
| 79 | } |
| 80 | SetDexGcMap(ref, dex_gc_map); |
Sebastien Hertz | 5dce0c9 | 2013-12-18 11:01:49 +0100 | [diff] [blame] | 81 | |
| 82 | // TODO: move this out when DEX-to-DEX supports devirtualization. |
| 83 | if (method_verifier->HasVirtualOrInterfaceInvokes()) { |
| 84 | PcToConcreteMethodMap* pc_to_concrete_method = GenerateDevirtMap(method_verifier); |
| 85 | if (pc_to_concrete_method != NULL) { |
| 86 | SetDevirtMap(ref, pc_to_concrete_method); |
| 87 | } |
| 88 | } |
Vladimir Marko | 2b5eaa2 | 2013-12-13 13:59:30 +0000 | [diff] [blame] | 89 | } |
| 90 | |
| 91 | if (method_verifier->HasCheckCasts()) { |
| 92 | MethodSafeCastSet* method_to_safe_casts = GenerateSafeCastSet(method_verifier); |
| 93 | if (method_to_safe_casts != NULL) { |
| 94 | SetSafeCastMap(ref, method_to_safe_casts); |
| 95 | } |
| 96 | } |
Vladimir Marko | 2b5eaa2 | 2013-12-13 13:59:30 +0000 | [diff] [blame] | 97 | return true; |
| 98 | } |
| 99 | |
| 100 | const std::vector<uint8_t>* VerifiedMethodsData::GetDexGcMap(MethodReference ref) { |
| 101 | ReaderMutexLock mu(Thread::Current(), dex_gc_maps_lock_); |
| 102 | DexGcMapTable::const_iterator it = dex_gc_maps_.find(ref); |
| 103 | CHECK(it != dex_gc_maps_.end()) |
| 104 | << "Didn't find GC map for: " << PrettyMethod(ref.dex_method_index, *ref.dex_file); |
| 105 | CHECK(it->second != NULL); |
| 106 | return it->second; |
| 107 | } |
| 108 | |
| 109 | const MethodReference* VerifiedMethodsData::GetDevirtMap(const MethodReference& ref, |
| 110 | uint32_t dex_pc) { |
| 111 | ReaderMutexLock mu(Thread::Current(), devirt_maps_lock_); |
| 112 | DevirtualizationMapTable::const_iterator it = devirt_maps_.find(ref); |
| 113 | if (it == devirt_maps_.end()) { |
| 114 | return NULL; |
| 115 | } |
| 116 | |
| 117 | // Look up the PC in the map, get the concrete method to execute and return its reference. |
| 118 | PcToConcreteMethodMap::const_iterator pc_to_concrete_method = it->second->find(dex_pc); |
| 119 | if (pc_to_concrete_method != it->second->end()) { |
| 120 | return &(pc_to_concrete_method->second); |
| 121 | } else { |
| 122 | return NULL; |
| 123 | } |
| 124 | } |
| 125 | |
| 126 | bool VerifiedMethodsData::IsSafeCast(MethodReference ref, uint32_t pc) { |
| 127 | ReaderMutexLock mu(Thread::Current(), safecast_map_lock_); |
| 128 | SafeCastMap::const_iterator it = safecast_map_.find(ref); |
| 129 | if (it == safecast_map_.end()) { |
| 130 | return false; |
| 131 | } |
| 132 | |
| 133 | // Look up the cast address in the set of safe casts |
Vladimir Marko | a9faa70 | 2013-12-17 11:17:52 +0000 | [diff] [blame] | 134 | // Use binary_search for lookup in the sorted vector. |
| 135 | return std::binary_search(it->second->begin(), it->second->end(), pc); |
Vladimir Marko | 2b5eaa2 | 2013-12-13 13:59:30 +0000 | [diff] [blame] | 136 | } |
| 137 | |
| 138 | void VerifiedMethodsData::AddRejectedClass(ClassReference ref) { |
| 139 | { |
| 140 | WriterMutexLock mu(Thread::Current(), rejected_classes_lock_); |
| 141 | rejected_classes_.insert(ref); |
| 142 | } |
| 143 | DCHECK(IsClassRejected(ref)); |
| 144 | } |
| 145 | |
| 146 | bool VerifiedMethodsData::IsClassRejected(ClassReference ref) { |
| 147 | ReaderMutexLock mu(Thread::Current(), rejected_classes_lock_); |
| 148 | return (rejected_classes_.find(ref) != rejected_classes_.end()); |
| 149 | } |
| 150 | |
| 151 | bool VerifiedMethodsData::IsCandidateForCompilation(MethodReference& method_ref, |
| 152 | const uint32_t access_flags) { |
| 153 | #ifdef ART_SEA_IR_MODE |
| 154 | bool use_sea = Runtime::Current()->IsSeaIRMode(); |
| 155 | use_sea = use_sea && (std::string::npos != PrettyMethod( |
| 156 | method_ref.dex_method_index, *(method_ref.dex_file)).find("fibonacci")); |
| 157 | if (use_sea) return true; |
| 158 | #endif |
| 159 | // Don't compile class initializers, ever. |
| 160 | if (((access_flags & kAccConstructor) != 0) && ((access_flags & kAccStatic) != 0)) { |
| 161 | return false; |
| 162 | } |
| 163 | return (Runtime::Current()->GetCompilerFilter() != Runtime::kInterpretOnly); |
| 164 | } |
| 165 | |
| 166 | const std::vector<uint8_t>* VerifiedMethodsData::GenerateGcMap( |
| 167 | verifier::MethodVerifier* method_verifier) { |
| 168 | size_t num_entries, ref_bitmap_bits, pc_bits; |
| 169 | ComputeGcMapSizes(method_verifier, &num_entries, &ref_bitmap_bits, &pc_bits); |
| 170 | // There's a single byte to encode the size of each bitmap |
| 171 | if (ref_bitmap_bits >= (8 /* bits per byte */ * 8192 /* 13-bit size */ )) { |
| 172 | // TODO: either a better GC map format or per method failures |
| 173 | method_verifier->Fail(verifier::VERIFY_ERROR_BAD_CLASS_HARD) |
| 174 | << "Cannot encode GC map for method with " << ref_bitmap_bits << " registers"; |
| 175 | return NULL; |
| 176 | } |
| 177 | size_t ref_bitmap_bytes = (ref_bitmap_bits + 7) / 8; |
| 178 | // There are 2 bytes to encode the number of entries |
| 179 | if (num_entries >= 65536) { |
| 180 | // TODO: either a better GC map format or per method failures |
| 181 | method_verifier->Fail(verifier::VERIFY_ERROR_BAD_CLASS_HARD) |
| 182 | << "Cannot encode GC map for method with " << num_entries << " entries"; |
| 183 | return NULL; |
| 184 | } |
| 185 | size_t pc_bytes; |
| 186 | verifier::RegisterMapFormat format; |
| 187 | if (pc_bits <= 8) { |
| 188 | format = verifier::kRegMapFormatCompact8; |
| 189 | pc_bytes = 1; |
| 190 | } else if (pc_bits <= 16) { |
| 191 | format = verifier::kRegMapFormatCompact16; |
| 192 | pc_bytes = 2; |
| 193 | } else { |
| 194 | // TODO: either a better GC map format or per method failures |
| 195 | method_verifier->Fail(verifier::VERIFY_ERROR_BAD_CLASS_HARD) |
| 196 | << "Cannot encode GC map for method with " |
| 197 | << (1 << pc_bits) << " instructions (number is rounded up to nearest power of 2)"; |
| 198 | return NULL; |
| 199 | } |
| 200 | size_t table_size = ((pc_bytes + ref_bitmap_bytes) * num_entries) + 4; |
| 201 | std::vector<uint8_t>* table = new std::vector<uint8_t>; |
| 202 | if (table == NULL) { |
| 203 | method_verifier->Fail(verifier::VERIFY_ERROR_BAD_CLASS_HARD) |
| 204 | << "Failed to encode GC map (size=" << table_size << ")"; |
| 205 | return NULL; |
| 206 | } |
| 207 | table->reserve(table_size); |
| 208 | // Write table header |
| 209 | table->push_back(format | ((ref_bitmap_bytes & ~0xFF) >> 5)); |
| 210 | table->push_back(ref_bitmap_bytes & 0xFF); |
| 211 | table->push_back(num_entries & 0xFF); |
| 212 | table->push_back((num_entries >> 8) & 0xFF); |
| 213 | // Write table data |
| 214 | const DexFile::CodeItem* code_item = method_verifier->CodeItem(); |
| 215 | for (size_t i = 0; i < code_item->insns_size_in_code_units_; i++) { |
| 216 | if (method_verifier->GetInstructionFlags(i).IsCompileTimeInfoPoint()) { |
| 217 | table->push_back(i & 0xFF); |
| 218 | if (pc_bytes == 2) { |
| 219 | table->push_back((i >> 8) & 0xFF); |
| 220 | } |
| 221 | verifier::RegisterLine* line = method_verifier->GetRegLine(i); |
| 222 | line->WriteReferenceBitMap(*table, ref_bitmap_bytes); |
| 223 | } |
| 224 | } |
| 225 | DCHECK_EQ(table->size(), table_size); |
| 226 | return table; |
| 227 | } |
| 228 | |
| 229 | void VerifiedMethodsData::VerifyGcMap(verifier::MethodVerifier* method_verifier, |
| 230 | const std::vector<uint8_t>& data) { |
| 231 | // Check that for every GC point there is a map entry, there aren't entries for non-GC points, |
| 232 | // that the table data is well formed and all references are marked (or not) in the bitmap |
| 233 | verifier::DexPcToReferenceMap map(&data[0]); |
| 234 | DCHECK_EQ(data.size(), map.RawSize()); |
| 235 | size_t map_index = 0; |
| 236 | const DexFile::CodeItem* code_item = method_verifier->CodeItem(); |
| 237 | for (size_t i = 0; i < code_item->insns_size_in_code_units_; i++) { |
| 238 | const uint8_t* reg_bitmap = map.FindBitMap(i, false); |
| 239 | if (method_verifier->GetInstructionFlags(i).IsCompileTimeInfoPoint()) { |
| 240 | CHECK_LT(map_index, map.NumEntries()); |
| 241 | CHECK_EQ(map.GetDexPc(map_index), i); |
| 242 | CHECK_EQ(map.GetBitMap(map_index), reg_bitmap); |
| 243 | map_index++; |
| 244 | verifier::RegisterLine* line = method_verifier->GetRegLine(i); |
| 245 | for (size_t j = 0; j < code_item->registers_size_; j++) { |
| 246 | if (line->GetRegisterType(j).IsNonZeroReferenceTypes()) { |
| 247 | CHECK_LT(j / 8, map.RegWidth()); |
| 248 | CHECK_EQ((reg_bitmap[j / 8] >> (j % 8)) & 1, 1); |
| 249 | } else if ((j / 8) < map.RegWidth()) { |
| 250 | CHECK_EQ((reg_bitmap[j / 8] >> (j % 8)) & 1, 0); |
| 251 | } else { |
| 252 | // If a register doesn't contain a reference then the bitmap may be shorter than the line |
| 253 | } |
| 254 | } |
| 255 | } else { |
| 256 | CHECK(reg_bitmap == NULL); |
| 257 | } |
| 258 | } |
| 259 | } |
| 260 | |
| 261 | void VerifiedMethodsData::ComputeGcMapSizes(verifier::MethodVerifier* method_verifier, |
| 262 | size_t* gc_points, size_t* ref_bitmap_bits, |
| 263 | size_t* log2_max_gc_pc) { |
| 264 | size_t local_gc_points = 0; |
| 265 | size_t max_insn = 0; |
| 266 | size_t max_ref_reg = -1; |
| 267 | const DexFile::CodeItem* code_item = method_verifier->CodeItem(); |
| 268 | for (size_t i = 0; i < code_item->insns_size_in_code_units_; i++) { |
| 269 | if (method_verifier->GetInstructionFlags(i).IsCompileTimeInfoPoint()) { |
| 270 | local_gc_points++; |
| 271 | max_insn = i; |
| 272 | verifier::RegisterLine* line = method_verifier->GetRegLine(i); |
| 273 | max_ref_reg = line->GetMaxNonZeroReferenceReg(max_ref_reg); |
| 274 | } |
| 275 | } |
| 276 | *gc_points = local_gc_points; |
| 277 | *ref_bitmap_bits = max_ref_reg + 1; // if max register is 0 we need 1 bit to encode (ie +1) |
| 278 | size_t i = 0; |
| 279 | while ((1U << i) <= max_insn) { |
| 280 | i++; |
| 281 | } |
| 282 | *log2_max_gc_pc = i; |
| 283 | } |
| 284 | |
| 285 | void VerifiedMethodsData::SetDexGcMap(MethodReference ref, const std::vector<uint8_t>* gc_map) { |
| 286 | DCHECK(Runtime::Current()->IsCompiler()); |
| 287 | { |
| 288 | WriterMutexLock mu(Thread::Current(), dex_gc_maps_lock_); |
| 289 | DexGcMapTable::iterator it = dex_gc_maps_.find(ref); |
| 290 | if (it != dex_gc_maps_.end()) { |
| 291 | delete it->second; |
| 292 | dex_gc_maps_.erase(it); |
| 293 | } |
| 294 | dex_gc_maps_.Put(ref, gc_map); |
| 295 | } |
| 296 | DCHECK(GetDexGcMap(ref) != NULL); |
| 297 | } |
| 298 | |
| 299 | VerifiedMethodsData::MethodSafeCastSet* VerifiedMethodsData::GenerateSafeCastSet( |
| 300 | verifier::MethodVerifier* method_verifier) { |
| 301 | /* |
| 302 | * Walks over the method code and adds any cast instructions in which |
| 303 | * the type cast is implicit to a set, which is used in the code generation |
| 304 | * to elide these casts. |
| 305 | */ |
| 306 | if (method_verifier->HasFailures()) { |
| 307 | return NULL; |
| 308 | } |
| 309 | UniquePtr<MethodSafeCastSet> mscs; |
| 310 | const DexFile::CodeItem* code_item = method_verifier->CodeItem(); |
| 311 | const Instruction* inst = Instruction::At(code_item->insns_); |
| 312 | const Instruction* end = Instruction::At(code_item->insns_ + |
| 313 | code_item->insns_size_in_code_units_); |
| 314 | |
| 315 | for (; inst < end; inst = inst->Next()) { |
| 316 | Instruction::Code code = inst->Opcode(); |
| 317 | if ((code == Instruction::CHECK_CAST) || (code == Instruction::APUT_OBJECT)) { |
| 318 | uint32_t dex_pc = inst->GetDexPc(code_item->insns_); |
| 319 | const verifier::RegisterLine* line = method_verifier->GetRegLine(dex_pc); |
| 320 | bool is_safe_cast = false; |
| 321 | if (code == Instruction::CHECK_CAST) { |
| 322 | const verifier::RegType& reg_type(line->GetRegisterType(inst->VRegA_21c())); |
| 323 | const verifier::RegType& cast_type = |
| 324 | method_verifier->ResolveCheckedClass(inst->VRegB_21c()); |
| 325 | is_safe_cast = cast_type.IsStrictlyAssignableFrom(reg_type); |
| 326 | } else { |
| 327 | const verifier::RegType& array_type(line->GetRegisterType(inst->VRegB_23x())); |
| 328 | // We only know its safe to assign to an array if the array type is precise. For example, |
| 329 | // an Object[] can have any type of object stored in it, but it may also be assigned a |
| 330 | // String[] in which case the stores need to be of Strings. |
| 331 | if (array_type.IsPreciseReference()) { |
| 332 | const verifier::RegType& value_type(line->GetRegisterType(inst->VRegA_23x())); |
| 333 | const verifier::RegType& component_type = method_verifier->GetRegTypeCache() |
| 334 | ->GetComponentType(array_type, method_verifier->GetClassLoader()); |
| 335 | is_safe_cast = component_type.IsStrictlyAssignableFrom(value_type); |
| 336 | } |
| 337 | } |
| 338 | if (is_safe_cast) { |
| 339 | if (mscs.get() == nullptr) { |
| 340 | mscs.reset(new MethodSafeCastSet()); |
Vladimir Marko | a9faa70 | 2013-12-17 11:17:52 +0000 | [diff] [blame] | 341 | } else { |
| 342 | DCHECK_LT(mscs->back(), dex_pc); // Verify ordering for push_back() to the sorted vector. |
Vladimir Marko | 2b5eaa2 | 2013-12-13 13:59:30 +0000 | [diff] [blame] | 343 | } |
Vladimir Marko | a9faa70 | 2013-12-17 11:17:52 +0000 | [diff] [blame] | 344 | mscs->push_back(dex_pc); |
Vladimir Marko | 2b5eaa2 | 2013-12-13 13:59:30 +0000 | [diff] [blame] | 345 | } |
| 346 | } |
| 347 | } |
| 348 | return mscs.release(); |
| 349 | } |
| 350 | |
| 351 | void VerifiedMethodsData::SetSafeCastMap(MethodReference ref, const MethodSafeCastSet* cast_set) { |
| 352 | WriterMutexLock mu(Thread::Current(), safecast_map_lock_); |
| 353 | SafeCastMap::iterator it = safecast_map_.find(ref); |
| 354 | if (it != safecast_map_.end()) { |
| 355 | delete it->second; |
| 356 | safecast_map_.erase(it); |
| 357 | } |
| 358 | safecast_map_.Put(ref, cast_set); |
| 359 | DCHECK(safecast_map_.find(ref) != safecast_map_.end()); |
| 360 | } |
| 361 | |
| 362 | VerifiedMethodsData::PcToConcreteMethodMap* VerifiedMethodsData::GenerateDevirtMap( |
| 363 | verifier::MethodVerifier* method_verifier) { |
| 364 | // It is risky to rely on reg_types for sharpening in cases of soft |
| 365 | // verification, we might end up sharpening to a wrong implementation. Just abort. |
| 366 | if (method_verifier->HasFailures()) { |
| 367 | return NULL; |
| 368 | } |
| 369 | |
| 370 | UniquePtr<PcToConcreteMethodMap> pc_to_concrete_method_map; |
| 371 | const DexFile::CodeItem* code_item = method_verifier->CodeItem(); |
| 372 | const uint16_t* insns = code_item->insns_; |
| 373 | const Instruction* inst = Instruction::At(insns); |
| 374 | const Instruction* end = Instruction::At(insns + code_item->insns_size_in_code_units_); |
| 375 | |
| 376 | for (; inst < end; inst = inst->Next()) { |
| 377 | bool is_virtual = (inst->Opcode() == Instruction::INVOKE_VIRTUAL) || |
| 378 | (inst->Opcode() == Instruction::INVOKE_VIRTUAL_RANGE); |
| 379 | bool is_interface = (inst->Opcode() == Instruction::INVOKE_INTERFACE) || |
| 380 | (inst->Opcode() == Instruction::INVOKE_INTERFACE_RANGE); |
| 381 | |
| 382 | if (!is_interface && !is_virtual) { |
| 383 | continue; |
| 384 | } |
| 385 | // Get reg type for register holding the reference to the object that will be dispatched upon. |
| 386 | uint32_t dex_pc = inst->GetDexPc(insns); |
| 387 | verifier::RegisterLine* line = method_verifier->GetRegLine(dex_pc); |
| 388 | bool is_range = (inst->Opcode() == Instruction::INVOKE_VIRTUAL_RANGE) || |
| 389 | (inst->Opcode() == Instruction::INVOKE_INTERFACE_RANGE); |
| 390 | const verifier::RegType& |
| 391 | reg_type(line->GetRegisterType(is_range ? inst->VRegC_3rc() : inst->VRegC_35c())); |
| 392 | |
| 393 | if (!reg_type.HasClass()) { |
| 394 | // We will compute devirtualization information only when we know the Class of the reg type. |
| 395 | continue; |
| 396 | } |
| 397 | mirror::Class* reg_class = reg_type.GetClass(); |
| 398 | if (reg_class->IsInterface()) { |
| 399 | // We can't devirtualize when the known type of the register is an interface. |
| 400 | continue; |
| 401 | } |
| 402 | if (reg_class->IsAbstract() && !reg_class->IsArrayClass()) { |
| 403 | // We can't devirtualize abstract classes except on arrays of abstract classes. |
| 404 | continue; |
| 405 | } |
| 406 | mirror::ArtMethod* abstract_method = method_verifier->GetDexCache()->GetResolvedMethod( |
| 407 | is_range ? inst->VRegB_3rc() : inst->VRegB_35c()); |
| 408 | if (abstract_method == NULL) { |
| 409 | // If the method is not found in the cache this means that it was never found |
| 410 | // by ResolveMethodAndCheckAccess() called when verifying invoke_*. |
| 411 | continue; |
| 412 | } |
| 413 | // Find the concrete method. |
| 414 | mirror::ArtMethod* concrete_method = NULL; |
| 415 | if (is_interface) { |
| 416 | concrete_method = reg_type.GetClass()->FindVirtualMethodForInterface(abstract_method); |
| 417 | } |
| 418 | if (is_virtual) { |
| 419 | concrete_method = reg_type.GetClass()->FindVirtualMethodForVirtual(abstract_method); |
| 420 | } |
| 421 | if (concrete_method == NULL || concrete_method->IsAbstract()) { |
| 422 | // In cases where concrete_method is not found, or is abstract, continue to the next invoke. |
| 423 | continue; |
| 424 | } |
| 425 | if (reg_type.IsPreciseReference() || concrete_method->IsFinal() || |
| 426 | concrete_method->GetDeclaringClass()->IsFinal()) { |
| 427 | // If we knew exactly the class being dispatched upon, or if the target method cannot be |
| 428 | // overridden record the target to be used in the compiler driver. |
| 429 | if (pc_to_concrete_method_map.get() == NULL) { |
| 430 | pc_to_concrete_method_map.reset(new PcToConcreteMethodMap()); |
| 431 | } |
| 432 | MethodReference concrete_ref( |
| 433 | concrete_method->GetDeclaringClass()->GetDexCache()->GetDexFile(), |
| 434 | concrete_method->GetDexMethodIndex()); |
| 435 | pc_to_concrete_method_map->Put(dex_pc, concrete_ref); |
| 436 | } |
| 437 | } |
| 438 | return pc_to_concrete_method_map.release(); |
| 439 | } |
| 440 | |
| 441 | void VerifiedMethodsData::SetDevirtMap(MethodReference ref, |
| 442 | const PcToConcreteMethodMap* devirt_map) { |
| 443 | WriterMutexLock mu(Thread::Current(), devirt_maps_lock_); |
| 444 | DevirtualizationMapTable::iterator it = devirt_maps_.find(ref); |
| 445 | if (it != devirt_maps_.end()) { |
| 446 | delete it->second; |
| 447 | devirt_maps_.erase(it); |
| 448 | } |
| 449 | |
| 450 | devirt_maps_.Put(ref, devirt_map); |
| 451 | DCHECK(devirt_maps_.find(ref) != devirt_maps_.end()); |
| 452 | } |
| 453 | |
| 454 | } // namespace art |