blob: 8bf38d35b26db7fd51e24ee5d8abea1d635363bc [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070016
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070017#include "class_linker.h"
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070018
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070019#include <unistd.h>
20
Alex Lighteb7c1442015-08-31 13:17:42 -070021#include <algorithm>
Brian Carlstromdbc05252011-09-09 01:59:59 -070022#include <deque>
Vladimir Markobf121912019-06-04 13:49:05 +010023#include <forward_list>
Ian Rogerscf7f1912014-10-22 22:06:39 -070024#include <iostream>
Vladimir Marko21300532017-01-24 18:06:55 +000025#include <map>
Ian Rogers700a4022014-05-19 16:49:03 -070026#include <memory>
Fred Shih381e4ca2014-08-25 17:24:27 -070027#include <queue>
Ian Rogers0cfe1fb2011-08-26 03:29:44 -070028#include <string>
Andreas Gampe9f3928f2019-02-04 11:19:31 -080029#include <string_view>
Alex Lighteb7c1442015-08-31 13:17:42 -070030#include <tuple>
Alex Lighteb7c1442015-08-31 13:17:42 -070031#include <unordered_map>
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070032#include <utility>
Elliott Hughes90a33692011-08-30 13:27:07 -070033#include <vector>
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070034
Andreas Gampe46ee31b2016-12-14 10:11:49 -080035#include "android-base/stringprintf.h"
36
Mathieu Chartierc7853442015-03-27 14:35:38 -070037#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070038#include "art_method-inl.h"
Vladimir Markobf121912019-06-04 13:49:05 +010039#include "barrier.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070040#include "base/arena_allocator.h"
Elliott Hughes1aa246d2012-12-13 09:29:36 -080041#include "base/casts.h"
Andreas Gampe19f54162019-05-14 16:16:28 -070042#include "base/file_utils.h"
David Sehr67bf42e2018-02-26 16:43:04 -080043#include "base/leb128.h"
Elliott Hughes07ed66b2012-12-12 18:34:25 -080044#include "base/logging.h"
Vladimir Markobf121912019-06-04 13:49:05 +010045#include "base/mutex-inl.h"
David Sehrc431b9d2018-03-02 12:01:51 -080046#include "base/os.h"
47#include "base/quasi_atomic.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070048#include "base/scoped_arena_containers.h"
Narayan Kamathd1c606f2014-06-09 16:50:19 +010049#include "base/scoped_flock.h"
Elliott Hughes1aa246d2012-12-13 09:29:36 -080050#include "base/stl_util.h"
Vladimir Markob9c29f62019-03-20 14:22:51 +000051#include "base/string_view_cpp20.h"
Mathieu Chartier32ce2ad2016-03-04 14:58:03 -080052#include "base/systrace.h"
Vladimir Marko80afd022015-05-19 18:08:00 +010053#include "base/time_utils.h"
Elliott Hughes76160052012-12-12 16:31:20 -080054#include "base/unix_file/fd_file.h"
David Sehrc431b9d2018-03-02 12:01:51 -080055#include "base/utils.h"
Andreas Gampeb9aec2c2015-04-23 22:23:47 -070056#include "base/value_object.h"
Mingyao Yang063fc772016-08-02 11:02:54 -070057#include "cha.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080058#include "class_linker-inl.h"
Calin Juravle57d0acc2017-07-11 17:41:30 -070059#include "class_loader_utils.h"
Vladimir Marko5868ada2020-05-12 11:50:34 +010060#include "class_root-inl.h"
Mathieu Chartiere4275c02015-08-06 15:34:15 -070061#include "class_table-inl.h"
Vladimir Marko2b5eaa22013-12-13 13:59:30 +000062#include "compiler_callbacks.h"
Vladimir Marko606adb32018-04-05 14:49:24 +010063#include "debug_print.h"
Elliott Hughes4740cdf2011-12-07 14:07:12 -080064#include "debugger.h"
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -070065#include "dex/class_accessor-inl.h"
David Sehrb2ec9f52018-02-21 13:20:31 -080066#include "dex/descriptors_names.h"
David Sehr9e734c72018-01-04 17:56:19 -080067#include "dex/dex_file-inl.h"
68#include "dex/dex_file_exception_helpers.h"
69#include "dex/dex_file_loader.h"
Andreas Gampead1aa632019-01-02 10:30:54 -080070#include "dex/signature-inl.h"
David Sehr0225f8e2018-01-31 08:52:24 +000071#include "dex/utf.h"
Vladimir Marko5115a4d2019-10-17 14:56:47 +010072#include "entrypoints/entrypoint_utils-inl.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070073#include "entrypoints/runtime_asm_entrypoints.h"
Alex Light705ad492015-09-21 11:36:30 -070074#include "experimental_flags.h"
Ian Rogers1d54e732013-05-02 21:10:01 -070075#include "gc/accounting/card_table-inl.h"
Mathieu Chartier03c1dd92016-03-07 16:13:54 -080076#include "gc/accounting/heap_bitmap-inl.h"
Chang Xingba17dbd2017-06-28 21:27:56 +000077#include "gc/accounting/space_bitmap-inl.h"
Andreas Gampe1c158a02017-07-13 17:26:19 -070078#include "gc/heap-visit-objects-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -070079#include "gc/heap.h"
Mathieu Chartier1b1e31f2016-05-19 10:13:04 -070080#include "gc/scoped_gc_critical_section.h"
Ian Rogers1d54e732013-05-02 21:10:01 -070081#include "gc/space/image_space.h"
Vladimir Marko8d6768d2017-03-14 10:13:21 +000082#include "gc/space/space-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -070083#include "gc_root-inl.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -070084#include "handle_scope-inl.h"
Andreas Gampeaa120012018-03-28 16:23:24 -070085#include "hidden_api.h"
Mathieu Chartier4a26f172016-01-26 14:26:18 -080086#include "image-inl.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070087#include "imt_conflict_table.h"
88#include "imtable-inl.h"
Mathieu Chartier74ccee62018-10-10 10:30:29 -070089#include "intern_table-inl.h"
Ian Rogers64b6d142012-10-29 16:34:15 -070090#include "interpreter/interpreter.h"
Nicolas Geoffray0315efa2020-06-26 11:42:39 +010091#include "interpreter/mterp/nterp.h"
David Srbeckyfb3de3d2018-01-29 16:11:49 +000092#include "jit/debugger_interface.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080093#include "jit/jit.h"
94#include "jit/jit_code_cache.h"
Vladimir Markoa3ad0cd2018-05-04 10:06:38 +010095#include "jni/java_vm_ext.h"
96#include "jni/jni_internal.h"
Mathieu Chartierc7853442015-03-27 14:35:38 -070097#include "linear_alloc.h"
Andreas Gampe8e0f0432018-10-24 13:38:03 -070098#include "mirror/array-alloc-inl.h"
99#include "mirror/array-inl.h"
Orion Hodsonc069a302017-01-18 09:23:12 +0000100#include "mirror/call_site.h"
Andreas Gampe70f5fd02018-10-24 19:58:37 -0700101#include "mirror/class-alloc-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800102#include "mirror/class-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700103#include "mirror/class.h"
Alex Lightd6251582016-10-31 11:12:30 -0700104#include "mirror/class_ext.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800105#include "mirror/class_loader.h"
Ian Rogers39ebcb82013-05-30 16:57:23 -0700106#include "mirror/dex_cache-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700107#include "mirror/dex_cache.h"
Narayan Kamath000e1882016-10-24 17:14:25 +0100108#include "mirror/emulated_stack_frame.h"
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700109#include "mirror/field.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800110#include "mirror/iftable-inl.h"
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700111#include "mirror/method.h"
Narayan Kamathafa48272016-08-03 12:46:58 +0100112#include "mirror/method_handle_impl.h"
Orion Hodsonc069a302017-01-18 09:23:12 +0000113#include "mirror/method_handles_lookup.h"
Steven Morelande431e272017-07-18 16:53:49 -0700114#include "mirror/method_type.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800115#include "mirror/object-inl.h"
Chang Xingba17dbd2017-06-28 21:27:56 +0000116#include "mirror/object-refvisitor-inl.h"
Alex Lighta9bbc082019-11-14 14:51:41 -0800117#include "mirror/object.h"
Andreas Gampe52ecb652018-10-24 15:18:21 -0700118#include "mirror/object_array-alloc-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700119#include "mirror/object_array-inl.h"
Alex Light133987d2020-03-26 19:22:12 +0000120#include "mirror/object_array.h"
Chris Wailes0c61be42018-09-26 17:27:34 -0700121#include "mirror/object_reference.h"
122#include "mirror/object_reference-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800123#include "mirror/proxy.h"
Fred Shih4ee7a662014-07-11 09:59:27 -0700124#include "mirror/reference-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800125#include "mirror/stack_trace_element.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700126#include "mirror/string-inl.h"
Andreas Gampe501c3b02019-04-17 21:54:27 +0000127#include "mirror/throwable.h"
Orion Hodson005ac512017-10-24 15:43:43 +0100128#include "mirror/var_handle.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700129#include "native/dalvik_system_DexFile.h"
Andreas Gampe373a9b52017-10-18 09:01:57 -0700130#include "nativehelper/scoped_local_ref.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700131#include "oat.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700132#include "oat_file-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700133#include "oat_file.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700134#include "oat_file_assistant.h"
135#include "oat_file_manager.h"
136#include "object_lock.h"
David Sehr82d046e2018-04-23 08:14:19 -0700137#include "profile/profile_compilation_info.h"
Brian Carlstrom1f870082011-08-23 16:02:11 -0700138#include "runtime.h"
Andreas Gampeac30fa22017-01-18 21:02:36 -0800139#include "runtime_callbacks.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -0700140#include "scoped_thread_state_change-inl.h"
Ian Rogers7b078e82014-09-10 14:44:24 -0700141#include "thread-inl.h"
Alex Light133987d2020-03-26 19:22:12 +0000142#include "thread.h"
Mingyao Yang063fc772016-08-02 11:02:54 -0700143#include "thread_list.h"
Mathieu Chartier7778b882015-10-05 16:41:10 -0700144#include "trace.h"
Vladimir Markob68bb7a2020-03-17 10:55:25 +0000145#include "transaction.h"
Vladimir Marko05792b92015-08-03 11:56:49 +0100146#include "utils/dex_cache_arrays_layout-inl.h"
Andreas Gampea43ba3d2019-03-13 15:49:20 -0700147#include "verifier/class_verifier.h"
Elliott Hugheseac76672012-05-24 21:56:51 -0700148#include "well_known_classes.h"
Carl Shapiro0e5d75d2011-07-06 18:28:37 -0700149
Nicolas Geoffray00391822019-12-10 10:17:23 +0000150#include "interpreter/interpreter_mterp_impl.h"
151
Carl Shapiro0e5d75d2011-07-06 18:28:37 -0700152namespace art {
153
Andreas Gampe46ee31b2016-12-14 10:11:49 -0800154using android::base::StringPrintf;
155
Mathieu Chartierc7853442015-03-27 14:35:38 -0700156static constexpr bool kSanityCheckObjects = kIsDebugBuild;
Mathieu Chartier8790c7f2016-03-31 15:05:45 -0700157static constexpr bool kVerifyArtMethodDeclaringClasses = kIsDebugBuild;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700158
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700159static void ThrowNoClassDefFoundError(const char* fmt, ...)
160 __attribute__((__format__(__printf__, 1, 2)))
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700161 REQUIRES_SHARED(Locks::mutator_lock_);
Elliott Hughes0512f022012-03-15 22:10:52 -0700162static void ThrowNoClassDefFoundError(const char* fmt, ...) {
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700163 va_list args;
164 va_start(args, fmt);
Ian Rogers62d6c772013-02-27 08:32:07 -0800165 Thread* self = Thread::Current();
Nicolas Geoffray0aa50ce2015-03-10 11:03:29 +0000166 self->ThrowNewExceptionV("Ljava/lang/NoClassDefFoundError;", fmt, args);
Ian Rogerscab01012012-01-10 17:35:46 -0800167 va_end(args);
168}
169
Andreas Gampe99babb62015-11-02 16:20:00 -0800170static bool HasInitWithString(Thread* self, ClassLinker* class_linker, const char* descriptor)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700171 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700172 ArtMethod* method = self->GetCurrentMethod(nullptr);
Andreas Gampebfdcdc12015-04-22 18:10:36 -0700173 StackHandleScope<1> hs(self);
174 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(method != nullptr ?
Mathieu Chartier90443472015-07-16 20:32:27 -0700175 method->GetDeclaringClass()->GetClassLoader() : nullptr));
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700176 ObjPtr<mirror::Class> exception_class = class_linker->FindClass(self, descriptor, class_loader);
Andreas Gampebfdcdc12015-04-22 18:10:36 -0700177
178 if (exception_class == nullptr) {
179 // No exc class ~ no <init>-with-string.
180 CHECK(self->IsExceptionPending());
181 self->ClearException();
182 return false;
183 }
184
Vladimir Markoba118822017-06-12 15:41:56 +0100185 ArtMethod* exception_init_method = exception_class->FindConstructor(
186 "(Ljava/lang/String;)V", class_linker->GetImagePointerSize());
Andreas Gampebfdcdc12015-04-22 18:10:36 -0700187 return exception_init_method != nullptr;
188}
189
Vladimir Markobb206de2019-03-28 10:30:32 +0000190static ObjPtr<mirror::Object> GetVerifyError(ObjPtr<mirror::Class> c)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700191 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lightd6251582016-10-31 11:12:30 -0700192 ObjPtr<mirror::ClassExt> ext(c->GetExtData());
193 if (ext == nullptr) {
194 return nullptr;
195 } else {
196 return ext->GetVerifyError();
197 }
198}
199
200// Helper for ThrowEarlierClassFailure. Throws the stored error.
201static void HandleEarlierVerifyError(Thread* self,
202 ClassLinker* class_linker,
203 ObjPtr<mirror::Class> c)
204 REQUIRES_SHARED(Locks::mutator_lock_) {
205 ObjPtr<mirror::Object> obj = GetVerifyError(c);
Andreas Gampe99babb62015-11-02 16:20:00 -0800206 DCHECK(obj != nullptr);
207 self->AssertNoPendingException();
208 if (obj->IsClass()) {
209 // Previous error has been stored as class. Create a new exception of that type.
210
211 // It's possible the exception doesn't have a <init>(String).
212 std::string temp;
213 const char* descriptor = obj->AsClass()->GetDescriptor(&temp);
214
215 if (HasInitWithString(self, class_linker, descriptor)) {
David Sehr709b0702016-10-13 09:12:37 -0700216 self->ThrowNewException(descriptor, c->PrettyDescriptor().c_str());
Andreas Gampe99babb62015-11-02 16:20:00 -0800217 } else {
218 self->ThrowNewException(descriptor, nullptr);
219 }
220 } else {
221 // Previous error has been stored as an instance. Just rethrow.
Vladimir Markoc13fbd82018-06-04 16:16:28 +0100222 ObjPtr<mirror::Class> throwable_class = GetClassRoot<mirror::Throwable>(class_linker);
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700223 ObjPtr<mirror::Class> error_class = obj->GetClass();
Andreas Gampe99babb62015-11-02 16:20:00 -0800224 CHECK(throwable_class->IsAssignableFrom(error_class));
225 self->SetException(obj->AsThrowable());
226 }
227 self->AssertPendingException();
228}
229
Andreas Gampe5b20b352018-10-11 19:03:20 -0700230// Ensures that methods have the kAccSkipAccessChecks bit set. We use the
231// kAccVerificationAttempted bit on the class access flags to determine whether this has been done
232// before.
Andreas Gampe5b20b352018-10-11 19:03:20 -0700233static void EnsureSkipAccessChecksMethods(Handle<mirror::Class> klass, PointerSize pointer_size)
234 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray00391822019-12-10 10:17:23 +0000235 Runtime* runtime = Runtime::Current();
236 ClassLinker* class_linker = runtime->GetClassLinker();
Andreas Gampe5b20b352018-10-11 19:03:20 -0700237 if (!klass->WasVerificationAttempted()) {
238 klass->SetSkipAccessChecksFlagOnAllMethods(pointer_size);
239 klass->SetVerificationAttempted();
Nicolas Geoffray00391822019-12-10 10:17:23 +0000240 // Now that the class has passed verification, try to set nterp entrypoints
241 // to methods that currently use the switch interpreter.
242 if (interpreter::CanRuntimeUseNterp()) {
243 for (ArtMethod& m : klass->GetMethods(pointer_size)) {
244 if (class_linker->IsQuickToInterpreterBridge(m.GetEntryPointFromQuickCompiledCode()) &&
245 interpreter::CanMethodUseNterp(&m)) {
246 if (klass->IsVisiblyInitialized() || !NeedsClinitCheckBeforeCall(&m)) {
247 runtime->GetInstrumentation()->UpdateMethodsCode(&m, interpreter::GetNterpEntryPoint());
Nicolas Geoffray7e2c9632020-01-09 13:41:10 +0000248 } else {
249 // Put the resolution stub, which will initialize the class and then
250 // call the method with nterp.
251 runtime->GetInstrumentation()->UpdateMethodsCode(&m, GetQuickResolutionStub());
Nicolas Geoffray00391822019-12-10 10:17:23 +0000252 }
253 }
254 }
255 }
Andreas Gampe5b20b352018-10-11 19:03:20 -0700256 }
257}
258
Vladimir Markobf121912019-06-04 13:49:05 +0100259// Callback responsible for making a batch of classes visibly initialized
260// after all threads have called it from a checkpoint, ensuring visibility.
261class ClassLinker::VisiblyInitializedCallback final
262 : public Closure, public IntrusiveForwardListNode<VisiblyInitializedCallback> {
263 public:
264 explicit VisiblyInitializedCallback(ClassLinker* class_linker)
265 : class_linker_(class_linker),
266 num_classes_(0u),
267 thread_visibility_counter_(0),
268 barriers_() {
269 std::fill_n(classes_, kMaxClasses, nullptr);
270 }
271
272 bool IsEmpty() const {
273 DCHECK_LE(num_classes_, kMaxClasses);
274 return num_classes_ == 0u;
275 }
276
277 bool IsFull() const {
278 DCHECK_LE(num_classes_, kMaxClasses);
279 return num_classes_ == kMaxClasses;
280 }
281
282 void AddClass(Thread* self, ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
283 DCHECK_EQ(klass->GetStatus(), ClassStatus::kInitialized);
284 DCHECK(!IsFull());
285 classes_[num_classes_] = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, klass);
286 ++num_classes_;
287 }
288
289 void AddBarrier(Barrier* barrier) {
290 barriers_.push_front(barrier);
291 }
292
293 std::forward_list<Barrier*> GetAndClearBarriers() {
294 std::forward_list<Barrier*> result;
295 result.swap(barriers_);
296 result.reverse(); // Return barriers in insertion order.
297 return result;
298 }
299
300 void MakeVisible(Thread* self) {
301 DCHECK_EQ(thread_visibility_counter_.load(std::memory_order_relaxed), 0);
302 size_t count = Runtime::Current()->GetThreadList()->RunCheckpoint(this);
303 AdjustThreadVisibilityCounter(self, count);
304 }
305
306 void Run(Thread* self) override {
307 self->ClearMakeVisiblyInitializedCounter();
308 AdjustThreadVisibilityCounter(self, -1);
309 }
310
311 private:
312 void AdjustThreadVisibilityCounter(Thread* self, ssize_t adjustment) {
313 ssize_t old = thread_visibility_counter_.fetch_add(adjustment, std::memory_order_relaxed);
314 if (old + adjustment == 0) {
315 // All threads passed the checkpoint. Mark classes as visibly initialized.
316 {
317 ScopedObjectAccess soa(self);
318 StackHandleScope<1u> hs(self);
319 MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
320 JavaVMExt* vm = self->GetJniEnv()->GetVm();
321 for (size_t i = 0, num = num_classes_; i != num; ++i) {
322 klass.Assign(ObjPtr<mirror::Class>::DownCast(self->DecodeJObject(classes_[i])));
323 vm->DeleteWeakGlobalRef(self, classes_[i]);
324 if (klass != nullptr) {
Vladimir Markobf121912019-06-04 13:49:05 +0100325 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
Vladimir Marko86c87522020-05-11 16:55:55 +0100326 class_linker_->FixupStaticTrampolines(self, klass.Get());
Vladimir Markobf121912019-06-04 13:49:05 +0100327 }
328 }
329 num_classes_ = 0u;
330 }
331 class_linker_->VisiblyInitializedCallbackDone(self, this);
332 }
333 }
334
Vladimir Marko9f18fbc2019-07-31 15:06:12 +0100335 static constexpr size_t kMaxClasses = 16;
Vladimir Markobf121912019-06-04 13:49:05 +0100336
337 ClassLinker* const class_linker_;
338 size_t num_classes_;
339 jweak classes_[kMaxClasses];
340
341 // The thread visibility counter starts at 0 and it is incremented by the number of
342 // threads that need to run this callback (by the thread that request the callback
343 // to be run) and decremented once for each `Run()` execution. When it reaches 0,
344 // whether after the increment or after a decrement, we know that `Run()` was executed
345 // for all threads and therefore we can mark the classes as visibly initialized.
346 std::atomic<ssize_t> thread_visibility_counter_;
347
348 // List of barries to `Pass()` for threads that wait for the callback to complete.
349 std::forward_list<Barrier*> barriers_;
350};
351
352void ClassLinker::MakeInitializedClassesVisiblyInitialized(Thread* self, bool wait) {
353 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
354 return; // Nothing to do. Thanks to the x86 memory model classes skip the initialized status.
355 }
356 std::optional<Barrier> maybe_barrier; // Avoid constructing the Barrier for `wait == false`.
357 if (wait) {
358 maybe_barrier.emplace(0);
359 }
360 int wait_count = 0;
361 VisiblyInitializedCallback* callback = nullptr;
362 {
363 MutexLock lock(self, visibly_initialized_callback_lock_);
364 if (visibly_initialized_callback_ != nullptr && !visibly_initialized_callback_->IsEmpty()) {
365 callback = visibly_initialized_callback_.release();
366 running_visibly_initialized_callbacks_.push_front(*callback);
367 }
368 if (wait) {
369 DCHECK(maybe_barrier.has_value());
370 Barrier* barrier = std::addressof(*maybe_barrier);
371 for (VisiblyInitializedCallback& cb : running_visibly_initialized_callbacks_) {
372 cb.AddBarrier(barrier);
373 ++wait_count;
374 }
375 }
376 }
377 if (callback != nullptr) {
378 callback->MakeVisible(self);
379 }
380 if (wait_count != 0) {
381 DCHECK(maybe_barrier.has_value());
382 maybe_barrier->Increment(self, wait_count);
383 }
384}
385
386void ClassLinker::VisiblyInitializedCallbackDone(Thread* self,
387 VisiblyInitializedCallback* callback) {
388 MutexLock lock(self, visibly_initialized_callback_lock_);
389 // Pass the barriers if requested.
390 for (Barrier* barrier : callback->GetAndClearBarriers()) {
391 barrier->Pass(self);
392 }
393 // Remove the callback from the list of running callbacks.
394 auto before = running_visibly_initialized_callbacks_.before_begin();
395 auto it = running_visibly_initialized_callbacks_.begin();
396 DCHECK(it != running_visibly_initialized_callbacks_.end());
397 while (std::addressof(*it) != callback) {
398 before = it;
399 ++it;
400 DCHECK(it != running_visibly_initialized_callbacks_.end());
401 }
402 running_visibly_initialized_callbacks_.erase_after(before);
403 // Reuse or destroy the callback object.
404 if (visibly_initialized_callback_ == nullptr) {
405 visibly_initialized_callback_.reset(callback);
406 } else {
407 delete callback;
408 }
409}
410
Alex Lightfb119572019-09-18 15:04:53 -0700411void ClassLinker::ForceClassInitialized(Thread* self, Handle<mirror::Class> klass) {
412 ClassLinker::VisiblyInitializedCallback* cb = MarkClassInitialized(self, klass);
413 if (cb != nullptr) {
414 cb->MakeVisible(self);
415 }
416 ScopedThreadSuspension sts(self, ThreadState::kSuspended);
417 MakeInitializedClassesVisiblyInitialized(self, /*wait=*/true);
418}
419
Vladimir Markobf121912019-06-04 13:49:05 +0100420ClassLinker::VisiblyInitializedCallback* ClassLinker::MarkClassInitialized(
421 Thread* self, Handle<mirror::Class> klass) {
422 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
423 // Thanks to the x86 memory model, we do not need any memory fences and
424 // we can immediately mark the class as visibly initialized.
425 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
Vladimir Marko86c87522020-05-11 16:55:55 +0100426 FixupStaticTrampolines(self, klass.Get());
Vladimir Markobf121912019-06-04 13:49:05 +0100427 return nullptr;
428 }
429 if (Runtime::Current()->IsActiveTransaction()) {
430 // Transactions are single-threaded, so we can mark the class as visibly intialized.
431 // (Otherwise we'd need to track the callback's entry in the transaction for rollback.)
432 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
Vladimir Marko86c87522020-05-11 16:55:55 +0100433 FixupStaticTrampolines(self, klass.Get());
Vladimir Markobf121912019-06-04 13:49:05 +0100434 return nullptr;
435 }
436 mirror::Class::SetStatus(klass, ClassStatus::kInitialized, self);
437 MutexLock lock(self, visibly_initialized_callback_lock_);
438 if (visibly_initialized_callback_ == nullptr) {
439 visibly_initialized_callback_.reset(new VisiblyInitializedCallback(this));
440 }
441 DCHECK(!visibly_initialized_callback_->IsFull());
442 visibly_initialized_callback_->AddClass(self, klass.Get());
443
444 if (visibly_initialized_callback_->IsFull()) {
445 VisiblyInitializedCallback* callback = visibly_initialized_callback_.release();
446 running_visibly_initialized_callbacks_.push_front(*callback);
447 return callback;
448 } else {
449 return nullptr;
450 }
451}
452
Vladimir Marko86c87522020-05-11 16:55:55 +0100453const void* ClassLinker::RegisterNative(
454 Thread* self, ArtMethod* method, const void* native_method) {
455 CHECK(method->IsNative()) << method->PrettyMethod();
456 CHECK(native_method != nullptr) << method->PrettyMethod();
457 void* new_native_method = nullptr;
458 Runtime* runtime = Runtime::Current();
459 runtime->GetRuntimeCallbacks()->RegisterNativeMethod(method,
460 native_method,
461 /*out*/&new_native_method);
462 if (method->IsCriticalNative()) {
463 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
464 // Remove old registered method if any.
465 auto it = critical_native_code_with_clinit_check_.find(method);
466 if (it != critical_native_code_with_clinit_check_.end()) {
467 critical_native_code_with_clinit_check_.erase(it);
468 }
469 // To ensure correct memory visibility, we need the class to be visibly
470 // initialized before we can set the JNI entrypoint.
471 if (method->GetDeclaringClass()->IsVisiblyInitialized()) {
472 method->SetEntryPointFromJni(new_native_method);
473 } else {
474 critical_native_code_with_clinit_check_.emplace(method, new_native_method);
475 }
476 } else {
477 method->SetEntryPointFromJni(new_native_method);
478 }
479 return new_native_method;
480}
481
482void ClassLinker::UnregisterNative(Thread* self, ArtMethod* method) {
483 CHECK(method->IsNative()) << method->PrettyMethod();
484 // Restore stub to lookup native pointer via dlsym.
485 if (method->IsCriticalNative()) {
486 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
487 auto it = critical_native_code_with_clinit_check_.find(method);
488 if (it != critical_native_code_with_clinit_check_.end()) {
489 critical_native_code_with_clinit_check_.erase(it);
490 }
491 method->SetEntryPointFromJni(GetJniDlsymLookupCriticalStub());
492 } else {
493 method->SetEntryPointFromJni(GetJniDlsymLookupStub());
494 }
495}
496
497const void* ClassLinker::GetRegisteredNative(Thread* self, ArtMethod* method) {
498 if (method->IsCriticalNative()) {
499 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
500 auto it = critical_native_code_with_clinit_check_.find(method);
501 if (it != critical_native_code_with_clinit_check_.end()) {
502 return it->second;
503 }
504 const void* native_code = method->GetEntryPointFromJni();
505 return IsJniDlsymLookupCriticalStub(native_code) ? nullptr : native_code;
506 } else {
507 const void* native_code = method->GetEntryPointFromJni();
508 return IsJniDlsymLookupStub(native_code) ? nullptr : native_code;
509 }
510}
511
Andreas Gampe7b3063b2019-01-07 14:12:52 -0800512void ClassLinker::ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,
513 bool wrap_in_no_class_def,
514 bool log) {
Elliott Hughes5c599942012-06-13 16:45:05 -0700515 // The class failed to initialize on a previous attempt, so we want to throw
516 // a NoClassDefFoundError (v2 2.17.5). The exception to this rule is if we
517 // failed in verification, in which case v2 5.4.1 says we need to re-throw
518 // the previous error.
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800519 Runtime* const runtime = Runtime::Current();
520 if (!runtime->IsAotCompiler()) { // Give info if this occurs at runtime.
Andreas Gampe3d6b4702015-09-21 08:35:52 -0700521 std::string extra;
Vladimir Markobb206de2019-03-28 10:30:32 +0000522 ObjPtr<mirror::Object> verify_error = GetVerifyError(c);
523 if (verify_error != nullptr) {
Andreas Gampe369c8512016-01-28 15:31:39 -0800524 if (verify_error->IsClass()) {
David Sehr709b0702016-10-13 09:12:37 -0700525 extra = mirror::Class::PrettyDescriptor(verify_error->AsClass());
Andreas Gampe369c8512016-01-28 15:31:39 -0800526 } else {
527 extra = verify_error->AsThrowable()->Dump();
528 }
Andreas Gampe3d6b4702015-09-21 08:35:52 -0700529 }
Andreas Gampe7b3063b2019-01-07 14:12:52 -0800530 if (log) {
531 LOG(INFO) << "Rejecting re-init on previously-failed class " << c->PrettyClass()
532 << ": " << extra;
533 }
Ian Rogers87e552d2012-08-31 15:54:48 -0700534 }
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700535
David Sehr709b0702016-10-13 09:12:37 -0700536 CHECK(c->IsErroneous()) << c->PrettyClass() << " " << c->GetStatus();
Ian Rogers62d6c772013-02-27 08:32:07 -0800537 Thread* self = Thread::Current();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800538 if (runtime->IsAotCompiler()) {
Ian Rogers7b078e82014-09-10 14:44:24 -0700539 // At compile time, accurate errors and NCDFE are disabled to speed compilation.
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700540 ObjPtr<mirror::Throwable> pre_allocated = runtime->GetPreAllocatedNoClassDefFoundError();
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000541 self->SetException(pre_allocated);
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700542 } else {
Vladimir Markobb206de2019-03-28 10:30:32 +0000543 ObjPtr<mirror::Object> verify_error = GetVerifyError(c);
544 if (verify_error != nullptr) {
Andreas Gampecb086952015-11-02 16:20:00 -0800545 // Rethrow stored error.
Andreas Gampe99babb62015-11-02 16:20:00 -0800546 HandleEarlierVerifyError(self, this, c);
Andreas Gampecb086952015-11-02 16:20:00 -0800547 }
Alex Lightd6251582016-10-31 11:12:30 -0700548 // TODO This might be wrong if we hit an OOME while allocating the ClassExt. In that case we
549 // might have meant to go down the earlier if statement with the original error but it got
550 // swallowed by the OOM so we end up here.
Vladimir Markobb206de2019-03-28 10:30:32 +0000551 if (verify_error == nullptr || wrap_in_no_class_def) {
Andreas Gampecb086952015-11-02 16:20:00 -0800552 // If there isn't a recorded earlier error, or this is a repeat throw from initialization,
553 // the top-level exception must be a NoClassDefFoundError. The potentially already pending
554 // exception will be a cause.
555 self->ThrowNewWrappedException("Ljava/lang/NoClassDefFoundError;",
David Sehr709b0702016-10-13 09:12:37 -0700556 c->PrettyDescriptor().c_str());
Ian Rogers7b078e82014-09-10 14:44:24 -0700557 }
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700558 }
559}
560
Brian Carlstromb23eab12014-10-08 17:55:21 -0700561static void VlogClassInitializationFailure(Handle<mirror::Class> klass)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700562 REQUIRES_SHARED(Locks::mutator_lock_) {
Brian Carlstromb23eab12014-10-08 17:55:21 -0700563 if (VLOG_IS_ON(class_linker)) {
564 std::string temp;
565 LOG(INFO) << "Failed to initialize class " << klass->GetDescriptor(&temp) << " from "
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000566 << klass->GetLocation() << "\n" << Thread::Current()->GetException()->Dump();
Brian Carlstromb23eab12014-10-08 17:55:21 -0700567 }
568}
569
570static void WrapExceptionInInitializer(Handle<mirror::Class> klass)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700571 REQUIRES_SHARED(Locks::mutator_lock_) {
Elliott Hughesa4f94742012-05-29 16:28:38 -0700572 Thread* self = Thread::Current();
573 JNIEnv* env = self->GetJniEnv();
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700574
575 ScopedLocalRef<jthrowable> cause(env, env->ExceptionOccurred());
Andreas Gampe2ed8def2014-08-28 14:41:02 -0700576 CHECK(cause.get() != nullptr);
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700577
Andreas Gampe1e8a3952016-11-30 10:13:19 -0800578 // Boot classpath classes should not fail initialization. This is a sanity debug check. This
579 // cannot in general be guaranteed, but in all likelihood leads to breakage down the line.
580 if (klass->GetClassLoader() == nullptr && !Runtime::Current()->IsAotCompiler()) {
Andreas Gampe22f71d22016-11-21 10:10:08 -0800581 std::string tmp;
Alex Light5047d9f2018-03-09 15:44:31 -0800582 // We want to LOG(FATAL) on debug builds since this really shouldn't be happening but we need to
583 // make sure to only do it if we don't have AsyncExceptions being thrown around since those
584 // could have caused the error.
585 bool known_impossible = kIsDebugBuild && !Runtime::Current()->AreAsyncExceptionsThrown();
586 LOG(known_impossible ? FATAL : WARNING) << klass->GetDescriptor(&tmp)
587 << " failed initialization: "
588 << self->GetException()->Dump();
Andreas Gampe22f71d22016-11-21 10:10:08 -0800589 }
590
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700591 env->ExceptionClear();
Elliott Hughesa4f94742012-05-29 16:28:38 -0700592 bool is_error = env->IsInstanceOf(cause.get(), WellKnownClasses::java_lang_Error);
593 env->Throw(cause.get());
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700594
Elliott Hughesa4f94742012-05-29 16:28:38 -0700595 // We only wrap non-Error exceptions; an Error can just be used as-is.
596 if (!is_error) {
Nicolas Geoffray0aa50ce2015-03-10 11:03:29 +0000597 self->ThrowNewWrappedException("Ljava/lang/ExceptionInInitializerError;", nullptr);
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700598 }
Brian Carlstromb23eab12014-10-08 17:55:21 -0700599 VlogClassInitializationFailure(klass);
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700600}
601
Fred Shih381e4ca2014-08-25 17:24:27 -0700602// Gap between two fields in object layout.
603struct FieldGap {
604 uint32_t start_offset; // The offset from the start of the object.
605 uint32_t size; // The gap size of 1, 2, or 4 bytes.
606};
607struct FieldGapsComparator {
Igor Murashkin2ffb7032017-11-08 13:35:21 -0800608 FieldGapsComparator() {
Fred Shih381e4ca2014-08-25 17:24:27 -0700609 }
610 bool operator() (const FieldGap& lhs, const FieldGap& rhs)
611 NO_THREAD_SAFETY_ANALYSIS {
Andreas Gampef52857f2015-02-18 15:38:57 -0800612 // Sort by gap size, largest first. Secondary sort by starting offset.
Richard Uhlerfab67882015-07-13 17:00:35 -0700613 // Note that the priority queue returns the largest element, so operator()
614 // should return true if lhs is less than rhs.
615 return lhs.size < rhs.size || (lhs.size == rhs.size && lhs.start_offset > rhs.start_offset);
Fred Shih381e4ca2014-08-25 17:24:27 -0700616 }
617};
Andreas Gampec55bb392018-09-21 00:02:02 +0000618using FieldGaps = std::priority_queue<FieldGap, std::vector<FieldGap>, FieldGapsComparator>;
Fred Shih381e4ca2014-08-25 17:24:27 -0700619
620// Adds largest aligned gaps to queue of gaps.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800621static void AddFieldGap(uint32_t gap_start, uint32_t gap_end, FieldGaps* gaps) {
Fred Shih381e4ca2014-08-25 17:24:27 -0700622 DCHECK(gaps != nullptr);
623
624 uint32_t current_offset = gap_start;
625 while (current_offset != gap_end) {
626 size_t remaining = gap_end - current_offset;
627 if (remaining >= sizeof(uint32_t) && IsAligned<4>(current_offset)) {
628 gaps->push(FieldGap {current_offset, sizeof(uint32_t)});
629 current_offset += sizeof(uint32_t);
630 } else if (remaining >= sizeof(uint16_t) && IsAligned<2>(current_offset)) {
631 gaps->push(FieldGap {current_offset, sizeof(uint16_t)});
632 current_offset += sizeof(uint16_t);
633 } else {
634 gaps->push(FieldGap {current_offset, sizeof(uint8_t)});
635 current_offset += sizeof(uint8_t);
636 }
637 DCHECK_LE(current_offset, gap_end) << "Overran gap";
638 }
639}
640// Shuffle fields forward, making use of gaps whenever possible.
641template<int n>
Vladimir Marko76649e82014-11-10 18:32:59 +0000642static void ShuffleForward(size_t* current_field_idx,
Fred Shih381e4ca2014-08-25 17:24:27 -0700643 MemberOffset* field_offset,
Mathieu Chartierc7853442015-03-27 14:35:38 -0700644 std::deque<ArtField*>* grouped_and_sorted_fields,
Fred Shih381e4ca2014-08-25 17:24:27 -0700645 FieldGaps* gaps)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700646 REQUIRES_SHARED(Locks::mutator_lock_) {
Fred Shih381e4ca2014-08-25 17:24:27 -0700647 DCHECK(current_field_idx != nullptr);
648 DCHECK(grouped_and_sorted_fields != nullptr);
Fred Shih381e4ca2014-08-25 17:24:27 -0700649 DCHECK(gaps != nullptr);
650 DCHECK(field_offset != nullptr);
651
652 DCHECK(IsPowerOfTwo(n));
653 while (!grouped_and_sorted_fields->empty()) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700654 ArtField* field = grouped_and_sorted_fields->front();
Fred Shih381e4ca2014-08-25 17:24:27 -0700655 Primitive::Type type = field->GetTypeAsPrimitiveType();
656 if (Primitive::ComponentSize(type) < n) {
657 break;
658 }
659 if (!IsAligned<n>(field_offset->Uint32Value())) {
660 MemberOffset old_offset = *field_offset;
661 *field_offset = MemberOffset(RoundUp(field_offset->Uint32Value(), n));
662 AddFieldGap(old_offset.Uint32Value(), field_offset->Uint32Value(), gaps);
663 }
David Sehr709b0702016-10-13 09:12:37 -0700664 CHECK(type != Primitive::kPrimNot) << field->PrettyField(); // should be primitive types
Fred Shih381e4ca2014-08-25 17:24:27 -0700665 grouped_and_sorted_fields->pop_front();
Fred Shih381e4ca2014-08-25 17:24:27 -0700666 if (!gaps->empty() && gaps->top().size >= n) {
667 FieldGap gap = gaps->top();
668 gaps->pop();
Roland Levillain14d90572015-07-16 10:52:26 +0100669 DCHECK_ALIGNED(gap.start_offset, n);
Fred Shih381e4ca2014-08-25 17:24:27 -0700670 field->SetOffset(MemberOffset(gap.start_offset));
671 if (gap.size > n) {
672 AddFieldGap(gap.start_offset + n, gap.start_offset + gap.size, gaps);
673 }
674 } else {
Roland Levillain14d90572015-07-16 10:52:26 +0100675 DCHECK_ALIGNED(field_offset->Uint32Value(), n);
Fred Shih381e4ca2014-08-25 17:24:27 -0700676 field->SetOffset(*field_offset);
677 *field_offset = MemberOffset(field_offset->Uint32Value() + n);
678 }
679 ++(*current_field_idx);
680 }
681}
682
Andreas Gampe87658f32019-04-18 18:39:02 +0000683ClassLinker::ClassLinker(InternTable* intern_table, bool fast_class_not_found_exceptions)
Andreas Gampe2af99022017-04-25 08:32:59 -0700684 : boot_class_table_(new ClassTable()),
685 failed_dex_cache_class_lookups_(0),
Ian Rogers98379392014-02-24 16:53:16 -0800686 class_roots_(nullptr),
Ian Rogers98379392014-02-24 16:53:16 -0800687 find_array_class_cache_next_victim_(0),
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700688 init_done_(false),
Vladimir Marko1998cd02017-01-13 13:02:58 +0000689 log_new_roots_(false),
Jeff Hao0aba0ba2013-06-03 14:49:28 -0700690 intern_table_(intern_table),
Andreas Gampe87658f32019-04-18 18:39:02 +0000691 fast_class_not_found_exceptions_(fast_class_not_found_exceptions),
Vladimir Marko7dac8642019-11-06 17:09:30 +0000692 jni_dlsym_lookup_trampoline_(nullptr),
Vladimir Markofa458ac2020-02-12 14:08:07 +0000693 jni_dlsym_lookup_critical_trampoline_(nullptr),
Ian Rogers98379392014-02-24 16:53:16 -0800694 quick_resolution_trampoline_(nullptr),
Andreas Gampe2da88232014-02-27 12:26:20 -0800695 quick_imt_conflict_trampoline_(nullptr),
Vladimir Marko8a630572014-04-09 18:45:35 +0100696 quick_generic_jni_trampoline_(nullptr),
Mathieu Chartier2d721012014-11-10 11:08:06 -0800697 quick_to_interpreter_bridge_trampoline_(nullptr),
Andreas Gampec1ac9ee2017-07-24 22:35:49 -0700698 image_pointer_size_(kRuntimePointerSize),
Vladimir Markobf121912019-06-04 13:49:05 +0100699 visibly_initialized_callback_lock_("visibly initialized callback lock"),
700 visibly_initialized_callback_(nullptr),
Vladimir Marko86c87522020-05-11 16:55:55 +0100701 critical_native_code_with_clinit_check_lock_("critical native code with clinit check lock"),
702 critical_native_code_with_clinit_check_(),
Andreas Gampe7dface32017-07-25 21:32:59 -0700703 cha_(Runtime::Current()->IsAotCompiler() ? nullptr : new ClassHierarchyAnalysis()) {
704 // For CHA disabled during Aot, see b/34193647.
705
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700706 CHECK(intern_table_ != nullptr);
Andreas Gampe8ac75952015-06-02 21:01:45 -0700707 static_assert(kFindArrayCacheSize == arraysize(find_array_class_cache_),
708 "Array cache size wrong.");
709 std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -0700710}
Brian Carlstroma663ea52011-08-19 23:33:41 -0700711
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800712void ClassLinker::CheckSystemClass(Thread* self, Handle<mirror::Class> c1, const char* descriptor) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700713 ObjPtr<mirror::Class> c2 = FindSystemClass(self, descriptor);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800714 if (c2 == nullptr) {
715 LOG(FATAL) << "Could not find class " << descriptor;
716 UNREACHABLE();
717 }
718 if (c1.Get() != c2) {
719 std::ostringstream os1, os2;
720 c1->DumpClass(os1, mirror::Class::kDumpClassFullDetail);
721 c2->DumpClass(os2, mirror::Class::kDumpClassFullDetail);
722 LOG(FATAL) << "InitWithoutImage: Class mismatch for " << descriptor
723 << ". This is most likely the result of a broken build. Make sure that "
724 << "libcore and art projects match.\n\n"
725 << os1.str() << "\n\n" << os2.str();
726 UNREACHABLE();
727 }
728}
729
Andreas Gampe3db9c5d2015-11-17 11:52:46 -0800730bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,
731 std::string* error_msg) {
Brian Carlstroma004aa92012-02-08 18:05:09 -0800732 VLOG(startup) << "ClassLinker::Init";
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -0700733
Mathieu Chartiere401d142015-04-22 13:56:20 -0700734 Thread* const self = Thread::Current();
735 Runtime* const runtime = Runtime::Current();
736 gc::Heap* const heap = runtime->GetHeap();
737
Jeff Haodcdc85b2015-12-04 14:06:18 -0800738 CHECK(!heap->HasBootImageSpace()) << "Runtime has image. We should use it.";
Elliott Hughesd8ddfd52011-08-15 14:32:53 -0700739 CHECK(!init_done_);
Brian Carlstrom578bbdc2011-07-21 14:07:47 -0700740
Mathieu Chartiere401d142015-04-22 13:56:20 -0700741 // Use the pointer size from the runtime since we are probably creating the image.
742 image_pointer_size_ = InstructionSetPointerSize(runtime->GetInstructionSet());
743
Elliott Hughes30646832011-10-13 16:59:46 -0700744 // java_lang_Class comes first, it's needed for AllocClass
Mathieu Chartier590fee92013-09-13 13:46:47 -0700745 // The GC can't handle an object with a null class since we can't get the size of this object.
Mathieu Chartier1d27b342014-01-28 12:51:09 -0800746 heap->IncrementDisableMovingGC(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700747 StackHandleScope<64> hs(self); // 64 is picked arbitrarily.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700748 auto class_class_size = mirror::Class::ClassClassSize(image_pointer_size_);
Mathieu Chartierd7a7f2f2018-09-07 11:57:18 -0700749 // Allocate the object as non-movable so that there are no cases where Object::IsClass returns
750 // the incorrect result when comparing to-space vs from-space.
Vladimir Markod7e9bbf2019-03-28 13:18:57 +0000751 Handle<mirror::Class> java_lang_Class(hs.NewHandle(ObjPtr<mirror::Class>::DownCast(
Vladimir Marko991cd5c2019-05-30 14:23:39 +0100752 heap->AllocNonMovableObject(self, nullptr, class_class_size, VoidFunctor()))));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800753 CHECK(java_lang_Class != nullptr);
Vladimir Marko317892b2018-05-31 11:11:32 +0100754 java_lang_Class->SetClassFlags(mirror::kClassFlagClass);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700755 java_lang_Class->SetClass(java_lang_Class.Get());
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -0700756 if (kUseBakerReadBarrier) {
757 java_lang_Class->AssertReadBarrierState();
Hiroshi Yamauchi9d04a202014-01-31 13:35:49 -0800758 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700759 java_lang_Class->SetClassSize(class_class_size);
Hiroshi Yamauchif0edfc32014-09-25 11:46:46 -0700760 java_lang_Class->SetPrimitiveType(Primitive::kPrimNot);
Mathieu Chartier1d27b342014-01-28 12:51:09 -0800761 heap->DecrementDisableMovingGC(self);
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700762 // AllocClass(ObjPtr<mirror::Class>) can now be used
Brian Carlstroma0808032011-07-18 00:39:23 -0700763
Elliott Hughes418d20f2011-09-22 14:00:39 -0700764 // Class[] is used for reflection support.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700765 auto class_array_class_size = mirror::ObjectArray<mirror::Class>::ClassSize(image_pointer_size_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700766 Handle<mirror::Class> class_array_class(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700767 AllocClass(self, java_lang_Class.Get(), class_array_class_size)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700768 class_array_class->SetComponentType(java_lang_Class.Get());
Elliott Hughes418d20f2011-09-22 14:00:39 -0700769
Ian Rogers23435d02012-09-24 11:23:12 -0700770 // java_lang_Object comes next so that object_array_class can be created.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700771 Handle<mirror::Class> java_lang_Object(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700772 AllocClass(self, java_lang_Class.Get(), mirror::Object::ClassSize(image_pointer_size_))));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800773 CHECK(java_lang_Object != nullptr);
Ian Rogers23435d02012-09-24 11:23:12 -0700774 // backfill Object as the super class of Class.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700775 java_lang_Class->SetSuperClass(java_lang_Object.Get());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000776 mirror::Class::SetStatus(java_lang_Object, ClassStatus::kLoaded, self);
Brian Carlstroma0808032011-07-18 00:39:23 -0700777
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700778 java_lang_Object->SetObjectSize(sizeof(mirror::Object));
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -0800779 // Allocate in non-movable so that it's possible to check if a JNI weak global ref has been
780 // cleared without triggering the read barrier and unintentionally mark the sentinel alive.
Vladimir Marko991cd5c2019-05-30 14:23:39 +0100781 runtime->SetSentinel(heap->AllocNonMovableObject(self,
782 java_lang_Object.Get(),
783 java_lang_Object->GetObjectSize(),
784 VoidFunctor()));
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700785
Igor Murashkin86083f72017-10-27 10:59:04 -0700786 // Initialize the SubtypeCheck bitstring for java.lang.Object and java.lang.Class.
Vladimir Marko305c38b2018-02-14 11:50:07 +0000787 if (kBitstringSubtypeCheckEnabled) {
Igor Murashkin86083f72017-10-27 10:59:04 -0700788 // It might seem the lock here is unnecessary, however all the SubtypeCheck
789 // functions are annotated to require locks all the way down.
790 //
791 // We take the lock here to avoid using NO_THREAD_SAFETY_ANALYSIS.
792 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
Vladimir Marko38b8b252018-01-02 19:07:06 +0000793 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Object.Get());
794 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Class.Get());
Igor Murashkin86083f72017-10-27 10:59:04 -0700795 }
796
Ian Rogers23435d02012-09-24 11:23:12 -0700797 // Object[] next to hold class roots.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700798 Handle<mirror::Class> object_array_class(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700799 AllocClass(self, java_lang_Class.Get(),
800 mirror::ObjectArray<mirror::Object>::ClassSize(image_pointer_size_))));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700801 object_array_class->SetComponentType(java_lang_Object.Get());
Brian Carlstroma0808032011-07-18 00:39:23 -0700802
Roland Levillain0e840272018-08-23 19:55:30 +0100803 // Setup java.lang.String.
804 //
805 // We make this class non-movable for the unlikely case where it were to be
806 // moved by a sticky-bit (minor) collection when using the Generational
807 // Concurrent Copying (CC) collector, potentially creating a stale reference
808 // in the `klass_` field of one of its instances allocated in the Large-Object
809 // Space (LOS) -- see the comment about the dirty card scanning logic in
810 // art::gc::collector::ConcurrentCopying::MarkingPhase.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700811 Handle<mirror::Class> java_lang_String(hs.NewHandle(
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700812 AllocClass</* kMovable= */ false>(
Roland Levillain0e840272018-08-23 19:55:30 +0100813 self, java_lang_Class.Get(), mirror::String::ClassSize(image_pointer_size_))));
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700814 java_lang_String->SetStringClass();
Vladimir Marko2c64a832018-01-04 11:31:56 +0000815 mirror::Class::SetStatus(java_lang_String, ClassStatus::kResolved, self);
Jesse Wilson14150742011-07-29 19:04:44 -0400816
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700817 // Setup java.lang.ref.Reference.
Fred Shih4ee7a662014-07-11 09:59:27 -0700818 Handle<mirror::Class> java_lang_ref_Reference(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700819 AllocClass(self, java_lang_Class.Get(), mirror::Reference::ClassSize(image_pointer_size_))));
Fred Shih4ee7a662014-07-11 09:59:27 -0700820 java_lang_ref_Reference->SetObjectSize(mirror::Reference::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000821 mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kResolved, self);
Fred Shih4ee7a662014-07-11 09:59:27 -0700822
Ian Rogers23435d02012-09-24 11:23:12 -0700823 // Create storage for root classes, save away our work so far (requires descriptors).
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700824 class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100825 mirror::ObjectArray<mirror::Class>::Alloc(self,
826 object_array_class.Get(),
827 static_cast<int32_t>(ClassRoot::kMax)));
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700828 CHECK(!class_roots_.IsNull());
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100829 SetClassRoot(ClassRoot::kJavaLangClass, java_lang_Class.Get());
830 SetClassRoot(ClassRoot::kJavaLangObject, java_lang_Object.Get());
831 SetClassRoot(ClassRoot::kClassArrayClass, class_array_class.Get());
832 SetClassRoot(ClassRoot::kObjectArrayClass, object_array_class.Get());
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100833 SetClassRoot(ClassRoot::kJavaLangString, java_lang_String.Get());
834 SetClassRoot(ClassRoot::kJavaLangRefReference, java_lang_ref_Reference.Get());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700835
Mathieu Chartier6beced42016-11-15 15:51:31 -0800836 // Fill in the empty iftable. Needs to be done after the kObjectArrayClass root is set.
837 java_lang_Object->SetIfTable(AllocIfTable(self, 0));
838
Vladimir Marko02610552018-06-04 14:38:00 +0100839 // Create array interface entries to populate once we can load system classes.
840 object_array_class->SetIfTable(AllocIfTable(self, 2));
841 DCHECK_EQ(GetArrayIfTable(), object_array_class->GetIfTable());
842
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700843 // Setup the primitive type classes.
Vladimir Marko70e2a762019-07-12 16:49:00 +0100844 CreatePrimitiveClass(self, Primitive::kPrimBoolean, ClassRoot::kPrimitiveBoolean);
845 CreatePrimitiveClass(self, Primitive::kPrimByte, ClassRoot::kPrimitiveByte);
846 CreatePrimitiveClass(self, Primitive::kPrimChar, ClassRoot::kPrimitiveChar);
847 CreatePrimitiveClass(self, Primitive::kPrimShort, ClassRoot::kPrimitiveShort);
848 CreatePrimitiveClass(self, Primitive::kPrimInt, ClassRoot::kPrimitiveInt);
849 CreatePrimitiveClass(self, Primitive::kPrimLong, ClassRoot::kPrimitiveLong);
850 CreatePrimitiveClass(self, Primitive::kPrimFloat, ClassRoot::kPrimitiveFloat);
851 CreatePrimitiveClass(self, Primitive::kPrimDouble, ClassRoot::kPrimitiveDouble);
852 CreatePrimitiveClass(self, Primitive::kPrimVoid, ClassRoot::kPrimitiveVoid);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700853
Vladimir Marko70e2a762019-07-12 16:49:00 +0100854 // Allocate the primitive array classes. We need only the native pointer
855 // array at this point (int[] or long[], depending on architecture) but
856 // we shall perform the same setup steps for all primitive array classes.
857 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveBoolean, ClassRoot::kBooleanArrayClass);
858 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveByte, ClassRoot::kByteArrayClass);
859 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveChar, ClassRoot::kCharArrayClass);
860 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveShort, ClassRoot::kShortArrayClass);
861 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveInt, ClassRoot::kIntArrayClass);
862 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveLong, ClassRoot::kLongArrayClass);
863 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveFloat, ClassRoot::kFloatArrayClass);
864 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveDouble, ClassRoot::kDoubleArrayClass);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700865
Brian Carlstrom75cb3b42011-07-28 02:13:36 -0700866 // now that these are registered, we can use AllocClass() and AllocObjectArray
Brian Carlstroma0808032011-07-18 00:39:23 -0700867
Ian Rogers52813c92012-10-11 11:50:38 -0700868 // Set up DexCache. This cannot be done later since AppendToBootClassPath calls AllocDexCache.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700869 Handle<mirror::Class> java_lang_DexCache(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700870 AllocClass(self, java_lang_Class.Get(), mirror::DexCache::ClassSize(image_pointer_size_))));
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100871 SetClassRoot(ClassRoot::kJavaLangDexCache, java_lang_DexCache.Get());
Vladimir Marko05792b92015-08-03 11:56:49 +0100872 java_lang_DexCache->SetDexCacheClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700873 java_lang_DexCache->SetObjectSize(mirror::DexCache::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000874 mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kResolved, self);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700875
Alex Lightd6251582016-10-31 11:12:30 -0700876
877 // Setup dalvik.system.ClassExt
878 Handle<mirror::Class> dalvik_system_ClassExt(hs.NewHandle(
879 AllocClass(self, java_lang_Class.Get(), mirror::ClassExt::ClassSize(image_pointer_size_))));
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100880 SetClassRoot(ClassRoot::kDalvikSystemClassExt, dalvik_system_ClassExt.Get());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000881 mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kResolved, self);
Alex Lightd6251582016-10-31 11:12:30 -0700882
Mathieu Chartier66f19252012-09-18 08:57:04 -0700883 // Set up array classes for string, field, method
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700884 Handle<mirror::Class> object_array_string(hs.NewHandle(
885 AllocClass(self, java_lang_Class.Get(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700886 mirror::ObjectArray<mirror::String>::ClassSize(image_pointer_size_))));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700887 object_array_string->SetComponentType(java_lang_String.Get());
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100888 SetClassRoot(ClassRoot::kJavaLangStringArrayClass, object_array_string.Get());
Mathieu Chartier66f19252012-09-18 08:57:04 -0700889
Nicolas Geoffray796d6302016-03-13 22:22:31 +0000890 LinearAlloc* linear_alloc = runtime->GetLinearAlloc();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700891 // Create runtime resolution and imt conflict methods.
892 runtime->SetResolutionMethod(runtime->CreateResolutionMethod());
Nicolas Geoffray796d6302016-03-13 22:22:31 +0000893 runtime->SetImtConflictMethod(runtime->CreateImtConflictMethod(linear_alloc));
894 runtime->SetImtUnimplementedMethod(runtime->CreateImtConflictMethod(linear_alloc));
Ian Rogers4445a7e2012-10-05 17:19:13 -0700895
Ian Rogers23435d02012-09-24 11:23:12 -0700896 // Setup boot_class_path_ and register class_path now that we can use AllocObjectArray to create
897 // DexCache instances. Needs to be after String, Field, Method arrays since AllocDexCache uses
898 // these roots.
Andreas Gampe3db9c5d2015-11-17 11:52:46 -0800899 if (boot_class_path.empty()) {
900 *error_msg = "Boot classpath is empty.";
901 return false;
902 }
Richard Uhlerfbef44d2014-12-23 09:48:51 -0800903 for (auto& dex_file : boot_class_path) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -0800904 if (dex_file == nullptr) {
Andreas Gampe3db9c5d2015-11-17 11:52:46 -0800905 *error_msg = "Null dex file.";
906 return false;
907 }
Mathieu Chartier0a19e212019-11-27 14:35:24 -0800908 AppendToBootClassPath(self, dex_file.get());
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800909 boot_dex_files_.push_back(std::move(dex_file));
Mathieu Chartier66f19252012-09-18 08:57:04 -0700910 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700911
912 // now we can use FindSystemClass
913
Dmitry Petrochenkof0972a42014-05-16 17:43:39 +0700914 // Set up GenericJNI entrypoint. That is mainly a hack for common_compiler_test.h so that
915 // we do not need friend classes or a publicly exposed setter.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700916 quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800917 if (!runtime->IsAotCompiler()) {
Alex Light64ad14d2014-08-19 14:23:13 -0700918 // We need to set up the generic trampolines since we don't have an image.
Vladimir Marko7dac8642019-11-06 17:09:30 +0000919 jni_dlsym_lookup_trampoline_ = GetJniDlsymLookupStub();
Vladimir Markofa458ac2020-02-12 14:08:07 +0000920 jni_dlsym_lookup_critical_trampoline_ = GetJniDlsymLookupCriticalStub();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700921 quick_resolution_trampoline_ = GetQuickResolutionStub();
922 quick_imt_conflict_trampoline_ = GetQuickImtConflictStub();
Vladimir Marko7dac8642019-11-06 17:09:30 +0000923 quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700924 quick_to_interpreter_bridge_trampoline_ = GetQuickToInterpreterBridge();
Alex Light64ad14d2014-08-19 14:23:13 -0700925 }
Dmitry Petrochenkof0972a42014-05-16 17:43:39 +0700926
Alex Lightd6251582016-10-31 11:12:30 -0700927 // Object, String, ClassExt and DexCache need to be rerun through FindSystemClass to finish init
Vladimir Marko2c64a832018-01-04 11:31:56 +0000928 mirror::Class::SetStatus(java_lang_Object, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800929 CheckSystemClass(self, java_lang_Object, "Ljava/lang/Object;");
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700930 CHECK_EQ(java_lang_Object->GetObjectSize(), mirror::Object::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000931 mirror::Class::SetStatus(java_lang_String, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800932 CheckSystemClass(self, java_lang_String, "Ljava/lang/String;");
Vladimir Marko2c64a832018-01-04 11:31:56 +0000933 mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800934 CheckSystemClass(self, java_lang_DexCache, "Ljava/lang/DexCache;");
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700935 CHECK_EQ(java_lang_DexCache->GetObjectSize(), mirror::DexCache::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000936 mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kNotReady, self);
Alex Lightd6251582016-10-31 11:12:30 -0700937 CheckSystemClass(self, dalvik_system_ClassExt, "Ldalvik/system/ClassExt;");
938 CHECK_EQ(dalvik_system_ClassExt->GetObjectSize(), mirror::ClassExt::InstanceSize());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700939
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800940 // Run Class through FindSystemClass. This initializes the dex_cache_ fields and register it
941 // in class_table_.
942 CheckSystemClass(self, java_lang_Class, "Ljava/lang/Class;");
Elliott Hughes418d20f2011-09-22 14:00:39 -0700943
Vladimir Marko70e2a762019-07-12 16:49:00 +0100944 // Setup core array classes, i.e. Object[], String[] and Class[] and primitive
945 // arrays - can't be done until Object has a vtable and component classes are loaded.
946 FinishCoreArrayClassSetup(ClassRoot::kObjectArrayClass);
947 FinishCoreArrayClassSetup(ClassRoot::kClassArrayClass);
948 FinishCoreArrayClassSetup(ClassRoot::kJavaLangStringArrayClass);
949 FinishCoreArrayClassSetup(ClassRoot::kBooleanArrayClass);
950 FinishCoreArrayClassSetup(ClassRoot::kByteArrayClass);
951 FinishCoreArrayClassSetup(ClassRoot::kCharArrayClass);
952 FinishCoreArrayClassSetup(ClassRoot::kShortArrayClass);
953 FinishCoreArrayClassSetup(ClassRoot::kIntArrayClass);
954 FinishCoreArrayClassSetup(ClassRoot::kLongArrayClass);
955 FinishCoreArrayClassSetup(ClassRoot::kFloatArrayClass);
956 FinishCoreArrayClassSetup(ClassRoot::kDoubleArrayClass);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700957
Ian Rogers23435d02012-09-24 11:23:12 -0700958 // Setup the single, global copy of "iftable".
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700959 auto java_lang_Cloneable = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Cloneable;"));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800960 CHECK(java_lang_Cloneable != nullptr);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700961 auto java_io_Serializable = hs.NewHandle(FindSystemClass(self, "Ljava/io/Serializable;"));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800962 CHECK(java_io_Serializable != nullptr);
Ian Rogers23435d02012-09-24 11:23:12 -0700963 // We assume that Cloneable/Serializable don't have superinterfaces -- normally we'd have to
964 // crawl up and explicitly list all of the supers as well.
Vladimir Marko02610552018-06-04 14:38:00 +0100965 object_array_class->GetIfTable()->SetInterface(0, java_lang_Cloneable.Get());
966 object_array_class->GetIfTable()->SetInterface(1, java_io_Serializable.Get());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700967
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700968 // Sanity check Class[] and Object[]'s interfaces. GetDirectInterface may cause thread
969 // suspension.
970 CHECK_EQ(java_lang_Cloneable.Get(),
Vladimir Marko19a4d372016-12-08 14:41:46 +0000971 mirror::Class::GetDirectInterface(self, class_array_class.Get(), 0));
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700972 CHECK_EQ(java_io_Serializable.Get(),
Vladimir Marko19a4d372016-12-08 14:41:46 +0000973 mirror::Class::GetDirectInterface(self, class_array_class.Get(), 1));
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700974 CHECK_EQ(java_lang_Cloneable.Get(),
Vladimir Marko19a4d372016-12-08 14:41:46 +0000975 mirror::Class::GetDirectInterface(self, object_array_class.Get(), 0));
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700976 CHECK_EQ(java_io_Serializable.Get(),
Vladimir Marko19a4d372016-12-08 14:41:46 +0000977 mirror::Class::GetDirectInterface(self, object_array_class.Get(), 1));
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700978
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700979 CHECK_EQ(object_array_string.Get(),
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100980 FindSystemClass(self, GetClassRootDescriptor(ClassRoot::kJavaLangStringArrayClass)));
Brian Carlstrom1f870082011-08-23 16:02:11 -0700981
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800982 // End of special init trickery, all subsequent classes may be loaded via FindSystemClass.
Ian Rogers466bb252011-10-14 03:29:56 -0700983
Ian Rogers23435d02012-09-24 11:23:12 -0700984 // Create java.lang.reflect.Proxy root.
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100985 SetClassRoot(ClassRoot::kJavaLangReflectProxy,
986 FindSystemClass(self, "Ljava/lang/reflect/Proxy;"));
Ian Rogers466bb252011-10-14 03:29:56 -0700987
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700988 // Create java.lang.reflect.Field.class root.
Vladimir Markoacb906d2018-05-30 10:23:49 +0100989 ObjPtr<mirror::Class> class_root = FindSystemClass(self, "Ljava/lang/reflect/Field;");
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700990 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100991 SetClassRoot(ClassRoot::kJavaLangReflectField, class_root);
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700992
993 // Create java.lang.reflect.Field array root.
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700994 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Field;");
995 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100996 SetClassRoot(ClassRoot::kJavaLangReflectFieldArrayClass, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700997
998 // Create java.lang.reflect.Constructor.class root and array root.
999 class_root = FindSystemClass(self, "Ljava/lang/reflect/Constructor;");
1000 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001001 SetClassRoot(ClassRoot::kJavaLangReflectConstructor, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -07001002 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Constructor;");
1003 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001004 SetClassRoot(ClassRoot::kJavaLangReflectConstructorArrayClass, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -07001005
1006 // Create java.lang.reflect.Method.class root and array root.
1007 class_root = FindSystemClass(self, "Ljava/lang/reflect/Method;");
1008 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001009 SetClassRoot(ClassRoot::kJavaLangReflectMethod, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -07001010 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Method;");
1011 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001012 SetClassRoot(ClassRoot::kJavaLangReflectMethodArrayClass, class_root);
Mathieu Chartierdaaf3262015-03-24 13:30:28 -07001013
Orion Hodson005ac512017-10-24 15:43:43 +01001014 // Create java.lang.invoke.CallSite.class root
1015 class_root = FindSystemClass(self, "Ljava/lang/invoke/CallSite;");
1016 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001017 SetClassRoot(ClassRoot::kJavaLangInvokeCallSite, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +01001018
Narayan Kamathafa48272016-08-03 12:46:58 +01001019 // Create java.lang.invoke.MethodType.class root
1020 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodType;");
1021 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001022 SetClassRoot(ClassRoot::kJavaLangInvokeMethodType, class_root);
Narayan Kamathafa48272016-08-03 12:46:58 +01001023
1024 // Create java.lang.invoke.MethodHandleImpl.class root
1025 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandleImpl;");
1026 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001027 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandleImpl, class_root);
Vladimir Markoc7aa87e2018-05-24 15:19:52 +01001028 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandle, class_root->GetSuperClass());
Narayan Kamathafa48272016-08-03 12:46:58 +01001029
Orion Hodsonc069a302017-01-18 09:23:12 +00001030 // Create java.lang.invoke.MethodHandles.Lookup.class root
1031 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandles$Lookup;");
1032 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001033 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandlesLookup, class_root);
Orion Hodsonc069a302017-01-18 09:23:12 +00001034
Orion Hodson005ac512017-10-24 15:43:43 +01001035 // Create java.lang.invoke.VarHandle.class root
1036 class_root = FindSystemClass(self, "Ljava/lang/invoke/VarHandle;");
Orion Hodsonc069a302017-01-18 09:23:12 +00001037 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001038 SetClassRoot(ClassRoot::kJavaLangInvokeVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +01001039
1040 // Create java.lang.invoke.FieldVarHandle.class root
1041 class_root = FindSystemClass(self, "Ljava/lang/invoke/FieldVarHandle;");
1042 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001043 SetClassRoot(ClassRoot::kJavaLangInvokeFieldVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +01001044
1045 // Create java.lang.invoke.ArrayElementVarHandle.class root
1046 class_root = FindSystemClass(self, "Ljava/lang/invoke/ArrayElementVarHandle;");
1047 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001048 SetClassRoot(ClassRoot::kJavaLangInvokeArrayElementVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +01001049
1050 // Create java.lang.invoke.ByteArrayViewVarHandle.class root
1051 class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteArrayViewVarHandle;");
1052 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001053 SetClassRoot(ClassRoot::kJavaLangInvokeByteArrayViewVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +01001054
1055 // Create java.lang.invoke.ByteBufferViewVarHandle.class root
1056 class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteBufferViewVarHandle;");
1057 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001058 SetClassRoot(ClassRoot::kJavaLangInvokeByteBufferViewVarHandle, class_root);
Orion Hodsonc069a302017-01-18 09:23:12 +00001059
Narayan Kamath000e1882016-10-24 17:14:25 +01001060 class_root = FindSystemClass(self, "Ldalvik/system/EmulatedStackFrame;");
1061 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001062 SetClassRoot(ClassRoot::kDalvikSystemEmulatedStackFrame, class_root);
Narayan Kamath000e1882016-10-24 17:14:25 +01001063
Brian Carlstrom1f870082011-08-23 16:02:11 -07001064 // java.lang.ref classes need to be specially flagged, but otherwise are normal classes
Fred Shih4ee7a662014-07-11 09:59:27 -07001065 // finish initializing Reference class
Vladimir Marko2c64a832018-01-04 11:31:56 +00001066 mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -08001067 CheckSystemClass(self, java_lang_ref_Reference, "Ljava/lang/ref/Reference;");
Fred Shih4ee7a662014-07-11 09:59:27 -07001068 CHECK_EQ(java_lang_ref_Reference->GetObjectSize(), mirror::Reference::InstanceSize());
Mathieu Chartiere401d142015-04-22 13:56:20 -07001069 CHECK_EQ(java_lang_ref_Reference->GetClassSize(),
1070 mirror::Reference::ClassSize(image_pointer_size_));
Mathieu Chartierfc58af42015-04-16 18:00:39 -07001071 class_root = FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07001072 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07001073 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagFinalizerReference);
Mathieu Chartierfc58af42015-04-16 18:00:39 -07001074 class_root = FindSystemClass(self, "Ljava/lang/ref/PhantomReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07001075 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07001076 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagPhantomReference);
Mathieu Chartierfc58af42015-04-16 18:00:39 -07001077 class_root = FindSystemClass(self, "Ljava/lang/ref/SoftReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07001078 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07001079 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagSoftReference);
Mathieu Chartierfc58af42015-04-16 18:00:39 -07001080 class_root = FindSystemClass(self, "Ljava/lang/ref/WeakReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07001081 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07001082 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagWeakReference);
Brian Carlstrom1f870082011-08-23 16:02:11 -07001083
Ian Rogers23435d02012-09-24 11:23:12 -07001084 // Setup the ClassLoader, verifying the object_size_.
Mathieu Chartierfc58af42015-04-16 18:00:39 -07001085 class_root = FindSystemClass(self, "Ljava/lang/ClassLoader;");
Mathieu Chartiere4275c02015-08-06 15:34:15 -07001086 class_root->SetClassLoaderClass();
Mathieu Chartierfc58af42015-04-16 18:00:39 -07001087 CHECK_EQ(class_root->GetObjectSize(), mirror::ClassLoader::InstanceSize());
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001088 SetClassRoot(ClassRoot::kJavaLangClassLoader, class_root);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07001089
jeffhao8cd6dda2012-02-22 10:15:34 -08001090 // Set up java.lang.Throwable, java.lang.ClassNotFoundException, and
Ian Rogers23435d02012-09-24 11:23:12 -07001091 // java.lang.StackTraceElement as a convenience.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001092 SetClassRoot(ClassRoot::kJavaLangThrowable, FindSystemClass(self, "Ljava/lang/Throwable;"));
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001093 SetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
Brian Carlstromf3632832014-05-20 15:36:53 -07001094 FindSystemClass(self, "Ljava/lang/ClassNotFoundException;"));
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001095 SetClassRoot(ClassRoot::kJavaLangStackTraceElement,
1096 FindSystemClass(self, "Ljava/lang/StackTraceElement;"));
1097 SetClassRoot(ClassRoot::kJavaLangStackTraceElementArrayClass,
Brian Carlstromf3632832014-05-20 15:36:53 -07001098 FindSystemClass(self, "[Ljava/lang/StackTraceElement;"));
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00001099 SetClassRoot(ClassRoot::kJavaLangClassLoaderArrayClass,
1100 FindSystemClass(self, "[Ljava/lang/ClassLoader;"));
Elliott Hughesd8ddfd52011-08-15 14:32:53 -07001101
Mathieu Chartiercdca4762016-04-28 09:44:54 -07001102 // Create conflict tables that depend on the class linker.
1103 runtime->FixupConflictTables();
1104
Ian Rogers98379392014-02-24 16:53:16 -08001105 FinishInit(self);
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -07001106
Brian Carlstroma004aa92012-02-08 18:05:09 -08001107 VLOG(startup) << "ClassLinker::InitFromCompiler exiting";
Andreas Gampe3db9c5d2015-11-17 11:52:46 -08001108
1109 return true;
Brian Carlstroma663ea52011-08-19 23:33:41 -07001110}
1111
Andreas Gampe9abc31e2018-05-17 11:47:09 -07001112static void CreateStringInitBindings(Thread* self, ClassLinker* class_linker)
1113 REQUIRES_SHARED(Locks::mutator_lock_) {
1114 // Find String.<init> -> StringFactory bindings.
1115 ObjPtr<mirror::Class> string_factory_class =
1116 class_linker->FindSystemClass(self, "Ljava/lang/StringFactory;");
1117 CHECK(string_factory_class != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001118 ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>(class_linker);
Andreas Gampe9abc31e2018-05-17 11:47:09 -07001119 WellKnownClasses::InitStringInit(string_class, string_factory_class);
1120 // Update the primordial thread.
1121 self->InitStringEntryPoints();
1122}
1123
Ian Rogers98379392014-02-24 16:53:16 -08001124void ClassLinker::FinishInit(Thread* self) {
Elliott Hughes4dd9b4d2011-12-12 18:29:24 -08001125 VLOG(startup) << "ClassLinker::FinishInit entering";
Brian Carlstrom16192862011-09-12 17:50:06 -07001126
Andreas Gampe9abc31e2018-05-17 11:47:09 -07001127 CreateStringInitBindings(self, this);
1128
Brian Carlstrom16192862011-09-12 17:50:06 -07001129 // Let the heap know some key offsets into java.lang.ref instances
Elliott Hughes20cde902011-10-04 17:37:27 -07001130 // Note: we hard code the field indexes here rather than using FindInstanceField
Brian Carlstrom16192862011-09-12 17:50:06 -07001131 // as the types of the field can't be resolved prior to the runtime being
1132 // fully initialized
Andreas Gampe7b2450e2018-06-19 10:45:54 -07001133 StackHandleScope<3> hs(self);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001134 Handle<mirror::Class> java_lang_ref_Reference =
1135 hs.NewHandle(GetClassRoot<mirror::Reference>(this));
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001136 Handle<mirror::Class> java_lang_ref_FinalizerReference =
1137 hs.NewHandle(FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;"));
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08001138
Mathieu Chartierc7853442015-03-27 14:35:38 -07001139 ArtField* pendingNext = java_lang_ref_Reference->GetInstanceField(0);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001140 CHECK_STREQ(pendingNext->GetName(), "pendingNext");
1141 CHECK_STREQ(pendingNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001142
Mathieu Chartierc7853442015-03-27 14:35:38 -07001143 ArtField* queue = java_lang_ref_Reference->GetInstanceField(1);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001144 CHECK_STREQ(queue->GetName(), "queue");
1145 CHECK_STREQ(queue->GetTypeDescriptor(), "Ljava/lang/ref/ReferenceQueue;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001146
Mathieu Chartierc7853442015-03-27 14:35:38 -07001147 ArtField* queueNext = java_lang_ref_Reference->GetInstanceField(2);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001148 CHECK_STREQ(queueNext->GetName(), "queueNext");
1149 CHECK_STREQ(queueNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001150
Mathieu Chartierc7853442015-03-27 14:35:38 -07001151 ArtField* referent = java_lang_ref_Reference->GetInstanceField(3);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001152 CHECK_STREQ(referent->GetName(), "referent");
1153 CHECK_STREQ(referent->GetTypeDescriptor(), "Ljava/lang/Object;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001154
Mathieu Chartierc7853442015-03-27 14:35:38 -07001155 ArtField* zombie = java_lang_ref_FinalizerReference->GetInstanceField(2);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001156 CHECK_STREQ(zombie->GetName(), "zombie");
1157 CHECK_STREQ(zombie->GetTypeDescriptor(), "Ljava/lang/Object;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001158
Brian Carlstroma663ea52011-08-19 23:33:41 -07001159 // ensure all class_roots_ are initialized
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001160 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
Brian Carlstroma663ea52011-08-19 23:33:41 -07001161 ClassRoot class_root = static_cast<ClassRoot>(i);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001162 ObjPtr<mirror::Class> klass = GetClassRoot(class_root);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07001163 CHECK(klass != nullptr);
1164 DCHECK(klass->IsArrayClass() || klass->IsPrimitive() || klass->GetDexCache() != nullptr);
Brian Carlstroma663ea52011-08-19 23:33:41 -07001165 // note SetClassRoot does additional validation.
1166 // if possible add new checks there to catch errors early
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07001167 }
1168
Vladimir Marko02610552018-06-04 14:38:00 +01001169 CHECK(GetArrayIfTable() != nullptr);
Elliott Hughes92f14b22011-10-06 12:29:54 -07001170
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07001171 // disable the slow paths in FindClass and CreatePrimitiveClass now
1172 // that Object, Class, and Object[] are setup
1173 init_done_ = true;
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -07001174
Andreas Gampe7b2450e2018-06-19 10:45:54 -07001175 // Under sanitization, the small carve-out to handle stack overflow might not be enough to
1176 // initialize the StackOverflowError class (as it might require running the verifier). Instead,
1177 // ensure that the class will be initialized.
1178 if (kMemoryToolIsAvailable && !Runtime::Current()->IsAotCompiler()) {
Andreas Gampee0bbab92019-07-25 12:28:22 -07001179 verifier::ClassVerifier::Init(this); // Need to prepare the verifier.
Andreas Gampe7b2450e2018-06-19 10:45:54 -07001180
1181 ObjPtr<mirror::Class> soe_klass = FindSystemClass(self, "Ljava/lang/StackOverflowError;");
1182 if (soe_klass == nullptr || !EnsureInitialized(self, hs.NewHandle(soe_klass), true, true)) {
1183 // Strange, but don't crash.
1184 LOG(WARNING) << "Could not prepare StackOverflowError.";
1185 self->ClearException();
1186 }
1187 }
1188
Elliott Hughes4dd9b4d2011-12-12 18:29:24 -08001189 VLOG(startup) << "ClassLinker::FinishInit exiting";
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07001190}
1191
Vladimir Markodcfcce42018-06-27 10:00:28 +00001192void ClassLinker::RunRootClinits(Thread* self) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001193 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); ++i) {
1194 ObjPtr<mirror::Class> c = GetClassRoot(ClassRoot(i), this);
Elliott Hughes2a20cfd2011-09-23 19:30:41 -07001195 if (!c->IsArrayClass() && !c->IsPrimitive()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001196 StackHandleScope<1> hs(self);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001197 Handle<mirror::Class> h_class(hs.NewHandle(c));
David Srbecky08110ef2020-05-20 19:33:43 +01001198 if (!EnsureInitialized(self, h_class, true, true)) {
1199 LOG(FATAL) << "Exception when initializing " << h_class->PrettyClass()
1200 << ": " << self->GetException()->Dump();
1201 }
Vladimir Markodcfcce42018-06-27 10:00:28 +00001202 } else {
1203 DCHECK(c->IsInitialized());
Elliott Hughes2a20cfd2011-09-23 19:30:41 -07001204 }
1205 }
1206}
1207
Jeff Haodcdc85b2015-12-04 14:06:18 -08001208struct TrampolineCheckData {
1209 const void* quick_resolution_trampoline;
1210 const void* quick_imt_conflict_trampoline;
1211 const void* quick_generic_jni_trampoline;
1212 const void* quick_to_interpreter_bridge_trampoline;
Andreas Gampe542451c2016-07-26 09:02:02 -07001213 PointerSize pointer_size;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001214 ArtMethod* m;
1215 bool error;
1216};
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001217
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001218bool ClassLinker::InitFromBootImage(std::string* error_msg) {
1219 VLOG(startup) << __FUNCTION__ << " entering";
Brian Carlstroma663ea52011-08-19 23:33:41 -07001220 CHECK(!init_done_);
1221
Mathieu Chartierdaaf3262015-03-24 13:30:28 -07001222 Runtime* const runtime = Runtime::Current();
1223 Thread* const self = Thread::Current();
1224 gc::Heap* const heap = runtime->GetHeap();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001225 std::vector<gc::space::ImageSpace*> spaces = heap->GetBootImageSpaces();
1226 CHECK(!spaces.empty());
Vladimir Marko024d69f2019-06-13 10:52:32 +01001227 const ImageHeader& image_header = spaces[0]->GetImageHeader();
1228 uint32_t pointer_size_unchecked = image_header.GetPointerSizeUnchecked();
Andreas Gampe542451c2016-07-26 09:02:02 -07001229 if (!ValidPointerSize(pointer_size_unchecked)) {
1230 *error_msg = StringPrintf("Invalid image pointer size: %u", pointer_size_unchecked);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001231 return false;
1232 }
Vladimir Marko3364d182019-03-13 13:55:01 +00001233 image_pointer_size_ = image_header.GetPointerSize();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001234 if (!runtime->IsAotCompiler()) {
1235 // Only the Aot compiler supports having an image with a different pointer size than the
1236 // runtime. This happens on the host for compiling 32 bit tests since we use a 64 bit libart
1237 // compiler. We may also use 32 bit dex2oat on a system with 64 bit apps.
Andreas Gampe542451c2016-07-26 09:02:02 -07001238 if (image_pointer_size_ != kRuntimePointerSize) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001239 *error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu",
Andreas Gampe542451c2016-07-26 09:02:02 -07001240 static_cast<size_t>(image_pointer_size_),
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001241 sizeof(void*));
1242 return false;
1243 }
1244 }
Vladimir Marko3364d182019-03-13 13:55:01 +00001245 DCHECK(!runtime->HasResolutionMethod());
1246 runtime->SetResolutionMethod(image_header.GetImageMethod(ImageHeader::kResolutionMethod));
1247 runtime->SetImtConflictMethod(image_header.GetImageMethod(ImageHeader::kImtConflictMethod));
1248 runtime->SetImtUnimplementedMethod(
1249 image_header.GetImageMethod(ImageHeader::kImtUnimplementedMethod));
1250 runtime->SetCalleeSaveMethod(
1251 image_header.GetImageMethod(ImageHeader::kSaveAllCalleeSavesMethod),
1252 CalleeSaveType::kSaveAllCalleeSaves);
1253 runtime->SetCalleeSaveMethod(
1254 image_header.GetImageMethod(ImageHeader::kSaveRefsOnlyMethod),
1255 CalleeSaveType::kSaveRefsOnly);
1256 runtime->SetCalleeSaveMethod(
1257 image_header.GetImageMethod(ImageHeader::kSaveRefsAndArgsMethod),
1258 CalleeSaveType::kSaveRefsAndArgs);
1259 runtime->SetCalleeSaveMethod(
1260 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethod),
1261 CalleeSaveType::kSaveEverything);
1262 runtime->SetCalleeSaveMethod(
1263 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForClinit),
1264 CalleeSaveType::kSaveEverythingForClinit);
1265 runtime->SetCalleeSaveMethod(
1266 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForSuspendCheck),
1267 CalleeSaveType::kSaveEverythingForSuspendCheck);
1268
Jeff Haodcdc85b2015-12-04 14:06:18 -08001269 std::vector<const OatFile*> oat_files =
1270 runtime->GetOatFileManager().RegisterImageOatFiles(spaces);
1271 DCHECK(!oat_files.empty());
1272 const OatHeader& default_oat_header = oat_files[0]->GetOatHeader();
Vladimir Marko7dac8642019-11-06 17:09:30 +00001273 jni_dlsym_lookup_trampoline_ = default_oat_header.GetJniDlsymLookupTrampoline();
Vladimir Markofa458ac2020-02-12 14:08:07 +00001274 jni_dlsym_lookup_critical_trampoline_ = default_oat_header.GetJniDlsymLookupCriticalTrampoline();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001275 quick_resolution_trampoline_ = default_oat_header.GetQuickResolutionTrampoline();
1276 quick_imt_conflict_trampoline_ = default_oat_header.GetQuickImtConflictTrampoline();
1277 quick_generic_jni_trampoline_ = default_oat_header.GetQuickGenericJniTrampoline();
1278 quick_to_interpreter_bridge_trampoline_ = default_oat_header.GetQuickToInterpreterBridge();
1279 if (kIsDebugBuild) {
1280 // Check that the other images use the same trampoline.
1281 for (size_t i = 1; i < oat_files.size(); ++i) {
1282 const OatHeader& ith_oat_header = oat_files[i]->GetOatHeader();
Vladimir Marko7dac8642019-11-06 17:09:30 +00001283 const void* ith_jni_dlsym_lookup_trampoline_ =
1284 ith_oat_header.GetJniDlsymLookupTrampoline();
Vladimir Markofa458ac2020-02-12 14:08:07 +00001285 const void* ith_jni_dlsym_lookup_critical_trampoline_ =
1286 ith_oat_header.GetJniDlsymLookupCriticalTrampoline();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001287 const void* ith_quick_resolution_trampoline =
1288 ith_oat_header.GetQuickResolutionTrampoline();
1289 const void* ith_quick_imt_conflict_trampoline =
1290 ith_oat_header.GetQuickImtConflictTrampoline();
1291 const void* ith_quick_generic_jni_trampoline =
1292 ith_oat_header.GetQuickGenericJniTrampoline();
1293 const void* ith_quick_to_interpreter_bridge_trampoline =
1294 ith_oat_header.GetQuickToInterpreterBridge();
Vladimir Marko7dac8642019-11-06 17:09:30 +00001295 if (ith_jni_dlsym_lookup_trampoline_ != jni_dlsym_lookup_trampoline_ ||
Vladimir Markofa458ac2020-02-12 14:08:07 +00001296 ith_jni_dlsym_lookup_critical_trampoline_ != jni_dlsym_lookup_critical_trampoline_ ||
Vladimir Marko7dac8642019-11-06 17:09:30 +00001297 ith_quick_resolution_trampoline != quick_resolution_trampoline_ ||
Jeff Haodcdc85b2015-12-04 14:06:18 -08001298 ith_quick_imt_conflict_trampoline != quick_imt_conflict_trampoline_ ||
1299 ith_quick_generic_jni_trampoline != quick_generic_jni_trampoline_ ||
1300 ith_quick_to_interpreter_bridge_trampoline != quick_to_interpreter_bridge_trampoline_) {
1301 // Make sure that all methods in this image do not contain those trampolines as
1302 // entrypoints. Otherwise the class-linker won't be able to work with a single set.
1303 TrampolineCheckData data;
1304 data.error = false;
1305 data.pointer_size = GetImagePointerSize();
1306 data.quick_resolution_trampoline = ith_quick_resolution_trampoline;
1307 data.quick_imt_conflict_trampoline = ith_quick_imt_conflict_trampoline;
1308 data.quick_generic_jni_trampoline = ith_quick_generic_jni_trampoline;
1309 data.quick_to_interpreter_bridge_trampoline = ith_quick_to_interpreter_bridge_trampoline;
1310 ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
Andreas Gampe0c183382017-07-13 22:26:24 -07001311 auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1312 if (obj->IsClass()) {
1313 ObjPtr<mirror::Class> klass = obj->AsClass();
1314 for (ArtMethod& m : klass->GetMethods(data.pointer_size)) {
1315 const void* entrypoint =
1316 m.GetEntryPointFromQuickCompiledCodePtrSize(data.pointer_size);
1317 if (entrypoint == data.quick_resolution_trampoline ||
1318 entrypoint == data.quick_imt_conflict_trampoline ||
1319 entrypoint == data.quick_generic_jni_trampoline ||
1320 entrypoint == data.quick_to_interpreter_bridge_trampoline) {
1321 data.m = &m;
1322 data.error = true;
1323 return;
1324 }
1325 }
1326 }
1327 };
1328 spaces[i]->GetLiveBitmap()->Walk(visitor);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001329 if (data.error) {
1330 ArtMethod* m = data.m;
David Sehr709b0702016-10-13 09:12:37 -07001331 LOG(ERROR) << "Found a broken ArtMethod: " << ArtMethod::PrettyMethod(m);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001332 *error_msg = "Found an ArtMethod with a bad entrypoint";
1333 return false;
1334 }
1335 }
1336 }
1337 }
Brian Carlstrom58ae9412011-10-04 00:56:06 -07001338
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001339 class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
Vladimir Markod7e9bbf2019-03-28 13:18:57 +00001340 ObjPtr<mirror::ObjectArray<mirror::Class>>::DownCast(
Vladimir Marko024d69f2019-06-13 10:52:32 +01001341 image_header.GetImageRoot(ImageHeader::kClassRoots)));
Vladimir Markof75613c2018-06-05 12:51:04 +01001342 DCHECK_EQ(GetClassRoot<mirror::Class>(this)->GetClassFlags(), mirror::kClassFlagClass);
Mathieu Chartier02b6a782012-10-26 13:51:26 -07001343
Vladimir Marko024d69f2019-06-13 10:52:32 +01001344 DCHECK_EQ(GetClassRoot<mirror::Object>(this)->GetObjectSize(), sizeof(mirror::Object));
1345 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
1346 ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
1347 image_header.GetImageRoot(ImageHeader::kBootImageLiveObjects));
1348 runtime->SetSentinel(boot_image_live_objects->Get(ImageHeader::kClearedJniWeakSentinel));
1349 DCHECK(runtime->GetSentinel().Read()->GetClass() == GetClassRoot<mirror::Object>(this));
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07001350
Vladimir Markod1908512018-11-22 14:57:28 +00001351 for (size_t i = 0u, size = spaces.size(); i != size; ++i) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001352 // Boot class loader, use a null handle.
1353 std::vector<std::unique_ptr<const DexFile>> dex_files;
Vladimir Markod1908512018-11-22 14:57:28 +00001354 if (!AddImageSpace(spaces[i],
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001355 ScopedNullHandle<mirror::ClassLoader>(),
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001356 /*out*/&dex_files,
1357 error_msg)) {
1358 return false;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001359 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001360 // Append opened dex files at the end.
1361 boot_dex_files_.insert(boot_dex_files_.end(),
1362 std::make_move_iterator(dex_files.begin()),
1363 std::make_move_iterator(dex_files.end()));
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001364 }
Mathieu Chartierbe8303d2017-08-17 17:39:39 -07001365 for (const std::unique_ptr<const DexFile>& dex_file : boot_dex_files_) {
1366 OatDexFile::MadviseDexFile(*dex_file, MadviseState::kMadviseStateAtLoad);
1367 }
Ian Rogers98379392014-02-24 16:53:16 -08001368 FinishInit(self);
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -07001369
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001370 VLOG(startup) << __FUNCTION__ << " exiting";
1371 return true;
1372}
Andreas Gampe3db9c5d2015-11-17 11:52:46 -08001373
Vladimir Marko4433c432018-12-04 14:57:47 +00001374void ClassLinker::AddExtraBootDexFiles(
1375 Thread* self,
1376 std::vector<std::unique_ptr<const DexFile>>&& additional_dex_files) {
1377 for (std::unique_ptr<const DexFile>& dex_file : additional_dex_files) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -08001378 AppendToBootClassPath(self, dex_file.get());
Vladimir Marko4433c432018-12-04 14:57:47 +00001379 boot_dex_files_.push_back(std::move(dex_file));
1380 }
1381}
1382
Jeff Hao5872d7c2016-04-27 11:07:41 -07001383bool ClassLinker::IsBootClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001384 ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001385 return class_loader == nullptr ||
Mathieu Chartier0795f232016-09-27 18:43:30 -07001386 soa.Decode<mirror::Class>(WellKnownClasses::java_lang_BootClassLoader) ==
1387 class_loader->GetClass();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001388}
1389
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03001390class CHAOnDeleteUpdateClassVisitor {
1391 public:
1392 explicit CHAOnDeleteUpdateClassVisitor(LinearAlloc* alloc)
1393 : allocator_(alloc), cha_(Runtime::Current()->GetClassLinker()->GetClassHierarchyAnalysis()),
1394 pointer_size_(Runtime::Current()->GetClassLinker()->GetImagePointerSize()),
1395 self_(Thread::Current()) {}
1396
1397 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
1398 // This class is going to be unloaded. Tell CHA about it.
1399 cha_->ResetSingleImplementationInHierarchy(klass, allocator_, pointer_size_);
1400 return true;
1401 }
1402 private:
1403 const LinearAlloc* allocator_;
1404 const ClassHierarchyAnalysis* cha_;
1405 const PointerSize pointer_size_;
1406 const Thread* self_;
1407};
1408
Chris Wailes0c61be42018-09-26 17:27:34 -07001409/*
Vladimir Marko8e05f092019-06-10 11:10:38 +01001410 * A class used to ensure that all references to strings interned in an AppImage have been
1411 * properly recorded in the interned references list, and is only ever run in debug mode.
Chris Wailes0c61be42018-09-26 17:27:34 -07001412 */
Vladimir Marko8e05f092019-06-10 11:10:38 +01001413class CountInternedStringReferencesVisitor {
Chang Xingba17dbd2017-06-28 21:27:56 +00001414 public:
Vladimir Marko8e05f092019-06-10 11:10:38 +01001415 CountInternedStringReferencesVisitor(const gc::space::ImageSpace& space,
1416 const InternTable::UnorderedSet& image_interns)
1417 : space_(space),
1418 image_interns_(image_interns),
1419 count_(0u) {}
Chris Wailes0c61be42018-09-26 17:27:34 -07001420
Chris Wailes0c61be42018-09-26 17:27:34 -07001421 void TestObject(ObjPtr<mirror::Object> referred_obj) const
Chang Xingba17dbd2017-06-28 21:27:56 +00001422 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes0c61be42018-09-26 17:27:34 -07001423 if (referred_obj != nullptr &&
1424 space_.HasAddress(referred_obj.Ptr()) &&
1425 referred_obj->IsString()) {
1426 ObjPtr<mirror::String> referred_str = referred_obj->AsString();
Vladimir Marko8e05f092019-06-10 11:10:38 +01001427 auto it = image_interns_.find(GcRoot<mirror::String>(referred_str));
1428 if (it != image_interns_.end() && it->Read() == referred_str) {
1429 ++count_;
Chris Wailesfbeef462018-10-19 14:16:35 -07001430 }
Chang Xingba17dbd2017-06-28 21:27:56 +00001431 }
Chang Xingba17dbd2017-06-28 21:27:56 +00001432 }
1433
Chris Wailes0c61be42018-09-26 17:27:34 -07001434 void VisitRootIfNonNull(
Chang Xingba17dbd2017-06-28 21:27:56 +00001435 mirror::CompressedReference<mirror::Object>* root) const
1436 REQUIRES_SHARED(Locks::mutator_lock_) {
1437 if (!root->IsNull()) {
1438 VisitRoot(root);
1439 }
1440 }
1441
Chris Wailes0c61be42018-09-26 17:27:34 -07001442 void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
Chang Xingba17dbd2017-06-28 21:27:56 +00001443 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes0c61be42018-09-26 17:27:34 -07001444 TestObject(root->AsMirrorPtr());
Chang Xingba17dbd2017-06-28 21:27:56 +00001445 }
1446
1447 // Visit Class Fields
Chris Wailes0c61be42018-09-26 17:27:34 -07001448 void operator()(ObjPtr<mirror::Object> obj,
1449 MemberOffset offset,
1450 bool is_static ATTRIBUTE_UNUSED) const
Chang Xingba17dbd2017-06-28 21:27:56 +00001451 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko8e05f092019-06-10 11:10:38 +01001452 // References within image or across images don't need a read barrier.
1453 ObjPtr<mirror::Object> referred_obj =
1454 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1455 TestObject(referred_obj);
Chang Xingba17dbd2017-06-28 21:27:56 +00001456 }
1457
1458 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1459 ObjPtr<mirror::Reference> ref) const
1460 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Vladimir Marko8e05f092019-06-10 11:10:38 +01001461 operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false);
Chang Xingba17dbd2017-06-28 21:27:56 +00001462 }
1463
Vladimir Marko8e05f092019-06-10 11:10:38 +01001464 size_t GetCount() const {
1465 return count_;
1466 }
1467
1468 private:
Chris Wailes0c61be42018-09-26 17:27:34 -07001469 const gc::space::ImageSpace& space_;
Vladimir Marko8e05f092019-06-10 11:10:38 +01001470 const InternTable::UnorderedSet& image_interns_;
1471 mutable size_t count_; // Modified from the `const` callbacks.
Chang Xingba17dbd2017-06-28 21:27:56 +00001472};
1473
Chris Wailes0c61be42018-09-26 17:27:34 -07001474/*
Vladimir Marko8e05f092019-06-10 11:10:38 +01001475 * This function counts references to strings interned in the AppImage.
1476 * This is used in debug build to check against the number of the recorded references.
Chris Wailes0c61be42018-09-26 17:27:34 -07001477 */
Vladimir Marko8e05f092019-06-10 11:10:38 +01001478size_t CountInternedStringReferences(gc::space::ImageSpace& space,
1479 const InternTable::UnorderedSet& image_interns)
1480 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes0c61be42018-09-26 17:27:34 -07001481 const gc::accounting::ContinuousSpaceBitmap* bitmap = space.GetMarkBitmap();
1482 const ImageHeader& image_header = space.GetImageHeader();
1483 const uint8_t* target_base = space.GetMemMap()->Begin();
1484 const ImageSection& objects_section = image_header.GetObjectsSection();
Chris Wailesfbeef462018-10-19 14:16:35 -07001485
1486 auto objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
1487 auto objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
Chris Wailes0c61be42018-09-26 17:27:34 -07001488
Vladimir Marko8e05f092019-06-10 11:10:38 +01001489 CountInternedStringReferencesVisitor visitor(space, image_interns);
Chris Wailes0c61be42018-09-26 17:27:34 -07001490 bitmap->VisitMarkedRange(objects_begin,
1491 objects_end,
1492 [&space, &visitor](mirror::Object* obj)
1493 REQUIRES_SHARED(Locks::mutator_lock_) {
1494 if (space.HasAddress(obj)) {
1495 if (obj->IsDexCache()) {
Chris Wailesfbeef462018-10-19 14:16:35 -07001496 obj->VisitReferences</* kVisitNativeRoots= */ true,
1497 kVerifyNone,
1498 kWithoutReadBarrier>(visitor, visitor);
Chris Wailes0c61be42018-09-26 17:27:34 -07001499 } else {
1500 // Don't visit native roots for non-dex-cache as they can't contain
1501 // native references to strings. This is verified during compilation
1502 // by ImageWriter::VerifyNativeGCRootInvariants.
Chris Wailesfbeef462018-10-19 14:16:35 -07001503 obj->VisitReferences</* kVisitNativeRoots= */ false,
1504 kVerifyNone,
1505 kWithoutReadBarrier>(visitor, visitor);
Chris Wailes0c61be42018-09-26 17:27:34 -07001506 }
1507 }
1508 });
Vladimir Marko8e05f092019-06-10 11:10:38 +01001509 return visitor.GetCount();
1510}
1511
1512template <typename Visitor>
1513static void VisitInternedStringReferences(
1514 gc::space::ImageSpace* space,
1515 bool use_preresolved_strings,
1516 const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1517 const uint8_t* target_base = space->Begin();
1518 const ImageSection& sro_section =
1519 space->GetImageHeader().GetImageStringReferenceOffsetsSection();
1520 const size_t num_string_offsets = sro_section.Size() / sizeof(AppImageReferenceOffsetInfo);
1521
1522 VLOG(image)
1523 << "ClassLinker:AppImage:InternStrings:imageStringReferenceOffsetCount = "
1524 << num_string_offsets;
1525
1526 const auto* sro_base =
1527 reinterpret_cast<const AppImageReferenceOffsetInfo*>(target_base + sro_section.Offset());
1528
1529 for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) {
1530 uint32_t base_offset = sro_base[offset_index].first;
1531
1532 if (HasDexCacheStringNativeRefTag(base_offset)) {
1533 base_offset = ClearDexCacheNativeRefTags(base_offset);
1534 DCHECK_ALIGNED(base_offset, 2);
1535
1536 ObjPtr<mirror::DexCache> dex_cache =
1537 reinterpret_cast<mirror::DexCache*>(space->Begin() + base_offset);
1538 uint32_t string_slot_index = sro_base[offset_index].second;
1539
1540 mirror::StringDexCachePair source =
1541 dex_cache->GetStrings()[string_slot_index].load(std::memory_order_relaxed);
1542 ObjPtr<mirror::String> referred_string = source.object.Read();
1543 DCHECK(referred_string != nullptr);
1544
1545 ObjPtr<mirror::String> visited = visitor(referred_string);
1546 if (visited != referred_string) {
1547 // Because we are not using a helper function we need to mark the GC card manually.
1548 WriteBarrier::ForEveryFieldWrite(dex_cache);
1549 dex_cache->GetStrings()[string_slot_index].store(
1550 mirror::StringDexCachePair(visited, source.index), std::memory_order_relaxed);
1551 }
1552 } else if (HasDexCachePreResolvedStringNativeRefTag(base_offset)) {
1553 if (use_preresolved_strings) {
1554 base_offset = ClearDexCacheNativeRefTags(base_offset);
1555 DCHECK_ALIGNED(base_offset, 2);
1556
1557 ObjPtr<mirror::DexCache> dex_cache =
1558 reinterpret_cast<mirror::DexCache*>(space->Begin() + base_offset);
1559 uint32_t string_index = sro_base[offset_index].second;
1560
Mathieu Chartier77f84fc2019-06-14 12:49:54 -07001561 GcRoot<mirror::String>* preresolved_strings =
1562 dex_cache->GetPreResolvedStrings();
1563 // Handle calls to ClearPreResolvedStrings that might occur concurrently by the profile
1564 // saver that runs shortly after startup. In case the strings are cleared, there is nothing
1565 // to fix up.
1566 if (preresolved_strings != nullptr) {
1567 ObjPtr<mirror::String> referred_string =
1568 preresolved_strings[string_index].Read();
1569 if (referred_string != nullptr) {
1570 ObjPtr<mirror::String> visited = visitor(referred_string);
1571 if (visited != referred_string) {
1572 // Because we are not using a helper function we need to mark the GC card manually.
1573 WriteBarrier::ForEveryFieldWrite(dex_cache);
1574 preresolved_strings[string_index] = GcRoot<mirror::String>(visited);
1575 }
1576 }
Vladimir Marko8e05f092019-06-10 11:10:38 +01001577 }
1578 }
1579 } else {
1580 uint32_t raw_member_offset = sro_base[offset_index].second;
1581 DCHECK_ALIGNED(base_offset, 2);
1582 DCHECK_ALIGNED(raw_member_offset, 2);
1583
1584 ObjPtr<mirror::Object> obj_ptr =
1585 reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
1586 MemberOffset member_offset(raw_member_offset);
1587 ObjPtr<mirror::String> referred_string =
1588 obj_ptr->GetFieldObject<mirror::String,
1589 kVerifyNone,
1590 kWithoutReadBarrier,
1591 /* kIsVolatile= */ false>(member_offset);
1592 DCHECK(referred_string != nullptr);
1593
1594 ObjPtr<mirror::String> visited = visitor(referred_string);
1595 if (visited != referred_string) {
1596 obj_ptr->SetFieldObject</* kTransactionActive= */ false,
1597 /* kCheckTransaction= */ false,
1598 kVerifyNone,
1599 /* kIsVolatile= */ false>(member_offset, visited);
1600 }
1601 }
1602 }
1603}
1604
1605static void VerifyInternedStringReferences(gc::space::ImageSpace* space)
1606 REQUIRES_SHARED(Locks::mutator_lock_) {
1607 InternTable::UnorderedSet image_interns;
1608 const ImageSection& section = space->GetImageHeader().GetInternedStringsSection();
1609 if (section.Size() > 0) {
1610 size_t read_count;
1611 const uint8_t* data = space->Begin() + section.Offset();
1612 InternTable::UnorderedSet image_set(data, /*make_copy_of_data=*/ false, &read_count);
1613 image_set.swap(image_interns);
1614 }
1615 size_t num_recorded_refs = 0u;
1616 VisitInternedStringReferences(
1617 space,
1618 /*use_preresolved_strings=*/ true,
1619 [&image_interns, &num_recorded_refs](ObjPtr<mirror::String> str)
1620 REQUIRES_SHARED(Locks::mutator_lock_) {
1621 auto it = image_interns.find(GcRoot<mirror::String>(str));
1622 CHECK(it != image_interns.end());
1623 CHECK(it->Read() == str);
1624 ++num_recorded_refs;
1625 return str;
1626 });
1627 size_t num_found_refs = CountInternedStringReferences(*space, image_interns);
1628 CHECK_EQ(num_recorded_refs, num_found_refs);
Chris Wailes0c61be42018-09-26 17:27:34 -07001629}
1630
Andreas Gampe2af99022017-04-25 08:32:59 -07001631// new_class_set is the set of classes that were read from the class table section in the image.
1632// If there was no class table section, it is null.
1633// Note: using a class here to avoid having to make ClassLinker internals public.
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001634class AppImageLoadingHelper {
Andreas Gampe2af99022017-04-25 08:32:59 -07001635 public:
Vladimir Marko0f3c7002017-09-07 14:15:56 +01001636 static void Update(
Andreas Gampe2af99022017-04-25 08:32:59 -07001637 ClassLinker* class_linker,
1638 gc::space::ImageSpace* space,
1639 Handle<mirror::ClassLoader> class_loader,
1640 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,
Vladimir Marko0f3c7002017-09-07 14:15:56 +01001641 ClassTable::ClassSet* new_class_set)
Andreas Gampe2af99022017-04-25 08:32:59 -07001642 REQUIRES(!Locks::dex_lock_)
1643 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001644
Chris Wailesfbeef462018-10-19 14:16:35 -07001645 static void HandleAppImageStrings(gc::space::ImageSpace* space)
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001646 REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampe2af99022017-04-25 08:32:59 -07001647};
1648
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001649void AppImageLoadingHelper::Update(
Andreas Gampe2af99022017-04-25 08:32:59 -07001650 ClassLinker* class_linker,
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001651 gc::space::ImageSpace* space,
1652 Handle<mirror::ClassLoader> class_loader,
1653 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,
Vladimir Marko0f3c7002017-09-07 14:15:56 +01001654 ClassTable::ClassSet* new_class_set)
Andreas Gampe2af99022017-04-25 08:32:59 -07001655 REQUIRES(!Locks::dex_lock_)
1656 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes23866362018-08-22 16:16:58 -07001657 ScopedTrace app_image_timing("AppImage:Updating");
1658
Vladimir Marko8e05f092019-06-10 11:10:38 +01001659 if (kIsDebugBuild && ClassLinker::kAppImageMayContainStrings) {
1660 // In debug build, verify the string references before applying
1661 // the Runtime::LoadAppImageStartupCache() option.
1662 VerifyInternedStringReferences(space);
1663 }
1664
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001665 Thread* const self = Thread::Current();
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001666 Runtime* const runtime = Runtime::Current();
1667 gc::Heap* const heap = runtime->GetHeap();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001668 const ImageHeader& header = space->GetImageHeader();
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001669 bool load_app_image_startup_cache = runtime->LoadAppImageStartupCache();
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001670 {
Vladimir Marko0f3c7002017-09-07 14:15:56 +01001671 // Register dex caches with the class loader.
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001672 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Alex Lighta9bbc082019-11-14 14:51:41 -08001673 for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001674 const DexFile* const dex_file = dex_cache->GetDexFile();
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001675 {
Andreas Gampecc1b5352016-12-01 16:58:38 -08001676 WriterMutexLock mu2(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08001677 CHECK(class_linker->FindDexCacheDataLocked(*dex_file) == nullptr);
Andreas Gampe2af99022017-04-25 08:32:59 -07001678 class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001679 }
Chris Wailes0c61be42018-09-26 17:27:34 -07001680
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001681 if (!load_app_image_startup_cache) {
1682 dex_cache->ClearPreResolvedStrings();
1683 }
1684
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001685 if (kIsDebugBuild) {
Vladimir Marko1a1de672016-10-13 12:53:15 +01001686 CHECK(new_class_set != nullptr);
Vladimir Marko8d6768d2017-03-14 10:13:21 +00001687 mirror::TypeDexCacheType* const types = dex_cache->GetResolvedTypes();
Vladimir Marko1a1de672016-10-13 12:53:15 +01001688 const size_t num_types = dex_cache->NumResolvedTypes();
Vladimir Marko8d6768d2017-03-14 10:13:21 +00001689 for (size_t j = 0; j != num_types; ++j) {
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001690 // The image space is not yet added to the heap, avoid read barriers.
Vladimir Marko8d6768d2017-03-14 10:13:21 +00001691 ObjPtr<mirror::Class> klass = types[j].load(std::memory_order_relaxed).object.Read();
Chris Wailes0c61be42018-09-26 17:27:34 -07001692
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001693 if (space->HasAddress(klass.Ptr())) {
Vladimir Marko72ab6842017-01-20 19:32:50 +00001694 DCHECK(!klass->IsErroneous()) << klass->GetStatus();
Vladimir Marko54159c62018-06-20 14:30:08 +01001695 auto it = new_class_set->find(ClassTable::TableSlot(klass));
Vladimir Marko1a1de672016-10-13 12:53:15 +01001696 DCHECK(it != new_class_set->end());
1697 DCHECK_EQ(it->Read(), klass);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001698 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Chris Wailes0c61be42018-09-26 17:27:34 -07001699
Vladimir Marko1a1de672016-10-13 12:53:15 +01001700 if (super_class != nullptr && !heap->ObjectIsInBootImageSpace(super_class)) {
Vladimir Marko54159c62018-06-20 14:30:08 +01001701 auto it2 = new_class_set->find(ClassTable::TableSlot(super_class));
Vladimir Marko1a1de672016-10-13 12:53:15 +01001702 DCHECK(it2 != new_class_set->end());
1703 DCHECK_EQ(it2->Read(), super_class);
1704 }
Chris Wailes0c61be42018-09-26 17:27:34 -07001705
Vladimir Marko1a1de672016-10-13 12:53:15 +01001706 for (ArtMethod& m : klass->GetDirectMethods(kRuntimePointerSize)) {
1707 const void* code = m.GetEntryPointFromQuickCompiledCode();
Alex Lightfc49fec2018-01-16 22:28:36 +00001708 const void* oat_code = m.IsInvokable() ? class_linker->GetQuickOatCodeFor(&m) : code;
1709 if (!class_linker->IsQuickResolutionStub(code) &&
1710 !class_linker->IsQuickGenericJniStub(code) &&
Andreas Gampe2af99022017-04-25 08:32:59 -07001711 !class_linker->IsQuickToInterpreterBridge(code) &&
Alex Lightfc49fec2018-01-16 22:28:36 +00001712 !m.IsNative()) {
1713 DCHECK_EQ(code, oat_code) << m.PrettyMethod();
Mathieu Chartier69731002016-03-02 16:08:31 -08001714 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001715 }
Chris Wailes0c61be42018-09-26 17:27:34 -07001716
Vladimir Marko1a1de672016-10-13 12:53:15 +01001717 for (ArtMethod& m : klass->GetVirtualMethods(kRuntimePointerSize)) {
1718 const void* code = m.GetEntryPointFromQuickCompiledCode();
Alex Lightfc49fec2018-01-16 22:28:36 +00001719 const void* oat_code = m.IsInvokable() ? class_linker->GetQuickOatCodeFor(&m) : code;
1720 if (!class_linker->IsQuickResolutionStub(code) &&
1721 !class_linker->IsQuickGenericJniStub(code) &&
Andreas Gampe2af99022017-04-25 08:32:59 -07001722 !class_linker->IsQuickToInterpreterBridge(code) &&
Alex Lightfc49fec2018-01-16 22:28:36 +00001723 !m.IsNative()) {
1724 DCHECK_EQ(code, oat_code) << m.PrettyMethod();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001725 }
1726 }
1727 }
1728 }
1729 }
1730 }
Mathieu Chartiera0b95212016-03-07 16:13:54 -08001731 }
Chris Wailes0c61be42018-09-26 17:27:34 -07001732
Mathieu Chartier0933cc52018-03-23 14:25:08 -07001733 if (ClassLinker::kAppImageMayContainStrings) {
Chris Wailesfbeef462018-10-19 14:16:35 -07001734 HandleAppImageStrings(space);
Chang Xingba17dbd2017-06-28 21:27:56 +00001735 }
Chris Wailes0c61be42018-09-26 17:27:34 -07001736
Mathieu Chartiera0b95212016-03-07 16:13:54 -08001737 if (kVerifyArtMethodDeclaringClasses) {
Chris Wailes23866362018-08-22 16:16:58 -07001738 ScopedTrace timing("AppImage:VerifyDeclaringClasses");
Mathieu Chartiera0b95212016-03-07 16:13:54 -08001739 ReaderMutexLock rmu(self, *Locks::heap_bitmap_lock_);
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07001740 gc::accounting::HeapBitmap* live_bitmap = heap->GetLiveBitmap();
1741 header.VisitPackedArtMethods([&](ArtMethod& method)
1742 REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
1743 ObjPtr<mirror::Class> klass = method.GetDeclaringClassUnchecked();
1744 if (klass != nullptr) {
1745 CHECK(live_bitmap->Test(klass.Ptr())) << "Image method has unmarked declaring class";
1746 }
1747 }, space->Begin(), kRuntimePointerSize);
Mathieu Chartier03c1dd92016-03-07 16:13:54 -08001748 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001749}
1750
Chris Wailesfbeef462018-10-19 14:16:35 -07001751void AppImageLoadingHelper::HandleAppImageStrings(gc::space::ImageSpace* space) {
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001752 // Iterate over the string reference offsets stored in the image and intern
1753 // the strings they point to.
1754 ScopedTrace timing("AppImage:InternString");
1755
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001756 Runtime* const runtime = Runtime::Current();
1757 InternTable* const intern_table = runtime->GetInternTable();
1758
1759 const bool load_startup_cache = runtime->LoadAppImageStartupCache();
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001760
1761 // Add the intern table, removing any conflicts. For conflicts, store the new address in a map
1762 // for faster lookup.
1763 // TODO: Optimize with a bitmap or bloom filter
1764 SafeMap<mirror::String*, mirror::String*> intern_remap;
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001765 auto func = [&](InternTable::UnorderedSet& interns)
Mathieu Chartier41c08082018-10-31 11:50:26 -07001766 REQUIRES_SHARED(Locks::mutator_lock_)
1767 REQUIRES(Locks::intern_table_lock_) {
Mathieu Chartier8fc75582018-11-01 14:21:33 -07001768 const size_t non_boot_image_strings = intern_table->CountInterns(
1769 /*visit_boot_images=*/false,
1770 /*visit_non_boot_images=*/true);
Chris Wailesfbeef462018-10-19 14:16:35 -07001771 VLOG(image) << "AppImage:stringsInInternTableSize = " << interns.size();
Mathieu Chartier8fc75582018-11-01 14:21:33 -07001772 VLOG(image) << "AppImage:nonBootImageInternStrings = " << non_boot_image_strings;
1773 // Visit the smaller of the two sets to compute the intersection.
1774 if (interns.size() < non_boot_image_strings) {
1775 for (auto it = interns.begin(); it != interns.end(); ) {
1776 ObjPtr<mirror::String> string = it->Read();
1777 ObjPtr<mirror::String> existing = intern_table->LookupWeakLocked(string);
1778 if (existing == nullptr) {
1779 existing = intern_table->LookupStrongLocked(string);
1780 }
1781 if (existing != nullptr) {
1782 intern_remap.Put(string.Ptr(), existing.Ptr());
1783 it = interns.erase(it);
1784 } else {
1785 ++it;
1786 }
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001787 }
Mathieu Chartier8fc75582018-11-01 14:21:33 -07001788 } else {
1789 intern_table->VisitInterns([&](const GcRoot<mirror::String>& root)
1790 REQUIRES_SHARED(Locks::mutator_lock_)
1791 REQUIRES(Locks::intern_table_lock_) {
1792 auto it = interns.find(root);
1793 if (it != interns.end()) {
1794 ObjPtr<mirror::String> existing = root.Read();
1795 intern_remap.Put(it->Read(), existing.Ptr());
1796 it = interns.erase(it);
1797 }
1798 }, /*visit_boot_images=*/false, /*visit_non_boot_images=*/true);
1799 }
1800 // Sanity check to ensure correctness.
1801 if (kIsDebugBuild) {
1802 for (GcRoot<mirror::String>& root : interns) {
1803 ObjPtr<mirror::String> string = root.Read();
1804 CHECK(intern_table->LookupWeakLocked(string) == nullptr) << string->ToModifiedUtf8();
1805 CHECK(intern_table->LookupStrongLocked(string) == nullptr) << string->ToModifiedUtf8();
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001806 }
1807 }
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001808 };
Vladimir Marko8e05f092019-06-10 11:10:38 +01001809 intern_table->AddImageStringsToTable(space, func);
1810 if (!intern_remap.empty()) {
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001811 VLOG(image) << "AppImage:conflictingInternStrings = " << intern_remap.size();
Vladimir Marko8e05f092019-06-10 11:10:38 +01001812 VisitInternedStringReferences(
1813 space,
1814 load_startup_cache,
1815 [&intern_remap](ObjPtr<mirror::String> str) REQUIRES_SHARED(Locks::mutator_lock_) {
1816 auto it = intern_remap.find(str.Ptr());
1817 if (it != intern_remap.end()) {
1818 return ObjPtr<mirror::String>(it->second);
1819 }
1820 return str;
1821 });
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001822 }
1823}
1824
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001825static std::unique_ptr<const DexFile> OpenOatDexFile(const OatFile* oat_file,
1826 const char* location,
1827 std::string* error_msg)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001828 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001829 DCHECK(error_msg != nullptr);
1830 std::unique_ptr<const DexFile> dex_file;
Andreas Gampeb40d3612018-06-26 15:49:42 -07001831 const OatDexFile* oat_dex_file = oat_file->GetOatDexFile(location, nullptr, error_msg);
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001832 if (oat_dex_file == nullptr) {
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001833 return std::unique_ptr<const DexFile>();
1834 }
1835 std::string inner_error_msg;
1836 dex_file = oat_dex_file->OpenDexFile(&inner_error_msg);
1837 if (dex_file == nullptr) {
1838 *error_msg = StringPrintf("Failed to open dex file %s from within oat file %s error '%s'",
1839 location,
1840 oat_file->GetLocation().c_str(),
1841 inner_error_msg.c_str());
1842 return std::unique_ptr<const DexFile>();
1843 }
1844
1845 if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) {
1846 *error_msg = StringPrintf("Checksums do not match for %s: %x vs %x",
1847 location,
1848 dex_file->GetLocationChecksum(),
1849 oat_dex_file->GetDexFileLocationChecksum());
1850 return std::unique_ptr<const DexFile>();
1851 }
1852 return dex_file;
1853}
1854
1855bool ClassLinker::OpenImageDexFiles(gc::space::ImageSpace* space,
1856 std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1857 std::string* error_msg) {
Mathieu Chartier268764d2016-09-13 12:09:38 -07001858 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001859 const ImageHeader& header = space->GetImageHeader();
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001860 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001861 DCHECK(dex_caches_object != nullptr);
Vladimir Marko4617d582019-03-28 13:48:31 +00001862 ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001863 dex_caches_object->AsObjectArray<mirror::DexCache>();
1864 const OatFile* oat_file = space->GetOatFile();
Alex Lighta9bbc082019-11-14 14:51:41 -08001865 for (auto dex_cache : dex_caches->Iterate()) {
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001866 std::string dex_file_location(dex_cache->GetLocation()->ToModifiedUtf8());
1867 std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
1868 dex_file_location.c_str(),
1869 error_msg);
1870 if (dex_file == nullptr) {
1871 return false;
1872 }
1873 dex_cache->SetDexFile(dex_file.get());
1874 out_dex_files->push_back(std::move(dex_file));
1875 }
1876 return true;
1877}
1878
Andreas Gampe0793bec2016-12-01 11:37:33 -08001879// Helper class for ArtMethod checks when adding an image. Keeps all required functionality
1880// together and caches some intermediate results.
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01001881class ImageSanityChecks final {
Andreas Gampe0793bec2016-12-01 11:37:33 -08001882 public:
1883 static void CheckObjects(gc::Heap* heap, ClassLinker* class_linker)
1884 REQUIRES_SHARED(Locks::mutator_lock_) {
1885 ImageSanityChecks isc(heap, class_linker);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001886 auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1887 DCHECK(obj != nullptr);
1888 CHECK(obj->GetClass() != nullptr) << "Null class in object " << obj;
1889 CHECK(obj->GetClass()->GetClass() != nullptr) << "Null class class " << obj;
1890 if (obj->IsClass()) {
1891 auto klass = obj->AsClass();
1892 for (ArtField& field : klass->GetIFields()) {
1893 CHECK_EQ(field.GetDeclaringClass(), klass);
1894 }
1895 for (ArtField& field : klass->GetSFields()) {
1896 CHECK_EQ(field.GetDeclaringClass(), klass);
1897 }
Vladimir Markoc524e9e2019-03-26 10:54:50 +00001898 const PointerSize pointer_size = isc.pointer_size_;
1899 for (ArtMethod& m : klass->GetMethods(pointer_size)) {
Andreas Gampe1c158a02017-07-13 17:26:19 -07001900 isc.SanityCheckArtMethod(&m, klass);
1901 }
Vladimir Markoc524e9e2019-03-26 10:54:50 +00001902 ObjPtr<mirror::PointerArray> vtable = klass->GetVTable();
Andreas Gampe1c158a02017-07-13 17:26:19 -07001903 if (vtable != nullptr) {
1904 isc.SanityCheckArtMethodPointerArray(vtable, nullptr);
1905 }
1906 if (klass->ShouldHaveImt()) {
1907 ImTable* imt = klass->GetImt(pointer_size);
1908 for (size_t i = 0; i < ImTable::kSize; ++i) {
1909 isc.SanityCheckArtMethod(imt->Get(i, pointer_size), nullptr);
1910 }
1911 }
1912 if (klass->ShouldHaveEmbeddedVTable()) {
1913 for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) {
1914 isc.SanityCheckArtMethod(klass->GetEmbeddedVTableEntry(i, pointer_size), nullptr);
1915 }
1916 }
Vladimir Markoc524e9e2019-03-26 10:54:50 +00001917 ObjPtr<mirror::IfTable> iftable = klass->GetIfTable();
Andreas Gampe1c158a02017-07-13 17:26:19 -07001918 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
1919 if (iftable->GetMethodArrayCount(i) > 0) {
1920 isc.SanityCheckArtMethodPointerArray(iftable->GetMethodArray(i), nullptr);
1921 }
1922 }
1923 }
1924 };
1925 heap->VisitObjects(visitor);
Andreas Gampe0793bec2016-12-01 11:37:33 -08001926 }
1927
Vladimir Marko07bfbac2017-07-06 14:55:02 +01001928 static void CheckArtMethodDexCacheArray(gc::Heap* heap,
1929 ClassLinker* class_linker,
1930 mirror::MethodDexCacheType* arr,
1931 size_t size)
Andreas Gampe0793bec2016-12-01 11:37:33 -08001932 REQUIRES_SHARED(Locks::mutator_lock_) {
1933 ImageSanityChecks isc(heap, class_linker);
Vladimir Marko07bfbac2017-07-06 14:55:02 +01001934 isc.SanityCheckArtMethodDexCacheArray(arr, size);
Andreas Gampe0793bec2016-12-01 11:37:33 -08001935 }
1936
Andreas Gampe0793bec2016-12-01 11:37:33 -08001937 private:
1938 ImageSanityChecks(gc::Heap* heap, ClassLinker* class_linker)
1939 : spaces_(heap->GetBootImageSpaces()),
1940 pointer_size_(class_linker->GetImagePointerSize()) {
1941 space_begin_.reserve(spaces_.size());
1942 method_sections_.reserve(spaces_.size());
1943 runtime_method_sections_.reserve(spaces_.size());
1944 for (gc::space::ImageSpace* space : spaces_) {
1945 space_begin_.push_back(space->Begin());
1946 auto& header = space->GetImageHeader();
1947 method_sections_.push_back(&header.GetMethodsSection());
1948 runtime_method_sections_.push_back(&header.GetRuntimeMethodsSection());
1949 }
1950 }
1951
1952 void SanityCheckArtMethod(ArtMethod* m, ObjPtr<mirror::Class> expected_class)
1953 REQUIRES_SHARED(Locks::mutator_lock_) {
1954 if (m->IsRuntimeMethod()) {
1955 ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClassUnchecked();
1956 CHECK(declaring_class == nullptr) << declaring_class << " " << m->PrettyMethod();
1957 } else if (m->IsCopied()) {
1958 CHECK(m->GetDeclaringClass() != nullptr) << m->PrettyMethod();
1959 } else if (expected_class != nullptr) {
1960 CHECK_EQ(m->GetDeclaringClassUnchecked(), expected_class) << m->PrettyMethod();
1961 }
1962 if (!spaces_.empty()) {
1963 bool contains = false;
1964 for (size_t i = 0; !contains && i != space_begin_.size(); ++i) {
1965 const size_t offset = reinterpret_cast<uint8_t*>(m) - space_begin_[i];
1966 contains = method_sections_[i]->Contains(offset) ||
1967 runtime_method_sections_[i]->Contains(offset);
1968 }
1969 CHECK(contains) << m << " not found";
1970 }
1971 }
1972
1973 void SanityCheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr,
1974 ObjPtr<mirror::Class> expected_class)
1975 REQUIRES_SHARED(Locks::mutator_lock_) {
1976 CHECK(arr != nullptr);
1977 for (int32_t j = 0; j < arr->GetLength(); ++j) {
1978 auto* method = arr->GetElementPtrSize<ArtMethod*>(j, pointer_size_);
1979 // expected_class == null means we are a dex cache.
1980 if (expected_class != nullptr) {
1981 CHECK(method != nullptr);
1982 }
1983 if (method != nullptr) {
1984 SanityCheckArtMethod(method, expected_class);
1985 }
1986 }
1987 }
1988
Vladimir Marko07bfbac2017-07-06 14:55:02 +01001989 void SanityCheckArtMethodDexCacheArray(mirror::MethodDexCacheType* arr, size_t size)
Andreas Gampe0793bec2016-12-01 11:37:33 -08001990 REQUIRES_SHARED(Locks::mutator_lock_) {
1991 CHECK_EQ(arr != nullptr, size != 0u);
1992 if (arr != nullptr) {
1993 bool contains = false;
1994 for (auto space : spaces_) {
1995 auto offset = reinterpret_cast<uint8_t*>(arr) - space->Begin();
Vladimir Markocd87c3e2017-09-05 13:11:57 +01001996 if (space->GetImageHeader().GetDexCacheArraysSection().Contains(offset)) {
Andreas Gampe0793bec2016-12-01 11:37:33 -08001997 contains = true;
1998 break;
1999 }
2000 }
2001 CHECK(contains);
2002 }
2003 for (size_t j = 0; j < size; ++j) {
Vladimir Marko07bfbac2017-07-06 14:55:02 +01002004 auto pair = mirror::DexCache::GetNativePairPtrSize(arr, j, pointer_size_);
2005 ArtMethod* method = pair.object;
Andreas Gampe0793bec2016-12-01 11:37:33 -08002006 // expected_class == null means we are a dex cache.
2007 if (method != nullptr) {
2008 SanityCheckArtMethod(method, nullptr);
2009 }
2010 }
2011 }
2012
2013 const std::vector<gc::space::ImageSpace*>& spaces_;
2014 const PointerSize pointer_size_;
2015
2016 // Cached sections from the spaces.
2017 std::vector<const uint8_t*> space_begin_;
2018 std::vector<const ImageSection*> method_sections_;
2019 std::vector<const ImageSection*> runtime_method_sections_;
2020};
2021
Andreas Gampebe7af222017-07-25 09:57:28 -07002022static void VerifyAppImage(const ImageHeader& header,
2023 const Handle<mirror::ClassLoader>& class_loader,
2024 const Handle<mirror::ObjectArray<mirror::DexCache> >& dex_caches,
2025 ClassTable* class_table, gc::space::ImageSpace* space)
2026 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07002027 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2028 ObjPtr<mirror::Class> klass = method.GetDeclaringClass();
2029 if (klass != nullptr && !Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
2030 CHECK_EQ(class_table->LookupByDescriptor(klass), klass)
2031 << mirror::Class::PrettyClass(klass);
2032 }
2033 }, space->Begin(), kRuntimePointerSize);
Andreas Gampebe7af222017-07-25 09:57:28 -07002034 {
2035 // Verify that all direct interfaces of classes in the class table are also resolved.
2036 std::vector<ObjPtr<mirror::Class>> classes;
2037 auto verify_direct_interfaces_in_table = [&](ObjPtr<mirror::Class> klass)
2038 REQUIRES_SHARED(Locks::mutator_lock_) {
2039 if (!klass->IsPrimitive() && klass->GetClassLoader() == class_loader.Get()) {
2040 classes.push_back(klass);
2041 }
2042 return true;
2043 };
2044 class_table->Visit(verify_direct_interfaces_in_table);
2045 Thread* self = Thread::Current();
2046 for (ObjPtr<mirror::Class> klass : classes) {
2047 for (uint32_t i = 0, num = klass->NumDirectInterfaces(); i != num; ++i) {
2048 CHECK(klass->GetDirectInterface(self, klass, i) != nullptr)
2049 << klass->PrettyDescriptor() << " iface #" << i;
2050 }
2051 }
2052 }
2053 // Check that all non-primitive classes in dex caches are also in the class table.
Alex Lighta9bbc082019-11-14 14:51:41 -08002054 for (auto dex_cache : dex_caches.ConstIterate<mirror::DexCache>()) {
Andreas Gampebe7af222017-07-25 09:57:28 -07002055 mirror::TypeDexCacheType* const types = dex_cache->GetResolvedTypes();
2056 for (int32_t j = 0, num_types = dex_cache->NumResolvedTypes(); j < num_types; j++) {
2057 ObjPtr<mirror::Class> klass = types[j].load(std::memory_order_relaxed).object.Read();
2058 if (klass != nullptr && !klass->IsPrimitive()) {
2059 CHECK(class_table->Contains(klass))
2060 << klass->PrettyDescriptor() << " " << dex_cache->GetDexFile()->GetLocation();
2061 }
2062 }
2063 }
2064}
2065
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002066bool ClassLinker::AddImageSpace(
2067 gc::space::ImageSpace* space,
2068 Handle<mirror::ClassLoader> class_loader,
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002069 std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
2070 std::string* error_msg) {
2071 DCHECK(out_dex_files != nullptr);
2072 DCHECK(error_msg != nullptr);
2073 const uint64_t start_time = NanoTime();
Andreas Gampefa4333d2017-02-14 11:10:34 -08002074 const bool app_image = class_loader != nullptr;
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002075 const ImageHeader& header = space->GetImageHeader();
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002076 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002077 DCHECK(dex_caches_object != nullptr);
2078 Runtime* const runtime = Runtime::Current();
2079 gc::Heap* const heap = runtime->GetHeap();
2080 Thread* const self = Thread::Current();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002081 // Check that the image is what we are expecting.
2082 if (image_pointer_size_ != space->GetImageHeader().GetPointerSize()) {
2083 *error_msg = StringPrintf("Application image pointer size does not match runtime: %zu vs %zu",
2084 static_cast<size_t>(space->GetImageHeader().GetPointerSize()),
2085 image_pointer_size_);
2086 return false;
2087 }
Vladimir Markoeca3eda2016-11-09 16:26:44 +00002088 size_t expected_image_roots = ImageHeader::NumberOfImageRoots(app_image);
2089 if (static_cast<size_t>(header.GetImageRoots()->GetLength()) != expected_image_roots) {
2090 *error_msg = StringPrintf("Expected %zu image roots but got %d",
2091 expected_image_roots,
2092 header.GetImageRoots()->GetLength());
2093 return false;
2094 }
2095 StackHandleScope<3> hs(self);
2096 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
2097 hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
2098 Handle<mirror::ObjectArray<mirror::Class>> class_roots(hs.NewHandle(
2099 header.GetImageRoot(ImageHeader::kClassRoots)->AsObjectArray<mirror::Class>()));
Vladimir Markoeca3eda2016-11-09 16:26:44 +00002100 MutableHandle<mirror::ClassLoader> image_class_loader(hs.NewHandle(
Vladimir Markof75613c2018-06-05 12:51:04 +01002101 app_image ? header.GetImageRoot(ImageHeader::kAppImageClassLoader)->AsClassLoader()
2102 : nullptr));
Andreas Gampefa4333d2017-02-14 11:10:34 -08002103 DCHECK(class_roots != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002104 if (class_roots->GetLength() != static_cast<int32_t>(ClassRoot::kMax)) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002105 *error_msg = StringPrintf("Expected %d class roots but got %d",
2106 class_roots->GetLength(),
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002107 static_cast<int32_t>(ClassRoot::kMax));
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002108 return false;
2109 }
2110 // Check against existing class roots to make sure they match the ones in the boot image.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002111 ObjPtr<mirror::ObjectArray<mirror::Class>> existing_class_roots = GetClassRoots();
2112 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
2113 if (class_roots->Get(i) != GetClassRoot(static_cast<ClassRoot>(i), existing_class_roots)) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002114 *error_msg = "App image class roots must have pointer equality with runtime ones.";
2115 return false;
2116 }
2117 }
Vladimir Markoeca3eda2016-11-09 16:26:44 +00002118 const OatFile* oat_file = space->GetOatFile();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002119 if (oat_file->GetOatHeader().GetDexFileCount() !=
2120 static_cast<uint32_t>(dex_caches->GetLength())) {
2121 *error_msg = "Dex cache count and dex file count mismatch while trying to initialize from "
2122 "image";
2123 return false;
2124 }
2125
Alex Lighta9bbc082019-11-14 14:51:41 -08002126 for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
David Brazdil3e8aae02019-03-26 18:48:02 +00002127 std::string dex_file_location = dex_cache->GetLocation()->ToModifiedUtf8();
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08002128 std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
2129 dex_file_location.c_str(),
2130 error_msg);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002131 if (dex_file == nullptr) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002132 return false;
2133 }
2134
2135 if (app_image) {
2136 // The current dex file field is bogus, overwrite it so that we can get the dex file in the
2137 // loop below.
Vladimir Markocd556b02017-02-03 11:47:34 +00002138 dex_cache->SetDexFile(dex_file.get());
Vladimir Marko8d6768d2017-03-14 10:13:21 +00002139 mirror::TypeDexCacheType* const types = dex_cache->GetResolvedTypes();
Vladimir Markocd556b02017-02-03 11:47:34 +00002140 for (int32_t j = 0, num_types = dex_cache->NumResolvedTypes(); j < num_types; j++) {
Vladimir Marko8d6768d2017-03-14 10:13:21 +00002141 ObjPtr<mirror::Class> klass = types[j].load(std::memory_order_relaxed).object.Read();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002142 if (klass != nullptr) {
Vladimir Marko72ab6842017-01-20 19:32:50 +00002143 DCHECK(!klass->IsErroneous()) << klass->GetStatus();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002144 }
2145 }
2146 } else {
2147 if (kSanityCheckObjects) {
Vladimir Marko07bfbac2017-07-06 14:55:02 +01002148 ImageSanityChecks::CheckArtMethodDexCacheArray(heap,
2149 this,
2150 dex_cache->GetResolvedMethods(),
2151 dex_cache->NumResolvedMethods());
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002152 }
2153 // Register dex files, keep track of existing ones that are conflicts.
Mathieu Chartier0a19e212019-11-27 14:35:24 -08002154 AppendToBootClassPath(dex_file.get(), dex_cache);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002155 }
2156 out_dex_files->push_back(std::move(dex_file));
2157 }
2158
2159 if (app_image) {
2160 ScopedObjectAccessUnchecked soa(Thread::Current());
Nicolas Geoffrayf0d30022018-11-20 17:45:38 +00002161 ScopedAssertNoThreadSuspension sants("Checking app image", soa.Self());
Vladimir Markoeca3eda2016-11-09 16:26:44 +00002162 if (IsBootClassLoader(soa, image_class_loader.Get())) {
2163 *error_msg = "Unexpected BootClassLoader in app image";
2164 return false;
2165 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002166 }
2167
2168 if (kSanityCheckObjects) {
Alex Lighta9bbc082019-11-14 14:51:41 -08002169 for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002170 for (size_t j = 0; j < dex_cache->NumResolvedFields(); ++j) {
2171 auto* field = dex_cache->GetResolvedField(j, image_pointer_size_);
2172 if (field != nullptr) {
2173 CHECK(field->GetDeclaringClass()->GetClass() != nullptr);
2174 }
2175 }
2176 }
2177 if (!app_image) {
Andreas Gampe0793bec2016-12-01 11:37:33 -08002178 ImageSanityChecks::CheckObjects(heap, this);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002179 }
2180 }
2181
2182 // Set entry point to interpreter if in InterpretOnly mode.
2183 if (!runtime->IsAotCompiler() && runtime->GetInstrumentation()->InterpretOnly()) {
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07002184 // Set image methods' entry point to interpreter.
2185 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2186 if (!method.IsRuntimeMethod()) {
2187 DCHECK(method.GetDeclaringClass() != nullptr);
Ulyana Trafimovich3060bb92020-07-16 14:17:11 +00002188 if (!method.IsNative() && !method.IsResolutionMethod()) {
2189 method.SetEntryPointFromQuickCompiledCodePtrSize(GetQuickToInterpreterBridge(),
2190 image_pointer_size_);
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07002191 }
2192 }
2193 }, space->Begin(), image_pointer_size_);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002194 }
2195
Nicolas Geoffray7e2c9632020-01-09 13:41:10 +00002196 if (interpreter::CanRuntimeUseNterp()) {
2197 // Set image methods' entry point that point to the interpreter bridge to the nterp entry point.
2198 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2199 if (IsQuickToInterpreterBridge(method.GetEntryPointFromQuickCompiledCode()) &&
2200 interpreter::CanMethodUseNterp(&method)) {
2201 method.SetEntryPointFromQuickCompiledCodePtrSize(interpreter::GetNterpEntryPoint(),
2202 image_pointer_size_);
2203 }
2204 }, space->Begin(), image_pointer_size_);
2205 }
2206
Nicolas Geoffray8c41a0b2020-02-06 16:52:11 +00002207 if (runtime->IsVerificationSoftFail()) {
2208 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2209 if (!method.IsNative() && method.IsInvokable()) {
2210 method.ClearSkipAccessChecks();
2211 }
2212 }, space->Begin(), image_pointer_size_);
2213 }
2214
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002215 ClassTable* class_table = nullptr;
2216 {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002217 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002218 class_table = InsertClassTableForClassLoader(class_loader.Get());
Mathieu Chartier69731002016-03-02 16:08:31 -08002219 }
2220 // If we have a class table section, read it and use it for verification in
2221 // UpdateAppImageClassLoadersAndDexCaches.
2222 ClassTable::ClassSet temp_set;
Vladimir Marko0f3c7002017-09-07 14:15:56 +01002223 const ImageSection& class_table_section = header.GetClassTableSection();
Mathieu Chartier69731002016-03-02 16:08:31 -08002224 const bool added_class_table = class_table_section.Size() > 0u;
2225 if (added_class_table) {
2226 const uint64_t start_time2 = NanoTime();
2227 size_t read_count = 0;
2228 temp_set = ClassTable::ClassSet(space->Begin() + class_table_section.Offset(),
2229 /*make copy*/false,
2230 &read_count);
Mathieu Chartier69731002016-03-02 16:08:31 -08002231 VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002232 }
2233 if (app_image) {
Mathieu Chartier74ccee62018-10-10 10:30:29 -07002234 AppImageLoadingHelper::Update(this, space, class_loader, dex_caches, &temp_set);
Mathieu Chartier456b4922018-11-06 10:35:48 -08002235
2236 {
2237 ScopedTrace trace("AppImage:UpdateClassLoaders");
2238 // Update class loader and resolved strings. If added_class_table is false, the resolved
2239 // strings were forwarded UpdateAppImageClassLoadersAndDexCaches.
Mathieu Chartierbc1e0fa2018-11-14 16:18:18 -08002240 ObjPtr<mirror::ClassLoader> loader(class_loader.Get());
Mathieu Chartier456b4922018-11-06 10:35:48 -08002241 for (const ClassTable::TableSlot& root : temp_set) {
Mathieu Chartierbc1e0fa2018-11-14 16:18:18 -08002242 // Note: We probably don't need the read barrier unless we copy the app image objects into
2243 // the region space.
2244 ObjPtr<mirror::Class> klass(root.Read());
2245 // Do not update class loader for boot image classes where the app image
2246 // class loader is only the initiating loader but not the defining loader.
2247 // Avoid read barrier since we are comparing against null.
2248 if (klass->GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) {
Vladimir Markob68bb7a2020-03-17 10:55:25 +00002249 klass->SetClassLoader(loader);
Mathieu Chartierbc1e0fa2018-11-14 16:18:18 -08002250 }
Mathieu Chartier456b4922018-11-06 10:35:48 -08002251 }
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002252 }
Igor Murashkin86083f72017-10-27 10:59:04 -07002253
Vladimir Marko305c38b2018-02-14 11:50:07 +00002254 if (kBitstringSubtypeCheckEnabled) {
Igor Murashkin86083f72017-10-27 10:59:04 -07002255 // Every class in the app image has initially SubtypeCheckInfo in the
2256 // Uninitialized state.
2257 //
2258 // The SubtypeCheck invariants imply that a SubtypeCheckInfo is at least Initialized
2259 // after class initialization is complete. The app image ClassStatus as-is
2260 // are almost all ClassStatus::Initialized, and being in the
2261 // SubtypeCheckInfo::kUninitialized state is violating that invariant.
2262 //
2263 // Force every app image class's SubtypeCheck to be at least kIninitialized.
2264 //
2265 // See also ImageWriter::FixupClass.
Chris Wailes23866362018-08-22 16:16:58 -07002266 ScopedTrace trace("AppImage:RecacluateSubtypeCheckBitstrings");
Igor Murashkin86083f72017-10-27 10:59:04 -07002267 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2268 for (const ClassTable::TableSlot& root : temp_set) {
Vladimir Marko38b8b252018-01-02 19:07:06 +00002269 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(root.Read());
Igor Murashkin86083f72017-10-27 10:59:04 -07002270 }
2271 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00002272 }
2273 if (!oat_file->GetBssGcRoots().empty()) {
2274 // Insert oat file to class table for visiting .bss GC roots.
2275 class_table->InsertOatFile(oat_file);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002276 }
Igor Murashkin86083f72017-10-27 10:59:04 -07002277
Mathieu Chartier69731002016-03-02 16:08:31 -08002278 if (added_class_table) {
2279 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2280 class_table->AddClassSet(std::move(temp_set));
2281 }
Andreas Gampebe7af222017-07-25 09:57:28 -07002282
Mathieu Chartier69731002016-03-02 16:08:31 -08002283 if (kIsDebugBuild && app_image) {
2284 // This verification needs to happen after the classes have been added to the class loader.
2285 // Since it ensures classes are in the class table.
Chris Wailes23866362018-08-22 16:16:58 -07002286 ScopedTrace trace("AppImage:Verify");
Andreas Gampebe7af222017-07-25 09:57:28 -07002287 VerifyAppImage(header, class_loader, dex_caches, class_table, space);
Mathieu Chartier69731002016-03-02 16:08:31 -08002288 }
Andreas Gampebe7af222017-07-25 09:57:28 -07002289
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002290 VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time);
Andreas Gampe3db9c5d2015-11-17 11:52:46 -08002291 return true;
Brian Carlstroma663ea52011-08-19 23:33:41 -07002292}
2293
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002294bool ClassLinker::ClassInClassTable(ObjPtr<mirror::Class> klass) {
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002295 ClassTable* const class_table = ClassTableForClassLoader(klass->GetClassLoader());
2296 return class_table != nullptr && class_table->Contains(klass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002297}
2298
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002299void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
Mathieu Chartier7778b882015-10-05 16:41:10 -07002300 // Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since
2301 // enabling tracing requires the mutator lock, there are no race conditions here.
2302 const bool tracing_enabled = Trace::IsTracingEnabled();
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002303 Thread* const self = Thread::Current();
2304 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002305 if (kUseReadBarrier) {
2306 // We do not track new roots for CC.
2307 DCHECK_EQ(0, flags & (kVisitRootFlagNewRoots |
2308 kVisitRootFlagClearRootLog |
2309 kVisitRootFlagStartLoggingNewRoots |
2310 kVisitRootFlagStopLoggingNewRoots));
2311 }
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002312 if ((flags & kVisitRootFlagAllRoots) != 0) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002313 // Argument for how root visiting deals with ArtField and ArtMethod roots.
2314 // There is 3 GC cases to handle:
2315 // Non moving concurrent:
2316 // This case is easy to handle since the reference members of ArtMethod and ArtFields are held
Mathieu Chartierda7c6502015-07-23 16:01:26 -07002317 // live by the class and class roots.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002318 //
2319 // Moving non-concurrent:
2320 // This case needs to call visit VisitNativeRoots in case the classes or dex cache arrays move.
2321 // To prevent missing roots, this case needs to ensure that there is no
2322 // suspend points between the point which we allocate ArtMethod arrays and place them in a
2323 // class which is in the class table.
2324 //
2325 // Moving concurrent:
2326 // Need to make sure to not copy ArtMethods without doing read barriers since the roots are
2327 // marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy.
Mathieu Chartier58c3f6a2016-12-01 14:21:11 -08002328 //
2329 // Use an unbuffered visitor since the class table uses a temporary GcRoot for holding decoded
2330 // ClassTable::TableSlot. The buffered root visiting would access a stale stack location for
2331 // these objects.
2332 UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
Andreas Gampe2af99022017-04-25 08:32:59 -07002333 boot_class_table_->VisitRoots(root_visitor);
Mathieu Chartier7778b882015-10-05 16:41:10 -07002334 // If tracing is enabled, then mark all the class loaders to prevent unloading.
neo.chaea2d1b282016-11-08 08:40:46 +09002335 if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
Mathieu Chartier7778b882015-10-05 16:41:10 -07002336 for (const ClassLoaderData& data : class_loaders_) {
2337 GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
2338 root.VisitRoot(visitor, RootInfo(kRootVMInternal));
2339 }
2340 }
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002341 } else if (!kUseReadBarrier && (flags & kVisitRootFlagNewRoots) != 0) {
Mathieu Chartierc2e20622014-11-03 11:41:47 -08002342 for (auto& root : new_class_roots_) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002343 ObjPtr<mirror::Class> old_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002344 root.VisitRoot(visitor, RootInfo(kRootStickyClass));
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002345 ObjPtr<mirror::Class> new_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002346 // Concurrent moving GC marked new roots through the to-space invariant.
2347 CHECK_EQ(new_ref, old_ref);
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002348 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00002349 for (const OatFile* oat_file : new_bss_roots_boot_oat_files_) {
2350 for (GcRoot<mirror::Object>& root : oat_file->GetBssGcRoots()) {
2351 ObjPtr<mirror::Object> old_ref = root.Read<kWithoutReadBarrier>();
2352 if (old_ref != nullptr) {
2353 DCHECK(old_ref->IsClass());
2354 root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2355 ObjPtr<mirror::Object> new_ref = root.Read<kWithoutReadBarrier>();
2356 // Concurrent moving GC marked new roots through the to-space invariant.
2357 CHECK_EQ(new_ref, old_ref);
2358 }
2359 }
2360 }
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002361 }
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002362 if (!kUseReadBarrier && (flags & kVisitRootFlagClearRootLog) != 0) {
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002363 new_class_roots_.clear();
Vladimir Marko1998cd02017-01-13 13:02:58 +00002364 new_bss_roots_boot_oat_files_.clear();
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002365 }
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002366 if (!kUseReadBarrier && (flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00002367 log_new_roots_ = true;
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002368 } else if (!kUseReadBarrier && (flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00002369 log_new_roots_ = false;
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002370 }
2371 // We deliberately ignore the class roots in the image since we
2372 // handle image roots by using the MS/CMS rescanning of dirty cards.
2373}
2374
Brian Carlstroma663ea52011-08-19 23:33:41 -07002375// Keep in sync with InitCallback. Anything we visit, we need to
2376// reinit references to when reinitializing a ClassLinker from a
2377// mapped image.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002378void ClassLinker::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
Mathieu Chartier31000802015-06-14 14:14:37 -07002379 class_roots_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002380 VisitClassRoots(visitor, flags);
Mathieu Chartier6cfc2c02015-10-12 15:06:16 -07002381 // Instead of visiting the find_array_class_cache_ drop it so that it doesn't prevent class
2382 // unloading if we are marking roots.
2383 DropFindArrayClassCache();
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07002384}
2385
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002386class VisitClassLoaderClassesVisitor : public ClassLoaderVisitor {
2387 public:
2388 explicit VisitClassLoaderClassesVisitor(ClassVisitor* visitor)
2389 : visitor_(visitor),
2390 done_(false) {}
2391
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002392 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002393 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002394 ClassTable* const class_table = class_loader->GetClassTable();
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002395 if (!done_ && class_table != nullptr) {
2396 DefiningClassLoaderFilterVisitor visitor(class_loader, visitor_);
2397 if (!class_table->Visit(visitor)) {
2398 // If the visitor ClassTable returns false it means that we don't need to continue.
2399 done_ = true;
2400 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002401 }
2402 }
2403
2404 private:
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002405 // Class visitor that limits the class visits from a ClassTable to the classes with
2406 // the provided defining class loader. This filter is used to avoid multiple visits
2407 // of the same class which can be recorded for multiple initiating class loaders.
2408 class DefiningClassLoaderFilterVisitor : public ClassVisitor {
2409 public:
2410 DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,
2411 ClassVisitor* visitor)
2412 : defining_class_loader_(defining_class_loader), visitor_(visitor) { }
2413
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002414 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002415 if (klass->GetClassLoader() != defining_class_loader_) {
2416 return true;
2417 }
2418 return (*visitor_)(klass);
2419 }
2420
Vladimir Marko0984e482019-03-27 16:41:41 +00002421 const ObjPtr<mirror::ClassLoader> defining_class_loader_;
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002422 ClassVisitor* const visitor_;
2423 };
2424
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002425 ClassVisitor* const visitor_;
2426 // If done is true then we don't need to do any more visiting.
2427 bool done_;
2428};
2429
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002430void ClassLinker::VisitClassesInternal(ClassVisitor* visitor) {
Andreas Gampe2af99022017-04-25 08:32:59 -07002431 if (boot_class_table_->Visit(*visitor)) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002432 VisitClassLoaderClassesVisitor loader_visitor(visitor);
2433 VisitClassLoaders(&loader_visitor);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002434 }
2435}
2436
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002437void ClassLinker::VisitClasses(ClassVisitor* visitor) {
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002438 Thread* const self = Thread::Current();
2439 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2440 // Not safe to have thread suspension when we are holding a lock.
2441 if (self != nullptr) {
Mathieu Chartier268764d2016-09-13 12:09:38 -07002442 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002443 VisitClassesInternal(visitor);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002444 } else {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002445 VisitClassesInternal(visitor);
Elliott Hughesa2155262011-11-16 16:26:58 -08002446 }
2447}
2448
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002449class GetClassesInToVector : public ClassVisitor {
2450 public:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002451 bool operator()(ObjPtr<mirror::Class> klass) override {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002452 classes_.push_back(klass);
2453 return true;
2454 }
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002455 std::vector<ObjPtr<mirror::Class>> classes_;
Ian Rogersdbf3be02014-08-29 15:40:08 -07002456};
2457
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002458class GetClassInToObjectArray : public ClassVisitor {
2459 public:
2460 explicit GetClassInToObjectArray(mirror::ObjectArray<mirror::Class>* arr)
2461 : arr_(arr), index_(0) {}
2462
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002463 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002464 ++index_;
2465 if (index_ <= arr_->GetLength()) {
2466 arr_->Set(index_ - 1, klass);
2467 return true;
2468 }
Ian Rogersdbf3be02014-08-29 15:40:08 -07002469 return false;
2470 }
Ian Rogersdbf3be02014-08-29 15:40:08 -07002471
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002472 bool Succeeded() const REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002473 return index_ <= arr_->GetLength();
2474 }
2475
2476 private:
2477 mirror::ObjectArray<mirror::Class>* const arr_;
2478 int32_t index_;
2479};
2480
2481void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002482 // TODO: it may be possible to avoid secondary storage if we iterate over dex caches. The problem
2483 // is avoiding duplicates.
2484 if (!kMovingClasses) {
Mathieu Chartier268764d2016-09-13 12:09:38 -07002485 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002486 GetClassesInToVector accumulator;
2487 VisitClasses(&accumulator);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002488 for (ObjPtr<mirror::Class> klass : accumulator.classes_) {
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002489 if (!visitor->operator()(klass)) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002490 return;
2491 }
2492 }
2493 } else {
Mathieu Chartier268764d2016-09-13 12:09:38 -07002494 Thread* const self = Thread::Current();
Ian Rogersdbf3be02014-08-29 15:40:08 -07002495 StackHandleScope<1> hs(self);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002496 auto classes = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
Ian Rogersdbf3be02014-08-29 15:40:08 -07002497 // We size the array assuming classes won't be added to the class table during the visit.
2498 // If this assumption fails we iterate again.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002499 while (true) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002500 size_t class_table_size;
2501 {
Ian Rogers7b078e82014-09-10 14:44:24 -07002502 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002503 // Add 100 in case new classes get loaded when we are filling in the object array.
2504 class_table_size = NumZygoteClasses() + NumNonZygoteClasses() + 100;
Ian Rogersdbf3be02014-08-29 15:40:08 -07002505 }
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002506 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Ian Rogersdbf3be02014-08-29 15:40:08 -07002507 classes.Assign(
2508 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, class_table_size));
Andreas Gampefa4333d2017-02-14 11:10:34 -08002509 CHECK(classes != nullptr); // OOME.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002510 GetClassInToObjectArray accumulator(classes.Get());
2511 VisitClasses(&accumulator);
2512 if (accumulator.Succeeded()) {
2513 break;
2514 }
Ian Rogersdbf3be02014-08-29 15:40:08 -07002515 }
2516 for (int32_t i = 0; i < classes->GetLength(); ++i) {
2517 // If the class table shrank during creation of the clases array we expect null elements. If
2518 // the class table grew then the loop repeats. If classes are created after the loop has
2519 // finished then we don't visit.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002520 ObjPtr<mirror::Class> klass = classes->Get(i);
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002521 if (klass != nullptr && !visitor->operator()(klass)) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002522 return;
2523 }
Ian Rogers00f7d0e2012-07-19 15:28:27 -07002524 }
2525 }
2526}
2527
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07002528ClassLinker::~ClassLinker() {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002529 Thread* const self = Thread::Current();
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07002530 for (const ClassLoaderData& data : class_loaders_) {
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002531 // CHA unloading analysis is not needed. No negative consequences are expected because
2532 // all the classloaders are deleted at the same time.
Andreas Gampe98ea9d92018-10-19 14:06:15 -07002533 DeleteClassLoader(self, data, /*cleanup_cha=*/ false);
Mathieu Chartier6b069532015-08-05 15:08:12 -07002534 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002535 class_loaders_.clear();
Vladimir Markobf121912019-06-04 13:49:05 +01002536 while (!running_visibly_initialized_callbacks_.empty()) {
2537 std::unique_ptr<VisiblyInitializedCallback> callback(
2538 std::addressof(running_visibly_initialized_callbacks_.front()));
2539 running_visibly_initialized_callbacks_.pop_front();
2540 }
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07002541}
2542
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002543void ClassLinker::DeleteClassLoader(Thread* self, const ClassLoaderData& data, bool cleanup_cha) {
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002544 Runtime* const runtime = Runtime::Current();
2545 JavaVMExt* const vm = runtime->GetJavaVM();
2546 vm->DeleteWeakGlobalRef(self, data.weak_root);
Calin Juravlee5de54c2016-04-20 14:22:09 +01002547 // Notify the JIT that we need to remove the methods and/or profiling info.
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002548 if (runtime->GetJit() != nullptr) {
2549 jit::JitCodeCache* code_cache = runtime->GetJit()->GetCodeCache();
2550 if (code_cache != nullptr) {
Mathieu Chartiercf79cf52017-07-21 11:17:57 -07002551 // For the JIT case, RemoveMethodsIn removes the CHA dependencies.
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002552 code_cache->RemoveMethodsIn(self, *data.allocator);
2553 }
Andreas Gampec1ac9ee2017-07-24 22:35:49 -07002554 } else if (cha_ != nullptr) {
Mathieu Chartiercf79cf52017-07-21 11:17:57 -07002555 // If we don't have a JIT, we need to manually remove the CHA dependencies manually.
Andreas Gampec1ac9ee2017-07-24 22:35:49 -07002556 cha_->RemoveDependenciesForLinearAlloc(data.allocator);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002557 }
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002558 // Cleanup references to single implementation ArtMethods that will be deleted.
2559 if (cleanup_cha) {
2560 CHAOnDeleteUpdateClassVisitor visitor(data.allocator);
2561 data.class_table->Visit<CHAOnDeleteUpdateClassVisitor, kWithoutReadBarrier>(visitor);
2562 }
Vladimir Marko86c87522020-05-11 16:55:55 +01002563 {
2564 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
2565 auto end = critical_native_code_with_clinit_check_.end();
2566 for (auto it = critical_native_code_with_clinit_check_.begin(); it != end; ) {
2567 if (data.allocator->ContainsUnsafe(it->first)) {
2568 it = critical_native_code_with_clinit_check_.erase(it);
2569 } else {
2570 ++it;
2571 }
2572 }
2573 }
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002574
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002575 delete data.allocator;
2576 delete data.class_table;
2577}
2578
Vladimir Markobcf17522018-06-01 13:14:32 +01002579ObjPtr<mirror::PointerArray> ClassLinker::AllocPointerArray(Thread* self, size_t length) {
2580 return ObjPtr<mirror::PointerArray>::DownCast(
Andreas Gampe542451c2016-07-26 09:02:02 -07002581 image_pointer_size_ == PointerSize::k64
Vladimir Markobcf17522018-06-01 13:14:32 +01002582 ? ObjPtr<mirror::Array>(mirror::LongArray::Alloc(self, length))
2583 : ObjPtr<mirror::Array>(mirror::IntArray::Alloc(self, length)));
Mathieu Chartiere401d142015-04-22 13:56:20 -07002584}
2585
Vladimir Markobcf17522018-06-01 13:14:32 +01002586ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(/*out*/ ObjPtr<mirror::String>* out_location,
2587 Thread* self,
2588 const DexFile& dex_file) {
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002589 StackHandleScope<1> hs(self);
2590 DCHECK(out_location != nullptr);
Mathieu Chartier28bd2e42016-10-04 13:54:57 -07002591 auto dex_cache(hs.NewHandle(ObjPtr<mirror::DexCache>::DownCast(
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002592 GetClassRoot<mirror::DexCache>(this)->AllocObject(self))));
Andreas Gampefa4333d2017-02-14 11:10:34 -08002593 if (dex_cache == nullptr) {
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002594 self->AssertPendingOOMException();
2595 return nullptr;
2596 }
Vladimir Marko31c3daa2019-06-13 12:18:37 +01002597 // Use InternWeak() so that the location String can be collected when the ClassLoader
2598 // with this DexCache is collected.
2599 ObjPtr<mirror::String> location = intern_table_->InternWeak(dex_file.GetLocation().c_str());
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002600 if (location == nullptr) {
2601 self->AssertPendingOOMException();
2602 return nullptr;
2603 }
2604 *out_location = location;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002605 return dex_cache.Get();
Brian Carlstroma0808032011-07-18 00:39:23 -07002606}
2607
Vladimir Markobcf17522018-06-01 13:14:32 +01002608ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(Thread* self,
2609 const DexFile& dex_file,
2610 LinearAlloc* linear_alloc) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002611 ObjPtr<mirror::String> location = nullptr;
2612 ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(&location, self, dex_file);
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002613 if (dex_cache != nullptr) {
Andreas Gampecc1b5352016-12-01 16:58:38 -08002614 WriterMutexLock mu(self, *Locks::dex_lock_);
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002615 DCHECK(location != nullptr);
Andreas Gampecc1b5352016-12-01 16:58:38 -08002616 mirror::DexCache::InitializeDexCache(self,
2617 dex_cache,
2618 location,
2619 &dex_file,
2620 linear_alloc,
2621 image_pointer_size_);
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002622 }
Vladimir Markobcf17522018-06-01 13:14:32 +01002623 return dex_cache;
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002624}
2625
Vladimir Marko70e2a762019-07-12 16:49:00 +01002626template <bool kMovable, typename PreFenceVisitor>
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002627ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2628 ObjPtr<mirror::Class> java_lang_Class,
Vladimir Marko70e2a762019-07-12 16:49:00 +01002629 uint32_t class_size,
2630 const PreFenceVisitor& pre_fence_visitor) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08002631 DCHECK_GE(class_size, sizeof(mirror::Class));
Ian Rogers1d54e732013-05-02 21:10:01 -07002632 gc::Heap* heap = Runtime::Current()->GetHeap();
Roland Levillain0e840272018-08-23 19:55:30 +01002633 ObjPtr<mirror::Object> k = (kMovingClasses && kMovable) ?
Vladimir Marko70e2a762019-07-12 16:49:00 +01002634 heap->AllocObject(self, java_lang_Class, class_size, pre_fence_visitor) :
2635 heap->AllocNonMovableObject(self, java_lang_Class, class_size, pre_fence_visitor);
Ian Rogers6fac4472014-02-25 17:01:10 -08002636 if (UNLIKELY(k == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002637 self->AssertPendingOOMException();
Ian Rogers6fac4472014-02-25 17:01:10 -08002638 return nullptr;
Ian Rogersa436fde2013-08-27 23:34:06 -07002639 }
Ian Rogers6fac4472014-02-25 17:01:10 -08002640 return k->AsClass();
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07002641}
2642
Vladimir Marko70e2a762019-07-12 16:49:00 +01002643template <bool kMovable>
2644ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2645 ObjPtr<mirror::Class> java_lang_Class,
2646 uint32_t class_size) {
2647 mirror::Class::InitializeClassVisitor visitor(class_size);
2648 return AllocClass<kMovable>(self, java_lang_Class, class_size, visitor);
2649}
2650
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002651ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self, uint32_t class_size) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002652 return AllocClass(self, GetClassRoot<mirror::Class>(this), class_size);
Brian Carlstroma0808032011-07-18 00:39:23 -07002653}
2654
Vladimir Marko70e2a762019-07-12 16:49:00 +01002655void ClassLinker::AllocPrimitiveArrayClass(Thread* self,
2656 ClassRoot primitive_root,
2657 ClassRoot array_root) {
Roland Levillain0e840272018-08-23 19:55:30 +01002658 // We make this class non-movable for the unlikely case where it were to be
2659 // moved by a sticky-bit (minor) collection when using the Generational
2660 // Concurrent Copying (CC) collector, potentially creating a stale reference
2661 // in the `klass_` field of one of its instances allocated in the Large-Object
2662 // Space (LOS) -- see the comment about the dirty card scanning logic in
2663 // art::gc::collector::ConcurrentCopying::MarkingPhase.
Vladimir Marko70e2a762019-07-12 16:49:00 +01002664 ObjPtr<mirror::Class> array_class = AllocClass</* kMovable= */ false>(
2665 self, GetClassRoot<mirror::Class>(this), mirror::Array::ClassSize(image_pointer_size_));
2666 ObjPtr<mirror::Class> component_type = GetClassRoot(primitive_root, this);
2667 DCHECK(component_type->IsPrimitive());
2668 array_class->SetComponentType(component_type);
2669 SetClassRoot(array_root, array_class);
2670}
2671
2672void ClassLinker::FinishArrayClassSetup(ObjPtr<mirror::Class> array_class) {
2673 ObjPtr<mirror::Class> java_lang_Object = GetClassRoot<mirror::Object>(this);
2674 array_class->SetSuperClass(java_lang_Object);
2675 array_class->SetVTable(java_lang_Object->GetVTable());
2676 array_class->SetPrimitiveType(Primitive::kPrimNot);
2677 ObjPtr<mirror::Class> component_type = array_class->GetComponentType();
2678 array_class->SetClassFlags(component_type->IsPrimitive()
2679 ? mirror::kClassFlagNoReferenceFields
2680 : mirror::kClassFlagObjectArray);
2681 array_class->SetClassLoader(component_type->GetClassLoader());
2682 array_class->SetStatusForPrimitiveOrArray(ClassStatus::kLoaded);
2683 array_class->PopulateEmbeddedVTable(image_pointer_size_);
2684 ImTable* object_imt = java_lang_Object->GetImt(image_pointer_size_);
2685 array_class->SetImt(object_imt, image_pointer_size_);
2686 // Skip EnsureSkipAccessChecksMethods(). We can skip the verified status,
2687 // the kAccVerificationAttempted flag is added below, and there are no
2688 // methods that need the kAccSkipAccessChecks flag.
2689 DCHECK_EQ(array_class->NumMethods(), 0u);
2690
2691 // don't need to set new_class->SetObjectSize(..)
2692 // because Object::SizeOf delegates to Array::SizeOf
2693
2694 // All arrays have java/lang/Cloneable and java/io/Serializable as
2695 // interfaces. We need to set that up here, so that stuff like
2696 // "instanceof" works right.
2697
2698 // Use the single, global copies of "interfaces" and "iftable"
2699 // (remember not to free them for arrays).
2700 {
2701 ObjPtr<mirror::IfTable> array_iftable = GetArrayIfTable();
2702 CHECK(array_iftable != nullptr);
2703 array_class->SetIfTable(array_iftable);
2704 }
2705
2706 // Inherit access flags from the component type.
2707 int access_flags = component_type->GetAccessFlags();
2708 // Lose any implementation detail flags; in particular, arrays aren't finalizable.
2709 access_flags &= kAccJavaFlagsMask;
2710 // Arrays can't be used as a superclass or interface, so we want to add "abstract final"
2711 // and remove "interface".
2712 access_flags |= kAccAbstract | kAccFinal;
2713 access_flags &= ~kAccInterface;
2714 // Arrays are access-checks-clean and preverified.
2715 access_flags |= kAccVerificationAttempted;
2716
Vladimir Markob68bb7a2020-03-17 10:55:25 +00002717 array_class->SetAccessFlagsDuringLinking(access_flags);
Vladimir Marko70e2a762019-07-12 16:49:00 +01002718
Vladimir Markobf121912019-06-04 13:49:05 +01002719 // Array classes are fully initialized either during single threaded startup,
2720 // or from a pre-fence visitor, so visibly initialized.
2721 array_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
Vladimir Marko70e2a762019-07-12 16:49:00 +01002722}
2723
2724void ClassLinker::FinishCoreArrayClassSetup(ClassRoot array_root) {
2725 // Do not hold lock on the array class object, the initialization of
2726 // core array classes is done while the process is still single threaded.
2727 ObjPtr<mirror::Class> array_class = GetClassRoot(array_root, this);
2728 FinishArrayClassSetup(array_class);
2729
2730 std::string temp;
2731 const char* descriptor = array_class->GetDescriptor(&temp);
2732 size_t hash = ComputeModifiedUtf8Hash(descriptor);
2733 ObjPtr<mirror::Class> existing = InsertClass(descriptor, array_class, hash);
2734 CHECK(existing == nullptr);
Roland Levillain0e840272018-08-23 19:55:30 +01002735}
2736
Vladimir Markobcf17522018-06-01 13:14:32 +01002737ObjPtr<mirror::ObjectArray<mirror::StackTraceElement>> ClassLinker::AllocStackTraceElementArray(
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07002738 Thread* self,
2739 size_t length) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07002740 return mirror::ObjectArray<mirror::StackTraceElement>::Alloc(
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002741 self, GetClassRoot<mirror::ObjectArray<mirror::StackTraceElement>>(this), length);
Shih-wei Liao55df06b2011-08-26 14:39:27 -07002742}
2743
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002744ObjPtr<mirror::Class> ClassLinker::EnsureResolved(Thread* self,
2745 const char* descriptor,
2746 ObjPtr<mirror::Class> klass) {
Andreas Gampe2ed8def2014-08-28 14:41:02 -07002747 DCHECK(klass != nullptr);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002748 if (kIsDebugBuild) {
2749 StackHandleScope<1> hs(self);
2750 HandleWrapperObjPtr<mirror::Class> h = hs.NewHandleWrapper(&klass);
2751 Thread::PoisonObjectPointersIfDebug();
2752 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002753
2754 // For temporary classes we must wait for them to be retired.
2755 if (init_done_ && klass->IsTemp()) {
2756 CHECK(!klass->IsResolved());
Vladimir Marko72ab6842017-01-20 19:32:50 +00002757 if (klass->IsErroneousUnresolved()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002758 ThrowEarlierClassFailure(klass);
2759 return nullptr;
2760 }
2761 StackHandleScope<1> hs(self);
2762 Handle<mirror::Class> h_class(hs.NewHandle(klass));
2763 ObjectLock<mirror::Class> lock(self, h_class);
2764 // Loop and wait for the resolving thread to retire this class.
Vladimir Marko72ab6842017-01-20 19:32:50 +00002765 while (!h_class->IsRetired() && !h_class->IsErroneousUnresolved()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002766 lock.WaitIgnoringInterrupts();
2767 }
Vladimir Marko72ab6842017-01-20 19:32:50 +00002768 if (h_class->IsErroneousUnresolved()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002769 ThrowEarlierClassFailure(h_class.Get());
2770 return nullptr;
2771 }
2772 CHECK(h_class->IsRetired());
2773 // Get the updated class from class table.
Andreas Gampe34ee6842014-12-02 15:43:52 -08002774 klass = LookupClass(self, descriptor, h_class.Get()->GetClassLoader());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002775 }
2776
Brian Carlstromaded5f72011-10-07 17:15:04 -07002777 // Wait for the class if it has not already been linked.
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002778 size_t index = 0;
2779 // Maximum number of yield iterations until we start sleeping.
2780 static const size_t kNumYieldIterations = 1000;
2781 // How long each sleep is in us.
2782 static const size_t kSleepDurationUS = 1000; // 1 ms.
Vladimir Marko72ab6842017-01-20 19:32:50 +00002783 while (!klass->IsResolved() && !klass->IsErroneousUnresolved()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002784 StackHandleScope<1> hs(self);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002785 HandleWrapperObjPtr<mirror::Class> h_class(hs.NewHandleWrapper(&klass));
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002786 {
2787 ObjectTryLock<mirror::Class> lock(self, h_class);
2788 // Can not use a monitor wait here since it may block when returning and deadlock if another
2789 // thread has locked klass.
2790 if (lock.Acquired()) {
2791 // Check for circular dependencies between classes, the lock is required for SetStatus.
2792 if (!h_class->IsResolved() && h_class->GetClinitThreadId() == self->GetTid()) {
2793 ThrowClassCircularityError(h_class.Get());
Vladimir Marko2c64a832018-01-04 11:31:56 +00002794 mirror::Class::SetStatus(h_class, ClassStatus::kErrorUnresolved, self);
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002795 return nullptr;
2796 }
2797 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002798 }
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002799 {
2800 // Handle wrapper deals with klass moving.
2801 ScopedThreadSuspension sts(self, kSuspended);
2802 if (index < kNumYieldIterations) {
2803 sched_yield();
2804 } else {
2805 usleep(kSleepDurationUS);
2806 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002807 }
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002808 ++index;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002809 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002810
Vladimir Marko72ab6842017-01-20 19:32:50 +00002811 if (klass->IsErroneousUnresolved()) {
Elliott Hughes4a2b4172011-09-20 17:08:25 -07002812 ThrowEarlierClassFailure(klass);
Mathieu Chartierc528dba2013-11-26 12:00:11 -08002813 return nullptr;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002814 }
2815 // Return the loaded class. No exceptions should be pending.
David Sehr709b0702016-10-13 09:12:37 -07002816 CHECK(klass->IsResolved()) << klass->PrettyClass();
Ian Rogers62d6c772013-02-27 08:32:07 -08002817 self->AssertNoPendingException();
Vladimir Markobcf17522018-06-01 13:14:32 +01002818 return klass;
Brian Carlstromaded5f72011-10-07 17:15:04 -07002819}
2820
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08002821using ClassPathEntry = std::pair<const DexFile*, const dex::ClassDef*>;
Ian Rogers68b56852014-08-29 20:19:11 -07002822
2823// Search a collection of DexFiles for a descriptor
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002824ClassPathEntry FindInClassPath(const char* descriptor,
Igor Murashkinb1d8c312015-08-04 11:18:43 -07002825 size_t hash, const std::vector<const DexFile*>& class_path) {
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002826 for (const DexFile* dex_file : class_path) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -08002827 DCHECK(dex_file != nullptr);
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08002828 const dex::ClassDef* dex_class_def = OatDexFile::FindClassDef(*dex_file, descriptor, hash);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07002829 if (dex_class_def != nullptr) {
Ian Rogers68b56852014-08-29 20:19:11 -07002830 return ClassPathEntry(dex_file, dex_class_def);
2831 }
2832 }
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002833 return ClassPathEntry(nullptr, nullptr);
Ian Rogers68b56852014-08-29 20:19:11 -07002834}
2835
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002836bool ClassLinker::FindClassInSharedLibraries(ScopedObjectAccessAlreadyRunnable& soa,
2837 Thread* self,
2838 const char* descriptor,
2839 size_t hash,
2840 Handle<mirror::ClassLoader> class_loader,
2841 /*out*/ ObjPtr<mirror::Class>* result) {
2842 ArtField* field =
2843 jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders);
2844 ObjPtr<mirror::Object> raw_shared_libraries = field->GetObject(class_loader.Get());
2845 if (raw_shared_libraries == nullptr) {
2846 return true;
2847 }
2848
2849 StackHandleScope<2> hs(self);
2850 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries(
2851 hs.NewHandle(raw_shared_libraries->AsObjectArray<mirror::ClassLoader>()));
2852 MutableHandle<mirror::ClassLoader> temp_loader = hs.NewHandle<mirror::ClassLoader>(nullptr);
Alex Lighta9bbc082019-11-14 14:51:41 -08002853 for (auto loader : shared_libraries.Iterate<mirror::ClassLoader>()) {
2854 temp_loader.Assign(loader);
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002855 if (!FindClassInBaseDexClassLoader(soa, self, descriptor, hash, temp_loader, result)) {
2856 return false; // One of the shared libraries is not supported.
2857 }
2858 if (*result != nullptr) {
2859 return true; // Found the class up the chain.
2860 }
2861 }
2862 return true;
2863}
2864
Nicolas Geoffray7d8d8ff2016-11-02 12:38:05 +00002865bool ClassLinker::FindClassInBaseDexClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
2866 Thread* self,
2867 const char* descriptor,
2868 size_t hash,
2869 Handle<mirror::ClassLoader> class_loader,
Vladimir Markobcf17522018-06-01 13:14:32 +01002870 /*out*/ ObjPtr<mirror::Class>* result) {
Calin Juravlecdd49122017-07-05 20:09:53 -07002871 // Termination case: boot class loader.
Andreas Gampef865ea92015-04-13 22:14:19 -07002872 if (IsBootClassLoader(soa, class_loader.Get())) {
Calin Juravle415dc3d2017-06-28 11:03:12 -07002873 *result = FindClassInBootClassLoaderClassPath(self, descriptor, hash);
Andreas Gampef865ea92015-04-13 22:14:19 -07002874 return true;
2875 }
2876
David Brazdil05909d82018-12-06 16:25:16 +00002877 if (IsPathOrDexClassLoader(soa, class_loader) || IsInMemoryDexClassLoader(soa, class_loader)) {
Calin Juravlecdd49122017-07-05 20:09:53 -07002878 // For regular path or dex class loader the search order is:
2879 // - parent
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002880 // - shared libraries
Calin Juravlecdd49122017-07-05 20:09:53 -07002881 // - class loader dex files
Andreas Gampef865ea92015-04-13 22:14:19 -07002882
Calin Juravlecdd49122017-07-05 20:09:53 -07002883 // Handles as RegisterDexFile may allocate dex caches (and cause thread suspension).
2884 StackHandleScope<1> hs(self);
2885 Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
2886 if (!FindClassInBaseDexClassLoader(soa, self, descriptor, hash, h_parent, result)) {
2887 return false; // One of the parents is not supported.
2888 }
2889 if (*result != nullptr) {
2890 return true; // Found the class up the chain.
2891 }
Andreas Gampef865ea92015-04-13 22:14:19 -07002892
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002893 if (!FindClassInSharedLibraries(soa, self, descriptor, hash, class_loader, result)) {
2894 return false; // One of the shared library loader is not supported.
2895 }
2896 if (*result != nullptr) {
2897 return true; // Found the class in a shared library.
2898 }
2899
Calin Juravlecdd49122017-07-05 20:09:53 -07002900 // Search the current class loader classpath.
2901 *result = FindClassInBaseDexClassLoaderClassPath(soa, descriptor, hash, class_loader);
Andreas Gampe501c3b02019-04-17 21:54:27 +00002902 return !soa.Self()->IsExceptionPending();
Andreas Gampef865ea92015-04-13 22:14:19 -07002903 }
2904
Calin Juravlecdd49122017-07-05 20:09:53 -07002905 if (IsDelegateLastClassLoader(soa, class_loader)) {
2906 // For delegate last, the search order is:
2907 // - boot class path
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002908 // - shared libraries
Calin Juravlecdd49122017-07-05 20:09:53 -07002909 // - class loader dex files
2910 // - parent
2911 *result = FindClassInBootClassLoaderClassPath(self, descriptor, hash);
2912 if (*result != nullptr) {
2913 return true; // The class is part of the boot class path.
2914 }
Andreas Gampe501c3b02019-04-17 21:54:27 +00002915 if (self->IsExceptionPending()) {
2916 // Pending exception means there was an error other than ClassNotFound that must be returned
2917 // to the caller.
2918 return false;
2919 }
Calin Juravlecdd49122017-07-05 20:09:53 -07002920
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002921 if (!FindClassInSharedLibraries(soa, self, descriptor, hash, class_loader, result)) {
2922 return false; // One of the shared library loader is not supported.
2923 }
2924 if (*result != nullptr) {
2925 return true; // Found the class in a shared library.
2926 }
2927
Calin Juravlecdd49122017-07-05 20:09:53 -07002928 *result = FindClassInBaseDexClassLoaderClassPath(soa, descriptor, hash, class_loader);
2929 if (*result != nullptr) {
2930 return true; // Found the class in the current class loader
2931 }
Andreas Gampe501c3b02019-04-17 21:54:27 +00002932 if (self->IsExceptionPending()) {
2933 // Pending exception means there was an error other than ClassNotFound that must be returned
2934 // to the caller.
2935 return false;
2936 }
Calin Juravlecdd49122017-07-05 20:09:53 -07002937
2938 // Handles as RegisterDexFile may allocate dex caches (and cause thread suspension).
2939 StackHandleScope<1> hs(self);
2940 Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
2941 return FindClassInBaseDexClassLoader(soa, self, descriptor, hash, h_parent, result);
2942 }
2943
2944 // Unsupported class loader.
2945 *result = nullptr;
2946 return false;
Calin Juravle415dc3d2017-06-28 11:03:12 -07002947}
2948
Andreas Gampe501c3b02019-04-17 21:54:27 +00002949namespace {
2950
2951// Matches exceptions caught in DexFile.defineClass.
2952ALWAYS_INLINE bool MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,
2953 ClassLinker* class_linker)
2954 REQUIRES_SHARED(Locks::mutator_lock_) {
2955 return
2956 // ClassNotFoundException.
2957 throwable->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
2958 class_linker))
2959 ||
2960 // NoClassDefFoundError. TODO: Reconsider this. b/130746382.
2961 throwable->InstanceOf(Runtime::Current()->GetPreAllocatedNoClassDefFoundError()->GetClass());
2962}
2963
2964// Clear exceptions caught in DexFile.defineClass.
2965ALWAYS_INLINE void FilterDexFileCaughtExceptions(Thread* self, ClassLinker* class_linker)
2966 REQUIRES_SHARED(Locks::mutator_lock_) {
2967 if (MatchesDexFileCaughtExceptions(self->GetException(), class_linker)) {
2968 self->ClearException();
2969 }
2970}
2971
2972} // namespace
2973
Calin Juravle415dc3d2017-06-28 11:03:12 -07002974// Finds the class in the boot class loader.
2975// If the class is found the method returns the resolved class. Otherwise it returns null.
2976ObjPtr<mirror::Class> ClassLinker::FindClassInBootClassLoaderClassPath(Thread* self,
2977 const char* descriptor,
2978 size_t hash) {
2979 ObjPtr<mirror::Class> result = nullptr;
2980 ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
2981 if (pair.second != nullptr) {
2982 ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, nullptr);
2983 if (klass != nullptr) {
2984 result = EnsureResolved(self, descriptor, klass);
Mathieu Chartierab0ed822014-09-11 14:21:41 -07002985 } else {
Calin Juravle415dc3d2017-06-28 11:03:12 -07002986 result = DefineClass(self,
2987 descriptor,
2988 hash,
2989 ScopedNullHandle<mirror::ClassLoader>(),
2990 *pair.first,
2991 *pair.second);
Mathieu Chartierab0ed822014-09-11 14:21:41 -07002992 }
Calin Juravle415dc3d2017-06-28 11:03:12 -07002993 if (result == nullptr) {
2994 CHECK(self->IsExceptionPending()) << descriptor;
Andreas Gampe501c3b02019-04-17 21:54:27 +00002995 FilterDexFileCaughtExceptions(self, this);
Andreas Gampef865ea92015-04-13 22:14:19 -07002996 }
2997 }
Calin Juravle415dc3d2017-06-28 11:03:12 -07002998 return result;
2999}
Andreas Gampef865ea92015-04-13 22:14:19 -07003000
Calin Juravle415dc3d2017-06-28 11:03:12 -07003001ObjPtr<mirror::Class> ClassLinker::FindClassInBaseDexClassLoaderClassPath(
3002 ScopedObjectAccessAlreadyRunnable& soa,
3003 const char* descriptor,
3004 size_t hash,
3005 Handle<mirror::ClassLoader> class_loader) {
David Brazdil05909d82018-12-06 16:25:16 +00003006 DCHECK(IsPathOrDexClassLoader(soa, class_loader) ||
3007 IsInMemoryDexClassLoader(soa, class_loader) ||
3008 IsDelegateLastClassLoader(soa, class_loader))
Calin Juravle415dc3d2017-06-28 11:03:12 -07003009 << "Unexpected class loader for descriptor " << descriptor;
Andreas Gampef865ea92015-04-13 22:14:19 -07003010
Andreas Gampeb8e7c372018-02-20 18:24:55 -08003011 ObjPtr<mirror::Class> ret;
3012 auto define_class = [&](const DexFile* cp_dex_file) REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003013 const dex::ClassDef* dex_class_def = OatDexFile::FindClassDef(*cp_dex_file, descriptor, hash);
Andreas Gampeb8e7c372018-02-20 18:24:55 -08003014 if (dex_class_def != nullptr) {
3015 ObjPtr<mirror::Class> klass = DefineClass(soa.Self(),
3016 descriptor,
3017 hash,
3018 class_loader,
3019 *cp_dex_file,
3020 *dex_class_def);
3021 if (klass == nullptr) {
3022 CHECK(soa.Self()->IsExceptionPending()) << descriptor;
Andreas Gampe501c3b02019-04-17 21:54:27 +00003023 FilterDexFileCaughtExceptions(soa.Self(), this);
Andreas Gampeb8e7c372018-02-20 18:24:55 -08003024 // TODO: Is it really right to break here, and not check the other dex files?
Andreas Gampe501c3b02019-04-17 21:54:27 +00003025 } else {
3026 DCHECK(!soa.Self()->IsExceptionPending());
Mathieu Chartierab0ed822014-09-11 14:21:41 -07003027 }
Andreas Gampeb8e7c372018-02-20 18:24:55 -08003028 ret = klass;
3029 return false; // Found a Class (or error == nullptr), stop visit.
Mathieu Chartierab0ed822014-09-11 14:21:41 -07003030 }
Andreas Gampeb8e7c372018-02-20 18:24:55 -08003031 return true; // Continue with the next DexFile.
3032 };
3033
3034 VisitClassLoaderDexFiles(soa, class_loader, define_class);
3035 return ret;
Mathieu Chartierab0ed822014-09-11 14:21:41 -07003036}
3037
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01003038ObjPtr<mirror::Class> ClassLinker::FindClass(Thread* self,
3039 const char* descriptor,
3040 Handle<mirror::ClassLoader> class_loader) {
Elliott Hughesba8eee12012-01-24 20:25:24 -08003041 DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
Ian Rogers98379392014-02-24 16:53:16 -08003042 DCHECK(self != nullptr);
Ian Rogers00f7d0e2012-07-19 15:28:27 -07003043 self->AssertNoPendingException();
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07003044 self->PoisonObjectPointers(); // For DefineClass, CreateArrayClass, etc...
Elliott Hughesc3b77c72011-12-15 20:56:48 -08003045 if (descriptor[1] == '\0') {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08003046 // only the descriptors of primitive types should be 1 character long, also avoid class lookup
3047 // for primitive classes that aren't backed by dex files.
3048 return FindPrimitiveClass(descriptor[0]);
3049 }
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08003050 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003051 // Find the class in the loaded classes table.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003052 ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, class_loader.Get());
Ian Rogers68b56852014-08-29 20:19:11 -07003053 if (klass != nullptr) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003054 return EnsureResolved(self, descriptor, klass);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003055 }
Brian Carlstromaded5f72011-10-07 17:15:04 -07003056 // Class is not yet loaded.
Andreas Gampefa4333d2017-02-14 11:10:34 -08003057 if (descriptor[0] != '[' && class_loader == nullptr) {
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003058 // Non-array class and the boot class loader, search the boot class path.
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08003059 ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
Ian Rogers68b56852014-08-29 20:19:11 -07003060 if (pair.second != nullptr) {
Mathieu Chartier9865bde2015-12-21 09:58:16 -08003061 return DefineClass(self,
3062 descriptor,
3063 hash,
3064 ScopedNullHandle<mirror::ClassLoader>(),
3065 *pair.first,
Ian Rogers7b078e82014-09-10 14:44:24 -07003066 *pair.second);
Ian Rogers63557452014-06-04 16:57:15 -07003067 } else {
3068 // The boot class loader is searched ahead of the application class loader, failures are
3069 // expected and will be wrapped in a ClassNotFoundException. Use the pre-allocated error to
3070 // trigger the chaining with a proper stack trace.
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003071 ObjPtr<mirror::Throwable> pre_allocated =
3072 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
Nicolas Geoffray14691c52015-03-05 10:40:17 +00003073 self->SetException(pre_allocated);
Ian Rogers63557452014-06-04 16:57:15 -07003074 return nullptr;
Jesse Wilson47daf872011-11-23 11:42:45 -05003075 }
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003076 }
3077 ObjPtr<mirror::Class> result_ptr;
3078 bool descriptor_equals;
3079 if (descriptor[0] == '[') {
3080 result_ptr = CreateArrayClass(self, descriptor, hash, class_loader);
3081 DCHECK_EQ(result_ptr == nullptr, self->IsExceptionPending());
3082 DCHECK(result_ptr == nullptr || result_ptr->DescriptorEquals(descriptor));
3083 descriptor_equals = true;
Jesse Wilson47daf872011-11-23 11:42:45 -05003084 } else {
Ian Rogers98379392014-02-24 16:53:16 -08003085 ScopedObjectAccessUnchecked soa(self);
Vladimir Markoc5798bf2016-12-09 10:20:54 +00003086 bool known_hierarchy =
3087 FindClassInBaseDexClassLoader(soa, self, descriptor, hash, class_loader, &result_ptr);
3088 if (result_ptr != nullptr) {
3089 // The chain was understood and we found the class. We still need to add the class to
3090 // the class table to protect from racy programs that can try and redefine the path list
3091 // which would change the Class<?> returned for subsequent evaluation of const-class.
3092 DCHECK(known_hierarchy);
3093 DCHECK(result_ptr->DescriptorEquals(descriptor));
3094 descriptor_equals = true;
Andreas Gampe501c3b02019-04-17 21:54:27 +00003095 } else if (!self->IsExceptionPending()) {
Vladimir Markoc5798bf2016-12-09 10:20:54 +00003096 // Either the chain wasn't understood or the class wasn't found.
Andreas Gampe501c3b02019-04-17 21:54:27 +00003097 // If there is a pending exception we didn't clear, it is a not a ClassNotFoundException and
3098 // we should return it instead of silently clearing and retrying.
Vladimir Markoc5798bf2016-12-09 10:20:54 +00003099 //
3100 // If the chain was understood but we did not find the class, let the Java-side
3101 // rediscover all this and throw the exception with the right stack trace. Note that
3102 // the Java-side could still succeed for racy programs if another thread is actively
3103 // modifying the class loader's path list.
Andreas Gampef865ea92015-04-13 22:14:19 -07003104
Alex Light185a4612018-10-04 15:54:25 -07003105 // The runtime is not allowed to call into java from a runtime-thread so just abort.
Alex Lighte9f61032018-09-24 16:04:51 -07003106 if (self->IsRuntimeThread()) {
Calin Juravleccd56952016-12-15 17:57:38 +00003107 // Oops, we can't call into java so we can't run actual class-loader code.
3108 // This is true for e.g. for the compiler (jit or aot).
Vladimir Markoc5798bf2016-12-09 10:20:54 +00003109 ObjPtr<mirror::Throwable> pre_allocated =
3110 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3111 self->SetException(pre_allocated);
Vladimir Marko2c8c6b62016-12-01 17:42:00 +00003112 return nullptr;
3113 }
Vladimir Markoc5798bf2016-12-09 10:20:54 +00003114
Vladimir Marko5fdd7782017-04-20 11:26:03 +01003115 // Inlined DescriptorToDot(descriptor) with extra validation.
3116 //
3117 // Throw NoClassDefFoundError early rather than potentially load a class only to fail
3118 // the DescriptorEquals() check below and give a confusing error message. For example,
3119 // when native code erroneously calls JNI GetFieldId() with signature "java/lang/String"
3120 // instead of "Ljava/lang/String;", the message below using the "dot" names would be
3121 // "class loader [...] returned class java.lang.String instead of java.lang.String".
3122 size_t descriptor_length = strlen(descriptor);
3123 if (UNLIKELY(descriptor[0] != 'L') ||
3124 UNLIKELY(descriptor[descriptor_length - 1] != ';') ||
3125 UNLIKELY(memchr(descriptor + 1, '.', descriptor_length - 2) != nullptr)) {
3126 ThrowNoClassDefFoundError("Invalid descriptor: %s.", descriptor);
3127 return nullptr;
3128 }
Andreas Gampe501c3b02019-04-17 21:54:27 +00003129
Vladimir Marko5fdd7782017-04-20 11:26:03 +01003130 std::string class_name_string(descriptor + 1, descriptor_length - 2);
3131 std::replace(class_name_string.begin(), class_name_string.end(), '/', '.');
Andreas Gampe87658f32019-04-18 18:39:02 +00003132 if (known_hierarchy &&
3133 fast_class_not_found_exceptions_ &&
3134 !Runtime::Current()->IsJavaDebuggable()) {
3135 // For known hierarchy, we know that the class is going to throw an exception. If we aren't
3136 // debuggable, optimize this path by throwing directly here without going back to Java
3137 // language. This reduces how many ClassNotFoundExceptions happen.
3138 self->ThrowNewExceptionF("Ljava/lang/ClassNotFoundException;",
3139 "%s",
3140 class_name_string.c_str());
3141 } else {
3142 ScopedLocalRef<jobject> class_loader_object(
3143 soa.Env(), soa.AddLocalReference<jobject>(class_loader.Get()));
3144 ScopedLocalRef<jobject> result(soa.Env(), nullptr);
3145 {
3146 ScopedThreadStateChange tsc(self, kNative);
3147 ScopedLocalRef<jobject> class_name_object(
3148 soa.Env(), soa.Env()->NewStringUTF(class_name_string.c_str()));
3149 if (class_name_object.get() == nullptr) {
3150 DCHECK(self->IsExceptionPending()); // OOME.
3151 return nullptr;
3152 }
3153 CHECK(class_loader_object.get() != nullptr);
3154 result.reset(soa.Env()->CallObjectMethod(class_loader_object.get(),
3155 WellKnownClasses::java_lang_ClassLoader_loadClass,
3156 class_name_object.get()));
3157 }
3158 if (result.get() == nullptr && !self->IsExceptionPending()) {
3159 // broken loader - throw NPE to be compatible with Dalvik
3160 ThrowNullPointerException(StringPrintf("ClassLoader.loadClass returned null for %s",
3161 class_name_string.c_str()).c_str());
Vladimir Markoc5798bf2016-12-09 10:20:54 +00003162 return nullptr;
3163 }
Andreas Gampe87658f32019-04-18 18:39:02 +00003164 result_ptr = soa.Decode<mirror::Class>(result.get());
3165 // Check the name of the returned class.
3166 descriptor_equals = (result_ptr != nullptr) && result_ptr->DescriptorEquals(descriptor);
Vladimir Markoc5798bf2016-12-09 10:20:54 +00003167 }
Andreas Gampe501c3b02019-04-17 21:54:27 +00003168 } else {
3169 DCHECK(!MatchesDexFileCaughtExceptions(self->GetException(), this));
Vladimir Marko2c8c6b62016-12-01 17:42:00 +00003170 }
Brian Carlstromaded5f72011-10-07 17:15:04 -07003171 }
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003172
3173 if (self->IsExceptionPending()) {
3174 // If the ClassLoader threw or array class allocation failed, pass that exception up.
3175 // However, to comply with the RI behavior, first check if another thread succeeded.
3176 result_ptr = LookupClass(self, descriptor, hash, class_loader.Get());
3177 if (result_ptr != nullptr && !result_ptr->IsErroneous()) {
3178 self->ClearException();
3179 return EnsureResolved(self, descriptor, result_ptr);
3180 }
3181 return nullptr;
3182 }
3183
3184 // Try to insert the class to the class table, checking for mismatch.
3185 ObjPtr<mirror::Class> old;
3186 {
3187 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3188 ClassTable* const class_table = InsertClassTableForClassLoader(class_loader.Get());
3189 old = class_table->Lookup(descriptor, hash);
3190 if (old == nullptr) {
3191 old = result_ptr; // For the comparison below, after releasing the lock.
3192 if (descriptor_equals) {
Vladimir Markobcf17522018-06-01 13:14:32 +01003193 class_table->InsertWithHash(result_ptr, hash);
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07003194 WriteBarrier::ForEveryFieldWrite(class_loader.Get());
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003195 } // else throw below, after releasing the lock.
3196 }
3197 }
3198 if (UNLIKELY(old != result_ptr)) {
3199 // Return `old` (even if `!descriptor_equals`) to mimic the RI behavior for parallel
3200 // capable class loaders. (All class loaders are considered parallel capable on Android.)
Vladimir Markodfc0de72019-04-01 10:57:55 +01003201 ObjPtr<mirror::Class> loader_class = class_loader->GetClass();
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003202 const char* loader_class_name =
3203 loader_class->GetDexFile().StringByTypeIdx(loader_class->GetDexTypeIndex());
3204 LOG(WARNING) << "Initiating class loader of type " << DescriptorToDot(loader_class_name)
3205 << " is not well-behaved; it returned a different Class for racing loadClass(\""
3206 << DescriptorToDot(descriptor) << "\").";
3207 return EnsureResolved(self, descriptor, old);
3208 }
3209 if (UNLIKELY(!descriptor_equals)) {
3210 std::string result_storage;
3211 const char* result_name = result_ptr->GetDescriptor(&result_storage);
3212 std::string loader_storage;
3213 const char* loader_class_name = class_loader->GetClass()->GetDescriptor(&loader_storage);
3214 ThrowNoClassDefFoundError(
3215 "Initiating class loader of type %s returned class %s instead of %s.",
3216 DescriptorToDot(loader_class_name).c_str(),
3217 DescriptorToDot(result_name).c_str(),
3218 DescriptorToDot(descriptor).c_str());
3219 return nullptr;
3220 }
Vladimir Markobcf17522018-06-01 13:14:32 +01003221 // Success.
3222 return result_ptr;
Brian Carlstromaded5f72011-10-07 17:15:04 -07003223}
3224
Alex Light270db1c2019-12-03 12:20:01 +00003225// Helper for maintaining DefineClass counting. We need to notify callbacks when we start/end a
3226// define-class and how many recursive DefineClasses we are at in order to allow for doing things
3227// like pausing class definition.
3228struct ScopedDefiningClass {
3229 public:
3230 explicit ScopedDefiningClass(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
3231 : self_(self), returned_(false) {
3232 Locks::mutator_lock_->AssertSharedHeld(self_);
3233 Runtime::Current()->GetRuntimeCallbacks()->BeginDefineClass();
3234 self_->IncrDefineClassCount();
3235 }
3236 ~ScopedDefiningClass() REQUIRES_SHARED(Locks::mutator_lock_) {
3237 Locks::mutator_lock_->AssertSharedHeld(self_);
3238 CHECK(returned_);
3239 }
3240
3241 ObjPtr<mirror::Class> Finish(Handle<mirror::Class> h_klass)
3242 REQUIRES_SHARED(Locks::mutator_lock_) {
3243 CHECK(!returned_);
3244 self_->DecrDefineClassCount();
3245 Runtime::Current()->GetRuntimeCallbacks()->EndDefineClass();
3246 Thread::PoisonObjectPointersIfDebug();
3247 returned_ = true;
3248 return h_klass.Get();
3249 }
3250
3251 ObjPtr<mirror::Class> Finish(ObjPtr<mirror::Class> klass)
3252 REQUIRES_SHARED(Locks::mutator_lock_) {
3253 StackHandleScope<1> hs(self_);
3254 Handle<mirror::Class> h_klass(hs.NewHandle(klass));
3255 return Finish(h_klass);
3256 }
3257
3258 ObjPtr<mirror::Class> Finish(nullptr_t np ATTRIBUTE_UNUSED)
3259 REQUIRES_SHARED(Locks::mutator_lock_) {
3260 ScopedNullHandle<mirror::Class> snh;
3261 return Finish(snh);
3262 }
3263
3264 private:
3265 Thread* self_;
3266 bool returned_;
3267};
3268
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01003269ObjPtr<mirror::Class> ClassLinker::DefineClass(Thread* self,
3270 const char* descriptor,
3271 size_t hash,
3272 Handle<mirror::ClassLoader> class_loader,
3273 const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003274 const dex::ClassDef& dex_class_def) {
Alex Light270db1c2019-12-03 12:20:01 +00003275 ScopedDefiningClass sdc(self);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003276 StackHandleScope<3> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07003277 auto klass = hs.NewHandle<mirror::Class>(nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003278
Brian Carlstromaded5f72011-10-07 17:15:04 -07003279 // Load the class from the dex file.
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003280 if (UNLIKELY(!init_done_)) {
Brian Carlstromaded5f72011-10-07 17:15:04 -07003281 // finish up init of hand crafted class_roots_
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003282 if (strcmp(descriptor, "Ljava/lang/Object;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003283 klass.Assign(GetClassRoot<mirror::Object>(this));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003284 } else if (strcmp(descriptor, "Ljava/lang/Class;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003285 klass.Assign(GetClassRoot<mirror::Class>(this));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003286 } else if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003287 klass.Assign(GetClassRoot<mirror::String>(this));
Fred Shih4ee7a662014-07-11 09:59:27 -07003288 } else if (strcmp(descriptor, "Ljava/lang/ref/Reference;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003289 klass.Assign(GetClassRoot<mirror::Reference>(this));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003290 } else if (strcmp(descriptor, "Ljava/lang/DexCache;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003291 klass.Assign(GetClassRoot<mirror::DexCache>(this));
Alex Lightd6251582016-10-31 11:12:30 -07003292 } else if (strcmp(descriptor, "Ldalvik/system/ClassExt;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003293 klass.Assign(GetClassRoot<mirror::ClassExt>(this));
Brian Carlstromaded5f72011-10-07 17:15:04 -07003294 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003295 }
3296
Vladimir Markob9c29f62019-03-20 14:22:51 +00003297 // For AOT-compilation of an app, we may use a shortened boot class path that excludes
3298 // some runtime modules. Prevent definition of classes in app class loader that could clash
3299 // with these modules as these classes could be resolved differently during execution.
3300 if (class_loader != nullptr &&
3301 Runtime::Current()->IsAotCompiler() &&
Vladimir Markod1f73512020-04-02 10:50:35 +01003302 IsUpdatableBootClassPathDescriptor(descriptor)) {
Vladimir Markob9c29f62019-03-20 14:22:51 +00003303 ObjPtr<mirror::Throwable> pre_allocated =
3304 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3305 self->SetException(pre_allocated);
Alex Light270db1c2019-12-03 12:20:01 +00003306 return sdc.Finish(nullptr);
Vladimir Markob9c29f62019-03-20 14:22:51 +00003307 }
3308
Alex Lighte9f61032018-09-24 16:04:51 -07003309 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
3310 // code to be executed. We put it up here so we can avoid all the allocations associated with
3311 // creating the class. This can happen with (eg) jit threads.
3312 if (!self->CanLoadClasses()) {
3313 // Make sure we don't try to load anything, potentially causing an infinite loop.
3314 ObjPtr<mirror::Throwable> pre_allocated =
3315 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3316 self->SetException(pre_allocated);
Alex Light270db1c2019-12-03 12:20:01 +00003317 return sdc.Finish(nullptr);
Alex Lighte9f61032018-09-24 16:04:51 -07003318 }
3319
Andreas Gampefa4333d2017-02-14 11:10:34 -08003320 if (klass == nullptr) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003321 // Allocate a class with the status of not ready.
3322 // Interface object should get the right size here. Regular class will
3323 // figure out the right size later and be replaced with one of the right
3324 // size when the class becomes resolved.
Chang Xing0c2c2222017-08-04 14:36:17 -07003325 if (CanAllocClass()) {
3326 klass.Assign(AllocClass(self, SizeOfClassWithoutEmbeddedTables(dex_file, dex_class_def)));
3327 } else {
Alex Light270db1c2019-12-03 12:20:01 +00003328 return sdc.Finish(nullptr);
Chang Xing0c2c2222017-08-04 14:36:17 -07003329 }
Brian Carlstromaded5f72011-10-07 17:15:04 -07003330 }
Andreas Gampefa4333d2017-02-14 11:10:34 -08003331 if (UNLIKELY(klass == nullptr)) {
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003332 self->AssertPendingOOMException();
Alex Light270db1c2019-12-03 12:20:01 +00003333 return sdc.Finish(nullptr);
Ian Rogersa436fde2013-08-27 23:34:06 -07003334 }
Alex Lightb0f11922017-01-23 14:25:17 -08003335 // Get the real dex file. This will return the input if there aren't any callbacks or they do
3336 // nothing.
3337 DexFile const* new_dex_file = nullptr;
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003338 dex::ClassDef const* new_class_def = nullptr;
Alex Lightb0f11922017-01-23 14:25:17 -08003339 // TODO We should ideally figure out some way to move this after we get a lock on the klass so it
3340 // will only be called once.
3341 Runtime::Current()->GetRuntimeCallbacks()->ClassPreDefine(descriptor,
3342 klass,
3343 class_loader,
3344 dex_file,
3345 dex_class_def,
3346 &new_dex_file,
3347 &new_class_def);
Alex Light440b5d92017-01-24 15:32:25 -08003348 // Check to see if an exception happened during runtime callbacks. Return if so.
3349 if (self->IsExceptionPending()) {
Alex Light270db1c2019-12-03 12:20:01 +00003350 return sdc.Finish(nullptr);
Alex Light440b5d92017-01-24 15:32:25 -08003351 }
Alex Lightb0f11922017-01-23 14:25:17 -08003352 ObjPtr<mirror::DexCache> dex_cache = RegisterDexFile(*new_dex_file, class_loader.Get());
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003353 if (dex_cache == nullptr) {
Vladimir Markocd556b02017-02-03 11:47:34 +00003354 self->AssertPendingException();
Alex Light270db1c2019-12-03 12:20:01 +00003355 return sdc.Finish(nullptr);
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003356 }
3357 klass->SetDexCache(dex_cache);
Alex Lightb0f11922017-01-23 14:25:17 -08003358 SetupClass(*new_dex_file, *new_class_def, klass, class_loader.Get());
Mathieu Chartierc7853442015-03-27 14:35:38 -07003359
Jeff Hao848f70a2014-01-15 13:49:50 -08003360 // Mark the string class by setting its access flag.
3361 if (UNLIKELY(!init_done_)) {
3362 if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
3363 klass->SetStringClass();
3364 }
3365 }
3366
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07003367 ObjectLock<mirror::Class> lock(self, klass);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003368 klass->SetClinitThreadId(self->GetTid());
Mathieu Chartier1e4841e2016-12-15 14:21:04 -08003369 // Make sure we have a valid empty iftable even if there are errors.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003370 klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003371
Mathieu Chartier590fee92013-09-13 13:46:47 -07003372 // Add the newly loaded class to the loaded classes table.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003373 ObjPtr<mirror::Class> existing = InsertClass(descriptor, klass.Get(), hash);
Ian Rogersc114b5f2014-07-21 08:55:01 -07003374 if (existing != nullptr) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07003375 // We failed to insert because we raced with another thread. Calling EnsureResolved may cause
3376 // this thread to block.
Alex Light270db1c2019-12-03 12:20:01 +00003377 return sdc.Finish(EnsureResolved(self, descriptor, existing));
Brian Carlstromaded5f72011-10-07 17:15:04 -07003378 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003379
Mathieu Chartierc7853442015-03-27 14:35:38 -07003380 // Load the fields and other things after we are inserted in the table. This is so that we don't
3381 // end up allocating unfree-able linear alloc resources and then lose the race condition. The
3382 // other reason is that the field roots are only visited from the class table. So we need to be
3383 // inserted before we allocate / fill in these fields.
Alex Lightb0f11922017-01-23 14:25:17 -08003384 LoadClass(self, *new_dex_file, *new_class_def, klass);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003385 if (self->IsExceptionPending()) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08003386 VLOG(class_linker) << self->GetException()->Dump();
Mathieu Chartierc7853442015-03-27 14:35:38 -07003387 // An exception occured during load, set status to erroneous while holding klass' lock in case
3388 // notification is necessary.
3389 if (!klass->IsErroneous()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00003390 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003391 }
Alex Light270db1c2019-12-03 12:20:01 +00003392 return sdc.Finish(nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003393 }
3394
Brian Carlstromaded5f72011-10-07 17:15:04 -07003395 // Finish loading (if necessary) by finding parents
3396 CHECK(!klass->IsLoaded());
Alex Lightb0f11922017-01-23 14:25:17 -08003397 if (!LoadSuperAndInterfaces(klass, *new_dex_file)) {
Brian Carlstromaded5f72011-10-07 17:15:04 -07003398 // Loading failed.
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003399 if (!klass->IsErroneous()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00003400 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003401 }
Alex Light270db1c2019-12-03 12:20:01 +00003402 return sdc.Finish(nullptr);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003403 }
3404 CHECK(klass->IsLoaded());
Andreas Gampe0f01b582017-01-18 15:22:37 -08003405
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07003406 // At this point the class is loaded. Publish a ClassLoad event.
Andreas Gampe0f01b582017-01-18 15:22:37 -08003407 // Note: this may be a temporary class. It is a listener's responsibility to handle this.
Andreas Gampeac30fa22017-01-18 21:02:36 -08003408 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(klass);
Andreas Gampe0f01b582017-01-18 15:22:37 -08003409
Brian Carlstromaded5f72011-10-07 17:15:04 -07003410 // Link the class (if necessary)
3411 CHECK(!klass->IsResolved());
Mathieu Chartier590fee92013-09-13 13:46:47 -07003412 // TODO: Use fast jobjects?
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07003413 auto interfaces = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003414
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07003415 MutableHandle<mirror::Class> h_new_class = hs.NewHandle<mirror::Class>(nullptr);
Igor Murashkinb1d8c312015-08-04 11:18:43 -07003416 if (!LinkClass(self, descriptor, klass, interfaces, &h_new_class)) {
Brian Carlstromaded5f72011-10-07 17:15:04 -07003417 // Linking failed.
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003418 if (!klass->IsErroneous()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00003419 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003420 }
Alex Light270db1c2019-12-03 12:20:01 +00003421 return sdc.Finish(nullptr);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003422 }
Mathieu Chartier524507a2014-08-27 15:28:28 -07003423 self->AssertNoPendingException();
Andreas Gampefa4333d2017-02-14 11:10:34 -08003424 CHECK(h_new_class != nullptr) << descriptor;
Vladimir Marko72ab6842017-01-20 19:32:50 +00003425 CHECK(h_new_class->IsResolved() && !h_new_class->IsErroneousResolved()) << descriptor;
Elliott Hughes4740cdf2011-12-07 14:07:12 -08003426
Sebastien Hertza8a697f2015-01-15 12:28:47 +01003427 // Instrumentation may have updated entrypoints for all methods of all
3428 // classes. However it could not update methods of this class while we
3429 // were loading it. Now the class is resolved, we can update entrypoints
3430 // as required by instrumentation.
3431 if (Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()) {
3432 // We must be in the kRunnable state to prevent instrumentation from
3433 // suspending all threads to update entrypoints while we are doing it
3434 // for this class.
3435 DCHECK_EQ(self->GetState(), kRunnable);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07003436 Runtime::Current()->GetInstrumentation()->InstallStubsForClass(h_new_class.Get());
Sebastien Hertza8a697f2015-01-15 12:28:47 +01003437 }
3438
Elliott Hughes4740cdf2011-12-07 14:07:12 -08003439 /*
3440 * We send CLASS_PREPARE events to the debugger from here. The
3441 * definition of "preparation" is creating the static fields for a
3442 * class and initializing them to the standard default values, but not
3443 * executing any code (that comes later, during "initialization").
3444 *
3445 * We did the static preparation in LinkClass.
3446 *
3447 * The class has been prepared and resolved but possibly not yet verified
3448 * at this point.
3449 */
Andreas Gampeac30fa22017-01-18 21:02:36 -08003450 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(klass, h_new_class);
Elliott Hughes4740cdf2011-12-07 14:07:12 -08003451
Tamas Berghammer160e6df2016-01-05 14:29:02 +00003452 // Notify native debugger of the new class and its layout.
3453 jit::Jit::NewTypeLoadedIfUsingJit(h_new_class.Get());
3454
Alex Light270db1c2019-12-03 12:20:01 +00003455 return sdc.Finish(h_new_class);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07003456}
3457
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003458uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003459 const dex::ClassDef& dex_class_def) {
Brian Carlstrom4873d462011-08-21 15:23:39 -07003460 size_t num_ref = 0;
Fred Shih37f05ef2014-07-16 18:38:08 -07003461 size_t num_8 = 0;
3462 size_t num_16 = 0;
Brian Carlstrom4873d462011-08-21 15:23:39 -07003463 size_t num_32 = 0;
3464 size_t num_64 = 0;
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003465 ClassAccessor accessor(dex_file, dex_class_def);
3466 // We allow duplicate definitions of the same field in a class_data_item
3467 // but ignore the repeated indexes here, b/21868015.
3468 uint32_t last_field_idx = dex::kDexNoIndex;
3469 for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
3470 uint32_t field_idx = field.GetIndex();
3471 // Ordering enforced by DexFileVerifier.
3472 DCHECK(last_field_idx == dex::kDexNoIndex || last_field_idx <= field_idx);
3473 if (UNLIKELY(field_idx == last_field_idx)) {
3474 continue;
3475 }
3476 last_field_idx = field_idx;
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003477 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003478 const char* descriptor = dex_file.GetFieldTypeDescriptor(field_id);
3479 char c = descriptor[0];
3480 switch (c) {
3481 case 'L':
3482 case '[':
3483 num_ref++;
3484 break;
3485 case 'J':
3486 case 'D':
3487 num_64++;
3488 break;
3489 case 'I':
3490 case 'F':
3491 num_32++;
3492 break;
3493 case 'S':
3494 case 'C':
3495 num_16++;
3496 break;
3497 case 'B':
3498 case 'Z':
3499 num_8++;
3500 break;
3501 default:
3502 LOG(FATAL) << "Unknown descriptor: " << c;
3503 UNREACHABLE();
Brian Carlstrom4873d462011-08-21 15:23:39 -07003504 }
3505 }
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003506 return mirror::Class::ComputeClassSize(false,
3507 0,
3508 num_8,
3509 num_16,
3510 num_32,
3511 num_64,
3512 num_ref,
Mathieu Chartiere401d142015-04-22 13:56:20 -07003513 image_pointer_size_);
Brian Carlstrom4873d462011-08-21 15:23:39 -07003514}
3515
Alex Lightfc49fec2018-01-16 22:28:36 +00003516// Special case to get oat code without overwriting a trampoline.
3517const void* ClassLinker::GetQuickOatCodeFor(ArtMethod* method) {
David Sehr709b0702016-10-13 09:12:37 -07003518 CHECK(method->IsInvokable()) << method->PrettyMethod();
Nicolas Geoffraya7a47592015-11-24 09:17:30 +00003519 if (method->IsProxyMethod()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -08003520 return GetQuickProxyInvokeHandler();
Jeff Hao8df6cea2013-07-29 13:54:48 -07003521 }
Nicolas Geoffray32384402019-07-17 20:06:44 +01003522 const void* code = method->GetOatMethodQuickCode(GetImagePointerSize());
Alex Lightfc49fec2018-01-16 22:28:36 +00003523 if (code != nullptr) {
3524 return code;
Mathieu Chartier2535abe2015-02-17 10:38:49 -08003525 }
Nicolas Geoffray32384402019-07-17 20:06:44 +01003526
3527 jit::Jit* jit = Runtime::Current()->GetJit();
3528 if (jit != nullptr) {
3529 code = jit->GetCodeCache()->GetSavedEntryPointOfPreCompiledMethod(method);
3530 if (code != nullptr) {
3531 return code;
3532 }
3533 }
3534
Alex Lightfc49fec2018-01-16 22:28:36 +00003535 if (method->IsNative()) {
3536 // No code and native? Use generic trampoline.
3537 return GetQuickGenericJniStub();
3538 }
Nicolas Geoffray00391822019-12-10 10:17:23 +00003539
3540 if (interpreter::CanRuntimeUseNterp() && interpreter::CanMethodUseNterp(method)) {
3541 return interpreter::GetNterpEntryPoint();
3542 }
3543
Alex Lightfc49fec2018-01-16 22:28:36 +00003544 return GetQuickToInterpreterBridge();
TDYa12785321912012-04-01 15:24:56 -07003545}
3546
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003547bool ClassLinker::ShouldUseInterpreterEntrypoint(ArtMethod* method, const void* quick_code) {
Alex Light2d441b12018-06-08 15:33:21 -07003548 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003549 if (UNLIKELY(method->IsNative() || method->IsProxyMethod())) {
3550 return false;
3551 }
3552
Elliott Hughes956af0f2014-12-11 14:34:28 -08003553 if (quick_code == nullptr) {
Sebastien Hertz7d658cf2013-07-09 10:56:11 +02003554 return true;
3555 }
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003556
3557 Runtime* runtime = Runtime::Current();
3558 instrumentation::Instrumentation* instr = runtime->GetInstrumentation();
3559 if (instr->InterpretOnly()) {
3560 return true;
3561 }
3562
3563 if (runtime->GetClassLinker()->IsQuickToInterpreterBridge(quick_code)) {
3564 // Doing this check avoids doing compiled/interpreter transitions.
3565 return true;
3566 }
3567
Alex Lightfc588092020-01-23 15:39:08 -08003568 if (Thread::Current()->IsForceInterpreter()) {
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003569 // Force the use of interpreter when it is required by the debugger.
3570 return true;
3571 }
3572
Alex Light8f34aba2017-10-09 13:46:32 -07003573 if (Thread::Current()->IsAsyncExceptionPending()) {
3574 // Force use of interpreter to handle async-exceptions
3575 return true;
3576 }
3577
Alex Light2d441b12018-06-08 15:33:21 -07003578 if (quick_code == GetQuickInstrumentationEntryPoint()) {
3579 const void* instr_target = instr->GetCodeForInvoke(method);
3580 DCHECK_NE(instr_target, GetQuickInstrumentationEntryPoint()) << method->PrettyMethod();
3581 return ShouldUseInterpreterEntrypoint(method, instr_target);
3582 }
3583
Nicolas Geoffray433b79a2017-01-30 20:54:45 +00003584 if (runtime->IsJavaDebuggable()) {
3585 // For simplicity, we ignore precompiled code and go to the interpreter
3586 // assuming we don't already have jitted code.
3587 // We could look at the oat file where `quick_code` is being defined,
3588 // and check whether it's been compiled debuggable, but we decided to
3589 // only rely on the JIT for debuggable apps.
Alex Light6b16d892016-11-11 11:21:04 -08003590 jit::Jit* jit = Runtime::Current()->GetJit();
3591 return (jit == nullptr) || !jit->GetCodeCache()->ContainsPc(quick_code);
3592 }
3593
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +00003594 if (runtime->IsNativeDebuggable()) {
Calin Juravlee5de54c2016-04-20 14:22:09 +01003595 DCHECK(runtime->UseJitCompilation() && runtime->GetJit()->JitAtFirstUse());
David Srbeckyf4480162016-03-16 00:06:24 +00003596 // If we are doing native debugging, ignore application's AOT code,
Nicolas Geoffray433b79a2017-01-30 20:54:45 +00003597 // since we want to JIT it (at first use) with extra stackmaps for native
3598 // debugging. We keep however all AOT code from the boot image,
3599 // since the JIT-at-first-use is blocking and would result in non-negligible
3600 // startup performance impact.
David Srbeckyf4480162016-03-16 00:06:24 +00003601 return !runtime->GetHeap()->IsInBootImageOatFile(quick_code);
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003602 }
3603
3604 return false;
Sebastien Hertz7d658cf2013-07-09 10:56:11 +02003605}
3606
Vladimir Marko86c87522020-05-11 16:55:55 +01003607void ClassLinker::FixupStaticTrampolines(Thread* self, ObjPtr<mirror::Class> klass) {
Alex Light2d441b12018-06-08 15:33:21 -07003608 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
Vladimir Markocce414f2019-10-07 08:51:33 +01003609 DCHECK(klass->IsVisiblyInitialized()) << klass->PrettyDescriptor();
Vladimir Marko86c87522020-05-11 16:55:55 +01003610 size_t num_direct_methods = klass->NumDirectMethods();
3611 if (num_direct_methods == 0) {
Ian Rogers1c829822013-09-30 18:18:50 -07003612 return; // No direct methods => no static methods.
Ian Rogers19846512012-02-24 11:42:47 -08003613 }
Vladimir Markocce414f2019-10-07 08:51:33 +01003614 if (UNLIKELY(klass->IsProxyClass())) {
3615 return;
3616 }
Vladimir Marko86c87522020-05-11 16:55:55 +01003617 PointerSize pointer_size = image_pointer_size_;
3618 if (std::any_of(klass->GetDirectMethods(pointer_size).begin(),
3619 klass->GetDirectMethods(pointer_size).end(),
3620 [](const ArtMethod& m) { return m.IsCriticalNative(); })) {
3621 // Store registered @CriticalNative methods, if any, to JNI entrypoints.
3622 // Direct methods are a contiguous chunk of memory, so use the ordering of the map.
3623 ArtMethod* first_method = klass->GetDirectMethod(0u, pointer_size);
3624 ArtMethod* last_method = klass->GetDirectMethod(num_direct_methods - 1u, pointer_size);
3625 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
3626 auto lb = critical_native_code_with_clinit_check_.lower_bound(first_method);
3627 while (lb != critical_native_code_with_clinit_check_.end() && lb->first <= last_method) {
3628 lb->first->SetEntryPointFromJni(lb->second);
3629 lb = critical_native_code_with_clinit_check_.erase(lb);
3630 }
3631 }
Ian Rogers62d6c772013-02-27 08:32:07 -08003632 Runtime* runtime = Runtime::Current();
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07003633 if (!runtime->IsStarted()) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08003634 if (runtime->IsAotCompiler() || runtime->GetHeap()->HasBootImageSpace()) {
Alex Light64ad14d2014-08-19 14:23:13 -07003635 return; // OAT file unavailable.
3636 }
Ian Rogers19846512012-02-24 11:42:47 -08003637 }
Alex Light64ad14d2014-08-19 14:23:13 -07003638
Mathieu Chartierf8322842014-05-16 10:59:25 -07003639 const DexFile& dex_file = klass->GetDexFile();
Ian Rogers97b52f82014-08-14 11:34:07 -07003640 bool has_oat_class;
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003641 OatFile::OatClass oat_class = OatFile::FindOatClass(dex_file,
3642 klass->GetDexClassDefIndex(),
3643 &has_oat_class);
Ian Rogers1c829822013-09-30 18:18:50 -07003644 // Link the code of methods skipped by LinkCode.
Vladimir Marko86c87522020-05-11 16:55:55 +01003645 for (size_t method_index = 0; method_index < num_direct_methods; ++method_index) {
3646 ArtMethod* method = klass->GetDirectMethod(method_index, pointer_size);
Sebastien Hertz7d658cf2013-07-09 10:56:11 +02003647 if (!method->IsStatic()) {
3648 // Only update static methods.
3649 continue;
Ian Rogers19846512012-02-24 11:42:47 -08003650 }
Nicolas Geoffray4fcdc942014-07-22 10:48:00 +01003651 const void* quick_code = nullptr;
Nicolas Geoffray00391822019-12-10 10:17:23 +00003652
3653 // In order:
3654 // 1) Check if we have AOT Code.
3655 // 2) Check if we have JIT Code.
3656 // 3) Check if we can use Nterp.
Nicolas Geoffray4fcdc942014-07-22 10:48:00 +01003657 if (has_oat_class) {
3658 OatFile::OatMethod oat_method = oat_class.GetOatMethod(method_index);
Nicolas Geoffray4fcdc942014-07-22 10:48:00 +01003659 quick_code = oat_method.GetQuickCode();
3660 }
Nicolas Geoffray00391822019-12-10 10:17:23 +00003661
Vladimir Markocce414f2019-10-07 08:51:33 +01003662 jit::Jit* jit = runtime->GetJit();
Nicolas Geoffray32384402019-07-17 20:06:44 +01003663 if (quick_code == nullptr && jit != nullptr) {
3664 quick_code = jit->GetCodeCache()->GetSavedEntryPointOfPreCompiledMethod(method);
Nicolas Geoffray7989ac92019-04-10 12:42:30 +01003665 }
Nicolas Geoffray00391822019-12-10 10:17:23 +00003666
3667 if (quick_code == nullptr &&
3668 interpreter::CanRuntimeUseNterp() &&
3669 interpreter::CanMethodUseNterp(method)) {
3670 quick_code = interpreter::GetNterpEntryPoint();
3671 }
3672
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003673 // Check whether the method is native, in which case it's generic JNI.
Ulyana Trafimovich3060bb92020-07-16 14:17:11 +00003674 if (quick_code == nullptr && method->IsNative()) {
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003675 quick_code = GetQuickGenericJniStub();
3676 } else if (ShouldUseInterpreterEntrypoint(method, quick_code)) {
Sebastien Hertz7d658cf2013-07-09 10:56:11 +02003677 // Use interpreter entry point.
Nicolas Geoffray00391822019-12-10 10:17:23 +00003678 if (IsQuickToInterpreterBridge(method->GetEntryPointFromQuickCompiledCode())) {
3679 // If we have the trampoline or the bridge already, no need to update.
3680 // This saves in not dirtying boot image memory.
3681 continue;
3682 }
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003683 quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz7d658cf2013-07-09 10:56:11 +02003684 }
Nicolas Geoffray00391822019-12-10 10:17:23 +00003685 CHECK(quick_code != nullptr);
Elliott Hughes956af0f2014-12-11 14:34:28 -08003686 runtime->GetInstrumentation()->UpdateMethodsCode(method, quick_code);
Ian Rogers19846512012-02-24 11:42:47 -08003687 }
Ian Rogers62d6c772013-02-27 08:32:07 -08003688 // Ignore virtual methods on the iterator.
Ian Rogers19846512012-02-24 11:42:47 -08003689}
3690
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003691// Does anything needed to make sure that the compiler will not generate a direct invoke to this
3692// method. Should only be called on non-invokable methods.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +00003693inline void EnsureThrowsInvocationError(ClassLinker* class_linker, ArtMethod* method)
3694 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light9139e002015-10-09 15:59:48 -07003695 DCHECK(method != nullptr);
3696 DCHECK(!method->IsInvokable());
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003697 method->SetEntryPointFromQuickCompiledCodePtrSize(
3698 class_linker->GetQuickToInterpreterBridgeTrampoline(),
3699 class_linker->GetImagePointerSize());
Alex Light9139e002015-10-09 15:59:48 -07003700}
3701
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003702static void LinkCode(ClassLinker* class_linker,
3703 ArtMethod* method,
3704 const OatFile::OatClass* oat_class,
3705 uint32_t class_def_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light2d441b12018-06-08 15:33:21 -07003706 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003707 Runtime* const runtime = Runtime::Current();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08003708 if (runtime->IsAotCompiler()) {
Nicolas Geoffray4fcdc942014-07-22 10:48:00 +01003709 // The following code only applies to a non-compiler runtime.
3710 return;
3711 }
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003712
Ian Rogers62d6c772013-02-27 08:32:07 -08003713 // Method shouldn't have already been linked.
Ian Rogersef7d42f2014-01-06 12:55:46 -08003714 DCHECK(method->GetEntryPointFromQuickCompiledCode() == nullptr);
Jeff Hao16743632013-05-08 10:59:04 -07003715
Alex Light9139e002015-10-09 15:59:48 -07003716 if (!method->IsInvokable()) {
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003717 EnsureThrowsInvocationError(class_linker, method);
Brian Carlstrom92827a52011-10-10 15:50:01 -07003718 return;
3719 }
Ian Rogers19846512012-02-24 11:42:47 -08003720
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003721 const void* quick_code = nullptr;
3722 if (oat_class != nullptr) {
3723 // Every kind of method should at least get an invoke stub from the oat_method.
3724 // non-abstract methods also get their code pointers.
3725 const OatFile::OatMethod oat_method = oat_class->GetOatMethod(class_def_method_index);
3726 quick_code = oat_method.GetQuickCode();
3727 }
3728
3729 bool enter_interpreter = class_linker->ShouldUseInterpreterEntrypoint(method, quick_code);
3730
3731 // Note: this mimics the logic in image_writer.cc that installs the resolution
3732 // stub only if we have compiled code and the method needs a class initialization
3733 // check.
Ulyana Trafimovich3060bb92020-07-16 14:17:11 +00003734 if (quick_code == nullptr) {
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003735 method->SetEntryPointFromQuickCompiledCode(
3736 method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
3737 } else if (enter_interpreter) {
3738 method->SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
Vladimir Marko5115a4d2019-10-17 14:56:47 +01003739 } else if (NeedsClinitCheckBeforeCall(method)) {
3740 DCHECK(!method->GetDeclaringClass()->IsVisiblyInitialized()); // Actually ClassStatus::Idx.
3741 // If we do have code but the method needs a class initialization check before calling
3742 // that code, install the resolution stub that will perform the check.
Sebastien Hertz7d658cf2013-07-09 10:56:11 +02003743 // It will be replaced by the proper entry point by ClassLinker::FixupStaticTrampolines
3744 // after initializing class (see ClassLinker::InitializeClass method).
Ian Rogers6f3dbba2014-10-14 17:41:57 -07003745 method->SetEntryPointFromQuickCompiledCode(GetQuickResolutionStub());
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003746 } else {
3747 method->SetEntryPointFromQuickCompiledCode(quick_code);
Ian Rogers0d6de042012-02-29 08:50:26 -08003748 }
jeffhao26c0a1a2012-01-17 16:28:33 -08003749
Ian Rogers62d6c772013-02-27 08:32:07 -08003750 if (method->IsNative()) {
Vladimir Marko86c87522020-05-11 16:55:55 +01003751 // Set up the dlsym lookup stub. Do not go through `UnregisterNative()`
3752 // as the extra processing for @CriticalNative is not needed yet.
3753 method->SetEntryPointFromJni(
3754 method->IsCriticalNative() ? GetJniDlsymLookupCriticalStub() : GetJniDlsymLookupStub());
Andreas Gampe90546832014-03-12 18:07:19 -07003755
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003756 if (enter_interpreter || quick_code == nullptr) {
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003757 // We have a native method here without code. Then it should have the generic JNI
3758 // trampoline as entrypoint.
Ian Rogers6f3dbba2014-10-14 17:41:57 -07003759 // TODO: this doesn't handle all the cases where trampolines may be installed.
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003760 DCHECK(class_linker->IsQuickGenericJniStub(method->GetEntryPointFromQuickCompiledCode()));
Andreas Gampe90546832014-03-12 18:07:19 -07003761 }
Brian Carlstrom92827a52011-10-10 15:50:01 -07003762 }
3763}
3764
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003765void ClassLinker::SetupClass(const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003766 const dex::ClassDef& dex_class_def,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003767 Handle<mirror::Class> klass,
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003768 ObjPtr<mirror::ClassLoader> class_loader) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08003769 CHECK(klass != nullptr);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07003770 CHECK(klass->GetDexCache() != nullptr);
Vladimir Marko2c64a832018-01-04 11:31:56 +00003771 CHECK_EQ(ClassStatus::kNotReady, klass->GetStatus());
Brian Carlstromf615a612011-07-23 12:50:34 -07003772 const char* descriptor = dex_file.GetClassDescriptor(dex_class_def);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07003773 CHECK(descriptor != nullptr);
Brian Carlstrom934486c2011-07-12 23:42:50 -07003774
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003775 klass->SetClass(GetClassRoot<mirror::Class>(this));
Andreas Gampe51829322014-08-25 15:05:04 -07003776 uint32_t access_flags = dex_class_def.GetJavaAccessFlags();
Brian Carlstrom8e3fb142013-10-09 21:00:27 -07003777 CHECK_EQ(access_flags & ~kAccJavaFlagsMask, 0U);
Vladimir Markob68bb7a2020-03-17 10:55:25 +00003778 klass->SetAccessFlagsDuringLinking(access_flags);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07003779 klass->SetClassLoader(class_loader);
Ian Rogersc2b44472011-12-14 21:17:17 -08003780 DCHECK_EQ(klass->GetPrimitiveType(), Primitive::kPrimNot);
Vladimir Marko2c64a832018-01-04 11:31:56 +00003781 mirror::Class::SetStatus(klass, ClassStatus::kIdx, nullptr);
Brian Carlstrom934486c2011-07-12 23:42:50 -07003782
Ian Rogers8b2c0b92013-09-19 02:56:49 -07003783 klass->SetDexClassDefIndex(dex_file.GetIndexForClassDef(dex_class_def));
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08003784 klass->SetDexTypeIndex(dex_class_def.class_idx_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003785}
Brian Carlstrom934486c2011-07-12 23:42:50 -07003786
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003787LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self,
3788 LinearAlloc* allocator,
3789 size_t length) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003790 if (length == 0) {
3791 return nullptr;
3792 }
Vladimir Markocf36d492015-08-12 19:27:26 +01003793 // If the ArtField alignment changes, review all uses of LengthPrefixedArray<ArtField>.
3794 static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4.");
3795 size_t storage_size = LengthPrefixedArray<ArtField>::ComputeSize(length);
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003796 void* array_storage = allocator->Alloc(self, storage_size);
Vladimir Markocf36d492015-08-12 19:27:26 +01003797 auto* ret = new(array_storage) LengthPrefixedArray<ArtField>(length);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003798 CHECK(ret != nullptr);
3799 std::uninitialized_fill_n(&ret->At(0), length, ArtField());
3800 return ret;
Mathieu Chartierc7853442015-03-27 14:35:38 -07003801}
3802
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003803LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self,
3804 LinearAlloc* allocator,
3805 size_t length) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003806 if (length == 0) {
3807 return nullptr;
Mathieu Chartiere401d142015-04-22 13:56:20 -07003808 }
Vladimir Marko14632852015-08-17 12:07:23 +01003809 const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_);
3810 const size_t method_size = ArtMethod::Size(image_pointer_size_);
Vladimir Markocf36d492015-08-12 19:27:26 +01003811 const size_t storage_size =
3812 LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size, method_alignment);
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003813 void* array_storage = allocator->Alloc(self, storage_size);
Vladimir Markocf36d492015-08-12 19:27:26 +01003814 auto* ret = new (array_storage) LengthPrefixedArray<ArtMethod>(length);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003815 CHECK(ret != nullptr);
3816 for (size_t i = 0; i < length; ++i) {
Vladimir Markocf36d492015-08-12 19:27:26 +01003817 new(reinterpret_cast<void*>(&ret->At(i, method_size, method_alignment))) ArtMethod;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003818 }
3819 return ret;
Mathieu Chartiere401d142015-04-22 13:56:20 -07003820}
3821
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003822LinearAlloc* ClassLinker::GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003823 if (class_loader == nullptr) {
3824 return Runtime::Current()->GetLinearAlloc();
3825 }
3826 LinearAlloc* allocator = class_loader->GetAllocator();
3827 DCHECK(allocator != nullptr);
3828 return allocator;
3829}
3830
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003831LinearAlloc* ClassLinker::GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartierd57d4542015-10-14 10:55:30 -07003832 if (class_loader == nullptr) {
3833 return Runtime::Current()->GetLinearAlloc();
3834 }
3835 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
3836 LinearAlloc* allocator = class_loader->GetAllocator();
3837 if (allocator == nullptr) {
Mathieu Chartier5b830502016-03-02 10:30:23 -08003838 RegisterClassLoader(class_loader);
3839 allocator = class_loader->GetAllocator();
3840 CHECK(allocator != nullptr);
Mathieu Chartierd57d4542015-10-14 10:55:30 -07003841 }
3842 return allocator;
3843}
3844
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003845void ClassLinker::LoadClass(Thread* self,
3846 const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003847 const dex::ClassDef& dex_class_def,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003848 Handle<mirror::Class> klass) {
David Brazdil20c765f2018-10-27 21:45:15 +00003849 ClassAccessor accessor(dex_file,
3850 dex_class_def,
3851 /* parse_hiddenapi_class_data= */ klass->IsBootStrapClassLoaded());
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003852 if (!accessor.HasClassData()) {
3853 return;
3854 }
3855 Runtime* const runtime = Runtime::Current();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003856 {
3857 // Note: We cannot have thread suspension until the field and method arrays are setup or else
3858 // Class::VisitFieldRoots may miss some fields or methods.
Mathieu Chartier268764d2016-09-13 12:09:38 -07003859 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003860 // Load static fields.
Vladimir Marko23682bf2015-06-24 14:28:03 +01003861 // We allow duplicate definitions of the same field in a class_data_item
3862 // but ignore the repeated indexes here, b/21868015.
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003863 LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003864 LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self,
3865 allocator,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003866 accessor.NumStaticFields());
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003867 LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self,
3868 allocator,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003869 accessor.NumInstanceFields());
3870 size_t num_sfields = 0u;
Vladimir Marko23682bf2015-06-24 14:28:03 +01003871 size_t num_ifields = 0u;
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003872 uint32_t last_static_field_idx = 0u;
3873 uint32_t last_instance_field_idx = 0u;
Orion Hodsonc069a302017-01-18 09:23:12 +00003874
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003875 // Methods
3876 bool has_oat_class = false;
3877 const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
3878 ? OatFile::FindOatClass(dex_file, klass->GetDexClassDefIndex(), &has_oat_class)
3879 : OatFile::OatClass::Invalid();
3880 const OatFile::OatClass* oat_class_ptr = has_oat_class ? &oat_class : nullptr;
3881 klass->SetMethodsPtr(
3882 AllocArtMethodArray(self, allocator, accessor.NumMethods()),
3883 accessor.NumDirectMethods(),
3884 accessor.NumVirtualMethods());
3885 size_t class_def_method_index = 0;
3886 uint32_t last_dex_method_index = dex::kDexNoIndex;
3887 size_t last_class_def_method_index = 0;
3888
3889 // Use the visitor since the ranged based loops are bit slower from seeking. Seeking to the
3890 // methods needs to decode all of the fields.
3891 accessor.VisitFieldsAndMethods([&](
3892 const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3893 uint32_t field_idx = field.GetIndex();
3894 DCHECK_GE(field_idx, last_static_field_idx); // Ordering enforced by DexFileVerifier.
3895 if (num_sfields == 0 || LIKELY(field_idx > last_static_field_idx)) {
3896 LoadField(field, klass, &sfields->At(num_sfields));
3897 ++num_sfields;
3898 last_static_field_idx = field_idx;
3899 }
3900 }, [&](const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3901 uint32_t field_idx = field.GetIndex();
3902 DCHECK_GE(field_idx, last_instance_field_idx); // Ordering enforced by DexFileVerifier.
3903 if (num_ifields == 0 || LIKELY(field_idx > last_instance_field_idx)) {
3904 LoadField(field, klass, &ifields->At(num_ifields));
3905 ++num_ifields;
3906 last_instance_field_idx = field_idx;
3907 }
3908 }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3909 ArtMethod* art_method = klass->GetDirectMethodUnchecked(class_def_method_index,
3910 image_pointer_size_);
3911 LoadMethod(dex_file, method, klass, art_method);
3912 LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3913 uint32_t it_method_index = method.GetIndex();
3914 if (last_dex_method_index == it_method_index) {
3915 // duplicate case
3916 art_method->SetMethodIndex(last_class_def_method_index);
3917 } else {
3918 art_method->SetMethodIndex(class_def_method_index);
3919 last_dex_method_index = it_method_index;
3920 last_class_def_method_index = class_def_method_index;
3921 }
3922 ++class_def_method_index;
3923 }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3924 ArtMethod* art_method = klass->GetVirtualMethodUnchecked(
3925 class_def_method_index - accessor.NumDirectMethods(),
3926 image_pointer_size_);
3927 LoadMethod(dex_file, method, klass, art_method);
3928 LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3929 ++class_def_method_index;
3930 });
3931
3932 if (UNLIKELY(num_ifields + num_sfields != accessor.NumFields())) {
David Sehr709b0702016-10-13 09:12:37 -07003933 LOG(WARNING) << "Duplicate fields in class " << klass->PrettyDescriptor()
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003934 << " (unique static fields: " << num_sfields << "/" << accessor.NumStaticFields()
3935 << ", unique instance fields: " << num_ifields << "/" << accessor.NumInstanceFields()
3936 << ")";
Vladimir Marko81819db2015-11-05 15:30:12 +00003937 // NOTE: Not shrinking the over-allocated sfields/ifields, just setting size.
3938 if (sfields != nullptr) {
3939 sfields->SetSize(num_sfields);
3940 }
3941 if (ifields != nullptr) {
3942 ifields->SetSize(num_ifields);
3943 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07003944 }
Vladimir Marko81819db2015-11-05 15:30:12 +00003945 // Set the field arrays.
3946 klass->SetSFieldsPtr(sfields);
3947 DCHECK_EQ(klass->NumStaticFields(), num_sfields);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003948 klass->SetIFieldsPtr(ifields);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003949 DCHECK_EQ(klass->NumInstanceFields(), num_ifields);
Ian Rogers0571d352011-11-03 19:51:38 -07003950 }
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07003951 // Ensure that the card is marked so that remembered sets pick up native roots.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07003952 WriteBarrier::ForEveryFieldWrite(klass.Get());
Mathieu Chartierf3f2a7a2015-04-14 15:43:10 -07003953 self->AllowThreadSuspension();
Brian Carlstrom934486c2011-07-12 23:42:50 -07003954}
3955
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003956void ClassLinker::LoadField(const ClassAccessor::Field& field,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003957 Handle<mirror::Class> klass,
Mathieu Chartierc7853442015-03-27 14:35:38 -07003958 ArtField* dst) {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003959 const uint32_t field_idx = field.GetIndex();
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08003960 dst->SetDexFieldIndex(field_idx);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07003961 dst->SetDeclaringClass(klass.Get());
David Brazdilf6a8a552018-01-15 18:10:50 +00003962
David Brazdil85865692018-10-30 17:26:20 +00003963 // Get access flags from the DexFile and set hiddenapi runtime access flags.
3964 dst->SetAccessFlags(field.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(field));
Brian Carlstrom934486c2011-07-12 23:42:50 -07003965}
3966
Mathieu Chartier268764d2016-09-13 12:09:38 -07003967void ClassLinker::LoadMethod(const DexFile& dex_file,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003968 const ClassAccessor::Method& method,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003969 Handle<mirror::Class> klass,
3970 ArtMethod* dst) {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003971 const uint32_t dex_method_idx = method.GetIndex();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003972 const dex::MethodId& method_id = dex_file.GetMethodId(dex_method_idx);
Ian Rogersdfb325e2013-10-30 01:00:44 -07003973 const char* method_name = dex_file.StringDataByIdx(method_id.name_idx_);
Mathieu Chartier66f19252012-09-18 08:57:04 -07003974
Mathieu Chartier268764d2016-09-13 12:09:38 -07003975 ScopedAssertNoThreadSuspension ants("LoadMethod");
Mathieu Chartier66f19252012-09-18 08:57:04 -07003976 dst->SetDexMethodIndex(dex_method_idx);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07003977 dst->SetDeclaringClass(klass.Get());
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003978 dst->SetCodeItemOffset(method.GetCodeItemOffset());
Brian Carlstrom934486c2011-07-12 23:42:50 -07003979
David Brazdil85865692018-10-30 17:26:20 +00003980 // Get access flags from the DexFile and set hiddenapi runtime access flags.
3981 uint32_t access_flags = method.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(method);
David Brazdilf6a8a552018-01-15 18:10:50 +00003982
Ian Rogersdfb325e2013-10-30 01:00:44 -07003983 if (UNLIKELY(strcmp("finalize", method_name) == 0)) {
Ian Rogers241b5de2013-10-09 17:58:57 -07003984 // Set finalizable flag on declaring class.
Ian Rogersdfb325e2013-10-30 01:00:44 -07003985 if (strcmp("V", dex_file.GetShorty(method_id.proto_idx_)) == 0) {
3986 // Void return type.
Andreas Gampe2ed8def2014-08-28 14:41:02 -07003987 if (klass->GetClassLoader() != nullptr) { // All non-boot finalizer methods are flagged.
Ian Rogersdfb325e2013-10-30 01:00:44 -07003988 klass->SetFinalizable();
3989 } else {
Ian Rogers1ff3c982014-08-12 02:30:58 -07003990 std::string temp;
3991 const char* klass_descriptor = klass->GetDescriptor(&temp);
Ian Rogersdfb325e2013-10-30 01:00:44 -07003992 // The Enum class declares a "final" finalize() method to prevent subclasses from
3993 // introducing a finalizer. We don't want to set the finalizable flag for Enum or its
3994 // subclasses, so we exclude it here.
3995 // We also want to avoid setting the flag on Object, where we know that finalize() is
3996 // empty.
Ian Rogers1ff3c982014-08-12 02:30:58 -07003997 if (strcmp(klass_descriptor, "Ljava/lang/Object;") != 0 &&
3998 strcmp(klass_descriptor, "Ljava/lang/Enum;") != 0) {
Ian Rogers241b5de2013-10-09 17:58:57 -07003999 klass->SetFinalizable();
Ian Rogers241b5de2013-10-09 17:58:57 -07004000 }
4001 }
4002 }
4003 } else if (method_name[0] == '<') {
4004 // Fix broken access flags for initializers. Bug 11157540.
Ian Rogersdfb325e2013-10-30 01:00:44 -07004005 bool is_init = (strcmp("<init>", method_name) == 0);
4006 bool is_clinit = !is_init && (strcmp("<clinit>", method_name) == 0);
Ian Rogers241b5de2013-10-09 17:58:57 -07004007 if (UNLIKELY(!is_init && !is_clinit)) {
4008 LOG(WARNING) << "Unexpected '<' at start of method name " << method_name;
4009 } else {
4010 if (UNLIKELY((access_flags & kAccConstructor) == 0)) {
4011 LOG(WARNING) << method_name << " didn't have expected constructor access flag in class "
David Sehr709b0702016-10-13 09:12:37 -07004012 << klass->PrettyDescriptor() << " in dex file " << dex_file.GetLocation();
Ian Rogers241b5de2013-10-09 17:58:57 -07004013 access_flags |= kAccConstructor;
4014 }
4015 }
4016 }
Vladimir Markob0a6aee2017-10-27 10:34:04 +01004017 if (UNLIKELY((access_flags & kAccNative) != 0u)) {
4018 // Check if the native method is annotated with @FastNative or @CriticalNative.
4019 access_flags |= annotations::GetNativeMethodAnnotationAccessFlags(
4020 dex_file, dst->GetClassDef(), dex_method_idx);
4021 }
Ian Rogers241b5de2013-10-09 17:58:57 -07004022 dst->SetAccessFlags(access_flags);
David Srbeckye36e7f22018-11-14 14:21:23 +00004023 // Must be done after SetAccessFlags since IsAbstract depends on it.
4024 if (klass->IsInterface() && dst->IsAbstract()) {
4025 dst->CalculateAndSetImtIndex();
4026 }
Brian Carlstrom934486c2011-07-12 23:42:50 -07004027}
4028
Mathieu Chartier0a19e212019-11-27 14:35:24 -08004029void ClassLinker::AppendToBootClassPath(Thread* self, const DexFile* dex_file) {
Vladimir Markocd556b02017-02-03 11:47:34 +00004030 ObjPtr<mirror::DexCache> dex_cache = AllocAndInitializeDexCache(
Mathieu Chartierd57d4542015-10-14 10:55:30 -07004031 self,
Mathieu Chartier0a19e212019-11-27 14:35:24 -08004032 *dex_file,
Vladimir Markocd556b02017-02-03 11:47:34 +00004033 Runtime::Current()->GetLinearAlloc());
Mathieu Chartier0a19e212019-11-27 14:35:24 -08004034 CHECK(dex_cache != nullptr) << "Failed to allocate dex cache for " << dex_file->GetLocation();
Brian Carlstrom40381fb2011-10-19 14:13:40 -07004035 AppendToBootClassPath(dex_file, dex_cache);
Brian Carlstroma663ea52011-08-19 23:33:41 -07004036}
4037
Mathieu Chartier0a19e212019-11-27 14:35:24 -08004038void ClassLinker::AppendToBootClassPath(const DexFile* dex_file,
Vladimir Markocd556b02017-02-03 11:47:34 +00004039 ObjPtr<mirror::DexCache> dex_cache) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -08004040 CHECK(dex_file != nullptr);
4041 CHECK(dex_cache != nullptr) << dex_file->GetLocation();
4042 boot_class_path_.push_back(dex_file);
Andreas Gampebe7af222017-07-25 09:57:28 -07004043 WriterMutexLock mu(Thread::Current(), *Locks::dex_lock_);
Mathieu Chartier0a19e212019-11-27 14:35:24 -08004044 RegisterDexFileLocked(*dex_file, dex_cache, /* class_loader= */ nullptr);
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07004045}
4046
Mathieu Chartierc528dba2013-11-26 12:00:11 -08004047void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file,
Vladimir Markocd556b02017-02-03 11:47:34 +00004048 ObjPtr<mirror::DexCache> dex_cache,
4049 ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07004050 Thread* const self = Thread::Current();
Andreas Gampecc1b5352016-12-01 16:58:38 -08004051 Locks::dex_lock_->AssertExclusiveHeld(self);
Vladimir Markocd556b02017-02-03 11:47:34 +00004052 CHECK(dex_cache != nullptr) << dex_file.GetLocation();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08004053 // For app images, the dex cache location may be a suffix of the dex file location since the
4054 // dex file location is an absolute path.
Mathieu Chartier76172162016-01-26 14:54:06 -08004055 const std::string dex_cache_location = dex_cache->GetLocation()->ToModifiedUtf8();
4056 const size_t dex_cache_length = dex_cache_location.length();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08004057 CHECK_GT(dex_cache_length, 0u) << dex_file.GetLocation();
4058 std::string dex_file_location = dex_file.GetLocation();
Nicolas Geoffraye3e0f702019-03-12 07:02:02 +00004059 // The following paths checks don't work on preopt when using boot dex files, where the dex
4060 // cache location is the one on device, and the dex_file's location is the one on host.
4061 if (!(Runtime::Current()->IsAotCompiler() && class_loader == nullptr && !kIsTargetBuild)) {
4062 CHECK_GE(dex_file_location.length(), dex_cache_length)
4063 << dex_cache_location << " " << dex_file.GetLocation();
4064 const std::string dex_file_suffix = dex_file_location.substr(
4065 dex_file_location.length() - dex_cache_length,
4066 dex_cache_length);
4067 // Example dex_cache location is SettingsProvider.apk and
4068 // dex file location is /system/priv-app/SettingsProvider/SettingsProvider.apk
4069 CHECK_EQ(dex_cache_location, dex_file_suffix);
4070 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004071 const OatFile* oat_file =
4072 (dex_file.GetOatDexFile() != nullptr) ? dex_file.GetOatDexFile()->GetOatFile() : nullptr;
Vladimir Markob066d432018-01-03 13:14:37 +00004073 // Clean up pass to remove null dex caches; null dex caches can occur due to class unloading
4074 // and we are lazily removing null entries. Also check if we need to initialize OatFile data
4075 // (.data.bimg.rel.ro and .bss sections) needed for code execution.
4076 bool initialize_oat_file_data = (oat_file != nullptr) && oat_file->IsExecutable();
Ian Rogers55256cb2017-12-21 17:07:11 -08004077 JavaVMExt* const vm = self->GetJniEnv()->GetVm();
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08004078 for (auto it = dex_caches_.begin(); it != dex_caches_.end(); ) {
4079 DexCacheData data = *it;
4080 if (self->IsJWeakCleared(data.weak_root)) {
4081 vm->DeleteWeakGlobalRef(self, data.weak_root);
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07004082 it = dex_caches_.erase(it);
4083 } else {
Vladimir Markob066d432018-01-03 13:14:37 +00004084 if (initialize_oat_file_data &&
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004085 it->dex_file->GetOatDexFile() != nullptr &&
4086 it->dex_file->GetOatDexFile()->GetOatFile() == oat_file) {
Vladimir Markob066d432018-01-03 13:14:37 +00004087 initialize_oat_file_data = false; // Already initialized.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004088 }
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07004089 ++it;
4090 }
Brian Carlstrom81a90872015-08-28 09:07:14 -07004091 }
Vladimir Markob066d432018-01-03 13:14:37 +00004092 if (initialize_oat_file_data) {
Vladimir Marko1cedb4a2019-02-06 14:13:28 +00004093 oat_file->InitializeRelocations();
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004094 }
David Brazdila5c3a802019-03-08 14:59:41 +00004095 // Let hiddenapi assign a domain to the newly registered dex file.
4096 hiddenapi::InitializeDexFileDomain(dex_file, class_loader);
4097
Vladimir Markocd556b02017-02-03 11:47:34 +00004098 jweak dex_cache_jweak = vm->AddWeakGlobalRef(self, dex_cache);
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07004099 dex_cache->SetDexFile(&dex_file);
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08004100 DexCacheData data;
4101 data.weak_root = dex_cache_jweak;
4102 data.dex_file = dex_cache->GetDexFile();
Vladimir Markocd556b02017-02-03 11:47:34 +00004103 data.class_table = ClassTableForClassLoader(class_loader);
David Srbeckyafc60cd2018-12-05 11:59:31 +00004104 AddNativeDebugInfoForDex(self, data.dex_file);
Vladimir Markocd556b02017-02-03 11:47:34 +00004105 DCHECK(data.class_table != nullptr);
Mathieu Chartier72041a02017-07-14 18:23:25 -07004106 // Make sure to hold the dex cache live in the class table. This case happens for the boot class
4107 // path dex caches without an image.
4108 data.class_table->InsertStrongRoot(dex_cache);
Andreas Gampe8a1a0f72020-03-03 16:07:45 -08004109 // Make sure that the dex cache holds the classloader live.
4110 dex_cache->SetClassLoader(class_loader);
Mathieu Chartier72041a02017-07-14 18:23:25 -07004111 if (class_loader != nullptr) {
4112 // Since we added a strong root to the class table, do the write barrier as required for
4113 // remembered sets and generational GCs.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07004114 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier72041a02017-07-14 18:23:25 -07004115 }
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08004116 dex_caches_.push_back(data);
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07004117}
4118
Alex Light725da8f2020-02-19 14:46:33 -08004119ObjPtr<mirror::DexCache> ClassLinker::DecodeDexCacheLocked(Thread* self, const DexCacheData* data) {
4120 return data != nullptr
4121 ? ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data->weak_root))
Vladimir Markocd556b02017-02-03 11:47:34 +00004122 : nullptr;
4123}
4124
Alex Light725da8f2020-02-19 14:46:33 -08004125bool ClassLinker::IsSameClassLoader(
Vladimir Markocd556b02017-02-03 11:47:34 +00004126 ObjPtr<mirror::DexCache> dex_cache,
Alex Light725da8f2020-02-19 14:46:33 -08004127 const DexCacheData* data,
Vladimir Markocd556b02017-02-03 11:47:34 +00004128 ObjPtr<mirror::ClassLoader> class_loader) {
Alex Light725da8f2020-02-19 14:46:33 -08004129 CHECK(data != nullptr);
4130 DCHECK_EQ(dex_cache->GetDexFile(), data->dex_file);
4131 return data->class_table == ClassTableForClassLoader(class_loader);
Vladimir Markocd556b02017-02-03 11:47:34 +00004132}
4133
Alex Light07f06212017-06-01 14:01:43 -07004134void ClassLinker::RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,
4135 ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartiered4ee442018-06-05 14:23:35 -07004136 SCOPED_TRACE << __FUNCTION__ << " " << dex_cache->GetDexFile()->GetLocation();
Alex Light07f06212017-06-01 14:01:43 -07004137 Thread* self = Thread::Current();
4138 StackHandleScope<2> hs(self);
4139 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
4140 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
4141 const DexFile* dex_file = dex_cache->GetDexFile();
4142 DCHECK(dex_file != nullptr) << "Attempt to register uninitialized dex_cache object!";
4143 if (kIsDebugBuild) {
Alex Light725da8f2020-02-19 14:46:33 -08004144 ReaderMutexLock mu(self, *Locks::dex_lock_);
4145 const DexCacheData* old_data = FindDexCacheDataLocked(*dex_file);
4146 ObjPtr<mirror::DexCache> old_dex_cache = DecodeDexCacheLocked(self, old_data);
Alex Light07f06212017-06-01 14:01:43 -07004147 DCHECK(old_dex_cache.IsNull()) << "Attempt to manually register a dex cache thats already "
4148 << "been registered on dex file " << dex_file->GetLocation();
4149 }
4150 ClassTable* table;
4151 {
4152 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4153 table = InsertClassTableForClassLoader(h_class_loader.Get());
4154 }
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03004155 // Avoid a deadlock between a garbage collecting thread running a checkpoint,
4156 // a thread holding the dex lock and blocking on a condition variable regarding
4157 // weak references access, and a thread blocking on the dex lock.
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03004158 gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
Alex Light07f06212017-06-01 14:01:43 -07004159 WriterMutexLock mu(self, *Locks::dex_lock_);
4160 RegisterDexFileLocked(*dex_file, h_dex_cache.Get(), h_class_loader.Get());
4161 table->InsertStrongRoot(h_dex_cache.Get());
4162 if (h_class_loader.Get() != nullptr) {
4163 // Since we added a strong root to the class table, do the write barrier as required for
4164 // remembered sets and generational GCs.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07004165 WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
Alex Light07f06212017-06-01 14:01:43 -07004166 }
4167}
4168
Alex Lightde7f8782020-02-24 10:14:22 -08004169static void ThrowDexFileAlreadyRegisteredError(Thread* self, const DexFile& dex_file)
4170 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light725da8f2020-02-19 14:46:33 -08004171 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
Alex Lightde7f8782020-02-24 10:14:22 -08004172 "Attempt to register dex file %s with multiple class loaders",
4173 dex_file.GetLocation().c_str());
Alex Light725da8f2020-02-19 14:46:33 -08004174}
4175
Vladimir Markocd556b02017-02-03 11:47:34 +00004176ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file,
4177 ObjPtr<mirror::ClassLoader> class_loader) {
Ian Rogers1f539342012-10-03 21:09:42 -07004178 Thread* self = Thread::Current();
Alex Light725da8f2020-02-19 14:46:33 -08004179 ObjPtr<mirror::DexCache> old_dex_cache;
4180 bool registered_with_another_class_loader = false;
Brian Carlstrom47d237a2011-10-18 15:08:33 -07004181 {
Andreas Gampecc1b5352016-12-01 16:58:38 -08004182 ReaderMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08004183 const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
4184 old_dex_cache = DecodeDexCacheLocked(self, old_data);
4185 if (old_dex_cache != nullptr) {
4186 if (IsSameClassLoader(old_dex_cache, old_data, class_loader)) {
4187 return old_dex_cache;
4188 } else {
4189 // TODO This is not very clean looking. Should maybe try to make a way to request exceptions
4190 // be thrown when it's safe to do so to simplify this.
4191 registered_with_another_class_loader = true;
4192 }
4193 }
Vladimir Markocd556b02017-02-03 11:47:34 +00004194 }
Alex Light725da8f2020-02-19 14:46:33 -08004195 // We need to have released the dex_lock_ to allocate safely.
4196 if (registered_with_another_class_loader) {
4197 ThrowDexFileAlreadyRegisteredError(self, dex_file);
4198 return nullptr;
Brian Carlstromaded5f72011-10-07 17:15:04 -07004199 }
Mathieu Chartiered4ee442018-06-05 14:23:35 -07004200 SCOPED_TRACE << __FUNCTION__ << " " << dex_file.GetLocation();
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07004201 LinearAlloc* const linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader);
4202 DCHECK(linear_alloc != nullptr);
4203 ClassTable* table;
4204 {
4205 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4206 table = InsertClassTableForClassLoader(class_loader);
4207 }
Brian Carlstrom47d237a2011-10-18 15:08:33 -07004208 // Don't alloc while holding the lock, since allocation may need to
4209 // suspend all threads and another thread may need the dex_lock_ to
4210 // get to a suspend point.
Vladimir Markocd556b02017-02-03 11:47:34 +00004211 StackHandleScope<3> hs(self);
4212 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004213 ObjPtr<mirror::String> location;
Mathieu Chartier6c60d842016-09-15 10:24:43 -07004214 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(AllocDexCache(/*out*/&location,
4215 self,
4216 dex_file)));
4217 Handle<mirror::String> h_location(hs.NewHandle(location));
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07004218 {
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03004219 // Avoid a deadlock between a garbage collecting thread running a checkpoint,
4220 // a thread holding the dex lock and blocking on a condition variable regarding
4221 // weak references access, and a thread blocking on the dex lock.
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03004222 gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
Andreas Gampecc1b5352016-12-01 16:58:38 -08004223 WriterMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08004224 const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
4225 old_dex_cache = DecodeDexCacheLocked(self, old_data);
Andreas Gampefa4333d2017-02-14 11:10:34 -08004226 if (old_dex_cache == nullptr && h_dex_cache != nullptr) {
Vladimir Markocd556b02017-02-03 11:47:34 +00004227 // Do InitializeDexCache while holding dex lock to make sure two threads don't call it at the
4228 // same time with the same dex cache. Since the .bss is shared this can cause failing DCHECK
4229 // that the arrays are null.
4230 mirror::DexCache::InitializeDexCache(self,
4231 h_dex_cache.Get(),
4232 h_location.Get(),
4233 &dex_file,
4234 linear_alloc,
4235 image_pointer_size_);
4236 RegisterDexFileLocked(dex_file, h_dex_cache.Get(), h_class_loader.Get());
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07004237 }
Alex Light725da8f2020-02-19 14:46:33 -08004238 if (old_dex_cache != nullptr) {
4239 // Another thread managed to initialize the dex cache faster, so use that DexCache.
4240 // If this thread encountered OOME, ignore it.
4241 DCHECK_EQ(h_dex_cache == nullptr, self->IsExceptionPending());
4242 self->ClearException();
4243 // We cannot call EnsureSameClassLoader() or allocate an exception while holding the
4244 // dex_lock_.
4245 if (IsSameClassLoader(old_dex_cache, old_data, h_class_loader.Get())) {
4246 return old_dex_cache;
4247 } else {
4248 registered_with_another_class_loader = true;
4249 }
4250 }
Vladimir Markocd556b02017-02-03 11:47:34 +00004251 }
Alex Light725da8f2020-02-19 14:46:33 -08004252 if (registered_with_another_class_loader) {
4253 ThrowDexFileAlreadyRegisteredError(self, dex_file);
4254 return nullptr;
Vladimir Markocd556b02017-02-03 11:47:34 +00004255 }
Andreas Gampefa4333d2017-02-14 11:10:34 -08004256 if (h_dex_cache == nullptr) {
Vladimir Markocd556b02017-02-03 11:47:34 +00004257 self->AssertPendingOOMException();
4258 return nullptr;
Brian Carlstrom47d237a2011-10-18 15:08:33 -07004259 }
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07004260 table->InsertStrongRoot(h_dex_cache.Get());
Mathieu Chartiera1467d02017-02-22 09:22:50 -08004261 if (h_class_loader.Get() != nullptr) {
4262 // Since we added a strong root to the class table, do the write barrier as required for
4263 // remembered sets and generational GCs.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07004264 WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
Mathieu Chartiera1467d02017-02-22 09:22:50 -08004265 }
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07004266 return h_dex_cache.Get();
Brian Carlstromaded5f72011-10-07 17:15:04 -07004267}
4268
Vladimir Markocd556b02017-02-03 11:47:34 +00004269bool ClassLinker::IsDexFileRegistered(Thread* self, const DexFile& dex_file) {
Andreas Gampecc1b5352016-12-01 16:58:38 -08004270 ReaderMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08004271 return DecodeDexCacheLocked(self, FindDexCacheDataLocked(dex_file)) != nullptr;
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07004272}
4273
Vladimir Markocd556b02017-02-03 11:47:34 +00004274ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const DexFile& dex_file) {
4275 ReaderMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08004276 const DexCacheData* dex_cache_data = FindDexCacheDataLocked(dex_file);
4277 ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
Vladimir Markocd556b02017-02-03 11:47:34 +00004278 if (dex_cache != nullptr) {
4279 return dex_cache;
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07004280 }
Brian Carlstrom81a90872015-08-28 09:07:14 -07004281 // Failure, dump diagnostic and abort.
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08004282 for (const DexCacheData& data : dex_caches_) {
Alex Light725da8f2020-02-19 14:46:33 -08004283 if (DecodeDexCacheLocked(self, &data) != nullptr) {
Andreas Gampe37c58462017-03-27 15:14:27 -07004284 LOG(FATAL_WITHOUT_ABORT) << "Registered dex file " << data.dex_file->GetLocation();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07004285 }
Brian Carlstrom81a90872015-08-28 09:07:14 -07004286 }
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004287 LOG(FATAL) << "Failed to find DexCache for DexFile " << dex_file.GetLocation()
Alex Light725da8f2020-02-19 14:46:33 -08004288 << " " << &dex_file << " " << dex_cache_data->dex_file;
Ian Rogerse0a02da2014-12-02 14:10:53 -08004289 UNREACHABLE();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004290}
4291
Vladimir Markocd556b02017-02-03 11:47:34 +00004292ClassTable* ClassLinker::FindClassTable(Thread* self, ObjPtr<mirror::DexCache> dex_cache) {
4293 const DexFile* dex_file = dex_cache->GetDexFile();
4294 DCHECK(dex_file != nullptr);
4295 ReaderMutexLock mu(self, *Locks::dex_lock_);
4296 // Search assuming unique-ness of dex file.
4297 for (const DexCacheData& data : dex_caches_) {
4298 // Avoid decoding (and read barriers) other unrelated dex caches.
4299 if (data.dex_file == dex_file) {
Alex Light725da8f2020-02-19 14:46:33 -08004300 ObjPtr<mirror::DexCache> registered_dex_cache = DecodeDexCacheLocked(self, &data);
Vladimir Markocd556b02017-02-03 11:47:34 +00004301 if (registered_dex_cache != nullptr) {
4302 CHECK_EQ(registered_dex_cache, dex_cache) << dex_file->GetLocation();
4303 return data.class_table;
4304 }
4305 }
4306 }
4307 return nullptr;
4308}
4309
Alex Light725da8f2020-02-19 14:46:33 -08004310const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(const DexFile& dex_file) {
Vladimir Markocd556b02017-02-03 11:47:34 +00004311 // Search assuming unique-ness of dex file.
4312 for (const DexCacheData& data : dex_caches_) {
4313 // Avoid decoding (and read barriers) other unrelated dex caches.
4314 if (data.dex_file == &dex_file) {
Alex Light725da8f2020-02-19 14:46:33 -08004315 return &data;
Vladimir Markocd556b02017-02-03 11:47:34 +00004316 }
4317 }
Alex Light725da8f2020-02-19 14:46:33 -08004318 return nullptr;
Vladimir Markocd556b02017-02-03 11:47:34 +00004319}
4320
Vladimir Marko70e2a762019-07-12 16:49:00 +01004321void ClassLinker::CreatePrimitiveClass(Thread* self,
4322 Primitive::Type type,
4323 ClassRoot primitive_root) {
Vladimir Markoacb906d2018-05-30 10:23:49 +01004324 ObjPtr<mirror::Class> primitive_class =
Mathieu Chartier6beced42016-11-15 15:51:31 -08004325 AllocClass(self, mirror::Class::PrimitiveClassSize(image_pointer_size_));
Vladimir Marko70e2a762019-07-12 16:49:00 +01004326 CHECK(primitive_class != nullptr) << "OOM for primitive class " << type;
4327 // Do not hold lock on the primitive class object, the initialization of
4328 // primitive classes is done while the process is still single threaded.
Vladimir Markob68bb7a2020-03-17 10:55:25 +00004329 primitive_class->SetAccessFlagsDuringLinking(
Vladimir Marko70e2a762019-07-12 16:49:00 +01004330 kAccPublic | kAccFinal | kAccAbstract | kAccVerificationAttempted);
4331 primitive_class->SetPrimitiveType(type);
4332 primitive_class->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
4333 // Skip EnsureSkipAccessChecksMethods(). We can skip the verified status,
4334 // the kAccVerificationAttempted flag was added above, and there are no
4335 // methods that need the kAccSkipAccessChecks flag.
4336 DCHECK_EQ(primitive_class->NumMethods(), 0u);
Vladimir Markobf121912019-06-04 13:49:05 +01004337 // Primitive classes are initialized during single threaded startup, so visibly initialized.
4338 primitive_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004339 const char* descriptor = Primitive::Descriptor(type);
Mathieu Chartier6beced42016-11-15 15:51:31 -08004340 ObjPtr<mirror::Class> existing = InsertClass(descriptor,
Vladimir Marko70e2a762019-07-12 16:49:00 +01004341 primitive_class,
Mathieu Chartier6beced42016-11-15 15:51:31 -08004342 ComputeModifiedUtf8Hash(descriptor));
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004343 CHECK(existing == nullptr) << "InitPrimitiveClass(" << type << ") failed";
Vladimir Marko70e2a762019-07-12 16:49:00 +01004344 SetClassRoot(primitive_root, primitive_class);
Carl Shapiro565f5072011-07-10 13:39:43 -07004345}
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004346
Vladimir Marko02610552018-06-04 14:38:00 +01004347inline ObjPtr<mirror::IfTable> ClassLinker::GetArrayIfTable() {
4348 return GetClassRoot<mirror::ObjectArray<mirror::Object>>(this)->GetIfTable();
4349}
4350
Brian Carlstrombe977852011-07-19 14:54:54 -07004351// Create an array class (i.e. the class object for the array, not the
4352// array itself). "descriptor" looks like "[C" or "[[[[B" or
4353// "[Ljava/lang/String;".
4354//
4355// If "descriptor" refers to an array of primitives, look up the
4356// primitive type's internally-generated class object.
4357//
Brian Carlstrom5b8e4c82011-09-18 01:38:59 -07004358// "class_loader" is the class loader of the class that's referring to
4359// us. It's used to ensure that we're looking for the element type in
4360// the right context. It does NOT become the class loader for the
4361// array class; that always comes from the base element class.
Brian Carlstrombe977852011-07-19 14:54:54 -07004362//
Mathieu Chartier2cebb242015-04-21 16:50:40 -07004363// Returns null with an exception raised on failure.
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004364ObjPtr<mirror::Class> ClassLinker::CreateArrayClass(Thread* self,
4365 const char* descriptor,
4366 size_t hash,
4367 Handle<mirror::ClassLoader> class_loader) {
Brian Carlstrom5b8e4c82011-09-18 01:38:59 -07004368 // Identify the underlying component type
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004369 CHECK_EQ('[', descriptor[0]);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004370 StackHandleScope<2> hs(self);
Alex Lighte9f61032018-09-24 16:04:51 -07004371
4372 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4373 // code to be executed. We put it up here so we can avoid all the allocations associated with
4374 // creating the class. This can happen with (eg) jit threads.
4375 if (!self->CanLoadClasses()) {
4376 // Make sure we don't try to load anything, potentially causing an infinite loop.
4377 ObjPtr<mirror::Throwable> pre_allocated =
4378 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4379 self->SetException(pre_allocated);
4380 return nullptr;
4381 }
4382
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07004383 MutableHandle<mirror::Class> component_type(hs.NewHandle(FindClass(self, descriptor + 1,
4384 class_loader)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004385 if (component_type == nullptr) {
Mathieu Chartierc0a9ea42014-02-03 16:36:49 -08004386 DCHECK(self->IsExceptionPending());
Andreas Gampedc13d7d2014-07-23 20:18:36 -07004387 // We need to accept erroneous classes as component types.
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08004388 const size_t component_hash = ComputeModifiedUtf8Hash(descriptor + 1);
4389 component_type.Assign(LookupClass(self, descriptor + 1, component_hash, class_loader.Get()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004390 if (component_type == nullptr) {
Andreas Gampedc13d7d2014-07-23 20:18:36 -07004391 DCHECK(self->IsExceptionPending());
4392 return nullptr;
4393 } else {
4394 self->ClearException();
4395 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004396 }
Ian Rogers2d10b202014-05-12 19:15:18 -07004397 if (UNLIKELY(component_type->IsPrimitiveVoid())) {
4398 ThrowNoClassDefFoundError("Attempt to create array of void primitive type");
4399 return nullptr;
4400 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004401 // See if the component type is already loaded. Array classes are
4402 // always associated with the class loader of their underlying
4403 // element type -- an array of Strings goes with the loader for
4404 // java/lang/String -- so we need to look for it there. (The
4405 // caller should have checked for the existence of the class
4406 // before calling here, but they did so with *their* class loader,
4407 // not the component type's loader.)
4408 //
4409 // If we find it, the caller adds "loader" to the class' initiating
4410 // loader list, which should prevent us from going through this again.
4411 //
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07004412 // This call is unnecessary if "loader" and "component_type->GetClassLoader()"
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004413 // are the same, because our caller (FindClass) just did the
4414 // lookup. (Even if we get this wrong we still have correct behavior,
4415 // because we effectively do this lookup again when we add the new
4416 // class to the hash table --- necessary because of possible races with
4417 // other threads.)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004418 if (class_loader.Get() != component_type->GetClassLoader()) {
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00004419 ObjPtr<mirror::Class> new_class =
4420 LookupClass(self, descriptor, hash, component_type->GetClassLoader());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004421 if (new_class != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01004422 return new_class;
Brian Carlstroma331b3c2011-07-18 17:47:56 -07004423 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004424 }
Vladimir Marko70e2a762019-07-12 16:49:00 +01004425 // Core array classes, i.e. Object[], Class[], String[] and primitive
4426 // arrays, have special initialization and they should be found above.
4427 DCHECK(!component_type->IsObjectClass() ||
4428 // Guard from false positives for errors before setting superclass.
4429 component_type->IsErroneousUnresolved());
4430 DCHECK(!component_type->IsStringClass());
4431 DCHECK(!component_type->IsClassClass());
4432 DCHECK(!component_type->IsPrimitive());
Brian Carlstroma331b3c2011-07-18 17:47:56 -07004433
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004434 // Fill out the fields in the Class.
4435 //
4436 // It is possible to execute some methods against arrays, because
4437 // all arrays are subclasses of java_lang_Object_, so we need to set
4438 // up a vtable. We can just point at the one in java_lang_Object_.
4439 //
4440 // Array classes are simple enough that we don't need to do a full
4441 // link step.
Vladimir Marko70e2a762019-07-12 16:49:00 +01004442 size_t array_class_size = mirror::Array::ClassSize(image_pointer_size_);
4443 auto visitor = [this, array_class_size, component_type](ObjPtr<mirror::Object> obj,
4444 size_t usable_size)
4445 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markob68bb7a2020-03-17 10:55:25 +00004446 ScopedAssertNoNewTransactionRecords sanntr("CreateArrayClass");
Vladimir Marko70e2a762019-07-12 16:49:00 +01004447 mirror::Class::InitializeClassVisitor init_class(array_class_size);
4448 init_class(obj, usable_size);
4449 ObjPtr<mirror::Class> klass = ObjPtr<mirror::Class>::DownCast(obj);
4450 klass->SetComponentType(component_type.Get());
4451 // Do not hold lock for initialization, the fence issued after the visitor
4452 // returns ensures memory visibility together with the implicit consume
4453 // semantics (for all supported architectures) for any thread that loads
4454 // the array class reference from any memory locations afterwards.
4455 FinishArrayClassSetup(klass);
4456 };
4457 auto new_class = hs.NewHandle<mirror::Class>(
4458 AllocClass(self, GetClassRoot<mirror::Class>(this), array_class_size, visitor));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004459 if (new_class == nullptr) {
Vladimir Marko70e2a762019-07-12 16:49:00 +01004460 self->AssertPendingOOMException();
4461 return nullptr;
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004462 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004463
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004464 ObjPtr<mirror::Class> existing = InsertClass(descriptor, new_class.Get(), hash);
Mathieu Chartierc0a9ea42014-02-03 16:36:49 -08004465 if (existing == nullptr) {
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004466 // We postpone ClassLoad and ClassPrepare events to this point in time to avoid
4467 // duplicate events in case of races. Array classes don't really follow dedicated
4468 // load and prepare, anyways.
4469 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(new_class);
4470 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(new_class, new_class);
4471
Tamas Berghammer160e6df2016-01-05 14:29:02 +00004472 jit::Jit::NewTypeLoadedIfUsingJit(new_class.Get());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004473 return new_class.Get();
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004474 }
4475 // Another thread must have loaded the class after we
4476 // started but before we finished. Abandon what we've
4477 // done.
4478 //
4479 // (Yes, this happens.)
4480
Vladimir Markobcf17522018-06-01 13:14:32 +01004481 return existing;
Brian Carlstroma331b3c2011-07-18 17:47:56 -07004482}
4483
Vladimir Marko9186b182018-11-06 14:55:54 +00004484ObjPtr<mirror::Class> ClassLinker::LookupPrimitiveClass(char type) {
4485 ClassRoot class_root;
Ian Rogers62f05122014-03-21 11:21:29 -07004486 switch (type) {
Vladimir Marko9186b182018-11-06 14:55:54 +00004487 case 'B': class_root = ClassRoot::kPrimitiveByte; break;
4488 case 'C': class_root = ClassRoot::kPrimitiveChar; break;
4489 case 'D': class_root = ClassRoot::kPrimitiveDouble; break;
4490 case 'F': class_root = ClassRoot::kPrimitiveFloat; break;
4491 case 'I': class_root = ClassRoot::kPrimitiveInt; break;
4492 case 'J': class_root = ClassRoot::kPrimitiveLong; break;
4493 case 'S': class_root = ClassRoot::kPrimitiveShort; break;
4494 case 'Z': class_root = ClassRoot::kPrimitiveBoolean; break;
4495 case 'V': class_root = ClassRoot::kPrimitiveVoid; break;
Ian Rogers62f05122014-03-21 11:21:29 -07004496 default:
Vladimir Marko9186b182018-11-06 14:55:54 +00004497 return nullptr;
Carl Shapiro744ad052011-08-06 15:53:36 -07004498 }
Vladimir Marko9186b182018-11-06 14:55:54 +00004499 return GetClassRoot(class_root, this);
4500}
4501
4502ObjPtr<mirror::Class> ClassLinker::FindPrimitiveClass(char type) {
4503 ObjPtr<mirror::Class> result = LookupPrimitiveClass(type);
4504 if (UNLIKELY(result == nullptr)) {
4505 std::string printable_type(PrintableChar(type));
4506 ThrowNoClassDefFoundError("Not a primitive type: %s", printable_type.c_str());
4507 }
4508 return result;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004509}
4510
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004511ObjPtr<mirror::Class> ClassLinker::InsertClass(const char* descriptor,
4512 ObjPtr<mirror::Class> klass,
4513 size_t hash) {
Alex Lighte9f61032018-09-24 16:04:51 -07004514 DCHECK(Thread::Current()->CanLoadClasses());
Elliott Hughes4dd9b4d2011-12-12 18:29:24 -08004515 if (VLOG_IS_ON(class_linker)) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004516 ObjPtr<mirror::DexCache> dex_cache = klass->GetDexCache();
Brian Carlstromae826982011-11-09 01:33:42 -08004517 std::string source;
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004518 if (dex_cache != nullptr) {
Brian Carlstromae826982011-11-09 01:33:42 -08004519 source += " from ";
4520 source += dex_cache->GetLocation()->ToModifiedUtf8();
4521 }
4522 LOG(INFO) << "Loaded class " << descriptor << source;
4523 }
Mathieu Chartier65975772016-08-05 10:46:36 -07004524 {
4525 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Vladimir Marko0984e482019-03-27 16:41:41 +00004526 const ObjPtr<mirror::ClassLoader> class_loader = klass->GetClassLoader();
Mathieu Chartier65975772016-08-05 10:46:36 -07004527 ClassTable* const class_table = InsertClassTableForClassLoader(class_loader);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004528 ObjPtr<mirror::Class> existing = class_table->Lookup(descriptor, hash);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004529 if (existing != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01004530 return existing;
Mathieu Chartier65975772016-08-05 10:46:36 -07004531 }
Mathieu Chartier65975772016-08-05 10:46:36 -07004532 VerifyObject(klass);
4533 class_table->InsertWithHash(klass, hash);
4534 if (class_loader != nullptr) {
4535 // This is necessary because we need to have the card dirtied for remembered sets.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07004536 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier65975772016-08-05 10:46:36 -07004537 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004538 if (log_new_roots_) {
Mathieu Chartier65975772016-08-05 10:46:36 -07004539 new_class_roots_.push_back(GcRoot<mirror::Class>(klass));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004540 }
4541 }
Mathieu Chartier65975772016-08-05 10:46:36 -07004542 if (kIsDebugBuild) {
4543 // Test that copied methods correctly can find their holder.
4544 for (ArtMethod& method : klass->GetCopiedMethods(image_pointer_size_)) {
4545 CHECK_EQ(GetHoldingClassOfCopiedMethod(&method), klass);
4546 }
Mathieu Chartier893263b2014-03-04 11:07:42 -08004547 }
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004548 return nullptr;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004549}
4550
Vladimir Marko1998cd02017-01-13 13:02:58 +00004551void ClassLinker::WriteBarrierForBootOatFileBssRoots(const OatFile* oat_file) {
Mathieu Chartiera1467d02017-02-22 09:22:50 -08004552 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4553 DCHECK(!oat_file->GetBssGcRoots().empty()) << oat_file->GetLocation();
4554 if (log_new_roots_ && !ContainsElement(new_bss_roots_boot_oat_files_, oat_file)) {
4555 new_bss_roots_boot_oat_files_.push_back(oat_file);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004556 }
4557}
4558
Alex Lighte64300b2015-12-15 15:02:47 -08004559// TODO This should really be in mirror::Class.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004560void ClassLinker::UpdateClassMethods(ObjPtr<mirror::Class> klass,
Alex Lighte64300b2015-12-15 15:02:47 -08004561 LengthPrefixedArray<ArtMethod>* new_methods) {
4562 klass->SetMethodsPtrUnchecked(new_methods,
4563 klass->NumDirectMethods(),
4564 klass->NumDeclaredVirtualMethods());
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004565 // Need to mark the card so that the remembered sets and mod union tables get updated.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07004566 WriteBarrier::ForEveryFieldWrite(klass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004567}
4568
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004569ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4570 const char* descriptor,
4571 ObjPtr<mirror::ClassLoader> class_loader) {
Andreas Gampe2ff3b972017-06-05 18:14:53 -07004572 return LookupClass(self, descriptor, ComputeModifiedUtf8Hash(descriptor), class_loader);
4573}
4574
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004575ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4576 const char* descriptor,
4577 size_t hash,
4578 ObjPtr<mirror::ClassLoader> class_loader) {
Vladimir Marko1a1de672016-10-13 12:53:15 +01004579 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
4580 ClassTable* const class_table = ClassTableForClassLoader(class_loader);
4581 if (class_table != nullptr) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004582 ObjPtr<mirror::Class> result = class_table->Lookup(descriptor, hash);
Vladimir Marko1a1de672016-10-13 12:53:15 +01004583 if (result != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01004584 return result;
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004585 }
Sameer Abu Asal2c6de222013-05-02 17:38:59 -07004586 }
Vladimir Marko1a1de672016-10-13 12:53:15 +01004587 return nullptr;
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004588}
4589
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004590class MoveClassTableToPreZygoteVisitor : public ClassLoaderVisitor {
4591 public:
Igor Murashkin2ffb7032017-11-08 13:35:21 -08004592 MoveClassTableToPreZygoteVisitor() {}
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004593
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004594 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004595 REQUIRES(Locks::classlinker_classes_lock_)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01004596 REQUIRES_SHARED(Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004597 ClassTable* const class_table = class_loader->GetClassTable();
Mathieu Chartier6b069532015-08-05 15:08:12 -07004598 if (class_table != nullptr) {
4599 class_table->FreezeSnapshot();
4600 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07004601 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004602};
4603
4604void ClassLinker::MoveClassTableToPreZygote() {
4605 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Andreas Gampe2af99022017-04-25 08:32:59 -07004606 boot_class_table_->FreezeSnapshot();
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004607 MoveClassTableToPreZygoteVisitor visitor;
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07004608 VisitClassLoaders(&visitor);
Mathieu Chartierc2e20622014-11-03 11:41:47 -08004609}
4610
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004611// Look up classes by hash and descriptor and put all matching ones in the result array.
4612class LookupClassesVisitor : public ClassLoaderVisitor {
4613 public:
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004614 LookupClassesVisitor(const char* descriptor,
4615 size_t hash,
4616 std::vector<ObjPtr<mirror::Class>>* result)
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004617 : descriptor_(descriptor),
4618 hash_(hash),
4619 result_(result) {}
4620
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004621 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01004622 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004623 ClassTable* const class_table = class_loader->GetClassTable();
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004624 ObjPtr<mirror::Class> klass = class_table->Lookup(descriptor_, hash_);
Vladimir Markoc5798bf2016-12-09 10:20:54 +00004625 // Add `klass` only if `class_loader` is its defining (not just initiating) class loader.
4626 if (klass != nullptr && klass->GetClassLoader() == class_loader) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004627 result_->push_back(klass);
4628 }
4629 }
4630
4631 private:
4632 const char* const descriptor_;
4633 const size_t hash_;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004634 std::vector<ObjPtr<mirror::Class>>* const result_;
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004635};
4636
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004637void ClassLinker::LookupClasses(const char* descriptor,
4638 std::vector<ObjPtr<mirror::Class>>& result) {
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004639 result.clear();
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004640 Thread* const self = Thread::Current();
4641 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
Mathieu Chartier6b069532015-08-05 15:08:12 -07004642 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
Andreas Gampe2af99022017-04-25 08:32:59 -07004643 ObjPtr<mirror::Class> klass = boot_class_table_->Lookup(descriptor, hash);
Mathieu Chartier6b069532015-08-05 15:08:12 -07004644 if (klass != nullptr) {
Vladimir Markoc5798bf2016-12-09 10:20:54 +00004645 DCHECK(klass->GetClassLoader() == nullptr);
Mathieu Chartier6b069532015-08-05 15:08:12 -07004646 result.push_back(klass);
4647 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004648 LookupClassesVisitor visitor(descriptor, hash, &result);
4649 VisitClassLoaders(&visitor);
Elliott Hughes6fa602d2011-12-02 17:54:25 -08004650}
4651
Alex Lightf1f10492015-10-07 16:08:36 -07004652bool ClassLinker::AttemptSupertypeVerification(Thread* self,
4653 Handle<mirror::Class> klass,
4654 Handle<mirror::Class> supertype) {
4655 DCHECK(self != nullptr);
Andreas Gampefa4333d2017-02-14 11:10:34 -08004656 DCHECK(klass != nullptr);
4657 DCHECK(supertype != nullptr);
Alex Lightf1f10492015-10-07 16:08:36 -07004658
Alex Lightf1f10492015-10-07 16:08:36 -07004659 if (!supertype->IsVerified() && !supertype->IsErroneous()) {
4660 VerifyClass(self, supertype);
4661 }
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004662
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004663 if (supertype->IsVerified()
4664 || supertype->ShouldVerifyAtRuntime()
4665 || supertype->IsVerifiedNeedsAccessChecks()) {
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004666 // The supertype is either verified, or we soft failed at AOT time.
4667 DCHECK(supertype->IsVerified() || Runtime::Current()->IsAotCompiler());
Alex Lightf1f10492015-10-07 16:08:36 -07004668 return true;
4669 }
4670 // If we got this far then we have a hard failure.
4671 std::string error_msg =
4672 StringPrintf("Rejecting class %s that attempts to sub-type erroneous class %s",
David Sehr709b0702016-10-13 09:12:37 -07004673 klass->PrettyDescriptor().c_str(),
4674 supertype->PrettyDescriptor().c_str());
Alex Lightf1f10492015-10-07 16:08:36 -07004675 LOG(WARNING) << error_msg << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
Andreas Gampe884f3b82016-03-30 19:52:58 -07004676 StackHandleScope<1> hs(self);
Alex Lightf1f10492015-10-07 16:08:36 -07004677 Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004678 if (cause != nullptr) {
Alex Lightf1f10492015-10-07 16:08:36 -07004679 // Set during VerifyClass call (if at all).
4680 self->ClearException();
4681 }
4682 // Change into a verify error.
4683 ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
Andreas Gampefa4333d2017-02-14 11:10:34 -08004684 if (cause != nullptr) {
Alex Lightf1f10492015-10-07 16:08:36 -07004685 self->GetException()->SetCause(cause.Get());
4686 }
4687 ClassReference ref(klass->GetDexCache()->GetDexFile(), klass->GetDexClassDefIndex());
4688 if (Runtime::Current()->IsAotCompiler()) {
4689 Runtime::Current()->GetCompilerCallbacks()->ClassRejected(ref);
4690 }
Andreas Gampe884f3b82016-03-30 19:52:58 -07004691 // Need to grab the lock to change status.
4692 ObjectLock<mirror::Class> super_lock(self, klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00004693 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Alex Lightf1f10492015-10-07 16:08:36 -07004694 return false;
4695}
4696
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004697verifier::FailureKind ClassLinker::VerifyClass(
Nicolas Geoffray08025182016-10-25 17:20:18 +01004698 Thread* self, Handle<mirror::Class> klass, verifier::HardFailLogMode log_level) {
Andreas Gampe884f3b82016-03-30 19:52:58 -07004699 {
4700 // TODO: assert that the monitor on the Class is held
4701 ObjectLock<mirror::Class> lock(self, klass);
Elliott Hughesd9c67be2012-02-02 19:54:06 -08004702
Andreas Gampe884f3b82016-03-30 19:52:58 -07004703 // Is somebody verifying this now?
Vladimir Marko2c64a832018-01-04 11:31:56 +00004704 ClassStatus old_status = klass->GetStatus();
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004705 while (old_status == ClassStatus::kVerifying) {
Andreas Gampe884f3b82016-03-30 19:52:58 -07004706 lock.WaitIgnoringInterrupts();
Mathieu Chartier5ef70202017-06-29 10:45:10 -07004707 // WaitIgnoringInterrupts can still receive an interrupt and return early, in this
4708 // case we may see the same status again. b/62912904. This is why the check is
4709 // greater or equal.
4710 CHECK(klass->IsErroneous() || (klass->GetStatus() >= old_status))
David Sehr709b0702016-10-13 09:12:37 -07004711 << "Class '" << klass->PrettyClass()
4712 << "' performed an illegal verification state transition from " << old_status
4713 << " to " << klass->GetStatus();
Andreas Gampe884f3b82016-03-30 19:52:58 -07004714 old_status = klass->GetStatus();
4715 }
jeffhao98eacac2011-09-14 16:11:53 -07004716
Andreas Gampe884f3b82016-03-30 19:52:58 -07004717 // The class might already be erroneous, for example at compile time if we attempted to verify
4718 // this class as a parent to another.
4719 if (klass->IsErroneous()) {
4720 ThrowEarlierClassFailure(klass.Get());
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004721 return verifier::FailureKind::kHardFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004722 }
Brian Carlstrom9b5ee882012-02-28 09:48:54 -08004723
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004724 // Don't attempt to re-verify if already verified.
Andreas Gampe884f3b82016-03-30 19:52:58 -07004725 if (klass->IsVerified()) {
Andreas Gampecc1b5352016-12-01 16:58:38 -08004726 EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004727 return verifier::FailureKind::kNoFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004728 }
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004729
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004730 if (klass->IsVerifiedNeedsAccessChecks()) {
4731 if (!Runtime::Current()->IsAotCompiler()) {
4732 // Mark the class as having a verification attempt to avoid re-running
4733 // the verifier and avoid calling EnsureSkipAccessChecksMethods.
4734 klass->SetVerificationAttempted();
4735 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4736 }
4737 return verifier::FailureKind::kAccessChecksFailure;
4738 }
4739
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004740 // For AOT, don't attempt to re-verify if we have already found we should
4741 // verify at runtime.
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004742 if (klass->ShouldVerifyAtRuntime()) {
4743 CHECK(Runtime::Current()->IsAotCompiler());
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004744 return verifier::FailureKind::kSoftFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004745 }
jeffhao98eacac2011-09-14 16:11:53 -07004746
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004747 DCHECK_EQ(klass->GetStatus(), ClassStatus::kResolved);
4748 mirror::Class::SetStatus(klass, ClassStatus::kVerifying, self);
Andreas Gampe884f3b82016-03-30 19:52:58 -07004749
4750 // Skip verification if disabled.
4751 if (!Runtime::Current()->IsVerificationEnabled()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00004752 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
Andreas Gampecc1b5352016-12-01 16:58:38 -08004753 EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004754 return verifier::FailureKind::kNoFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004755 }
Jeff Hao4a200f52014-04-01 14:58:49 -07004756 }
4757
Bharadwaj Kalandhabhatta271c1e12017-06-27 11:14:49 -07004758 VLOG(class_linker) << "Beginning verification for class: "
4759 << klass->PrettyDescriptor()
4760 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4761
Ian Rogers9ffb0392012-09-10 11:56:50 -07004762 // Verify super class.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004763 StackHandleScope<2> hs(self);
Alex Lightf1f10492015-10-07 16:08:36 -07004764 MutableHandle<mirror::Class> supertype(hs.NewHandle(klass->GetSuperClass()));
4765 // If we have a superclass and we get a hard verification failure we can return immediately.
Andreas Gampefa4333d2017-02-14 11:10:34 -08004766 if (supertype != nullptr && !AttemptSupertypeVerification(self, klass, supertype)) {
Alex Lightf1f10492015-10-07 16:08:36 -07004767 CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004768 return verifier::FailureKind::kHardFailure;
Alex Lightf1f10492015-10-07 16:08:36 -07004769 }
Ian Rogers1c5eb702012-02-01 09:18:34 -08004770
Alex Lightf1f10492015-10-07 16:08:36 -07004771 // Verify all default super-interfaces.
4772 //
4773 // (1) Don't bother if the superclass has already had a soft verification failure.
4774 //
4775 // (2) Interfaces shouldn't bother to do this recursive verification because they cannot cause
4776 // recursive initialization by themselves. This is because when an interface is initialized
4777 // directly it must not initialize its superinterfaces. We are allowed to verify regardless
4778 // but choose not to for an optimization. If the interfaces is being verified due to a class
4779 // initialization (which would need all the default interfaces to be verified) the class code
4780 // will trigger the recursive verification anyway.
Andreas Gampefa4333d2017-02-14 11:10:34 -08004781 if ((supertype == nullptr || supertype->IsVerified()) // See (1)
Alex Lightf1f10492015-10-07 16:08:36 -07004782 && !klass->IsInterface()) { // See (2)
4783 int32_t iftable_count = klass->GetIfTableCount();
4784 MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
4785 // Loop through all interfaces this class has defined. It doesn't matter the order.
4786 for (int32_t i = 0; i < iftable_count; i++) {
4787 iface.Assign(klass->GetIfTable()->GetInterface(i));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004788 DCHECK(iface != nullptr);
Alex Lightf1f10492015-10-07 16:08:36 -07004789 // We only care if we have default interfaces and can skip if we are already verified...
4790 if (LIKELY(!iface->HasDefaultMethods() || iface->IsVerified())) {
4791 continue;
4792 } else if (UNLIKELY(!AttemptSupertypeVerification(self, klass, iface))) {
4793 // We had a hard failure while verifying this interface. Just return immediately.
4794 CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004795 return verifier::FailureKind::kHardFailure;
Alex Lightf1f10492015-10-07 16:08:36 -07004796 } else if (UNLIKELY(!iface->IsVerified())) {
4797 // We softly failed to verify the iface. Stop checking and clean up.
4798 // Put the iface into the supertype handle so we know what caused us to fail.
4799 supertype.Assign(iface.Get());
4800 break;
Ian Rogers1c5eb702012-02-01 09:18:34 -08004801 }
Ian Rogers1c5eb702012-02-01 09:18:34 -08004802 }
4803 }
4804
Alex Lightf1f10492015-10-07 16:08:36 -07004805 // At this point if verification failed, then supertype is the "first" supertype that failed
4806 // verification (without a specific order). If verification succeeded, then supertype is either
4807 // null or the original superclass of klass and is verified.
Andreas Gampefa4333d2017-02-14 11:10:34 -08004808 DCHECK(supertype == nullptr ||
Alex Lightf1f10492015-10-07 16:08:36 -07004809 supertype.Get() == klass->GetSuperClass() ||
4810 !supertype->IsVerified());
4811
Elliott Hughes634eb2e2012-03-22 16:06:28 -07004812 // Try to use verification information from the oat file, otherwise do runtime verification.
Ian Rogers4445a7e2012-10-05 17:19:13 -07004813 const DexFile& dex_file = *klass->GetDexCache()->GetDexFile();
Vladimir Marko2c64a832018-01-04 11:31:56 +00004814 ClassStatus oat_file_class_status(ClassStatus::kNotReady);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004815 bool preverified = VerifyClassUsingOatFile(dex_file, klass.Get(), oat_file_class_status);
Bharadwaj Kalandhabhatta271c1e12017-06-27 11:14:49 -07004816
4817 VLOG(class_linker) << "Class preverified status for class "
4818 << klass->PrettyDescriptor()
4819 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4820 << ": "
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004821 << preverified
4822 << "( " << oat_file_class_status << ")";
Bharadwaj Kalandhabhatta271c1e12017-06-27 11:14:49 -07004823
Andreas Gampeec6e6c12015-11-05 20:39:56 -08004824 // If the oat file says the class had an error, re-run the verifier. That way we will get a
4825 // precise error message. To ensure a rerun, test:
Vladimir Marko72ab6842017-01-20 19:32:50 +00004826 // mirror::Class::IsErroneous(oat_file_class_status) => !preverified
4827 DCHECK(!mirror::Class::IsErroneous(oat_file_class_status) || !preverified);
Andreas Gampeec6e6c12015-11-05 20:39:56 -08004828
Ian Rogers62d6c772013-02-27 08:32:07 -08004829 std::string error_msg;
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004830 verifier::FailureKind verifier_failure = verifier::FailureKind::kNoFailure;
jeffhaof1e6b7c2012-06-05 18:33:30 -07004831 if (!preverified) {
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004832 verifier_failure = PerformClassVerification(self, klass, log_level, &error_msg);
jeffhaof1e6b7c2012-06-05 18:33:30 -07004833 }
Andreas Gampe884f3b82016-03-30 19:52:58 -07004834
4835 // Verification is done, grab the lock again.
4836 ObjectLock<mirror::Class> lock(self, klass);
4837
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004838 if (preverified || verifier_failure != verifier::FailureKind::kHardFailure) {
4839 if (!preverified && verifier_failure != verifier::FailureKind::kNoFailure) {
David Sehr709b0702016-10-13 09:12:37 -07004840 VLOG(class_linker) << "Soft verification failure in class "
4841 << klass->PrettyDescriptor()
4842 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4843 << " because: " << error_msg;
Ian Rogers529781d2012-07-23 17:24:29 -07004844 }
Ian Rogers1f539342012-10-03 21:09:42 -07004845 self->AssertNoPendingException();
jeffhaoe4f0b2a2012-08-30 11:18:57 -07004846 // Make sure all classes referenced by catch blocks are resolved.
Alex Light5a559862016-01-29 12:24:48 -08004847 ResolveClassExceptionHandlerTypes(klass);
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004848 if (verifier_failure == verifier::FailureKind::kNoFailure) {
Alex Lightf1f10492015-10-07 16:08:36 -07004849 // Even though there were no verifier failures we need to respect whether the super-class and
4850 // super-default-interfaces were verified or requiring runtime reverification.
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004851 if (supertype == nullptr
4852 || supertype->IsVerified()
4853 || supertype->IsVerifiedNeedsAccessChecks()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00004854 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07004855 } else {
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004856 CHECK(Runtime::Current()->IsAotCompiler());
Vladimir Marko2c64a832018-01-04 11:31:56 +00004857 CHECK_EQ(supertype->GetStatus(), ClassStatus::kRetryVerificationAtRuntime);
4858 mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
Alex Lightf1f10492015-10-07 16:08:36 -07004859 // Pretend a soft failure occurred so that we don't consider the class verified below.
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004860 verifier_failure = verifier::FailureKind::kSoftFailure;
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07004861 }
jeffhaoe4f0b2a2012-08-30 11:18:57 -07004862 } else {
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004863 CHECK(verifier_failure == verifier::FailureKind::kSoftFailure ||
4864 verifier_failure == verifier::FailureKind::kAccessChecksFailure);
jeffhaoe4f0b2a2012-08-30 11:18:57 -07004865 // Soft failures at compile time should be retried at runtime. Soft
4866 // failures at runtime will be handled by slow paths in the generated
4867 // code. Set status accordingly.
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08004868 if (Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004869 if (verifier_failure == verifier::FailureKind::kSoftFailure) {
4870 mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
4871 } else {
4872 mirror::Class::SetStatus(klass, ClassStatus::kVerifiedNeedsAccessChecks, self);
4873 }
jeffhaoe4f0b2a2012-08-30 11:18:57 -07004874 } else {
Vladimir Marko2c64a832018-01-04 11:31:56 +00004875 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
Igor Murashkindf707e42016-02-02 16:56:50 -08004876 // As this is a fake verified status, make sure the methods are _not_ marked
4877 // kAccSkipAccessChecks later.
4878 klass->SetVerificationAttempted();
jeffhaoe4f0b2a2012-08-30 11:18:57 -07004879 }
4880 }
jeffhao5cfd6fb2011-09-27 13:54:29 -07004881 } else {
David Sehr709b0702016-10-13 09:12:37 -07004882 VLOG(verifier) << "Verification failed on class " << klass->PrettyDescriptor()
Andreas Gampeec6e6c12015-11-05 20:39:56 -08004883 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4884 << " because: " << error_msg;
Ian Rogers00f7d0e2012-07-19 15:28:27 -07004885 self->AssertNoPendingException();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004886 ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
Vladimir Marko2c64a832018-01-04 11:31:56 +00004887 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
jeffhao5cfd6fb2011-09-27 13:54:29 -07004888 }
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004889 if (preverified || verifier_failure == verifier::FailureKind::kNoFailure) {
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004890 if (oat_file_class_status == ClassStatus::kVerifiedNeedsAccessChecks ||
4891 UNLIKELY(Runtime::Current()->IsVerificationSoftFail())) {
Igor Murashkindf707e42016-02-02 16:56:50 -08004892 // Never skip access checks if the verification soft fail is forced.
4893 // Mark the class as having a verification attempt to avoid re-running the verifier.
4894 klass->SetVerificationAttempted();
4895 } else {
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004896 // Class is verified so we don't need to do any access check on its methods.
4897 // Let the interpreter know it by setting the kAccSkipAccessChecks flag onto each
4898 // method.
4899 // Note: we're going here during compilation and at runtime. When we set the
4900 // kAccSkipAccessChecks flag when compiling image classes, the flag is recorded
4901 // in the image and is set when loading the image.
Andreas Gampecc1b5352016-12-01 16:58:38 -08004902 EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
Igor Murashkindf707e42016-02-02 16:56:50 -08004903 }
Andreas Gampe48498592014-09-10 19:48:05 -07004904 }
Nicolas Geoffray486dda02017-09-11 14:15:52 +01004905 // Done verifying. Notify the compiler about the verification status, in case the class
4906 // was verified implicitly (eg super class of a compiled class).
4907 if (Runtime::Current()->IsAotCompiler()) {
4908 Runtime::Current()->GetCompilerCallbacks()->UpdateClassState(
4909 ClassReference(&klass->GetDexFile(), klass->GetDexClassDefIndex()), klass->GetStatus());
4910 }
Nicolas Geoffray08025182016-10-25 17:20:18 +01004911 return verifier_failure;
Andreas Gampe48498592014-09-10 19:48:05 -07004912}
4913
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004914verifier::FailureKind ClassLinker::PerformClassVerification(Thread* self,
4915 Handle<mirror::Class> klass,
4916 verifier::HardFailLogMode log_level,
4917 std::string* error_msg) {
4918 Runtime* const runtime = Runtime::Current();
Andreas Gampea43ba3d2019-03-13 15:49:20 -07004919 return verifier::ClassVerifier::VerifyClass(self,
4920 klass.Get(),
4921 runtime->GetCompilerCallbacks(),
4922 runtime->IsAotCompiler(),
4923 log_level,
4924 Runtime::Current()->GetTargetSdkVersion(),
4925 error_msg);
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004926}
4927
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07004928bool ClassLinker::VerifyClassUsingOatFile(const DexFile& dex_file,
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004929 ObjPtr<mirror::Class> klass,
Vladimir Marko2c64a832018-01-04 11:31:56 +00004930 ClassStatus& oat_file_class_status) {
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004931 // If we're compiling, we can only verify the class using the oat file if
4932 // we are not compiling the image or if the class we're verifying is not part of
Andreas Gampee9934582018-01-19 21:23:04 -08004933 // the compilation unit (app - dependencies). We will let the compiler callback
4934 // tell us about the latter.
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08004935 if (Runtime::Current()->IsAotCompiler()) {
Andreas Gampee9934582018-01-19 21:23:04 -08004936 CompilerCallbacks* callbacks = Runtime::Current()->GetCompilerCallbacks();
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004937 // We are compiling an app (not the image).
Andreas Gampee9934582018-01-19 21:23:04 -08004938 if (!callbacks->CanUseOatStatusForVerification(klass.Ptr())) {
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004939 return false;
4940 }
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004941 }
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004942
Andreas Gampeb40d3612018-06-26 15:49:42 -07004943 const OatDexFile* oat_dex_file = dex_file.GetOatDexFile();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004944 // In case we run without an image there won't be a backing oat file.
Mathieu Chartier1b868492016-11-16 16:22:37 -08004945 if (oat_dex_file == nullptr || oat_dex_file->GetOatFile() == nullptr) {
Anwar Ghuloumad256bb2013-07-18 14:58:55 -07004946 return false;
4947 }
4948
Ian Rogers8b2c0b92013-09-19 02:56:49 -07004949 uint16_t class_def_index = klass->GetDexClassDefIndex();
Vladimir Markod3c5beb2014-04-11 16:32:51 +01004950 oat_file_class_status = oat_dex_file->GetOatClass(class_def_index).GetStatus();
Vladimir Marko2c64a832018-01-04 11:31:56 +00004951 if (oat_file_class_status >= ClassStatus::kVerified) {
Mathieu Chartiera079e3a2016-03-16 19:08:31 -07004952 return true;
4953 }
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004954 if (oat_file_class_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
4955 // We return that the clas has already been verified, and the caller should
4956 // check the class status to ensure we run with access checks.
4957 return true;
4958 }
Mathieu Chartiera079e3a2016-03-16 19:08:31 -07004959 // If we only verified a subset of the classes at compile time, we can end up with classes that
4960 // were resolved by the verifier.
Vladimir Marko2c64a832018-01-04 11:31:56 +00004961 if (oat_file_class_status == ClassStatus::kResolved) {
Mathieu Chartiera079e3a2016-03-16 19:08:31 -07004962 return false;
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004963 }
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004964 // We never expect a .oat file to have kRetryVerificationAtRuntime statuses.
4965 CHECK_NE(oat_file_class_status, ClassStatus::kRetryVerificationAtRuntime)
4966 << klass->PrettyClass() << " " << dex_file.GetLocation();
4967
Vladimir Marko72ab6842017-01-20 19:32:50 +00004968 if (mirror::Class::IsErroneous(oat_file_class_status)) {
jeffhao1ac29442012-03-26 11:37:32 -07004969 // Compile time verification failed with a hard error. This is caused by invalid instructions
4970 // in the class. These errors are unrecoverable.
4971 return false;
4972 }
Vladimir Marko2c64a832018-01-04 11:31:56 +00004973 if (oat_file_class_status == ClassStatus::kNotReady) {
Ian Rogersc4762272012-02-01 15:55:55 -08004974 // Status is uninitialized if we couldn't determine the status at compile time, for example,
4975 // not loading the class.
4976 // TODO: when the verifier doesn't rely on Class-es failing to resolve/load the type hierarchy
4977 // isn't a problem and this case shouldn't occur
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004978 return false;
4979 }
Ian Rogers1ff3c982014-08-12 02:30:58 -07004980 std::string temp;
Elliott Hughes634eb2e2012-03-22 16:06:28 -07004981 LOG(FATAL) << "Unexpected class status: " << oat_file_class_status
David Sehr709b0702016-10-13 09:12:37 -07004982 << " " << dex_file.GetLocation() << " " << klass->PrettyClass() << " "
Ian Rogers1ff3c982014-08-12 02:30:58 -07004983 << klass->GetDescriptor(&temp);
Ian Rogerse0a02da2014-12-02 14:10:53 -08004984 UNREACHABLE();
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004985}
4986
Alex Light5a559862016-01-29 12:24:48 -08004987void ClassLinker::ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass) {
Alex Light51a64d52015-12-17 13:55:59 -08004988 for (ArtMethod& method : klass->GetMethods(image_pointer_size_)) {
Alex Light5a559862016-01-29 12:24:48 -08004989 ResolveMethodExceptionHandlerTypes(&method);
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004990 }
4991}
4992
Alex Light5a559862016-01-29 12:24:48 -08004993void ClassLinker::ResolveMethodExceptionHandlerTypes(ArtMethod* method) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004994 // similar to DexVerifier::ScanTryCatchBlocks and dex2oat's ResolveExceptionsForMethod.
David Sehr0225f8e2018-01-31 08:52:24 +00004995 CodeItemDataAccessor accessor(method->DexInstructionData());
Mathieu Chartier808c7a52017-12-15 11:19:33 -08004996 if (!accessor.HasCodeItem()) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004997 return; // native or abstract method
4998 }
Mathieu Chartier808c7a52017-12-15 11:19:33 -08004999 if (accessor.TriesSize() == 0) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08005000 return; // nothing to process
5001 }
Mathieu Chartier808c7a52017-12-15 11:19:33 -08005002 const uint8_t* handlers_ptr = accessor.GetCatchHandlerData(0);
Brian Carlstrome7d856b2012-01-11 18:10:55 -08005003 uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr);
Brian Carlstrome7d856b2012-01-11 18:10:55 -08005004 for (uint32_t idx = 0; idx < handlers_size; idx++) {
5005 CatchHandlerIterator iterator(handlers_ptr);
5006 for (; iterator.HasNext(); iterator.Next()) {
5007 // Ensure exception types are resolved so that they don't need resolution to be delivered,
5008 // unresolved exception types will be ignored by exception delivery
Andreas Gampea5b09a62016-11-17 15:21:22 -08005009 if (iterator.GetHandlerTypeIndex().IsValid()) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005010 ObjPtr<mirror::Class> exception_type = ResolveType(iterator.GetHandlerTypeIndex(), method);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005011 if (exception_type == nullptr) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08005012 DCHECK(Thread::Current()->IsExceptionPending());
5013 Thread::Current()->ClearException();
5014 }
5015 }
5016 }
5017 handlers_ptr = iterator.EndDataPointer();
5018 }
5019}
5020
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01005021ObjPtr<mirror::Class> ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& soa,
5022 jstring name,
5023 jobjectArray interfaces,
5024 jobject loader,
5025 jobjectArray methods,
5026 jobjectArray throws) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07005027 Thread* self = soa.Self();
Alex Lighte9f61032018-09-24 16:04:51 -07005028
5029 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
5030 // code to be executed. We put it up here so we can avoid all the allocations associated with
5031 // creating the class. This can happen with (eg) jit-threads.
5032 if (!self->CanLoadClasses()) {
5033 // Make sure we don't try to load anything, potentially causing an infinite loop.
5034 ObjPtr<mirror::Throwable> pre_allocated =
5035 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
5036 self->SetException(pre_allocated);
5037 return nullptr;
5038 }
5039
Alex Light133987d2020-03-26 19:22:12 +00005040 StackHandleScope<12> hs(self);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005041 MutableHandle<mirror::Class> temp_klass(hs.NewHandle(
Vladimir Markob4eb1b12018-05-24 11:09:38 +01005042 AllocClass(self, GetClassRoot<mirror::Class>(this), sizeof(mirror::Class))));
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005043 if (temp_klass == nullptr) {
Ian Rogersa436fde2013-08-27 23:34:06 -07005044 CHECK(self->IsExceptionPending()); // OOME.
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005045 return nullptr;
Ian Rogersa436fde2013-08-27 23:34:06 -07005046 }
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005047 DCHECK(temp_klass->GetClass() != nullptr);
5048 temp_klass->SetObjectSize(sizeof(mirror::Proxy));
Igor Murashkindf707e42016-02-02 16:56:50 -08005049 // Set the class access flags incl. VerificationAttempted, so we do not try to set the flag on
5050 // the methods.
Vladimir Markob68bb7a2020-03-17 10:55:25 +00005051 temp_klass->SetAccessFlagsDuringLinking(
5052 kAccClassIsProxy | kAccPublic | kAccFinal | kAccVerificationAttempted);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005053 temp_klass->SetClassLoader(soa.Decode<mirror::ClassLoader>(loader));
5054 DCHECK_EQ(temp_klass->GetPrimitiveType(), Primitive::kPrimNot);
5055 temp_klass->SetName(soa.Decode<mirror::String>(name));
Vladimir Markob4eb1b12018-05-24 11:09:38 +01005056 temp_klass->SetDexCache(GetClassRoot<mirror::Proxy>(this)->GetDexCache());
Mathieu Chartier6beced42016-11-15 15:51:31 -08005057 // Object has an empty iftable, copy it for that reason.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01005058 temp_klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
Vladimir Marko2c64a832018-01-04 11:31:56 +00005059 mirror::Class::SetStatus(temp_klass, ClassStatus::kIdx, self);
Vladimir Marko3892e622019-03-15 15:22:18 +00005060 std::string storage;
5061 const char* descriptor = temp_klass->GetDescriptor(&storage);
5062 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005063
Mathieu Chartierd57d4542015-10-14 10:55:30 -07005064 // Needs to be before we insert the class so that the allocator field is set.
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005065 LinearAlloc* const allocator = GetOrCreateAllocatorForClassLoader(temp_klass->GetClassLoader());
Mathieu Chartierd57d4542015-10-14 10:55:30 -07005066
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005067 // Insert the class before loading the fields as the field roots
5068 // (ArtField::declaring_class_) are only visited from the class
5069 // table. There can't be any suspend points between inserting the
5070 // class and setting the field arrays below.
Vladimir Marko3892e622019-03-15 15:22:18 +00005071 ObjPtr<mirror::Class> existing = InsertClass(descriptor, temp_klass.Get(), hash);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005072 CHECK(existing == nullptr);
Ian Rogersc2b44472011-12-14 21:17:17 -08005073
Elliott Hughes2ed52c42012-03-21 16:56:56 -07005074 // Instance fields are inherited, but we add a couple of static fields...
Mathieu Chartierc7853442015-03-27 14:35:38 -07005075 const size_t num_fields = 2;
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07005076 LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, allocator, num_fields);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005077 temp_klass->SetSFieldsPtr(sfields);
Mathieu Chartierc7853442015-03-27 14:35:38 -07005078
Elliott Hughes2ed52c42012-03-21 16:56:56 -07005079 // 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by
5080 // our proxy, so Class.getInterfaces doesn't return the flattened set.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005081 ArtField& interfaces_sfield = sfields->At(0);
5082 interfaces_sfield.SetDexFieldIndex(0);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005083 interfaces_sfield.SetDeclaringClass(temp_klass.Get());
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005084 interfaces_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
Mathieu Chartierc7853442015-03-27 14:35:38 -07005085
Elliott Hughes2ed52c42012-03-21 16:56:56 -07005086 // 2. Create a static field 'throws' that holds exceptions thrown by our methods.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005087 ArtField& throws_sfield = sfields->At(1);
5088 throws_sfield.SetDexFieldIndex(1);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005089 throws_sfield.SetDeclaringClass(temp_klass.Get());
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005090 throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
Jesse Wilson95caa792011-10-12 18:14:17 -04005091
Ian Rogers466bb252011-10-14 03:29:56 -07005092 // Proxies have 1 direct method, the constructor
Alex Lighte64300b2015-12-15 15:02:47 -08005093 const size_t num_direct_methods = 1;
Jesse Wilson95caa792011-10-12 18:14:17 -04005094
Alex Light133987d2020-03-26 19:22:12 +00005095 // The array we get passed contains all methods, including private and static
5096 // ones that aren't proxied. We need to filter those out since only interface
5097 // methods (non-private & virtual) are actually proxied.
5098 Handle<mirror::ObjectArray<mirror::Method>> h_methods =
5099 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Method>>(methods));
Vladimir Marko679730e2018-05-25 15:06:48 +01005100 DCHECK_EQ(h_methods->GetClass(), GetClassRoot<mirror::ObjectArray<mirror::Method>>())
David Sehr709b0702016-10-13 09:12:37 -07005101 << mirror::Class::PrettyClass(h_methods->GetClass());
Alex Light133987d2020-03-26 19:22:12 +00005102 // List of the actual virtual methods this class will have.
5103 std::vector<ArtMethod*> proxied_methods;
5104 std::vector<size_t> proxied_throws_idx;
5105 proxied_methods.reserve(h_methods->GetLength());
5106 proxied_throws_idx.reserve(h_methods->GetLength());
5107 // Filter out to only the non-private virtual methods.
5108 for (auto [mirror, idx] : ZipCount(h_methods.Iterate<mirror::Method>())) {
5109 ArtMethod* m = mirror->GetArtMethod();
5110 if (!m->IsPrivate() && !m->IsStatic()) {
5111 proxied_methods.push_back(m);
5112 proxied_throws_idx.push_back(idx);
5113 }
5114 }
5115 const size_t num_virtual_methods = proxied_methods.size();
Alex Lightbc115092020-03-27 11:25:16 -07005116 // We also need to filter out the 'throws'. The 'throws' are a Class[][] that
5117 // contains an array of all the classes each function is declared to throw.
5118 // This is used to wrap unexpected exceptions in a
5119 // UndeclaredThrowableException exception. This array is in the same order as
5120 // the methods array and like the methods array must be filtered to remove any
5121 // non-proxied methods.
Alex Light133987d2020-03-26 19:22:12 +00005122 const bool has_filtered_methods =
5123 static_cast<int32_t>(num_virtual_methods) != h_methods->GetLength();
5124 MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> original_proxied_throws(
5125 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(throws)));
5126 MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> proxied_throws(
5127 hs.NewHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(
5128 (has_filtered_methods)
5129 ? mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>::Alloc(
5130 self, original_proxied_throws->GetClass(), num_virtual_methods)
5131 : original_proxied_throws.Get()));
Alex Lightbc115092020-03-27 11:25:16 -07005132 if (proxied_throws.IsNull() && !original_proxied_throws.IsNull()) {
5133 self->AssertPendingOOMException();
5134 return nullptr;
5135 }
Alex Light133987d2020-03-26 19:22:12 +00005136 if (has_filtered_methods) {
5137 for (auto [orig_idx, new_idx] : ZipCount(MakeIterationRange(proxied_throws_idx))) {
5138 DCHECK_LE(new_idx, orig_idx);
5139 proxied_throws->Set(new_idx, original_proxied_throws->Get(orig_idx));
5140 }
5141 }
Alex Lighte64300b2015-12-15 15:02:47 -08005142
5143 // Create the methods array.
5144 LengthPrefixedArray<ArtMethod>* proxy_class_methods = AllocArtMethodArray(
5145 self, allocator, num_direct_methods + num_virtual_methods);
Mathieu Chartiere401d142015-04-22 13:56:20 -07005146 // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we
5147 // want to throw OOM in the future.
Alex Lighte64300b2015-12-15 15:02:47 -08005148 if (UNLIKELY(proxy_class_methods == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005149 self->AssertPendingOOMException();
5150 return nullptr;
Ian Rogersa436fde2013-08-27 23:34:06 -07005151 }
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005152 temp_klass->SetMethodsPtr(proxy_class_methods, num_direct_methods, num_virtual_methods);
Alex Lighte64300b2015-12-15 15:02:47 -08005153
5154 // Create the single direct method.
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005155 CreateProxyConstructor(temp_klass, temp_klass->GetDirectMethodUnchecked(0, image_pointer_size_));
Alex Lighte64300b2015-12-15 15:02:47 -08005156
5157 // Create virtual method using specified prototypes.
5158 // TODO These should really use the iterators.
Jesse Wilson95caa792011-10-12 18:14:17 -04005159 for (size_t i = 0; i < num_virtual_methods; ++i) {
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005160 auto* virtual_method = temp_klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
Alex Light133987d2020-03-26 19:22:12 +00005161 auto* prototype = proxied_methods[i];
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005162 CreateProxyMethod(temp_klass, prototype, virtual_method);
Mathieu Chartiere401d142015-04-22 13:56:20 -07005163 DCHECK(virtual_method->GetDeclaringClass() != nullptr);
5164 DCHECK(prototype->GetDeclaringClass() != nullptr);
Jesse Wilson95caa792011-10-12 18:14:17 -04005165 }
Ian Rogersc2b44472011-12-14 21:17:17 -08005166
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005167 // The super class is java.lang.reflect.Proxy
Vladimir Markob4eb1b12018-05-24 11:09:38 +01005168 temp_klass->SetSuperClass(GetClassRoot<mirror::Proxy>(this));
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005169 // Now effectively in the loaded state.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005170 mirror::Class::SetStatus(temp_klass, ClassStatus::kLoaded, self);
Ian Rogers62d6c772013-02-27 08:32:07 -08005171 self->AssertNoPendingException();
Ian Rogersc2b44472011-12-14 21:17:17 -08005172
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005173 // At this point the class is loaded. Publish a ClassLoad event.
5174 // Note: this may be a temporary class. It is a listener's responsibility to handle this.
5175 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(temp_klass);
5176
5177 MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
Ian Rogersc8982582012-09-07 16:53:25 -07005178 {
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005179 // Must hold lock on object when resolved.
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005180 ObjectLock<mirror::Class> resolution_lock(self, temp_klass);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005181 // Link the fields and virtual methods, creating vtable and iftables.
5182 // The new class will replace the old one in the class table.
Mathieu Chartiere401d142015-04-22 13:56:20 -07005183 Handle<mirror::ObjectArray<mirror::Class>> h_interfaces(
Mathieu Chartier0795f232016-09-27 18:43:30 -07005184 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces)));
Vladimir Marko3892e622019-03-15 15:22:18 +00005185 if (!LinkClass(self, descriptor, temp_klass, h_interfaces, &klass)) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00005186 mirror::Class::SetStatus(temp_klass, ClassStatus::kErrorUnresolved, self);
Mathieu Chartierc528dba2013-11-26 12:00:11 -08005187 return nullptr;
Ian Rogers7dfb28c2013-08-22 08:18:36 -07005188 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005189 }
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005190 CHECK(temp_klass->IsRetired());
5191 CHECK_NE(temp_klass.Get(), klass.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005192
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005193 CHECK_EQ(interfaces_sfield.GetDeclaringClass(), klass.Get());
Mathieu Chartier0795f232016-09-27 18:43:30 -07005194 interfaces_sfield.SetObject<false>(
5195 klass.Get(),
Mathieu Chartierf8ac97f2016-10-05 15:56:52 -07005196 soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005197 CHECK_EQ(throws_sfield.GetDeclaringClass(), klass.Get());
5198 throws_sfield.SetObject<false>(
Mathieu Chartier0795f232016-09-27 18:43:30 -07005199 klass.Get(),
Alex Light133987d2020-03-26 19:22:12 +00005200 proxied_throws.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005201
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005202 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(temp_klass, klass);
5203
Vladimir Marko305c38b2018-02-14 11:50:07 +00005204 // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
5205 // See also ClassLinker::EnsureInitialized().
5206 if (kBitstringSubtypeCheckEnabled) {
5207 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
5208 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(klass.Get());
5209 // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck for j.l.r.Proxy is already assigned.
5210 }
5211
Vladimir Markobf121912019-06-04 13:49:05 +01005212 VisiblyInitializedCallback* callback = nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005213 {
5214 // Lock on klass is released. Lock new class object.
5215 ObjectLock<mirror::Class> initialization_lock(self, klass);
Andreas Gampe5b20b352018-10-11 19:03:20 -07005216 EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
Vladimir Markobf121912019-06-04 13:49:05 +01005217 // Conservatively go through the ClassStatus::kInitialized state.
5218 callback = MarkClassInitialized(self, klass);
5219 }
5220 if (callback != nullptr) {
5221 callback->MakeVisible(self);
Ian Rogersc8982582012-09-07 16:53:25 -07005222 }
Ian Rogersc2b44472011-12-14 21:17:17 -08005223
5224 // sanity checks
Elliott Hughes67d92002012-03-26 15:08:51 -07005225 if (kIsDebugBuild) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005226 CHECK(klass->GetIFieldsPtr() == nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07005227 CheckProxyConstructor(klass->GetDirectMethod(0, image_pointer_size_));
5228
Ian Rogersc2b44472011-12-14 21:17:17 -08005229 for (size_t i = 0; i < num_virtual_methods; ++i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005230 auto* virtual_method = klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
Alex Light133987d2020-03-26 19:22:12 +00005231 CheckProxyMethod(virtual_method, proxied_methods[i]);
Ian Rogersc2b44472011-12-14 21:17:17 -08005232 }
Elliott Hughes2ed52c42012-03-21 16:56:56 -07005233
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005234 StackHandleScope<1> hs2(self);
Mathieu Chartier0795f232016-09-27 18:43:30 -07005235 Handle<mirror::String> decoded_name = hs2.NewHandle(soa.Decode<mirror::String>(name));
Elliott Hughes2ed52c42012-03-21 16:56:56 -07005236 std::string interfaces_field_name(StringPrintf("java.lang.Class[] %s.interfaces",
Mathieu Chartier590fee92013-09-13 13:46:47 -07005237 decoded_name->ToModifiedUtf8().c_str()));
David Sehr709b0702016-10-13 09:12:37 -07005238 CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(0)), interfaces_field_name);
Elliott Hughes2ed52c42012-03-21 16:56:56 -07005239
5240 std::string throws_field_name(StringPrintf("java.lang.Class[][] %s.throws",
Mathieu Chartier590fee92013-09-13 13:46:47 -07005241 decoded_name->ToModifiedUtf8().c_str()));
David Sehr709b0702016-10-13 09:12:37 -07005242 CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(1)), throws_field_name);
Ian Rogersc2b44472011-12-14 21:17:17 -08005243
Narayan Kamath6b2dc312017-03-14 13:26:12 +00005244 CHECK_EQ(klass.Get()->GetProxyInterfaces(),
Mathieu Chartierf8ac97f2016-10-05 15:56:52 -07005245 soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
Narayan Kamath6b2dc312017-03-14 13:26:12 +00005246 CHECK_EQ(klass.Get()->GetProxyThrows(),
Alex Light133987d2020-03-26 19:22:12 +00005247 proxied_throws.Get());
Ian Rogersc2b44472011-12-14 21:17:17 -08005248 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005249 return klass.Get();
Jesse Wilson95caa792011-10-12 18:14:17 -04005250}
5251
Mathieu Chartiere401d142015-04-22 13:56:20 -07005252void ClassLinker::CreateProxyConstructor(Handle<mirror::Class> klass, ArtMethod* out) {
5253 // Create constructor for Proxy that must initialize the method.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01005254 ObjPtr<mirror::Class> proxy_class = GetClassRoot<mirror::Proxy>(this);
5255 CHECK_EQ(proxy_class->NumDirectMethods(), 21u);
Przemyslaw Szczepaniakf11cd292016-08-17 17:46:38 +01005256
Igor Murashkin9d3d7522017-02-27 10:39:49 -08005257 // Find the <init>(InvocationHandler)V method. The exact method offset varies depending
5258 // on which front-end compiler was used to build the libcore DEX files.
Alex Light6cae5ea2018-06-07 17:07:02 -07005259 ArtMethod* proxy_constructor =
5260 jni::DecodeArtMethod(WellKnownClasses::java_lang_reflect_Proxy_init);
Igor Murashkin9d3d7522017-02-27 10:39:49 -08005261 DCHECK(proxy_constructor != nullptr)
5262 << "Could not find <init> method in java.lang.reflect.Proxy";
5263
Jeff Haodb8a6642014-08-14 17:18:52 -07005264 // Clone the existing constructor of Proxy (our constructor would just invoke it so steal its
5265 // code_ too)
Mathieu Chartiere401d142015-04-22 13:56:20 -07005266 DCHECK(out != nullptr);
5267 out->CopyFrom(proxy_constructor, image_pointer_size_);
Vladimir Markoba118822017-06-12 15:41:56 +01005268 // Make this constructor public and fix the class to be our Proxy version.
Mathieu Chartier201e2972017-06-05 18:34:53 -07005269 // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
Vladimir Markoba118822017-06-12 15:41:56 +01005270 // Note that the compiler calls a ResolveMethod() overload that does not handle a Proxy referrer.
Mathieu Chartier201e2972017-06-05 18:34:53 -07005271 out->SetAccessFlags((out->GetAccessFlags() & ~kAccProtected) |
5272 kAccPublic |
5273 kAccCompileDontBother);
Mathieu Chartiere401d142015-04-22 13:56:20 -07005274 out->SetDeclaringClass(klass.Get());
Vladimir Markod1ee20f2017-08-17 09:21:16 +00005275
5276 // Set the original constructor method.
5277 out->SetDataPtrSize(proxy_constructor, image_pointer_size_);
Ian Rogersc2b44472011-12-14 21:17:17 -08005278}
5279
Mathieu Chartiere401d142015-04-22 13:56:20 -07005280void ClassLinker::CheckProxyConstructor(ArtMethod* constructor) const {
Ian Rogers466bb252011-10-14 03:29:56 -07005281 CHECK(constructor->IsConstructor());
Mathieu Chartiere401d142015-04-22 13:56:20 -07005282 auto* np = constructor->GetInterfaceMethodIfProxy(image_pointer_size_);
5283 CHECK_STREQ(np->GetName(), "<init>");
5284 CHECK_STREQ(np->GetSignature().ToString().c_str(), "(Ljava/lang/reflect/InvocationHandler;)V");
Ian Rogers466bb252011-10-14 03:29:56 -07005285 DCHECK(constructor->IsPublic());
Jesse Wilson95caa792011-10-12 18:14:17 -04005286}
5287
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005288void ClassLinker::CreateProxyMethod(Handle<mirror::Class> klass, ArtMethod* prototype,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005289 ArtMethod* out) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08005290 // We steal everything from the prototype (such as DexCache, invoke stub, etc.) then specialize
Ian Rogers466bb252011-10-14 03:29:56 -07005291 // as necessary
Mathieu Chartiere401d142015-04-22 13:56:20 -07005292 DCHECK(out != nullptr);
5293 out->CopyFrom(prototype, image_pointer_size_);
Ian Rogers466bb252011-10-14 03:29:56 -07005294
Alex Lighte9dd04f2016-03-16 16:09:45 -07005295 // Set class to be the concrete proxy class.
Mathieu Chartiere401d142015-04-22 13:56:20 -07005296 out->SetDeclaringClass(klass.Get());
Alex Lighte9dd04f2016-03-16 16:09:45 -07005297 // Clear the abstract, default and conflict flags to ensure that defaults aren't picked in
5298 // preference to the invocation handler.
5299 const uint32_t kRemoveFlags = kAccAbstract | kAccDefault | kAccDefaultConflict;
5300 // Make the method final.
Mathieu Chartier201e2972017-06-05 18:34:53 -07005301 // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5302 const uint32_t kAddFlags = kAccFinal | kAccCompileDontBother;
Alex Lighte9dd04f2016-03-16 16:09:45 -07005303 out->SetAccessFlags((out->GetAccessFlags() & ~kRemoveFlags) | kAddFlags);
5304
5305 // Clear the dex_code_item_offset_. It needs to be 0 since proxy methods have no CodeItems but the
5306 // method they copy might (if it's a default method).
5307 out->SetCodeItemOffset(0);
Jesse Wilson95caa792011-10-12 18:14:17 -04005308
Vladimir Markod1ee20f2017-08-17 09:21:16 +00005309 // Set the original interface method.
5310 out->SetDataPtrSize(prototype, image_pointer_size_);
5311
Ian Rogers466bb252011-10-14 03:29:56 -07005312 // At runtime the method looks like a reference and argument saving method, clone the code
5313 // related parameters from this method.
Mathieu Chartiere401d142015-04-22 13:56:20 -07005314 out->SetEntryPointFromQuickCompiledCode(GetQuickProxyInvokeHandler());
Ian Rogersc2b44472011-12-14 21:17:17 -08005315}
Jesse Wilson95caa792011-10-12 18:14:17 -04005316
Mathieu Chartiere401d142015-04-22 13:56:20 -07005317void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) const {
Ian Rogers466bb252011-10-14 03:29:56 -07005318 // Basic sanity
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08005319 CHECK(!prototype->IsFinal());
5320 CHECK(method->IsFinal());
Alex Light9139e002015-10-09 15:59:48 -07005321 CHECK(method->IsInvokable());
Ian Rogers19846512012-02-24 11:42:47 -08005322
5323 // The proxy method doesn't have its own dex cache or dex file and so it steals those of its
5324 // interface prototype. The exception to this are Constructors and the Class of the Proxy itself.
Ian Rogers19846512012-02-24 11:42:47 -08005325 CHECK_EQ(prototype->GetDexMethodIndex(), method->GetDexMethodIndex());
Vladimir Marko5c3e9d12017-08-30 16:43:54 +01005326 CHECK_EQ(prototype, method->GetInterfaceMethodIfProxy(image_pointer_size_));
Jesse Wilson95caa792011-10-12 18:14:17 -04005327}
5328
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005329bool ClassLinker::CanWeInitializeClass(ObjPtr<mirror::Class> klass, bool can_init_statics,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005330 bool can_init_parents) {
Brian Carlstrom610e49f2013-11-04 17:07:22 -08005331 if (can_init_statics && can_init_parents) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005332 return true;
5333 }
5334 if (!can_init_statics) {
5335 // Check if there's a class initializer.
Mathieu Chartiere401d142015-04-22 13:56:20 -07005336 ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005337 if (clinit != nullptr) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005338 return false;
5339 }
5340 // Check if there are encoded static values needing initialization.
5341 if (klass->NumStaticFields() != 0) {
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005342 const dex::ClassDef* dex_class_def = klass->GetClassDef();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005343 DCHECK(dex_class_def != nullptr);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005344 if (dex_class_def->static_values_off_ != 0) {
5345 return false;
5346 }
5347 }
Vladimir Marko889b72d2019-11-12 11:01:13 +00005348 }
5349 // If we are a class we need to initialize all interfaces with default methods when we are
5350 // initialized. Check all of them.
5351 if (!klass->IsInterface()) {
5352 size_t num_interfaces = klass->GetIfTableCount();
5353 for (size_t i = 0; i < num_interfaces; i++) {
5354 ObjPtr<mirror::Class> iface = klass->GetIfTable()->GetInterface(i);
5355 if (iface->HasDefaultMethods() && !iface->IsInitialized()) {
5356 if (!can_init_parents || !CanWeInitializeClass(iface, can_init_statics, can_init_parents)) {
Alex Lighteb7c1442015-08-31 13:17:42 -07005357 return false;
5358 }
5359 }
5360 }
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005361 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07005362 if (klass->IsInterface() || !klass->HasSuperClass()) {
5363 return true;
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005364 }
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005365 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Vladimir Marko889b72d2019-11-12 11:01:13 +00005366 if (super_class->IsInitialized()) {
5367 return true;
Mathieu Chartiere401d142015-04-22 13:56:20 -07005368 }
Vladimir Marko889b72d2019-11-12 11:01:13 +00005369 return can_init_parents && CanWeInitializeClass(super_class, can_init_statics, can_init_parents);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005370}
5371
Mathieu Chartier23369542020-03-04 08:24:11 -08005372bool ClassLinker::InitializeClass(Thread* self,
5373 Handle<mirror::Class> klass,
5374 bool can_init_statics,
5375 bool can_init_parents) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005376 // see JLS 3rd edition, 12.4.2 "Detailed Initialization Procedure" for the locking protocol
5377
5378 // Are we already initialized and therefore done?
5379 // Note: we differ from the JLS here as we don't do this under the lock, this is benign as
5380 // an initialized class will never change its state.
5381 if (klass->IsInitialized()) {
5382 return true;
5383 }
5384
5385 // Fast fail if initialization requires a full runtime. Not part of the JLS.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005386 if (!CanWeInitializeClass(klass.Get(), can_init_statics, can_init_parents)) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005387 return false;
5388 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005389
Ian Rogers7b078e82014-09-10 14:44:24 -07005390 self->AllowThreadSuspension();
Mathieu Chartier23369542020-03-04 08:24:11 -08005391 Runtime* const runtime = Runtime::Current();
5392 const bool stats_enabled = runtime->HasStatsEnabled();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005393 uint64_t t0;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005394 {
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07005395 ObjectLock<mirror::Class> lock(self, klass);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005396
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005397 // Re-check under the lock in case another thread initialized ahead of us.
5398 if (klass->IsInitialized()) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005399 return true;
5400 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005401
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005402 // Was the class already found to be erroneous? Done under the lock to match the JLS.
Brian Carlstromd1422f82011-09-28 11:37:09 -07005403 if (klass->IsErroneous()) {
Andreas Gampe7b3063b2019-01-07 14:12:52 -08005404 ThrowEarlierClassFailure(klass.Get(), true, /* log= */ true);
Brian Carlstromb23eab12014-10-08 17:55:21 -07005405 VlogClassInitializationFailure(klass);
Brian Carlstromd1422f82011-09-28 11:37:09 -07005406 return false;
5407 }
5408
Vladimir Marko72ab6842017-01-20 19:32:50 +00005409 CHECK(klass->IsResolved() && !klass->IsErroneousResolved())
5410 << klass->PrettyClass() << ": state=" << klass->GetStatus();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005411
5412 if (!klass->IsVerified()) {
Ian Rogers7b078e82014-09-10 14:44:24 -07005413 VerifyClass(self, klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005414 if (!klass->IsVerified()) {
5415 // We failed to verify, expect either the klass to be erroneous or verification failed at
5416 // compile time.
5417 if (klass->IsErroneous()) {
Andreas Gampefc49fa02016-04-21 12:21:55 -07005418 // The class is erroneous. This may be a verifier error, or another thread attempted
5419 // verification and/or initialization and failed. We can distinguish those cases by
5420 // whether an exception is already pending.
5421 if (self->IsExceptionPending()) {
5422 // Check that it's a VerifyError.
5423 DCHECK_EQ("java.lang.Class<java.lang.VerifyError>",
David Sehr709b0702016-10-13 09:12:37 -07005424 mirror::Class::PrettyClass(self->GetException()->GetClass()));
Andreas Gampefc49fa02016-04-21 12:21:55 -07005425 } else {
5426 // Check that another thread attempted initialization.
5427 DCHECK_NE(0, klass->GetClinitThreadId());
5428 DCHECK_NE(self->GetTid(), klass->GetClinitThreadId());
5429 // Need to rethrow the previous failure now.
5430 ThrowEarlierClassFailure(klass.Get(), true);
5431 }
Brian Carlstromb23eab12014-10-08 17:55:21 -07005432 VlogClassInitializationFailure(klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005433 } else {
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08005434 CHECK(Runtime::Current()->IsAotCompiler());
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01005435 CHECK(klass->ShouldVerifyAtRuntime() || klass->IsVerifiedNeedsAccessChecks());
Vladimir Markod79b37b2018-11-02 13:06:22 +00005436 self->AssertNoPendingException();
5437 self->SetException(Runtime::Current()->GetPreAllocatedNoClassDefFoundError());
jeffhaoa9b3bf42012-06-06 17:18:39 -07005438 }
Vladimir Markod79b37b2018-11-02 13:06:22 +00005439 self->AssertPendingException();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005440 return false;
Mathieu Chartier524507a2014-08-27 15:28:28 -07005441 } else {
5442 self->AssertNoPendingException();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005443 }
Andreas Gampefc49fa02016-04-21 12:21:55 -07005444
5445 // A separate thread could have moved us all the way to initialized. A "simple" example
5446 // involves a subclass of the current class being initialized at the same time (which
5447 // will implicitly initialize the superclass, if scheduled that way). b/28254258
Vladimir Marko72ab6842017-01-20 19:32:50 +00005448 DCHECK(!klass->IsErroneous()) << klass->GetStatus();
Andreas Gampefc49fa02016-04-21 12:21:55 -07005449 if (klass->IsInitialized()) {
5450 return true;
5451 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005452 }
5453
Vladimir Marko2c64a832018-01-04 11:31:56 +00005454 // If the class is ClassStatus::kInitializing, either this thread is
Brian Carlstromd1422f82011-09-28 11:37:09 -07005455 // initializing higher up the stack or another thread has beat us
5456 // to initializing and we need to wait. Either way, this
5457 // invocation of InitializeClass will not be responsible for
5458 // running <clinit> and will return.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005459 if (klass->GetStatus() == ClassStatus::kInitializing) {
Mathieu Chartier524507a2014-08-27 15:28:28 -07005460 // Could have got an exception during verification.
5461 if (self->IsExceptionPending()) {
Brian Carlstromb23eab12014-10-08 17:55:21 -07005462 VlogClassInitializationFailure(klass);
Mathieu Chartier524507a2014-08-27 15:28:28 -07005463 return false;
5464 }
Elliott Hughes005ab2e2011-09-11 17:15:31 -07005465 // We caught somebody else in the act; was it us?
Elliott Hughesdcc24742011-09-07 14:02:44 -07005466 if (klass->GetClinitThreadId() == self->GetTid()) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005467 // Yes. That's fine. Return so we can continue initializing.
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005468 return true;
5469 }
Brian Carlstromd1422f82011-09-28 11:37:09 -07005470 // No. That's fine. Wait for another thread to finish initializing.
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005471 return WaitForInitializeClass(klass, self, lock);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005472 }
5473
Jeff Haoe2e40342017-07-19 10:45:18 -07005474 // Try to get the oat class's status for this class if the oat file is present. The compiler
5475 // tries to validate superclass descriptors, and writes the result into the oat file.
5476 // Runtime correctness is guaranteed by classpath checks done on loading. If the classpath
5477 // is different at runtime than it was at compile time, the oat file is rejected. So if the
5478 // oat file is present, the classpaths must match, and the runtime time check can be skipped.
Jeff Hao0cb17282017-07-12 14:51:49 -07005479 bool has_oat_class = false;
Jeff Haoe2e40342017-07-19 10:45:18 -07005480 const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
5481 ? OatFile::FindOatClass(klass->GetDexFile(), klass->GetDexClassDefIndex(), &has_oat_class)
5482 : OatFile::OatClass::Invalid();
Vladimir Marko2c64a832018-01-04 11:31:56 +00005483 if (oat_class.GetStatus() < ClassStatus::kSuperclassValidated &&
Jeff Hao0cb17282017-07-12 14:51:49 -07005484 !ValidateSuperClassDescriptors(klass)) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00005485 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005486 return false;
5487 }
Ian Rogers7b078e82014-09-10 14:44:24 -07005488 self->AllowThreadSuspension();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005489
Vladimir Marko2c64a832018-01-04 11:31:56 +00005490 CHECK_EQ(klass->GetStatus(), ClassStatus::kVerified) << klass->PrettyClass()
Andreas Gampe9510ccd2016-04-20 09:55:25 -07005491 << " self.tid=" << self->GetTid() << " clinit.tid=" << klass->GetClinitThreadId();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005492
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005493 // From here out other threads may observe that we're initializing and so changes of state
5494 // require the a notification.
Elliott Hughesdcc24742011-09-07 14:02:44 -07005495 klass->SetClinitThreadId(self->GetTid());
Vladimir Marko2c64a832018-01-04 11:31:56 +00005496 mirror::Class::SetStatus(klass, ClassStatus::kInitializing, self);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005497
Mathieu Chartier23369542020-03-04 08:24:11 -08005498 t0 = stats_enabled ? NanoTime() : 0u;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005499 }
5500
Andreas Gampeaf864702019-07-23 14:05:35 -07005501 uint64_t t_sub = 0;
5502
Brian Carlstrom6d3f72c2013-08-21 18:06:34 -07005503 // Initialize super classes, must be done while initializing for the JLS.
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005504 if (!klass->IsInterface() && klass->HasSuperClass()) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005505 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005506 if (!super_class->IsInitialized()) {
5507 CHECK(!super_class->IsInterface());
5508 CHECK(can_init_parents);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005509 StackHandleScope<1> hs(self);
5510 Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class));
Mathieu Chartier23369542020-03-04 08:24:11 -08005511 uint64_t super_t0 = stats_enabled ? NanoTime() : 0u;
Ian Rogers7b078e82014-09-10 14:44:24 -07005512 bool super_initialized = InitializeClass(self, handle_scope_super, can_init_statics, true);
Mathieu Chartier23369542020-03-04 08:24:11 -08005513 uint64_t super_t1 = stats_enabled ? NanoTime() : 0u;
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005514 if (!super_initialized) {
5515 // The super class was verified ahead of entering initializing, we should only be here if
5516 // the super class became erroneous due to initialization.
Chang Xingadbb91c2017-07-17 11:23:55 -07005517 // For the case of aot compiler, the super class might also be initializing but we don't
5518 // want to process circular dependencies in pre-compile.
5519 CHECK(self->IsExceptionPending())
Brian Carlstromf3632832014-05-20 15:36:53 -07005520 << "Super class initialization failed for "
David Sehr709b0702016-10-13 09:12:37 -07005521 << handle_scope_super->PrettyDescriptor()
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005522 << " that has unexpected status " << handle_scope_super->GetStatus()
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005523 << "\nPending exception:\n"
Nicolas Geoffray14691c52015-03-05 10:40:17 +00005524 << (self->GetException() != nullptr ? self->GetException()->Dump() : "");
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07005525 ObjectLock<mirror::Class> lock(self, klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005526 // Initialization failed because the super-class is erroneous.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005527 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005528 return false;
5529 }
Andreas Gampeaf864702019-07-23 14:05:35 -07005530 t_sub = super_t1 - super_t0;
Ian Rogers1bddec32012-02-04 12:27:34 -08005531 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005532 }
5533
Alex Lighteb7c1442015-08-31 13:17:42 -07005534 if (!klass->IsInterface()) {
5535 // Initialize interfaces with default methods for the JLS.
5536 size_t num_direct_interfaces = klass->NumDirectInterfaces();
Alex Light56a40f52015-10-14 11:07:41 -07005537 // Only setup the (expensive) handle scope if we actually need to.
5538 if (UNLIKELY(num_direct_interfaces > 0)) {
Alex Lighteb7c1442015-08-31 13:17:42 -07005539 StackHandleScope<1> hs_iface(self);
Alex Light56a40f52015-10-14 11:07:41 -07005540 MutableHandle<mirror::Class> handle_scope_iface(hs_iface.NewHandle<mirror::Class>(nullptr));
5541 for (size_t i = 0; i < num_direct_interfaces; i++) {
Vladimir Marko19a4d372016-12-08 14:41:46 +00005542 handle_scope_iface.Assign(mirror::Class::GetDirectInterface(self, klass.Get(), i));
Vladimir Marko8d6768d2017-03-14 10:13:21 +00005543 CHECK(handle_scope_iface != nullptr) << klass->PrettyDescriptor() << " iface #" << i;
Alex Light56a40f52015-10-14 11:07:41 -07005544 CHECK(handle_scope_iface->IsInterface());
5545 if (handle_scope_iface->HasBeenRecursivelyInitialized()) {
5546 // We have already done this for this interface. Skip it.
5547 continue;
5548 }
5549 // We cannot just call initialize class directly because we need to ensure that ALL
5550 // interfaces with default methods are initialized. Non-default interface initialization
5551 // will not affect other non-default super-interfaces.
Mathieu Chartier23369542020-03-04 08:24:11 -08005552 // This is not very precise, misses all walking.
5553 uint64_t inf_t0 = stats_enabled ? NanoTime() : 0u;
Alex Light56a40f52015-10-14 11:07:41 -07005554 bool iface_initialized = InitializeDefaultInterfaceRecursive(self,
5555 handle_scope_iface,
5556 can_init_statics,
5557 can_init_parents);
Mathieu Chartier23369542020-03-04 08:24:11 -08005558 uint64_t inf_t1 = stats_enabled ? NanoTime() : 0u;
Alex Light56a40f52015-10-14 11:07:41 -07005559 if (!iface_initialized) {
5560 ObjectLock<mirror::Class> lock(self, klass);
5561 // Initialization failed because one of our interfaces with default methods is erroneous.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005562 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Alex Light56a40f52015-10-14 11:07:41 -07005563 return false;
5564 }
Andreas Gampeaf864702019-07-23 14:05:35 -07005565 t_sub += inf_t1 - inf_t0;
Alex Lighteb7c1442015-08-31 13:17:42 -07005566 }
5567 }
5568 }
5569
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005570 const size_t num_static_fields = klass->NumStaticFields();
5571 if (num_static_fields > 0) {
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005572 const dex::ClassDef* dex_class_def = klass->GetClassDef();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005573 CHECK(dex_class_def != nullptr);
Hiroshi Yamauchi67ef46a2014-08-21 15:59:43 -07005574 StackHandleScope<3> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005575 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
Mathieu Chartierf8322842014-05-16 10:59:25 -07005576 Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005577
5578 // Eagerly fill in static fields so that the we don't have to do as many expensive
5579 // Class::FindStaticField in ResolveField.
5580 for (size_t i = 0; i < num_static_fields; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07005581 ArtField* field = klass->GetStaticField(i);
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005582 const uint32_t field_idx = field->GetDexFieldIndex();
Mathieu Chartierc7853442015-03-27 14:35:38 -07005583 ArtField* resolved_field = dex_cache->GetResolvedField(field_idx, image_pointer_size_);
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005584 if (resolved_field == nullptr) {
David Brazdil1ab0fa82018-05-04 11:28:03 +01005585 // Populating cache of a dex file which defines `klass` should always be allowed.
David Brazdilf50ac102018-10-17 18:00:06 +01005586 DCHECK(!hiddenapi::ShouldDenyAccessToMember(
5587 field,
5588 hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
5589 hiddenapi::AccessMethod::kNone));
Mathieu Chartierc7853442015-03-27 14:35:38 -07005590 dex_cache->SetResolvedField(field_idx, field, image_pointer_size_);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07005591 } else {
5592 DCHECK_EQ(field, resolved_field);
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005593 }
5594 }
5595
Vladimir Markoe11dd502017-12-08 14:09:45 +00005596 annotations::RuntimeEncodedStaticFieldValueIterator value_it(dex_cache,
5597 class_loader,
David Sehr9323e6e2016-09-13 08:58:35 -07005598 this,
5599 *dex_class_def);
Vladimir Markoe11dd502017-12-08 14:09:45 +00005600 const DexFile& dex_file = *dex_cache->GetDexFile();
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005601
Hiroshi Yamauchi88500112014-08-22 12:12:56 -07005602 if (value_it.HasNext()) {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005603 ClassAccessor accessor(dex_file, *dex_class_def);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005604 CHECK(can_init_statics);
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005605 for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
5606 if (!value_it.HasNext()) {
5607 break;
5608 }
5609 ArtField* art_field = ResolveField(field.GetIndex(),
5610 dex_cache,
5611 class_loader,
Andreas Gampe98ea9d92018-10-19 14:06:15 -07005612 /* is_static= */ true);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +01005613 if (Runtime::Current()->IsActiveTransaction()) {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005614 value_it.ReadValueToField<true>(art_field);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +01005615 } else {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005616 value_it.ReadValueToField<false>(art_field);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +01005617 }
Mathieu Chartierda595be2016-08-10 13:57:39 -07005618 if (self->IsExceptionPending()) {
5619 break;
5620 }
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005621 value_it.Next();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005622 }
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005623 DCHECK(self->IsExceptionPending() || !value_it.HasNext());
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005624 }
5625 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005626
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005627
Mathieu Chartierda595be2016-08-10 13:57:39 -07005628 if (!self->IsExceptionPending()) {
5629 ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5630 if (clinit != nullptr) {
5631 CHECK(can_init_statics);
5632 JValue result;
5633 clinit->Invoke(self, nullptr, 0, &result, "V");
5634 }
5635 }
Ian Rogers7b078e82014-09-10 14:44:24 -07005636 self->AllowThreadSuspension();
Mathieu Chartier23369542020-03-04 08:24:11 -08005637 uint64_t t1 = stats_enabled ? NanoTime() : 0u;
Elliott Hughes83df2ac2011-10-11 16:37:54 -07005638
Vladimir Markobf121912019-06-04 13:49:05 +01005639 VisiblyInitializedCallback* callback = nullptr;
Ian Rogersbdfb1a52012-01-12 14:05:22 -08005640 bool success = true;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005641 {
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07005642 ObjectLock<mirror::Class> lock(self, klass);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005643
5644 if (self->IsExceptionPending()) {
Brian Carlstromb23eab12014-10-08 17:55:21 -07005645 WrapExceptionInInitializer(klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00005646 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Ian Rogersbdfb1a52012-01-12 14:05:22 -08005647 success = false;
Sebastien Hertz1c80bec2015-02-03 11:58:06 +01005648 } else if (Runtime::Current()->IsTransactionAborted()) {
5649 // The exception thrown when the transaction aborted has been caught and cleared
5650 // so we need to throw it again now.
David Sehr709b0702016-10-13 09:12:37 -07005651 VLOG(compiler) << "Return from class initializer of "
5652 << mirror::Class::PrettyDescriptor(klass.Get())
Sebastien Hertzbd9cf9f2015-03-03 12:16:13 +01005653 << " without exception while transaction was aborted: re-throw it now.";
Mathieu Chartier23369542020-03-04 08:24:11 -08005654 runtime->ThrowTransactionAbortError(self);
Vladimir Marko2c64a832018-01-04 11:31:56 +00005655 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Sebastien Hertz1c80bec2015-02-03 11:58:06 +01005656 success = false;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005657 } else {
Mathieu Chartier23369542020-03-04 08:24:11 -08005658 if (stats_enabled) {
5659 RuntimeStats* global_stats = runtime->GetStats();
5660 RuntimeStats* thread_stats = self->GetStats();
5661 ++global_stats->class_init_count;
5662 ++thread_stats->class_init_count;
5663 global_stats->class_init_time_ns += (t1 - t0 - t_sub);
5664 thread_stats->class_init_time_ns += (t1 - t0 - t_sub);
5665 }
Ian Rogerse6bb3b22013-08-19 21:51:45 -07005666 // Set the class as initialized except if failed to initialize static fields.
Vladimir Markobf121912019-06-04 13:49:05 +01005667 callback = MarkClassInitialized(self, klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005668 if (VLOG_IS_ON(class_linker)) {
Ian Rogers1ff3c982014-08-12 02:30:58 -07005669 std::string temp;
5670 LOG(INFO) << "Initialized class " << klass->GetDescriptor(&temp) << " from " <<
Mathieu Chartierf8322842014-05-16 10:59:25 -07005671 klass->GetLocation();
Brian Carlstromae826982011-11-09 01:33:42 -08005672 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005673 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005674 }
Vladimir Markobf121912019-06-04 13:49:05 +01005675 if (callback != nullptr) {
5676 callback->MakeVisible(self);
5677 }
Ian Rogersbdfb1a52012-01-12 14:05:22 -08005678 return success;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005679}
5680
Alex Lighteb7c1442015-08-31 13:17:42 -07005681// We recursively run down the tree of interfaces. We need to do this in the order they are declared
5682// and perform the initialization only on those interfaces that contain default methods.
5683bool ClassLinker::InitializeDefaultInterfaceRecursive(Thread* self,
5684 Handle<mirror::Class> iface,
5685 bool can_init_statics,
5686 bool can_init_parents) {
5687 CHECK(iface->IsInterface());
5688 size_t num_direct_ifaces = iface->NumDirectInterfaces();
Alex Light56a40f52015-10-14 11:07:41 -07005689 // Only create the (expensive) handle scope if we need it.
5690 if (UNLIKELY(num_direct_ifaces > 0)) {
5691 StackHandleScope<1> hs(self);
5692 MutableHandle<mirror::Class> handle_super_iface(hs.NewHandle<mirror::Class>(nullptr));
5693 // First we initialize all of iface's super-interfaces recursively.
5694 for (size_t i = 0; i < num_direct_ifaces; i++) {
Vladimir Marko19a4d372016-12-08 14:41:46 +00005695 ObjPtr<mirror::Class> super_iface = mirror::Class::GetDirectInterface(self, iface.Get(), i);
Vladimir Marko8d6768d2017-03-14 10:13:21 +00005696 CHECK(super_iface != nullptr) << iface->PrettyDescriptor() << " iface #" << i;
Alex Light56a40f52015-10-14 11:07:41 -07005697 if (!super_iface->HasBeenRecursivelyInitialized()) {
5698 // Recursive step
5699 handle_super_iface.Assign(super_iface);
5700 if (!InitializeDefaultInterfaceRecursive(self,
5701 handle_super_iface,
5702 can_init_statics,
5703 can_init_parents)) {
5704 return false;
5705 }
Alex Lighteb7c1442015-08-31 13:17:42 -07005706 }
5707 }
5708 }
5709
5710 bool result = true;
5711 // Then we initialize 'iface' if it has default methods. We do not need to (and in fact must not)
5712 // initialize if we don't have default methods.
5713 if (iface->HasDefaultMethods()) {
5714 result = EnsureInitialized(self, iface, can_init_statics, can_init_parents);
5715 }
5716
5717 // Mark that this interface has undergone recursive default interface initialization so we know we
5718 // can skip it on any later class initializations. We do this even if we are not a default
5719 // interface since we can still avoid the traversal. This is purely a performance optimization.
5720 if (result) {
5721 // TODO This should be done in a better way
Andreas Gampe976b2982018-03-02 17:54:22 -08005722 // Note: Use a try-lock to avoid blocking when someone else is holding the lock on this
5723 // interface. It is bad (Java) style, but not impossible. Marking the recursive
5724 // initialization is a performance optimization (to avoid another idempotent visit
5725 // for other implementing classes/interfaces), and can be revisited later.
5726 ObjectTryLock<mirror::Class> lock(self, iface);
5727 if (lock.Acquired()) {
5728 iface->SetRecursivelyInitialized();
5729 }
Alex Lighteb7c1442015-08-31 13:17:42 -07005730 }
5731 return result;
5732}
5733
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07005734bool ClassLinker::WaitForInitializeClass(Handle<mirror::Class> klass,
5735 Thread* self,
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005736 ObjectLock<mirror::Class>& lock)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005737 REQUIRES_SHARED(Locks::mutator_lock_) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005738 while (true) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -07005739 self->AssertNoPendingException();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005740 CHECK(!klass->IsInitialized());
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005741 lock.WaitIgnoringInterrupts();
Brian Carlstromd1422f82011-09-28 11:37:09 -07005742
5743 // When we wake up, repeat the test for init-in-progress. If
5744 // there's an exception pending (only possible if
Brian Carlstromb23eab12014-10-08 17:55:21 -07005745 // we were not using WaitIgnoringInterrupts), bail out.
Brian Carlstromd1422f82011-09-28 11:37:09 -07005746 if (self->IsExceptionPending()) {
Brian Carlstromb23eab12014-10-08 17:55:21 -07005747 WrapExceptionInInitializer(klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00005748 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Brian Carlstromd1422f82011-09-28 11:37:09 -07005749 return false;
5750 }
5751 // Spurious wakeup? Go back to waiting.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005752 if (klass->GetStatus() == ClassStatus::kInitializing) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005753 continue;
5754 }
Vladimir Marko2c64a832018-01-04 11:31:56 +00005755 if (klass->GetStatus() == ClassStatus::kVerified &&
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08005756 Runtime::Current()->IsAotCompiler()) {
Ian Rogers3d1548d2012-09-24 14:08:03 -07005757 // Compile time initialization failed.
5758 return false;
5759 }
Brian Carlstromd1422f82011-09-28 11:37:09 -07005760 if (klass->IsErroneous()) {
5761 // The caller wants an exception, but it was thrown in a
5762 // different thread. Synthesize one here.
Brian Carlstromdf143242011-10-10 18:05:34 -07005763 ThrowNoClassDefFoundError("<clinit> failed for class %s; see exception in other thread",
David Sehr709b0702016-10-13 09:12:37 -07005764 klass->PrettyDescriptor().c_str());
Brian Carlstromb23eab12014-10-08 17:55:21 -07005765 VlogClassInitializationFailure(klass);
Brian Carlstromd1422f82011-09-28 11:37:09 -07005766 return false;
5767 }
5768 if (klass->IsInitialized()) {
5769 return true;
5770 }
David Sehr709b0702016-10-13 09:12:37 -07005771 LOG(FATAL) << "Unexpected class status. " << klass->PrettyClass() << " is "
Mathieu Chartierc528dba2013-11-26 12:00:11 -08005772 << klass->GetStatus();
Brian Carlstromd1422f82011-09-28 11:37:09 -07005773 }
Ian Rogers07140832014-09-30 15:43:59 -07005774 UNREACHABLE();
Brian Carlstromd1422f82011-09-28 11:37:09 -07005775}
5776
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005777static void ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,
5778 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005779 ArtMethod* method,
5780 ArtMethod* m)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005781 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005782 DCHECK(Thread::Current()->IsExceptionPending());
5783 DCHECK(!m->IsProxyMethod());
5784 const DexFile* dex_file = m->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005785 const dex::MethodId& method_id = dex_file->GetMethodId(m->GetDexMethodIndex());
5786 const dex::ProtoId& proto_id = dex_file->GetMethodPrototype(method_id);
Andreas Gampea5b09a62016-11-17 15:21:22 -08005787 dex::TypeIndex return_type_idx = proto_id.return_type_idx_;
David Sehr709b0702016-10-13 09:12:37 -07005788 std::string return_type = dex_file->PrettyType(return_type_idx);
5789 std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005790 ThrowWrappedLinkageError(klass.Get(),
5791 "While checking class %s method %s signature against %s %s: "
5792 "Failed to resolve return type %s with %s",
David Sehr709b0702016-10-13 09:12:37 -07005793 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5794 ArtMethod::PrettyMethod(method).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005795 super_klass->IsInterface() ? "interface" : "superclass",
David Sehr709b0702016-10-13 09:12:37 -07005796 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005797 return_type.c_str(), class_loader.c_str());
5798}
5799
5800static void ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,
5801 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005802 ArtMethod* method,
5803 ArtMethod* m,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07005804 uint32_t index,
Andreas Gampea5b09a62016-11-17 15:21:22 -08005805 dex::TypeIndex arg_type_idx)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005806 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005807 DCHECK(Thread::Current()->IsExceptionPending());
5808 DCHECK(!m->IsProxyMethod());
5809 const DexFile* dex_file = m->GetDexFile();
David Sehr709b0702016-10-13 09:12:37 -07005810 std::string arg_type = dex_file->PrettyType(arg_type_idx);
5811 std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005812 ThrowWrappedLinkageError(klass.Get(),
5813 "While checking class %s method %s signature against %s %s: "
5814 "Failed to resolve arg %u type %s with %s",
David Sehr709b0702016-10-13 09:12:37 -07005815 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5816 ArtMethod::PrettyMethod(method).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005817 super_klass->IsInterface() ? "interface" : "superclass",
David Sehr709b0702016-10-13 09:12:37 -07005818 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005819 index, arg_type.c_str(), class_loader.c_str());
5820}
5821
5822static void ThrowSignatureMismatch(Handle<mirror::Class> klass,
5823 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005824 ArtMethod* method,
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005825 const std::string& error_msg)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005826 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005827 ThrowLinkageError(klass.Get(),
5828 "Class %s method %s resolves differently in %s %s: %s",
David Sehr709b0702016-10-13 09:12:37 -07005829 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5830 ArtMethod::PrettyMethod(method).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005831 super_klass->IsInterface() ? "interface" : "superclass",
David Sehr709b0702016-10-13 09:12:37 -07005832 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005833 error_msg.c_str());
5834}
5835
Ian Rogersb5fb2072014-12-02 17:22:02 -08005836static bool HasSameSignatureWithDifferentClassLoaders(Thread* self,
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005837 Handle<mirror::Class> klass,
5838 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005839 ArtMethod* method1,
5840 ArtMethod* method2)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005841 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogersb5fb2072014-12-02 17:22:02 -08005842 {
5843 StackHandleScope<1> hs(self);
Vladimir Markob45528c2017-07-27 14:14:28 +01005844 Handle<mirror::Class> return_type(hs.NewHandle(method1->ResolveReturnType()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08005845 if (UNLIKELY(return_type == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005846 ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method1);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005847 return false;
5848 }
Vladimir Markob45528c2017-07-27 14:14:28 +01005849 ObjPtr<mirror::Class> other_return_type = method2->ResolveReturnType();
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005850 if (UNLIKELY(other_return_type == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005851 ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method2);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005852 return false;
5853 }
Vladimir Marko862f43c2015-02-10 18:22:57 +00005854 if (UNLIKELY(other_return_type != return_type.Get())) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005855 ThrowSignatureMismatch(klass, super_klass, method1,
5856 StringPrintf("Return types mismatch: %s(%p) vs %s(%p)",
David Sehr709b0702016-10-13 09:12:37 -07005857 return_type->PrettyClassAndClassLoader().c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005858 return_type.Get(),
David Sehr709b0702016-10-13 09:12:37 -07005859 other_return_type->PrettyClassAndClassLoader().c_str(),
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005860 other_return_type.Ptr()));
Ian Rogersb5fb2072014-12-02 17:22:02 -08005861 return false;
5862 }
5863 }
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005864 const dex::TypeList* types1 = method1->GetParameterTypeList();
5865 const dex::TypeList* types2 = method2->GetParameterTypeList();
Ian Rogersb5fb2072014-12-02 17:22:02 -08005866 if (types1 == nullptr) {
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005867 if (types2 != nullptr && types2->Size() != 0) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005868 ThrowSignatureMismatch(klass, super_klass, method1,
5869 StringPrintf("Type list mismatch with %s",
David Sehr709b0702016-10-13 09:12:37 -07005870 method2->PrettyMethod(true).c_str()));
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005871 return false;
5872 }
5873 return true;
Ian Rogersb5fb2072014-12-02 17:22:02 -08005874 } else if (UNLIKELY(types2 == nullptr)) {
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005875 if (types1->Size() != 0) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005876 ThrowSignatureMismatch(klass, super_klass, method1,
5877 StringPrintf("Type list mismatch with %s",
David Sehr709b0702016-10-13 09:12:37 -07005878 method2->PrettyMethod(true).c_str()));
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005879 return false;
5880 }
5881 return true;
Ian Rogersb5fb2072014-12-02 17:22:02 -08005882 }
5883 uint32_t num_types = types1->Size();
5884 if (UNLIKELY(num_types != types2->Size())) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005885 ThrowSignatureMismatch(klass, super_klass, method1,
5886 StringPrintf("Type list mismatch with %s",
David Sehr709b0702016-10-13 09:12:37 -07005887 method2->PrettyMethod(true).c_str()));
Ian Rogersb5fb2072014-12-02 17:22:02 -08005888 return false;
5889 }
5890 for (uint32_t i = 0; i < num_types; ++i) {
Vladimir Marko862f43c2015-02-10 18:22:57 +00005891 StackHandleScope<1> hs(self);
Andreas Gampea5b09a62016-11-17 15:21:22 -08005892 dex::TypeIndex param_type_idx = types1->GetTypeItem(i).type_idx_;
Vladimir Marko862f43c2015-02-10 18:22:57 +00005893 Handle<mirror::Class> param_type(hs.NewHandle(
Vladimir Markob45528c2017-07-27 14:14:28 +01005894 method1->ResolveClassFromTypeIndex(param_type_idx)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08005895 if (UNLIKELY(param_type == nullptr)) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005896 ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005897 method1, i, param_type_idx);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005898 return false;
5899 }
Andreas Gampea5b09a62016-11-17 15:21:22 -08005900 dex::TypeIndex other_param_type_idx = types2->GetTypeItem(i).type_idx_;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005901 ObjPtr<mirror::Class> other_param_type =
Vladimir Markob45528c2017-07-27 14:14:28 +01005902 method2->ResolveClassFromTypeIndex(other_param_type_idx);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005903 if (UNLIKELY(other_param_type == nullptr)) {
5904 ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005905 method2, i, other_param_type_idx);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005906 return false;
5907 }
Vladimir Marko862f43c2015-02-10 18:22:57 +00005908 if (UNLIKELY(param_type.Get() != other_param_type)) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005909 ThrowSignatureMismatch(klass, super_klass, method1,
5910 StringPrintf("Parameter %u type mismatch: %s(%p) vs %s(%p)",
5911 i,
David Sehr709b0702016-10-13 09:12:37 -07005912 param_type->PrettyClassAndClassLoader().c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005913 param_type.Get(),
David Sehr709b0702016-10-13 09:12:37 -07005914 other_param_type->PrettyClassAndClassLoader().c_str(),
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005915 other_param_type.Ptr()));
Ian Rogersb5fb2072014-12-02 17:22:02 -08005916 return false;
5917 }
5918 }
5919 return true;
5920}
5921
5922
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07005923bool ClassLinker::ValidateSuperClassDescriptors(Handle<mirror::Class> klass) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005924 if (klass->IsInterface()) {
5925 return true;
5926 }
Ian Rogers151f2212014-05-06 11:27:27 -07005927 // Begin with the methods local to the superclass.
Ian Rogersded66a02014-10-28 18:12:55 -07005928 Thread* self = Thread::Current();
Mathieu Chartiere401d142015-04-22 13:56:20 -07005929 StackHandleScope<1> hs(self);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005930 MutableHandle<mirror::Class> super_klass(hs.NewHandle<mirror::Class>(nullptr));
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005931 if (klass->HasSuperClass() &&
5932 klass->GetClassLoader() != klass->GetSuperClass()->GetClassLoader()) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005933 super_klass.Assign(klass->GetSuperClass());
Mingyao Yang2cdbad72014-07-16 10:44:41 -07005934 for (int i = klass->GetSuperClass()->GetVTableLength() - 1; i >= 0; --i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005935 auto* m = klass->GetVTableEntry(i, image_pointer_size_);
5936 auto* super_m = klass->GetSuperClass()->GetVTableEntry(i, image_pointer_size_);
5937 if (m != super_m) {
Vladimir Marko942fd312017-01-16 20:52:19 +00005938 if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5939 klass,
5940 super_klass,
5941 m,
5942 super_m))) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005943 self->AssertPendingException();
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005944 return false;
5945 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005946 }
5947 }
5948 }
Brian Carlstrom4b620ff2011-09-11 01:11:01 -07005949 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005950 super_klass.Assign(klass->GetIfTable()->GetInterface(i));
5951 if (klass->GetClassLoader() != super_klass->GetClassLoader()) {
5952 uint32_t num_methods = super_klass->NumVirtualMethods();
Ian Rogers151f2212014-05-06 11:27:27 -07005953 for (uint32_t j = 0; j < num_methods; ++j) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005954 auto* m = klass->GetIfTable()->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>(
5955 j, image_pointer_size_);
5956 auto* super_m = super_klass->GetVirtualMethod(j, image_pointer_size_);
5957 if (m != super_m) {
Vladimir Marko942fd312017-01-16 20:52:19 +00005958 if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5959 klass,
5960 super_klass,
5961 m,
5962 super_m))) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005963 self->AssertPendingException();
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005964 return false;
5965 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005966 }
5967 }
5968 }
5969 }
5970 return true;
5971}
5972
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005973bool ClassLinker::EnsureInitialized(Thread* self,
5974 Handle<mirror::Class> c,
5975 bool can_init_fields,
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07005976 bool can_init_parents) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08005977 DCHECK(c != nullptr);
Igor Murashkin86083f72017-10-27 10:59:04 -07005978
Mathieu Chartier524507a2014-08-27 15:28:28 -07005979 if (c->IsInitialized()) {
Vladimir Marko8e110652019-07-30 10:14:41 +01005980 // If we've seen an initialized but not visibly initialized class
5981 // many times, request visible initialization.
5982 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
5983 // Thanks to the x86 memory model classes skip the initialized status.
5984 DCHECK(c->IsVisiblyInitialized());
5985 } else if (UNLIKELY(!c->IsVisiblyInitialized())) {
5986 if (self->IncrementMakeVisiblyInitializedCounter()) {
5987 MakeInitializedClassesVisiblyInitialized(self, /*wait=*/ false);
5988 }
5989 }
Andreas Gampe5b20b352018-10-11 19:03:20 -07005990 DCHECK(c->WasVerificationAttempted()) << c->PrettyClassAndClassLoader();
Mathieu Chartier524507a2014-08-27 15:28:28 -07005991 return true;
5992 }
Igor Murashkin86083f72017-10-27 10:59:04 -07005993 // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
5994 //
5995 // Ensure the bitstring is initialized before any of the class initialization
5996 // logic occurs. Once a class initializer starts running, objects can
5997 // escape into the heap and use the subtype checking code.
5998 //
5999 // Note: A class whose SubtypeCheckInfo is at least Initialized means it
6000 // can be used as a source for the IsSubClass check, and that all ancestors
6001 // of the class are Assigned (can be used as a target for IsSubClass check)
6002 // or Overflowed (can be used as a source for IsSubClass check).
Vladimir Marko305c38b2018-02-14 11:50:07 +00006003 if (kBitstringSubtypeCheckEnabled) {
Igor Murashkin86083f72017-10-27 10:59:04 -07006004 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
Vladimir Marko38b8b252018-01-02 19:07:06 +00006005 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(c.Get());
Igor Murashkin86083f72017-10-27 10:59:04 -07006006 // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck is already initialized.
6007 }
Ian Rogers7b078e82014-09-10 14:44:24 -07006008 const bool success = InitializeClass(self, c, can_init_fields, can_init_parents);
Mathieu Chartier524507a2014-08-27 15:28:28 -07006009 if (!success) {
6010 if (can_init_fields && can_init_parents) {
David Sehr709b0702016-10-13 09:12:37 -07006011 CHECK(self->IsExceptionPending()) << c->PrettyClass();
Mathieu Chartier524507a2014-08-27 15:28:28 -07006012 }
6013 } else {
6014 self->AssertNoPendingException();
Ian Rogers595799e2012-01-11 17:32:51 -08006015 }
6016 return success;
Elliott Hughesf4c21c92011-08-19 17:31:31 -07006017}
6018
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006019void ClassLinker::FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,
6020 ObjPtr<mirror::Class> new_class) {
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07006021 DCHECK_EQ(temp_class->NumInstanceFields(), 0u);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07006022 for (ArtField& field : new_class->GetIFields()) {
6023 if (field.GetDeclaringClass() == temp_class) {
6024 field.SetDeclaringClass(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006025 }
6026 }
6027
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07006028 DCHECK_EQ(temp_class->NumStaticFields(), 0u);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07006029 for (ArtField& field : new_class->GetSFields()) {
6030 if (field.GetDeclaringClass() == temp_class) {
6031 field.SetDeclaringClass(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006032 }
6033 }
6034
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07006035 DCHECK_EQ(temp_class->NumDirectMethods(), 0u);
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07006036 DCHECK_EQ(temp_class->NumVirtualMethods(), 0u);
Alex Lighte64300b2015-12-15 15:02:47 -08006037 for (auto& method : new_class->GetMethods(image_pointer_size_)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006038 if (method.GetDeclaringClass() == temp_class) {
6039 method.SetDeclaringClass(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006040 }
6041 }
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07006042
6043 // Make sure the remembered set and mod-union tables know that we updated some of the native
6044 // roots.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07006045 WriteBarrier::ForEveryFieldWrite(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006046}
6047
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006048void ClassLinker::RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier5b830502016-03-02 10:30:23 -08006049 CHECK(class_loader->GetAllocator() == nullptr);
6050 CHECK(class_loader->GetClassTable() == nullptr);
6051 Thread* const self = Thread::Current();
6052 ClassLoaderData data;
Ian Rogers55256cb2017-12-21 17:07:11 -08006053 data.weak_root = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, class_loader);
Mathieu Chartier5b830502016-03-02 10:30:23 -08006054 // Create and set the class table.
6055 data.class_table = new ClassTable;
6056 class_loader->SetClassTable(data.class_table);
6057 // Create and set the linear allocator.
6058 data.allocator = Runtime::Current()->CreateLinearAlloc();
6059 class_loader->SetAllocator(data.allocator);
6060 // Add to the list so that we know to free the data later.
6061 class_loaders_.push_back(data);
6062}
6063
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006064ClassTable* ClassLinker::InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier6b069532015-08-05 15:08:12 -07006065 if (class_loader == nullptr) {
Andreas Gampe2af99022017-04-25 08:32:59 -07006066 return boot_class_table_.get();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07006067 }
Mathieu Chartier6b069532015-08-05 15:08:12 -07006068 ClassTable* class_table = class_loader->GetClassTable();
6069 if (class_table == nullptr) {
Mathieu Chartier5b830502016-03-02 10:30:23 -08006070 RegisterClassLoader(class_loader);
6071 class_table = class_loader->GetClassTable();
6072 DCHECK(class_table != nullptr);
Mathieu Chartier6b069532015-08-05 15:08:12 -07006073 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07006074 return class_table;
6075}
6076
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006077ClassTable* ClassLinker::ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Andreas Gampe2af99022017-04-25 08:32:59 -07006078 return class_loader == nullptr ? boot_class_table_.get() : class_loader->GetClassTable();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07006079}
6080
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006081static ImTable* FindSuperImt(ObjPtr<mirror::Class> klass, PointerSize pointer_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006082 REQUIRES_SHARED(Locks::mutator_lock_) {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006083 while (klass->HasSuperClass()) {
6084 klass = klass->GetSuperClass();
6085 if (klass->ShouldHaveImt()) {
6086 return klass->GetImt(pointer_size);
6087 }
6088 }
6089 return nullptr;
6090}
6091
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006092bool ClassLinker::LinkClass(Thread* self,
6093 const char* descriptor,
6094 Handle<mirror::Class> klass,
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07006095 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
Igor Murashkinb1d8c312015-08-04 11:18:43 -07006096 MutableHandle<mirror::Class>* h_new_class_out) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00006097 CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006098
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006099 if (!LinkSuperClass(klass)) {
6100 return false;
6101 }
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006102 ArtMethod* imt_data[ImTable::kSize];
6103 // If there are any new conflicts compared to super class.
6104 bool new_conflict = false;
Nicolas Geoffray918dcea2017-07-21 07:58:14 +00006105 std::fill_n(imt_data, arraysize(imt_data), Runtime::Current()->GetImtUnimplementedMethod());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006106 if (!LinkMethods(self, klass, interfaces, &new_conflict, imt_data)) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006107 return false;
6108 }
Ian Rogers7b078e82014-09-10 14:44:24 -07006109 if (!LinkInstanceFields(self, klass)) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006110 return false;
6111 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006112 size_t class_size;
Igor Murashkinb1d8c312015-08-04 11:18:43 -07006113 if (!LinkStaticFields(self, klass, &class_size)) {
Brian Carlstrom4873d462011-08-21 15:23:39 -07006114 return false;
6115 }
6116 CreateReferenceInstanceOffsets(klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00006117 CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006118
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006119 ImTable* imt = nullptr;
6120 if (klass->ShouldHaveImt()) {
6121 // If there are any new conflicts compared to the super class we can not make a copy. There
6122 // can be cases where both will have a conflict method at the same slot without having the same
6123 // set of conflicts. In this case, we can not share the IMT since the conflict table slow path
6124 // will possibly create a table that is incorrect for either of the classes.
6125 // Same IMT with new_conflict does not happen very often.
6126 if (!new_conflict) {
6127 ImTable* super_imt = FindSuperImt(klass.Get(), image_pointer_size_);
6128 if (super_imt != nullptr) {
6129 bool imt_equals = true;
6130 for (size_t i = 0; i < ImTable::kSize && imt_equals; ++i) {
6131 imt_equals = imt_equals && (super_imt->Get(i, image_pointer_size_) == imt_data[i]);
6132 }
6133 if (imt_equals) {
6134 imt = super_imt;
6135 }
6136 }
6137 }
6138 if (imt == nullptr) {
6139 LinearAlloc* allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
6140 imt = reinterpret_cast<ImTable*>(
6141 allocator->Alloc(self, ImTable::SizeInBytes(image_pointer_size_)));
6142 if (imt == nullptr) {
6143 return false;
6144 }
6145 imt->Populate(imt_data, image_pointer_size_);
6146 }
6147 }
6148
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006149 if (!klass->IsTemp() || (!init_done_ && klass->GetClassSize() == class_size)) {
6150 // We don't need to retire this class as it has no embedded tables or it was created the
6151 // correct size during class linker initialization.
David Sehr709b0702016-10-13 09:12:37 -07006152 CHECK_EQ(klass->GetClassSize(), class_size) << klass->PrettyDescriptor();
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006153
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006154 if (klass->ShouldHaveEmbeddedVTable()) {
6155 klass->PopulateEmbeddedVTable(image_pointer_size_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006156 }
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006157 if (klass->ShouldHaveImt()) {
6158 klass->SetImt(imt, image_pointer_size_);
6159 }
Mingyao Yang063fc772016-08-02 11:02:54 -07006160
6161 // Update CHA info based on whether we override methods.
6162 // Have to do this before setting the class as resolved which allows
6163 // instantiation of klass.
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00006164 if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
Andreas Gampec1ac9ee2017-07-24 22:35:49 -07006165 cha_->UpdateAfterLoadingOf(klass);
6166 }
Nicolas Geoffray918dcea2017-07-21 07:58:14 +00006167
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006168 // This will notify waiters on klass that saw the not yet resolved
6169 // class in the class_table_ during EnsureResolved.
Vladimir Marko2c64a832018-01-04 11:31:56 +00006170 mirror::Class::SetStatus(klass, ClassStatus::kResolved, self);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07006171 h_new_class_out->Assign(klass.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006172 } else {
6173 CHECK(!klass->IsResolved());
6174 // Retire the temporary class and create the correctly sized resolved class.
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07006175 StackHandleScope<1> hs(self);
Vladimir Marko3068d582019-05-28 16:39:29 +01006176 Handle<mirror::Class> h_new_class =
6177 hs.NewHandle(mirror::Class::CopyOf(klass, self, class_size, imt, image_pointer_size_));
Mathieu Chartier3ee25bb2015-08-10 10:13:02 -07006178 // Set arrays to null since we don't want to have multiple classes with the same ArtField or
6179 // ArtMethod array pointers. If this occurs, it causes bugs in remembered sets since the GC
6180 // may not see any references to the target space and clean the card for a class if another
6181 // class had the same array pointer.
Alex Lighte64300b2015-12-15 15:02:47 -08006182 klass->SetMethodsPtrUnchecked(nullptr, 0, 0);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07006183 klass->SetSFieldsPtrUnchecked(nullptr);
6184 klass->SetIFieldsPtrUnchecked(nullptr);
Andreas Gampefa4333d2017-02-14 11:10:34 -08006185 if (UNLIKELY(h_new_class == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006186 self->AssertPendingOOMException();
Vladimir Marko2c64a832018-01-04 11:31:56 +00006187 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006188 return false;
6189 }
6190
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07006191 CHECK_EQ(h_new_class->GetClassSize(), class_size);
6192 ObjectLock<mirror::Class> lock(self, h_new_class);
6193 FixupTemporaryDeclaringClass(klass.Get(), h_new_class.Get());
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07006194
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00006195 if (LIKELY(descriptor != nullptr)) {
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07006196 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Vladimir Marko0984e482019-03-27 16:41:41 +00006197 const ObjPtr<mirror::ClassLoader> class_loader = h_new_class.Get()->GetClassLoader();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07006198 ClassTable* const table = InsertClassTableForClassLoader(class_loader);
Vladimir Marko0984e482019-03-27 16:41:41 +00006199 const ObjPtr<mirror::Class> existing =
6200 table->UpdateClass(descriptor, h_new_class.Get(), ComputeModifiedUtf8Hash(descriptor));
Mathieu Chartier05aa4d32015-09-19 12:44:38 -07006201 if (class_loader != nullptr) {
6202 // We updated the class in the class table, perform the write barrier so that the GC knows
6203 // about the change.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07006204 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier05aa4d32015-09-19 12:44:38 -07006205 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07006206 CHECK_EQ(existing, klass.Get());
Vladimir Marko1998cd02017-01-13 13:02:58 +00006207 if (log_new_roots_) {
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07006208 new_class_roots_.push_back(GcRoot<mirror::Class>(h_new_class.Get()));
6209 }
6210 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006211
Mingyao Yang063fc772016-08-02 11:02:54 -07006212 // Update CHA info based on whether we override methods.
6213 // Have to do this before setting the class as resolved which allows
6214 // instantiation of klass.
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00006215 if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
Andreas Gampec1ac9ee2017-07-24 22:35:49 -07006216 cha_->UpdateAfterLoadingOf(h_new_class);
6217 }
Mingyao Yang063fc772016-08-02 11:02:54 -07006218
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006219 // This will notify waiters on temp class that saw the not yet resolved class in the
6220 // class_table_ during EnsureResolved.
Vladimir Marko2c64a832018-01-04 11:31:56 +00006221 mirror::Class::SetStatus(klass, ClassStatus::kRetired, self);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006222
Vladimir Marko2c64a832018-01-04 11:31:56 +00006223 CHECK_EQ(h_new_class->GetStatus(), ClassStatus::kResolving);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006224 // This will notify waiters on new_class that saw the not yet resolved
6225 // class in the class_table_ during EnsureResolved.
Vladimir Marko2c64a832018-01-04 11:31:56 +00006226 mirror::Class::SetStatus(h_new_class, ClassStatus::kResolved, self);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07006227 // Return the new class.
6228 h_new_class_out->Assign(h_new_class.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006229 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006230 return true;
6231}
6232
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07006233bool ClassLinker::LoadSuperAndInterfaces(Handle<mirror::Class> klass, const DexFile& dex_file) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00006234 CHECK_EQ(ClassStatus::kIdx, klass->GetStatus());
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08006235 const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
Andreas Gampea5b09a62016-11-17 15:21:22 -08006236 dex::TypeIndex super_class_idx = class_def.superclass_idx_;
6237 if (super_class_idx.IsValid()) {
Roland Levillain90328ac2016-05-18 12:25:38 +01006238 // Check that a class does not inherit from itself directly.
6239 //
6240 // TODO: This is a cheap check to detect the straightforward case
6241 // of a class extending itself (b/28685551), but we should do a
6242 // proper cycle detection on loaded classes, to detect all cases
6243 // of class circularity errors (b/28830038).
6244 if (super_class_idx == class_def.class_idx_) {
6245 ThrowClassCircularityError(klass.Get(),
6246 "Class %s extends itself",
David Sehr709b0702016-10-13 09:12:37 -07006247 klass->PrettyDescriptor().c_str());
Roland Levillain90328ac2016-05-18 12:25:38 +01006248 return false;
6249 }
6250
Vladimir Marko666ee3d2017-12-11 18:37:36 +00006251 ObjPtr<mirror::Class> super_class = ResolveType(super_class_idx, klass.Get());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006252 if (super_class == nullptr) {
Brian Carlstrom65ca0772011-09-24 16:03:08 -07006253 DCHECK(Thread::Current()->IsExceptionPending());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006254 return false;
6255 }
Ian Rogersbe125a92012-01-11 15:19:49 -08006256 // Verify
6257 if (!klass->CanAccess(super_class)) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006258 ThrowIllegalAccessError(klass.Get(), "Class %s extended by class %s is inaccessible",
David Sehr709b0702016-10-13 09:12:37 -07006259 super_class->PrettyDescriptor().c_str(),
6260 klass->PrettyDescriptor().c_str());
Ian Rogersbe125a92012-01-11 15:19:49 -08006261 return false;
6262 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006263 CHECK(super_class->IsResolved());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07006264 klass->SetSuperClass(super_class);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006265 }
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08006266 const dex::TypeList* interfaces = dex_file.GetInterfacesList(class_def);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006267 if (interfaces != nullptr) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08006268 for (size_t i = 0; i < interfaces->Size(); i++) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08006269 dex::TypeIndex idx = interfaces->GetTypeItem(i).type_idx_;
Vladimir Marko666ee3d2017-12-11 18:37:36 +00006270 ObjPtr<mirror::Class> interface = ResolveType(idx, klass.Get());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006271 if (interface == nullptr) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08006272 DCHECK(Thread::Current()->IsExceptionPending());
6273 return false;
6274 }
6275 // Verify
6276 if (!klass->CanAccess(interface)) {
6277 // TODO: the RI seemed to ignore this in my testing.
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006278 ThrowIllegalAccessError(klass.Get(),
6279 "Interface %s implemented by class %s is inaccessible",
David Sehr709b0702016-10-13 09:12:37 -07006280 interface->PrettyDescriptor().c_str(),
6281 klass->PrettyDescriptor().c_str());
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08006282 return false;
6283 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006284 }
6285 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07006286 // Mark the class as loaded.
Vladimir Marko2c64a832018-01-04 11:31:56 +00006287 mirror::Class::SetStatus(klass, ClassStatus::kLoaded, nullptr);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006288 return true;
6289}
6290
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07006291bool ClassLinker::LinkSuperClass(Handle<mirror::Class> klass) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006292 CHECK(!klass->IsPrimitive());
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006293 ObjPtr<mirror::Class> super = klass->GetSuperClass();
Vladimir Markob4eb1b12018-05-24 11:09:38 +01006294 ObjPtr<mirror::Class> object_class = GetClassRoot<mirror::Object>(this);
6295 if (klass.Get() == object_class) {
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006296 if (super != nullptr) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006297 ThrowClassFormatError(klass.Get(), "java.lang.Object must not have a superclass");
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006298 return false;
6299 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006300 return true;
6301 }
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006302 if (super == nullptr) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006303 ThrowLinkageError(klass.Get(), "No superclass defined for class %s",
David Sehr709b0702016-10-13 09:12:37 -07006304 klass->PrettyDescriptor().c_str());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006305 return false;
6306 }
6307 // Verify
Vladimir Markob4eb1b12018-05-24 11:09:38 +01006308 if (klass->IsInterface() && super != object_class) {
Vladimir Marko1fcae9f2017-11-28 14:14:19 +00006309 ThrowClassFormatError(klass.Get(), "Interfaces must have java.lang.Object as superclass");
6310 return false;
6311 }
Vladimir Markob43b2d82017-07-18 17:46:38 +01006312 if (super->IsFinal()) {
6313 ThrowVerifyError(klass.Get(),
6314 "Superclass %s of %s is declared final",
6315 super->PrettyDescriptor().c_str(),
6316 klass->PrettyDescriptor().c_str());
6317 return false;
6318 }
6319 if (super->IsInterface()) {
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006320 ThrowIncompatibleClassChangeError(klass.Get(),
Vladimir Markob43b2d82017-07-18 17:46:38 +01006321 "Superclass %s of %s is an interface",
David Sehr709b0702016-10-13 09:12:37 -07006322 super->PrettyDescriptor().c_str(),
Vladimir Markob43b2d82017-07-18 17:46:38 +01006323 klass->PrettyDescriptor().c_str());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006324 return false;
6325 }
6326 if (!klass->CanAccess(super)) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006327 ThrowIllegalAccessError(klass.Get(), "Superclass %s is inaccessible to class %s",
David Sehr709b0702016-10-13 09:12:37 -07006328 super->PrettyDescriptor().c_str(),
6329 klass->PrettyDescriptor().c_str());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006330 return false;
6331 }
Elliott Hughes20cde902011-10-04 17:37:27 -07006332
Brian Carlstromf3632832014-05-20 15:36:53 -07006333 // Inherit kAccClassIsFinalizable from the superclass in case this
6334 // class doesn't override finalize.
Elliott Hughes20cde902011-10-04 17:37:27 -07006335 if (super->IsFinalizable()) {
6336 klass->SetFinalizable();
6337 }
6338
Mathieu Chartiere4275c02015-08-06 15:34:15 -07006339 // Inherit class loader flag form super class.
6340 if (super->IsClassLoaderClass()) {
6341 klass->SetClassLoaderClass();
6342 }
6343
Elliott Hughes2da50362011-10-10 16:57:08 -07006344 // Inherit reference flags (if any) from the superclass.
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07006345 uint32_t reference_flags = (super->GetClassFlags() & mirror::kClassFlagReference);
Elliott Hughes2da50362011-10-10 16:57:08 -07006346 if (reference_flags != 0) {
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07006347 CHECK_EQ(klass->GetClassFlags(), 0u);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07006348 klass->SetClassFlags(klass->GetClassFlags() | reference_flags);
Elliott Hughes2da50362011-10-10 16:57:08 -07006349 }
Elliott Hughes72ee0ae2011-10-10 17:31:28 -07006350 // Disallow custom direct subclasses of java.lang.ref.Reference.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01006351 if (init_done_ && super == GetClassRoot<mirror::Reference>(this)) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006352 ThrowLinkageError(klass.Get(),
Ian Rogers62d6c772013-02-27 08:32:07 -08006353 "Class %s attempts to subclass java.lang.ref.Reference, which is not allowed",
David Sehr709b0702016-10-13 09:12:37 -07006354 klass->PrettyDescriptor().c_str());
Elliott Hughes72ee0ae2011-10-10 17:31:28 -07006355 return false;
6356 }
Elliott Hughes2da50362011-10-10 16:57:08 -07006357
Ian Rogers7dfb28c2013-08-22 08:18:36 -07006358 if (kIsDebugBuild) {
6359 // Ensure super classes are fully resolved prior to resolving fields..
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006360 while (super != nullptr) {
Ian Rogers7dfb28c2013-08-22 08:18:36 -07006361 CHECK(super->IsResolved());
6362 super = super->GetSuperClass();
6363 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07006364 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006365 return true;
6366}
6367
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07006368// Populate the class vtable and itable. Compute return type indices.
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006369bool ClassLinker::LinkMethods(Thread* self,
6370 Handle<mirror::Class> klass,
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006371 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006372 bool* out_new_conflict,
Igor Murashkinb1d8c312015-08-04 11:18:43 -07006373 ArtMethod** out_imt) {
Ian Rogers7b078e82014-09-10 14:44:24 -07006374 self->AllowThreadSuspension();
Alex Lighteb7c1442015-08-31 13:17:42 -07006375 // A map from vtable indexes to the method they need to be updated to point to. Used because we
6376 // need to have default methods be in the virtuals array of each class but we don't set that up
6377 // until LinkInterfaceMethods.
Alex Light9139e002015-10-09 15:59:48 -07006378 std::unordered_map<size_t, ClassLinker::MethodTranslation> default_translations;
Alex Lighteb7c1442015-08-31 13:17:42 -07006379 // Link virtual methods then interface methods.
6380 // We set up the interface lookup table first because we need it to determine if we need to update
6381 // any vtable entries with new default method implementations.
6382 return SetupInterfaceLookupTable(self, klass, interfaces)
6383 && LinkVirtualMethods(self, klass, /*out*/ &default_translations)
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006384 && LinkInterfaceMethods(self, klass, default_translations, out_new_conflict, out_imt);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006385}
6386
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006387// Comparator for name and signature of a method, used in finding overriding methods. Implementation
6388// avoids the use of handles, if it didn't then rather than compare dex files we could compare dex
6389// caches in the implementation below.
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01006390class MethodNameAndSignatureComparator final : public ValueObject {
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006391 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -07006392 explicit MethodNameAndSignatureComparator(ArtMethod* method)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006393 REQUIRES_SHARED(Locks::mutator_lock_) :
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006394 dex_file_(method->GetDexFile()), mid_(&dex_file_->GetMethodId(method->GetDexMethodIndex())),
6395 name_(nullptr), name_len_(0) {
David Sehr709b0702016-10-13 09:12:37 -07006396 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006397 }
6398
6399 const char* GetName() {
6400 if (name_ == nullptr) {
6401 name_ = dex_file_->StringDataAndUtf16LengthByIdx(mid_->name_idx_, &name_len_);
6402 }
6403 return name_;
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006404 }
6405
Mathieu Chartiere401d142015-04-22 13:56:20 -07006406 bool HasSameNameAndSignature(ArtMethod* other)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006407 REQUIRES_SHARED(Locks::mutator_lock_) {
David Sehr709b0702016-10-13 09:12:37 -07006408 DCHECK(!other->IsProxyMethod()) << other->PrettyMethod();
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006409 const DexFile* other_dex_file = other->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08006410 const dex::MethodId& other_mid = other_dex_file->GetMethodId(other->GetDexMethodIndex());
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006411 if (dex_file_ == other_dex_file) {
6412 return mid_->name_idx_ == other_mid.name_idx_ && mid_->proto_idx_ == other_mid.proto_idx_;
6413 }
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006414 GetName(); // Only used to make sure its calculated.
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006415 uint32_t other_name_len;
6416 const char* other_name = other_dex_file->StringDataAndUtf16LengthByIdx(other_mid.name_idx_,
6417 &other_name_len);
6418 if (name_len_ != other_name_len || strcmp(name_, other_name) != 0) {
6419 return false;
6420 }
6421 return dex_file_->GetMethodSignature(*mid_) == other_dex_file->GetMethodSignature(other_mid);
6422 }
6423
6424 private:
6425 // Dex file for the method to compare against.
6426 const DexFile* const dex_file_;
6427 // MethodId for the method to compare against.
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08006428 const dex::MethodId* const mid_;
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006429 // Lazily computed name from the dex file's strings.
6430 const char* name_;
6431 // Lazily computed name length.
6432 uint32_t name_len_;
6433};
6434
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006435class LinkVirtualHashTable {
6436 public:
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006437 LinkVirtualHashTable(Handle<mirror::Class> klass,
6438 size_t hash_size,
6439 uint32_t* hash_table,
Andreas Gampe542451c2016-07-26 09:02:02 -07006440 PointerSize image_pointer_size)
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006441 : klass_(klass),
6442 hash_size_(hash_size),
6443 hash_table_(hash_table),
Mathieu Chartiere401d142015-04-22 13:56:20 -07006444 image_pointer_size_(image_pointer_size) {
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006445 std::fill(hash_table_, hash_table_ + hash_size_, invalid_index_);
6446 }
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006447
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006448 void Add(uint32_t virtual_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006449 ArtMethod* local_method = klass_->GetVirtualMethodDuringLinking(
6450 virtual_method_index, image_pointer_size_);
6451 const char* name = local_method->GetInterfaceMethodIfProxy(image_pointer_size_)->GetName();
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08006452 uint32_t hash = ComputeModifiedUtf8Hash(name);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006453 uint32_t index = hash % hash_size_;
6454 // Linear probe until we have an empty slot.
6455 while (hash_table_[index] != invalid_index_) {
6456 if (++index == hash_size_) {
6457 index = 0;
6458 }
6459 }
6460 hash_table_[index] = virtual_method_index;
6461 }
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006462
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006463 uint32_t FindAndRemove(MethodNameAndSignatureComparator* comparator)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006464 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006465 const char* name = comparator->GetName();
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08006466 uint32_t hash = ComputeModifiedUtf8Hash(name);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006467 size_t index = hash % hash_size_;
6468 while (true) {
6469 const uint32_t value = hash_table_[index];
6470 // Since linear probe makes continuous blocks, hitting an invalid index means we are done
6471 // the block and can safely assume not found.
6472 if (value == invalid_index_) {
6473 break;
6474 }
6475 if (value != removed_index_) { // This signifies not already overriden.
Mathieu Chartiere401d142015-04-22 13:56:20 -07006476 ArtMethod* virtual_method =
6477 klass_->GetVirtualMethodDuringLinking(value, image_pointer_size_);
6478 if (comparator->HasSameNameAndSignature(
6479 virtual_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006480 hash_table_[index] = removed_index_;
6481 return value;
6482 }
6483 }
6484 if (++index == hash_size_) {
6485 index = 0;
6486 }
6487 }
6488 return GetNotFoundIndex();
6489 }
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006490
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006491 static uint32_t GetNotFoundIndex() {
6492 return invalid_index_;
6493 }
6494
6495 private:
6496 static const uint32_t invalid_index_;
6497 static const uint32_t removed_index_;
6498
6499 Handle<mirror::Class> klass_;
6500 const size_t hash_size_;
6501 uint32_t* const hash_table_;
Andreas Gampe542451c2016-07-26 09:02:02 -07006502 const PointerSize image_pointer_size_;
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006503};
6504
6505const uint32_t LinkVirtualHashTable::invalid_index_ = std::numeric_limits<uint32_t>::max();
6506const uint32_t LinkVirtualHashTable::removed_index_ = std::numeric_limits<uint32_t>::max() - 1;
6507
Stephen Hines1ddd9132017-02-08 01:51:18 -08006508bool ClassLinker::LinkVirtualMethods(
Alex Lighteb7c1442015-08-31 13:17:42 -07006509 Thread* self,
6510 Handle<mirror::Class> klass,
Alex Light9139e002015-10-09 15:59:48 -07006511 /*out*/std::unordered_map<size_t, ClassLinker::MethodTranslation>* default_translations) {
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006512 const size_t num_virtual_methods = klass->NumVirtualMethods();
Alex Lighteb7c1442015-08-31 13:17:42 -07006513 if (klass->IsInterface()) {
6514 // No vtable.
6515 if (!IsUint<16>(num_virtual_methods)) {
6516 ThrowClassFormatError(klass.Get(), "Too many methods on interface: %zu", num_virtual_methods);
6517 return false;
6518 }
6519 bool has_defaults = false;
Alex Lighteb7c1442015-08-31 13:17:42 -07006520 // Assign each method an IMT index and set the default flag.
6521 for (size_t i = 0; i < num_virtual_methods; ++i) {
6522 ArtMethod* m = klass->GetVirtualMethodDuringLinking(i, image_pointer_size_);
6523 m->SetMethodIndex(i);
6524 if (!m->IsAbstract()) {
Vladimir Marko1c993cd2020-05-28 09:30:06 +00006525 // If the dex file does not support default methods, throw ClassFormatError.
6526 // This check is necessary to protect from odd cases, such as native default
6527 // methods, that the dex file verifier permits for old dex file versions. b/157170505
6528 // FIXME: This should be `if (!m->GetDexFile()->SupportsDefaultMethods())` but we're
6529 // currently running CTS tests for default methods with dex file version 035 which
6530 // does not support default methods. So, we limit this to native methods. b/157718952
6531 if (m->IsNative()) {
6532 DCHECK(!m->GetDexFile()->SupportsDefaultMethods());
6533 ThrowClassFormatError(klass.Get(),
6534 "Dex file does not support default method '%s'",
6535 m->PrettyMethod().c_str());
6536 return false;
6537 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006538 m->SetAccessFlags(m->GetAccessFlags() | kAccDefault);
6539 has_defaults = true;
6540 }
6541 }
6542 // Mark that we have default methods so that we won't need to scan the virtual_methods_ array
6543 // during initialization. This is a performance optimization. We could simply traverse the
6544 // virtual_methods_ array again during initialization.
6545 if (has_defaults) {
6546 klass->SetHasDefaultMethods();
6547 }
6548 return true;
6549 } else if (klass->HasSuperClass()) {
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006550 const size_t super_vtable_length = klass->GetSuperClass()->GetVTableLength();
6551 const size_t max_count = num_virtual_methods + super_vtable_length;
Vladimir Marko3068d582019-05-28 16:39:29 +01006552 StackHandleScope<3> hs(self);
Mingyao Yang38eecb02014-08-13 14:51:03 -07006553 Handle<mirror::Class> super_class(hs.NewHandle(klass->GetSuperClass()));
Mathieu Chartiere401d142015-04-22 13:56:20 -07006554 MutableHandle<mirror::PointerArray> vtable;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006555 if (super_class->ShouldHaveEmbeddedVTable()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006556 vtable = hs.NewHandle(AllocPointerArray(self, max_count));
Andreas Gampefa4333d2017-02-14 11:10:34 -08006557 if (UNLIKELY(vtable == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006558 self->AssertPendingOOMException();
Mingyao Yang2cdbad72014-07-16 10:44:41 -07006559 return false;
6560 }
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006561 for (size_t i = 0; i < super_vtable_length; i++) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006562 vtable->SetElementPtrSize(
6563 i, super_class->GetEmbeddedVTableEntry(i, image_pointer_size_), image_pointer_size_);
Mingyao Yang2cdbad72014-07-16 10:44:41 -07006564 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006565 // We might need to change vtable if we have new virtual methods or new interfaces (since that
6566 // might give us new default methods). If no new interfaces then we can skip the rest since
6567 // the class cannot override any of the super-class's methods. This is required for
6568 // correctness since without it we might not update overridden default method vtable entries
6569 // correctly.
6570 if (num_virtual_methods == 0 && super_class->GetIfTableCount() == klass->GetIfTableCount()) {
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006571 klass->SetVTable(vtable.Get());
6572 return true;
6573 }
Mingyao Yang2cdbad72014-07-16 10:44:41 -07006574 } else {
Alex Lighteb7c1442015-08-31 13:17:42 -07006575 DCHECK(super_class->IsAbstract() && !super_class->IsArrayClass());
Vladimir Marko3068d582019-05-28 16:39:29 +01006576 Handle<mirror::PointerArray> super_vtable = hs.NewHandle(super_class->GetVTable());
David Sehr709b0702016-10-13 09:12:37 -07006577 CHECK(super_vtable != nullptr) << super_class->PrettyClass();
Alex Lighteb7c1442015-08-31 13:17:42 -07006578 // We might need to change vtable if we have new virtual methods or new interfaces (since that
6579 // might give us new default methods). See comment above.
6580 if (num_virtual_methods == 0 && super_class->GetIfTableCount() == klass->GetIfTableCount()) {
Vladimir Marko3068d582019-05-28 16:39:29 +01006581 klass->SetVTable(super_vtable.Get());
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006582 return true;
6583 }
Vladimir Marko3068d582019-05-28 16:39:29 +01006584 vtable = hs.NewHandle(ObjPtr<mirror::PointerArray>::DownCast(
6585 mirror::Array::CopyOf(super_vtable, self, max_count)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08006586 if (UNLIKELY(vtable == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006587 self->AssertPendingOOMException();
Mingyao Yang2cdbad72014-07-16 10:44:41 -07006588 return false;
6589 }
Ian Rogersa436fde2013-08-27 23:34:06 -07006590 }
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006591 // How the algorithm works:
6592 // 1. Populate hash table by adding num_virtual_methods from klass. The values in the hash
6593 // table are: invalid_index for unused slots, index super_vtable_length + i for a virtual
6594 // method which has not been matched to a vtable method, and j if the virtual method at the
6595 // index overrode the super virtual method at index j.
6596 // 2. Loop through super virtual methods, if they overwrite, update hash table to j
6597 // (j < super_vtable_length) to avoid redundant checks. (TODO maybe use this info for reducing
6598 // the need for the initial vtable which we later shrink back down).
6599 // 3. Add non overridden methods to the end of the vtable.
6600 static constexpr size_t kMaxStackHash = 250;
Alex Lighteb7c1442015-08-31 13:17:42 -07006601 // + 1 so that even if we only have new default methods we will still be able to use this hash
6602 // table (i.e. it will never have 0 size).
6603 const size_t hash_table_size = num_virtual_methods * 3 + 1;
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006604 uint32_t* hash_table_ptr;
6605 std::unique_ptr<uint32_t[]> hash_heap_storage;
6606 if (hash_table_size <= kMaxStackHash) {
6607 hash_table_ptr = reinterpret_cast<uint32_t*>(
6608 alloca(hash_table_size * sizeof(*hash_table_ptr)));
6609 } else {
6610 hash_heap_storage.reset(new uint32_t[hash_table_size]);
6611 hash_table_ptr = hash_heap_storage.get();
6612 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07006613 LinkVirtualHashTable hash_table(klass, hash_table_size, hash_table_ptr, image_pointer_size_);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006614 // Add virtual methods to the hash table.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006615 for (size_t i = 0; i < num_virtual_methods; ++i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006616 DCHECK(klass->GetVirtualMethodDuringLinking(
6617 i, image_pointer_size_)->GetDeclaringClass() != nullptr);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006618 hash_table.Add(i);
6619 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006620 // Loop through each super vtable method and see if they are overridden by a method we added to
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006621 // the hash table.
6622 for (size_t j = 0; j < super_vtable_length; ++j) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006623 // Search the hash table to see if we are overridden by any method.
Mathieu Chartiere401d142015-04-22 13:56:20 -07006624 ArtMethod* super_method = vtable->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
Alex Lightc7a420c2016-10-18 14:33:18 -07006625 if (!klass->CanAccessMember(super_method->GetDeclaringClass(),
6626 super_method->GetAccessFlags())) {
6627 // Continue on to the next method since this one is package private and canot be overridden.
6628 // Before Android 4.1, the package-private method super_method might have been incorrectly
6629 // overridden.
6630 continue;
6631 }
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006632 MethodNameAndSignatureComparator super_method_name_comparator(
Mathieu Chartiere401d142015-04-22 13:56:20 -07006633 super_method->GetInterfaceMethodIfProxy(image_pointer_size_));
Alex Lightc7a420c2016-10-18 14:33:18 -07006634 // We remove the method so that subsequent lookups will be faster by making the hash-map
6635 // smaller as we go on.
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006636 uint32_t hash_index = hash_table.FindAndRemove(&super_method_name_comparator);
6637 if (hash_index != hash_table.GetNotFoundIndex()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006638 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(
6639 hash_index, image_pointer_size_);
Alex Lightc7a420c2016-10-18 14:33:18 -07006640 if (super_method->IsFinal()) {
6641 ThrowLinkageError(klass.Get(), "Method %s overrides final method in class %s",
6642 virtual_method->PrettyMethod().c_str(),
6643 super_method->GetDeclaringClassDescriptor());
6644 return false;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006645 }
Alex Lightc7a420c2016-10-18 14:33:18 -07006646 vtable->SetElementPtrSize(j, virtual_method, image_pointer_size_);
6647 virtual_method->SetMethodIndex(j);
Alex Light9139e002015-10-09 15:59:48 -07006648 } else if (super_method->IsOverridableByDefaultMethod()) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006649 // We didn't directly override this method but we might through default methods...
6650 // Check for default method update.
6651 ArtMethod* default_method = nullptr;
Alex Light9139e002015-10-09 15:59:48 -07006652 switch (FindDefaultMethodImplementation(self,
6653 super_method,
6654 klass,
6655 /*out*/&default_method)) {
6656 case DefaultMethodSearchResult::kDefaultConflict: {
6657 // A conflict was found looking for default methods. Note this (assuming it wasn't
6658 // pre-existing) in the translations map.
6659 if (UNLIKELY(!super_method->IsDefaultConflicting())) {
6660 // Don't generate another conflict method to reduce memory use as an optimization.
6661 default_translations->insert(
6662 {j, ClassLinker::MethodTranslation::CreateConflictingMethod()});
6663 }
6664 break;
6665 }
6666 case DefaultMethodSearchResult::kAbstractFound: {
6667 // No conflict but method is abstract.
6668 // We note that this vtable entry must be made abstract.
6669 if (UNLIKELY(!super_method->IsAbstract())) {
6670 default_translations->insert(
6671 {j, ClassLinker::MethodTranslation::CreateAbstractMethod()});
6672 }
6673 break;
6674 }
6675 case DefaultMethodSearchResult::kDefaultFound: {
6676 if (UNLIKELY(super_method->IsDefaultConflicting() ||
6677 default_method->GetDeclaringClass() != super_method->GetDeclaringClass())) {
6678 // Found a default method implementation that is new.
6679 // TODO Refactor this add default methods to virtuals here and not in
6680 // LinkInterfaceMethods maybe.
6681 // The problem is default methods might override previously present
6682 // default-method or miranda-method vtable entries from the superclass.
6683 // Unfortunately we need these to be entries in this class's virtuals. We do not
6684 // give these entries there until LinkInterfaceMethods so we pass this map around
6685 // to let it know which vtable entries need to be updated.
6686 // Make a note that vtable entry j must be updated, store what it needs to be updated
6687 // to. We will allocate a virtual method slot in LinkInterfaceMethods and fix it up
6688 // then.
6689 default_translations->insert(
6690 {j, ClassLinker::MethodTranslation::CreateTranslatedMethod(default_method)});
David Sehr709b0702016-10-13 09:12:37 -07006691 VLOG(class_linker) << "Method " << super_method->PrettyMethod()
6692 << " overridden by default "
6693 << default_method->PrettyMethod()
6694 << " in " << mirror::Class::PrettyClass(klass.Get());
Alex Light9139e002015-10-09 15:59:48 -07006695 }
6696 break;
6697 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006698 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006699 }
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006700 }
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006701 size_t actual_count = super_vtable_length;
Alex Lighteb7c1442015-08-31 13:17:42 -07006702 // Add the non-overridden methods at the end.
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006703 for (size_t i = 0; i < num_virtual_methods; ++i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006704 ArtMethod* local_method = klass->GetVirtualMethodDuringLinking(i, image_pointer_size_);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006705 size_t method_idx = local_method->GetMethodIndexDuringLinking();
6706 if (method_idx < super_vtable_length &&
Mathieu Chartiere401d142015-04-22 13:56:20 -07006707 local_method == vtable->GetElementPtrSize<ArtMethod*>(method_idx, image_pointer_size_)) {
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006708 continue;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006709 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07006710 vtable->SetElementPtrSize(actual_count, local_method, image_pointer_size_);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006711 local_method->SetMethodIndex(actual_count);
6712 ++actual_count;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006713 }
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08006714 if (!IsUint<16>(actual_count)) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006715 ThrowClassFormatError(klass.Get(), "Too many methods defined on class: %zd", actual_count);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006716 return false;
6717 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07006718 // Shrink vtable if possible
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006719 CHECK_LE(actual_count, max_count);
6720 if (actual_count < max_count) {
Vladimir Marko3068d582019-05-28 16:39:29 +01006721 vtable.Assign(ObjPtr<mirror::PointerArray>::DownCast(
6722 mirror::Array::CopyOf(vtable, self, actual_count)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08006723 if (UNLIKELY(vtable == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006724 self->AssertPendingOOMException();
Ian Rogersa436fde2013-08-27 23:34:06 -07006725 return false;
6726 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006727 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006728 klass->SetVTable(vtable.Get());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006729 } else {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01006730 CHECK_EQ(klass.Get(), GetClassRoot<mirror::Object>(this));
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08006731 if (!IsUint<16>(num_virtual_methods)) {
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006732 ThrowClassFormatError(klass.Get(), "Too many methods: %d",
6733 static_cast<int>(num_virtual_methods));
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006734 return false;
6735 }
Vladimir Markobcf17522018-06-01 13:14:32 +01006736 ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, num_virtual_methods);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006737 if (UNLIKELY(vtable == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006738 self->AssertPendingOOMException();
Ian Rogersa436fde2013-08-27 23:34:06 -07006739 return false;
6740 }
Brian Carlstroma40f9bc2011-07-26 21:26:07 -07006741 for (size_t i = 0; i < num_virtual_methods; ++i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006742 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, image_pointer_size_);
6743 vtable->SetElementPtrSize(i, virtual_method, image_pointer_size_);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07006744 virtual_method->SetMethodIndex(i & 0xFFFF);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006745 }
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006746 klass->SetVTable(vtable);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006747 }
6748 return true;
6749}
6750
Alex Light9139e002015-10-09 15:59:48 -07006751// Determine if the given iface has any subinterface in the given list that declares the method
6752// specified by 'target'.
6753//
6754// Arguments
6755// - self: The thread we are running on
6756// - target: A comparator that will match any method that overrides the method we are checking for
6757// - iftable: The iftable we are searching for an overriding method on.
6758// - ifstart: The index of the interface we are checking to see if anything overrides
6759// - iface: The interface we are checking to see if anything overrides.
6760// - image_pointer_size:
6761// The image pointer size.
6762//
6763// Returns
6764// - True: There is some method that matches the target comparator defined in an interface that
6765// is a subtype of iface.
6766// - False: There is no method that matches the target comparator in any interface that is a subtype
6767// of iface.
6768static bool ContainsOverridingMethodOf(Thread* self,
6769 MethodNameAndSignatureComparator& target,
6770 Handle<mirror::IfTable> iftable,
6771 size_t ifstart,
6772 Handle<mirror::Class> iface,
Andreas Gampe542451c2016-07-26 09:02:02 -07006773 PointerSize image_pointer_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006774 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light9139e002015-10-09 15:59:48 -07006775 DCHECK(self != nullptr);
Andreas Gampefa4333d2017-02-14 11:10:34 -08006776 DCHECK(iface != nullptr);
6777 DCHECK(iftable != nullptr);
Alex Light9139e002015-10-09 15:59:48 -07006778 DCHECK_GE(ifstart, 0u);
6779 DCHECK_LT(ifstart, iftable->Count());
6780 DCHECK_EQ(iface.Get(), iftable->GetInterface(ifstart));
6781 DCHECK(iface->IsInterface());
6782
6783 size_t iftable_count = iftable->Count();
6784 StackHandleScope<1> hs(self);
6785 MutableHandle<mirror::Class> current_iface(hs.NewHandle<mirror::Class>(nullptr));
6786 for (size_t k = ifstart + 1; k < iftable_count; k++) {
6787 // Skip ifstart since our current interface obviously cannot override itself.
6788 current_iface.Assign(iftable->GetInterface(k));
Alex Lighte64300b2015-12-15 15:02:47 -08006789 // Iterate through every method on this interface. The order does not matter.
6790 for (ArtMethod& current_method : current_iface->GetDeclaredVirtualMethods(image_pointer_size)) {
Alex Light9139e002015-10-09 15:59:48 -07006791 if (UNLIKELY(target.HasSameNameAndSignature(
Alex Lighte64300b2015-12-15 15:02:47 -08006792 current_method.GetInterfaceMethodIfProxy(image_pointer_size)))) {
Alex Light9139e002015-10-09 15:59:48 -07006793 // Check if the i'th interface is a subtype of this one.
6794 if (iface->IsAssignableFrom(current_iface.Get())) {
6795 return true;
6796 }
6797 break;
6798 }
6799 }
6800 }
6801 return false;
6802}
6803
Alex Lighteb7c1442015-08-31 13:17:42 -07006804// Find the default method implementation for 'interface_method' in 'klass'. Stores it into
Alex Light9139e002015-10-09 15:59:48 -07006805// out_default_method and returns kDefaultFound on success. If no default method was found return
6806// kAbstractFound and store nullptr into out_default_method. If an error occurs (such as a
6807// default_method conflict) it will return kDefaultConflict.
6808ClassLinker::DefaultMethodSearchResult ClassLinker::FindDefaultMethodImplementation(
6809 Thread* self,
6810 ArtMethod* target_method,
6811 Handle<mirror::Class> klass,
6812 /*out*/ArtMethod** out_default_method) const {
Alex Lighteb7c1442015-08-31 13:17:42 -07006813 DCHECK(self != nullptr);
6814 DCHECK(target_method != nullptr);
6815 DCHECK(out_default_method != nullptr);
Alex Lighteb7c1442015-08-31 13:17:42 -07006816
6817 *out_default_method = nullptr;
Alex Lighteb7c1442015-08-31 13:17:42 -07006818
6819 // We organize the interface table so that, for interface I any subinterfaces J follow it in the
6820 // table. This lets us walk the table backwards when searching for default methods. The first one
6821 // we encounter is the best candidate since it is the most specific. Once we have found it we keep
6822 // track of it and then continue checking all other interfaces, since we need to throw an error if
6823 // we encounter conflicting default method implementations (one is not a subtype of the other).
6824 //
6825 // The order of unrelated interfaces does not matter and is not defined.
6826 size_t iftable_count = klass->GetIfTableCount();
6827 if (iftable_count == 0) {
Alex Light9139e002015-10-09 15:59:48 -07006828 // No interfaces. We have already reset out to null so just return kAbstractFound.
6829 return DefaultMethodSearchResult::kAbstractFound;
Alex Lighteb7c1442015-08-31 13:17:42 -07006830 }
6831
Alex Light9139e002015-10-09 15:59:48 -07006832 StackHandleScope<3> hs(self);
6833 MutableHandle<mirror::Class> chosen_iface(hs.NewHandle<mirror::Class>(nullptr));
Alex Lighteb7c1442015-08-31 13:17:42 -07006834 MutableHandle<mirror::IfTable> iftable(hs.NewHandle(klass->GetIfTable()));
Alex Light9139e002015-10-09 15:59:48 -07006835 MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
Alex Lighteb7c1442015-08-31 13:17:42 -07006836 MethodNameAndSignatureComparator target_name_comparator(
6837 target_method->GetInterfaceMethodIfProxy(image_pointer_size_));
6838 // Iterates over the klass's iftable in reverse
Alex Light9139e002015-10-09 15:59:48 -07006839 for (size_t k = iftable_count; k != 0; ) {
6840 --k;
6841
Alex Lighteb7c1442015-08-31 13:17:42 -07006842 DCHECK_LT(k, iftable->Count());
Alex Light9139e002015-10-09 15:59:48 -07006843
6844 iface.Assign(iftable->GetInterface(k));
Alex Lighte64300b2015-12-15 15:02:47 -08006845 // Iterate through every declared method on this interface. The order does not matter.
6846 for (auto& method_iter : iface->GetDeclaredVirtualMethods(image_pointer_size_)) {
6847 ArtMethod* current_method = &method_iter;
Alex Lighteb7c1442015-08-31 13:17:42 -07006848 // Skip abstract methods and methods with different names.
6849 if (current_method->IsAbstract() ||
6850 !target_name_comparator.HasSameNameAndSignature(
6851 current_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
6852 continue;
Alex Lightd7c10c22016-03-31 10:03:07 -07006853 } else if (!current_method->IsPublic()) {
6854 // The verifier should have caught the non-public method for dex version 37. Just warn and
6855 // skip it since this is from before default-methods so we don't really need to care that it
6856 // has code.
David Sehr709b0702016-10-13 09:12:37 -07006857 LOG(WARNING) << "Interface method " << current_method->PrettyMethod()
6858 << " is not public! "
Alex Lightd7c10c22016-03-31 10:03:07 -07006859 << "This will be a fatal error in subsequent versions of android. "
6860 << "Continuing anyway.";
Alex Lighteb7c1442015-08-31 13:17:42 -07006861 }
Andreas Gampefa4333d2017-02-14 11:10:34 -08006862 if (UNLIKELY(chosen_iface != nullptr)) {
Alex Light9139e002015-10-09 15:59:48 -07006863 // We have multiple default impls of the same method. This is a potential default conflict.
6864 // We need to check if this possibly conflicting method is either a superclass of the chosen
6865 // default implementation or is overridden by a non-default interface method. In either case
6866 // there is no conflict.
6867 if (!iface->IsAssignableFrom(chosen_iface.Get()) &&
6868 !ContainsOverridingMethodOf(self,
6869 target_name_comparator,
6870 iftable,
6871 k,
6872 iface,
6873 image_pointer_size_)) {
Nicolas Geoffray7f3e0db2016-01-28 09:29:31 +00006874 VLOG(class_linker) << "Conflicting default method implementations found: "
David Sehr709b0702016-10-13 09:12:37 -07006875 << current_method->PrettyMethod() << " and "
6876 << ArtMethod::PrettyMethod(*out_default_method) << " in class "
6877 << klass->PrettyClass() << " conflict.";
Alex Light9139e002015-10-09 15:59:48 -07006878 *out_default_method = nullptr;
6879 return DefaultMethodSearchResult::kDefaultConflict;
Alex Lighteb7c1442015-08-31 13:17:42 -07006880 } else {
6881 break; // Continue checking at the next interface.
6882 }
6883 } else {
Alex Light9139e002015-10-09 15:59:48 -07006884 // chosen_iface == null
6885 if (!ContainsOverridingMethodOf(self,
6886 target_name_comparator,
6887 iftable,
6888 k,
6889 iface,
6890 image_pointer_size_)) {
6891 // Don't set this as the chosen interface if something else is overriding it (because that
6892 // other interface would be potentially chosen instead if it was default). If the other
6893 // interface was abstract then we wouldn't select this interface as chosen anyway since
6894 // the abstract method masks it.
6895 *out_default_method = current_method;
6896 chosen_iface.Assign(iface.Get());
6897 // We should now finish traversing the graph to find if we have default methods that
6898 // conflict.
6899 } else {
David Sehr709b0702016-10-13 09:12:37 -07006900 VLOG(class_linker) << "A default method '" << current_method->PrettyMethod()
6901 << "' was "
6902 << "skipped because it was overridden by an abstract method in a "
6903 << "subinterface on class '" << klass->PrettyClass() << "'";
Alex Light9139e002015-10-09 15:59:48 -07006904 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006905 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006906 break;
6907 }
6908 }
Alex Light9139e002015-10-09 15:59:48 -07006909 if (*out_default_method != nullptr) {
David Sehr709b0702016-10-13 09:12:37 -07006910 VLOG(class_linker) << "Default method '" << (*out_default_method)->PrettyMethod()
6911 << "' selected "
6912 << "as the implementation for '" << target_method->PrettyMethod()
6913 << "' in '" << klass->PrettyClass() << "'";
Alex Light9139e002015-10-09 15:59:48 -07006914 return DefaultMethodSearchResult::kDefaultFound;
6915 } else {
6916 return DefaultMethodSearchResult::kAbstractFound;
6917 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006918}
6919
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006920ArtMethod* ClassLinker::AddMethodToConflictTable(ObjPtr<mirror::Class> klass,
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006921 ArtMethod* conflict_method,
6922 ArtMethod* interface_method,
6923 ArtMethod* method,
6924 bool force_new_conflict_method) {
Andreas Gampe542451c2016-07-26 09:02:02 -07006925 ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006926 Runtime* const runtime = Runtime::Current();
6927 LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
6928 bool new_entry = conflict_method == runtime->GetImtConflictMethod() || force_new_conflict_method;
6929
6930 // Create a new entry if the existing one is the shared conflict method.
6931 ArtMethod* new_conflict_method = new_entry
6932 ? runtime->CreateImtConflictMethod(linear_alloc)
6933 : conflict_method;
6934
6935 // Allocate a new table. Note that we will leak this table at the next conflict,
6936 // but that's a tradeoff compared to making the table fixed size.
6937 void* data = linear_alloc->Alloc(
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006938 Thread::Current(), ImtConflictTable::ComputeSizeWithOneMoreEntry(current_table,
6939 image_pointer_size_));
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006940 if (data == nullptr) {
6941 LOG(ERROR) << "Failed to allocate conflict table";
6942 return conflict_method;
6943 }
6944 ImtConflictTable* new_table = new (data) ImtConflictTable(current_table,
6945 interface_method,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006946 method,
6947 image_pointer_size_);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006948
6949 // Do a fence to ensure threads see the data in the table before it is assigned
6950 // to the conflict method.
6951 // Note that there is a race in the presence of multiple threads and we may leak
6952 // memory from the LinearAlloc, but that's a tradeoff compared to using
6953 // atomic operations.
Orion Hodson27b96762018-03-13 16:06:57 +00006954 std::atomic_thread_fence(std::memory_order_release);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006955 new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006956 return new_conflict_method;
6957}
6958
Vladimir Marko921094a2017-01-12 18:37:06 +00006959bool ClassLinker::AllocateIfTableMethodArrays(Thread* self,
6960 Handle<mirror::Class> klass,
6961 Handle<mirror::IfTable> iftable) {
6962 DCHECK(!klass->IsInterface());
6963 const bool has_superclass = klass->HasSuperClass();
6964 const bool extend_super_iftable = has_superclass;
6965 const size_t ifcount = klass->GetIfTableCount();
6966 const size_t super_ifcount = has_superclass ? klass->GetSuperClass()->GetIfTableCount() : 0U;
6967 for (size_t i = 0; i < ifcount; ++i) {
6968 size_t num_methods = iftable->GetInterface(i)->NumDeclaredVirtualMethods();
6969 if (num_methods > 0) {
6970 const bool is_super = i < super_ifcount;
6971 // This is an interface implemented by a super-class. Therefore we can just copy the method
6972 // array from the superclass.
6973 const bool super_interface = is_super && extend_super_iftable;
6974 ObjPtr<mirror::PointerArray> method_array;
6975 if (super_interface) {
6976 ObjPtr<mirror::IfTable> if_table = klass->GetSuperClass()->GetIfTable();
6977 DCHECK(if_table != nullptr);
6978 DCHECK(if_table->GetMethodArray(i) != nullptr);
6979 // If we are working on a super interface, try extending the existing method array.
Vladimir Marko3068d582019-05-28 16:39:29 +01006980 StackHandleScope<1u> hs(self);
6981 Handle<mirror::PointerArray> old_array = hs.NewHandle(if_table->GetMethodArray(i));
6982 method_array =
6983 ObjPtr<mirror::PointerArray>::DownCast(mirror::Object::Clone(old_array, self));
Vladimir Marko921094a2017-01-12 18:37:06 +00006984 } else {
6985 method_array = AllocPointerArray(self, num_methods);
6986 }
6987 if (UNLIKELY(method_array == nullptr)) {
6988 self->AssertPendingOOMException();
6989 return false;
6990 }
6991 iftable->SetMethodArray(i, method_array);
6992 }
6993 }
6994 return true;
6995}
6996
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006997void ClassLinker::SetIMTRef(ArtMethod* unimplemented_method,
6998 ArtMethod* imt_conflict_method,
6999 ArtMethod* current_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007000 /*out*/bool* new_conflict,
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007001 /*out*/ArtMethod** imt_ref) {
Alex Lighteb7c1442015-08-31 13:17:42 -07007002 // Place method in imt if entry is empty, place conflict otherwise.
7003 if (*imt_ref == unimplemented_method) {
7004 *imt_ref = current_method;
Nicolas Geoffray796d6302016-03-13 22:22:31 +00007005 } else if (!(*imt_ref)->IsRuntimeMethod()) {
Alex Lighteb7c1442015-08-31 13:17:42 -07007006 // If we are not a conflict and we have the same signature and name as the imt
7007 // entry, it must be that we overwrote a superclass vtable entry.
Nicolas Geoffray796d6302016-03-13 22:22:31 +00007008 // Note that we have checked IsRuntimeMethod, as there may be multiple different
7009 // conflict methods.
Alex Lighteb7c1442015-08-31 13:17:42 -07007010 MethodNameAndSignatureComparator imt_comparator(
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007011 (*imt_ref)->GetInterfaceMethodIfProxy(image_pointer_size_));
Alex Lighteb7c1442015-08-31 13:17:42 -07007012 if (imt_comparator.HasSameNameAndSignature(
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007013 current_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
Alex Lighteb7c1442015-08-31 13:17:42 -07007014 *imt_ref = current_method;
7015 } else {
Alex Light9139e002015-10-09 15:59:48 -07007016 *imt_ref = imt_conflict_method;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007017 *new_conflict = true;
Alex Lighteb7c1442015-08-31 13:17:42 -07007018 }
Nicolas Geoffray796d6302016-03-13 22:22:31 +00007019 } else {
7020 // Place the default conflict method. Note that there may be an existing conflict
7021 // method in the IMT, but it could be one tailored to the super class, with a
7022 // specific ImtConflictTable.
7023 *imt_ref = imt_conflict_method;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007024 *new_conflict = true;
Alex Lighteb7c1442015-08-31 13:17:42 -07007025 }
7026}
7027
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007028void ClassLinker::FillIMTAndConflictTables(ObjPtr<mirror::Class> klass) {
David Sehr709b0702016-10-13 09:12:37 -07007029 DCHECK(klass->ShouldHaveImt()) << klass->PrettyClass();
7030 DCHECK(!klass->IsTemp()) << klass->PrettyClass();
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007031 ArtMethod* imt_data[ImTable::kSize];
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007032 Runtime* const runtime = Runtime::Current();
7033 ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod();
7034 ArtMethod* const conflict_method = runtime->GetImtConflictMethod();
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007035 std::fill_n(imt_data, arraysize(imt_data), unimplemented_method);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007036 if (klass->GetIfTable() != nullptr) {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007037 bool new_conflict = false;
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007038 FillIMTFromIfTable(klass->GetIfTable(),
7039 unimplemented_method,
7040 conflict_method,
7041 klass,
Andreas Gampe98ea9d92018-10-19 14:06:15 -07007042 /*create_conflict_tables=*/true,
7043 /*ignore_copied_methods=*/false,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007044 &new_conflict,
7045 &imt_data[0]);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007046 }
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007047 if (!klass->ShouldHaveImt()) {
7048 return;
7049 }
7050 // Compare the IMT with the super class including the conflict methods. If they are equivalent,
7051 // we can just use the same pointer.
7052 ImTable* imt = nullptr;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007053 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007054 if (super_class != nullptr && super_class->ShouldHaveImt()) {
7055 ImTable* super_imt = super_class->GetImt(image_pointer_size_);
7056 bool same = true;
7057 for (size_t i = 0; same && i < ImTable::kSize; ++i) {
7058 ArtMethod* method = imt_data[i];
7059 ArtMethod* super_method = super_imt->Get(i, image_pointer_size_);
7060 if (method != super_method) {
7061 bool is_conflict_table = method->IsRuntimeMethod() &&
7062 method != unimplemented_method &&
7063 method != conflict_method;
7064 // Verify conflict contents.
7065 bool super_conflict_table = super_method->IsRuntimeMethod() &&
7066 super_method != unimplemented_method &&
7067 super_method != conflict_method;
7068 if (!is_conflict_table || !super_conflict_table) {
7069 same = false;
7070 } else {
7071 ImtConflictTable* table1 = method->GetImtConflictTable(image_pointer_size_);
7072 ImtConflictTable* table2 = super_method->GetImtConflictTable(image_pointer_size_);
7073 same = same && table1->Equals(table2, image_pointer_size_);
7074 }
7075 }
7076 }
7077 if (same) {
7078 imt = super_imt;
7079 }
7080 }
7081 if (imt == nullptr) {
7082 imt = klass->GetImt(image_pointer_size_);
7083 DCHECK(imt != nullptr);
7084 imt->Populate(imt_data, image_pointer_size_);
7085 } else {
7086 klass->SetImt(imt, image_pointer_size_);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007087 }
7088}
7089
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007090ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count,
7091 LinearAlloc* linear_alloc,
Andreas Gampe542451c2016-07-26 09:02:02 -07007092 PointerSize image_pointer_size) {
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007093 void* data = linear_alloc->Alloc(Thread::Current(),
7094 ImtConflictTable::ComputeSize(count,
7095 image_pointer_size));
7096 return (data != nullptr) ? new (data) ImtConflictTable(count, image_pointer_size) : nullptr;
7097}
7098
7099ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count, LinearAlloc* linear_alloc) {
7100 return CreateImtConflictTable(count, linear_alloc, image_pointer_size_);
7101}
7102
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007103void ClassLinker::FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007104 ArtMethod* unimplemented_method,
7105 ArtMethod* imt_conflict_method,
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007106 ObjPtr<mirror::Class> klass,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007107 bool create_conflict_tables,
7108 bool ignore_copied_methods,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007109 /*out*/bool* new_conflict,
7110 /*out*/ArtMethod** imt) {
7111 uint32_t conflict_counts[ImTable::kSize] = {};
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007112 for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007113 ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007114 const size_t num_virtuals = interface->NumVirtualMethods();
7115 const size_t method_array_count = if_table->GetMethodArrayCount(i);
7116 // Virtual methods can be larger than the if table methods if there are default methods.
7117 DCHECK_GE(num_virtuals, method_array_count);
7118 if (kIsDebugBuild) {
7119 if (klass->IsInterface()) {
7120 DCHECK_EQ(method_array_count, 0u);
7121 } else {
7122 DCHECK_EQ(interface->NumDeclaredVirtualMethods(), method_array_count);
7123 }
7124 }
7125 if (method_array_count == 0) {
7126 continue;
7127 }
Vladimir Marko557fece2019-03-26 14:29:41 +00007128 ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007129 for (size_t j = 0; j < method_array_count; ++j) {
7130 ArtMethod* implementation_method =
7131 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
7132 if (ignore_copied_methods && implementation_method->IsCopied()) {
7133 continue;
7134 }
7135 DCHECK(implementation_method != nullptr);
7136 // Miranda methods cannot be used to implement an interface method, but they are safe to put
7137 // in the IMT since their entrypoint is the interface trampoline. If we put any copied methods
7138 // or interface methods in the IMT here they will not create extra conflicts since we compare
7139 // names and signatures in SetIMTRef.
7140 ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
David Srbeckye36e7f22018-11-14 14:21:23 +00007141 const uint32_t imt_index = interface_method->GetImtIndex();
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007142
7143 // There is only any conflicts if all of the interface methods for an IMT slot don't have
7144 // the same implementation method, keep track of this to avoid creating a conflict table in
7145 // this case.
7146
7147 // Conflict table size for each IMT slot.
7148 ++conflict_counts[imt_index];
7149
7150 SetIMTRef(unimplemented_method,
7151 imt_conflict_method,
7152 implementation_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007153 /*out*/new_conflict,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007154 /*out*/&imt[imt_index]);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007155 }
7156 }
7157
7158 if (create_conflict_tables) {
7159 // Create the conflict tables.
7160 LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007161 for (size_t i = 0; i < ImTable::kSize; ++i) {
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007162 size_t conflicts = conflict_counts[i];
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007163 if (imt[i] == imt_conflict_method) {
7164 ImtConflictTable* new_table = CreateImtConflictTable(conflicts, linear_alloc);
7165 if (new_table != nullptr) {
7166 ArtMethod* new_conflict_method =
7167 Runtime::Current()->CreateImtConflictMethod(linear_alloc);
7168 new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
7169 imt[i] = new_conflict_method;
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007170 } else {
7171 LOG(ERROR) << "Failed to allocate conflict table";
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007172 imt[i] = imt_conflict_method;
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007173 }
7174 } else {
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007175 DCHECK_NE(imt[i], imt_conflict_method);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007176 }
7177 }
7178
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007179 for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007180 ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007181 const size_t method_array_count = if_table->GetMethodArrayCount(i);
7182 // Virtual methods can be larger than the if table methods if there are default methods.
7183 if (method_array_count == 0) {
7184 continue;
7185 }
Vladimir Marko557fece2019-03-26 14:29:41 +00007186 ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007187 for (size_t j = 0; j < method_array_count; ++j) {
7188 ArtMethod* implementation_method =
7189 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
7190 if (ignore_copied_methods && implementation_method->IsCopied()) {
7191 continue;
7192 }
7193 DCHECK(implementation_method != nullptr);
7194 ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
David Srbeckye36e7f22018-11-14 14:21:23 +00007195 const uint32_t imt_index = interface_method->GetImtIndex();
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007196 if (!imt[imt_index]->IsRuntimeMethod() ||
7197 imt[imt_index] == unimplemented_method ||
7198 imt[imt_index] == imt_conflict_method) {
7199 continue;
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007200 }
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007201 ImtConflictTable* table = imt[imt_index]->GetImtConflictTable(image_pointer_size_);
7202 const size_t num_entries = table->NumEntries(image_pointer_size_);
7203 table->SetInterfaceMethod(num_entries, image_pointer_size_, interface_method);
7204 table->SetImplementationMethod(num_entries, image_pointer_size_, implementation_method);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007205 }
7206 }
7207 }
7208}
7209
Alex Lighteb7c1442015-08-31 13:17:42 -07007210// Simple helper function that checks that no subtypes of 'val' are contained within the 'classes'
7211// set.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007212static bool NotSubinterfaceOfAny(
7213 const std::unordered_set<ObjPtr<mirror::Class>, HashObjPtr>& classes,
7214 ObjPtr<mirror::Class> val)
Alex Lighteb7c1442015-08-31 13:17:42 -07007215 REQUIRES(Roles::uninterruptible_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07007216 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lighteb7c1442015-08-31 13:17:42 -07007217 DCHECK(val != nullptr);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007218 for (ObjPtr<mirror::Class> c : classes) {
7219 if (val->IsAssignableFrom(c)) {
Alex Lighteb7c1442015-08-31 13:17:42 -07007220 return false;
7221 }
7222 }
7223 return true;
7224}
7225
7226// Fills in and flattens the interface inheritance hierarchy.
7227//
7228// By the end of this function all interfaces in the transitive closure of to_process are added to
7229// the iftable and every interface precedes all of its sub-interfaces in this list.
7230//
7231// all I, J: Interface | I <: J implies J precedes I
7232//
7233// (note A <: B means that A is a subtype of B)
7234//
7235// This returns the total number of items in the iftable. The iftable might be resized down after
7236// this call.
7237//
7238// We order this backwards so that we do not need to reorder superclass interfaces when new
7239// interfaces are added in subclass's interface tables.
7240//
7241// Upon entry into this function iftable is a copy of the superclass's iftable with the first
7242// super_ifcount entries filled in with the transitive closure of the interfaces of the superclass.
7243// The other entries are uninitialized. We will fill in the remaining entries in this function. The
7244// iftable must be large enough to hold all interfaces without changing its size.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007245static size_t FillIfTable(ObjPtr<mirror::IfTable> iftable,
Alex Lighteb7c1442015-08-31 13:17:42 -07007246 size_t super_ifcount,
Stephen Hines48ba1972018-09-24 13:35:54 -07007247 const std::vector<ObjPtr<mirror::Class>>& to_process)
Alex Lighteb7c1442015-08-31 13:17:42 -07007248 REQUIRES(Roles::uninterruptible_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07007249 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lighteb7c1442015-08-31 13:17:42 -07007250 // This is the set of all class's already in the iftable. Used to make checking if a class has
7251 // already been added quicker.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007252 std::unordered_set<ObjPtr<mirror::Class>, HashObjPtr> classes_in_iftable;
Alex Lighteb7c1442015-08-31 13:17:42 -07007253 // The first super_ifcount elements are from the superclass. We note that they are already added.
7254 for (size_t i = 0; i < super_ifcount; i++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007255 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
Alex Lighteb7c1442015-08-31 13:17:42 -07007256 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, iface)) << "Bad ordering.";
7257 classes_in_iftable.insert(iface);
7258 }
7259 size_t filled_ifcount = super_ifcount;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007260 for (ObjPtr<mirror::Class> interface : to_process) {
Alex Lighteb7c1442015-08-31 13:17:42 -07007261 // Let us call the first filled_ifcount elements of iftable the current-iface-list.
7262 // At this point in the loop current-iface-list has the invariant that:
7263 // for every pair of interfaces I,J within it:
7264 // if index_of(I) < index_of(J) then I is not a subtype of J
7265
7266 // If we have already seen this element then all of its super-interfaces must already be in the
7267 // current-iface-list so we can skip adding it.
7268 if (!ContainsElement(classes_in_iftable, interface)) {
7269 // We haven't seen this interface so add all of its super-interfaces onto the
7270 // current-iface-list, skipping those already on it.
7271 int32_t ifcount = interface->GetIfTableCount();
7272 for (int32_t j = 0; j < ifcount; j++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007273 ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
Alex Lighteb7c1442015-08-31 13:17:42 -07007274 if (!ContainsElement(classes_in_iftable, super_interface)) {
7275 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, super_interface)) << "Bad ordering.";
7276 classes_in_iftable.insert(super_interface);
7277 iftable->SetInterface(filled_ifcount, super_interface);
7278 filled_ifcount++;
7279 }
7280 }
7281 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, interface)) << "Bad ordering";
7282 // Place this interface onto the current-iface-list after all of its super-interfaces.
7283 classes_in_iftable.insert(interface);
7284 iftable->SetInterface(filled_ifcount, interface);
7285 filled_ifcount++;
7286 } else if (kIsDebugBuild) {
7287 // Check all super-interfaces are already in the list.
7288 int32_t ifcount = interface->GetIfTableCount();
7289 for (int32_t j = 0; j < ifcount; j++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007290 ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
Alex Lighteb7c1442015-08-31 13:17:42 -07007291 DCHECK(ContainsElement(classes_in_iftable, super_interface))
David Sehr709b0702016-10-13 09:12:37 -07007292 << "Iftable does not contain " << mirror::Class::PrettyClass(super_interface)
7293 << ", a superinterface of " << interface->PrettyClass();
Alex Lighteb7c1442015-08-31 13:17:42 -07007294 }
7295 }
7296 }
7297 if (kIsDebugBuild) {
7298 // Check that the iftable is ordered correctly.
7299 for (size_t i = 0; i < filled_ifcount; i++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007300 ObjPtr<mirror::Class> if_a = iftable->GetInterface(i);
Alex Lighteb7c1442015-08-31 13:17:42 -07007301 for (size_t j = i + 1; j < filled_ifcount; j++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007302 ObjPtr<mirror::Class> if_b = iftable->GetInterface(j);
Alex Lighteb7c1442015-08-31 13:17:42 -07007303 // !(if_a <: if_b)
7304 CHECK(!if_b->IsAssignableFrom(if_a))
David Sehr709b0702016-10-13 09:12:37 -07007305 << "Bad interface order: " << mirror::Class::PrettyClass(if_a) << " (index " << i
7306 << ") extends "
7307 << if_b->PrettyClass() << " (index " << j << ") and so should be after it in the "
Alex Lighteb7c1442015-08-31 13:17:42 -07007308 << "interface list.";
7309 }
7310 }
7311 }
7312 return filled_ifcount;
7313}
7314
7315bool ClassLinker::SetupInterfaceLookupTable(Thread* self, Handle<mirror::Class> klass,
7316 Handle<mirror::ObjectArray<mirror::Class>> interfaces) {
7317 StackHandleScope<1> hs(self);
Mathieu Chartier6beced42016-11-15 15:51:31 -08007318 const bool has_superclass = klass->HasSuperClass();
7319 const size_t super_ifcount = has_superclass ? klass->GetSuperClass()->GetIfTableCount() : 0U;
Andreas Gampefa4333d2017-02-14 11:10:34 -08007320 const bool have_interfaces = interfaces != nullptr;
Alex Lighteb7c1442015-08-31 13:17:42 -07007321 const size_t num_interfaces =
7322 have_interfaces ? interfaces->GetLength() : klass->NumDirectInterfaces();
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007323 if (num_interfaces == 0) {
7324 if (super_ifcount == 0) {
Mathieu Chartier6beced42016-11-15 15:51:31 -08007325 if (LIKELY(has_superclass)) {
7326 klass->SetIfTable(klass->GetSuperClass()->GetIfTable());
7327 }
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007328 // Class implements no interfaces.
7329 DCHECK_EQ(klass->GetIfTableCount(), 0);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007330 return true;
7331 }
Ian Rogers9bc81912012-10-11 21:43:36 -07007332 // Class implements same interfaces as parent, are any of these not marker interfaces?
7333 bool has_non_marker_interface = false;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007334 ObjPtr<mirror::IfTable> super_iftable = klass->GetSuperClass()->GetIfTable();
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007335 for (size_t i = 0; i < super_ifcount; ++i) {
Ian Rogers9bc81912012-10-11 21:43:36 -07007336 if (super_iftable->GetMethodArrayCount(i) > 0) {
7337 has_non_marker_interface = true;
7338 break;
7339 }
7340 }
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007341 // Class just inherits marker interfaces from parent so recycle parent's iftable.
Ian Rogers9bc81912012-10-11 21:43:36 -07007342 if (!has_non_marker_interface) {
Ian Rogers9bc81912012-10-11 21:43:36 -07007343 klass->SetIfTable(super_iftable);
7344 return true;
7345 }
7346 }
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007347 size_t ifcount = super_ifcount + num_interfaces;
Alex Lighteb7c1442015-08-31 13:17:42 -07007348 // Check that every class being implemented is an interface.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007349 for (size_t i = 0; i < num_interfaces; i++) {
Mathieu Chartier28bd2e42016-10-04 13:54:57 -07007350 ObjPtr<mirror::Class> interface = have_interfaces
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07007351 ? interfaces->GetWithoutChecks(i)
Vladimir Marko19a4d372016-12-08 14:41:46 +00007352 : mirror::Class::GetDirectInterface(self, klass.Get(), i);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007353 DCHECK(interface != nullptr);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007354 if (UNLIKELY(!interface->IsInterface())) {
7355 std::string temp;
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07007356 ThrowIncompatibleClassChangeError(klass.Get(),
7357 "Class %s implements non-interface class %s",
David Sehr709b0702016-10-13 09:12:37 -07007358 klass->PrettyDescriptor().c_str(),
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007359 PrettyDescriptor(interface->GetDescriptor(&temp)).c_str());
7360 return false;
7361 }
7362 ifcount += interface->GetIfTableCount();
7363 }
Alex Lighteb7c1442015-08-31 13:17:42 -07007364 // Create the interface function table.
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07007365 MutableHandle<mirror::IfTable> iftable(hs.NewHandle(AllocIfTable(self, ifcount)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08007366 if (UNLIKELY(iftable == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07007367 self->AssertPendingOOMException();
Ian Rogersa436fde2013-08-27 23:34:06 -07007368 return false;
7369 }
Alex Lighteb7c1442015-08-31 13:17:42 -07007370 // Fill in table with superclass's iftable.
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07007371 if (super_ifcount != 0) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007372 ObjPtr<mirror::IfTable> super_iftable = klass->GetSuperClass()->GetIfTable();
Brian Carlstrom4b620ff2011-09-11 01:11:01 -07007373 for (size_t i = 0; i < super_ifcount; i++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007374 ObjPtr<mirror::Class> super_interface = super_iftable->GetInterface(i);
Ian Rogers9bc81912012-10-11 21:43:36 -07007375 iftable->SetInterface(i, super_interface);
Brian Carlstrom4b620ff2011-09-11 01:11:01 -07007376 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07007377 }
Alex Lighteb7c1442015-08-31 13:17:42 -07007378
7379 // Note that AllowThreadSuspension is to thread suspension as pthread_testcancel is to pthread
7380 // cancellation. That is it will suspend if one has a pending suspend request but otherwise
7381 // doesn't really do anything.
Ian Rogers7b078e82014-09-10 14:44:24 -07007382 self->AllowThreadSuspension();
Alex Lighteb7c1442015-08-31 13:17:42 -07007383
7384 size_t new_ifcount;
7385 {
Mathieu Chartier268764d2016-09-13 12:09:38 -07007386 ScopedAssertNoThreadSuspension nts("Copying mirror::Class*'s for FillIfTable");
Vladimir Markobcf17522018-06-01 13:14:32 +01007387 std::vector<ObjPtr<mirror::Class>> to_add;
Alex Lighteb7c1442015-08-31 13:17:42 -07007388 for (size_t i = 0; i < num_interfaces; i++) {
Mathieu Chartier28bd2e42016-10-04 13:54:57 -07007389 ObjPtr<mirror::Class> interface = have_interfaces ? interfaces->Get(i) :
Vladimir Marko19a4d372016-12-08 14:41:46 +00007390 mirror::Class::GetDirectInterface(self, klass.Get(), i);
Vladimir Markobcf17522018-06-01 13:14:32 +01007391 to_add.push_back(interface);
Ian Rogersb52b01a2012-01-12 17:01:38 -08007392 }
Alex Lighteb7c1442015-08-31 13:17:42 -07007393
7394 new_ifcount = FillIfTable(iftable.Get(), super_ifcount, std::move(to_add));
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07007395 }
Alex Lighteb7c1442015-08-31 13:17:42 -07007396
Ian Rogers7b078e82014-09-10 14:44:24 -07007397 self->AllowThreadSuspension();
Alex Lighteb7c1442015-08-31 13:17:42 -07007398
Ian Rogersb52b01a2012-01-12 17:01:38 -08007399 // Shrink iftable in case duplicates were found
Alex Lighteb7c1442015-08-31 13:17:42 -07007400 if (new_ifcount < ifcount) {
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007401 DCHECK_NE(num_interfaces, 0U);
Vladimir Markobcf17522018-06-01 13:14:32 +01007402 iftable.Assign(ObjPtr<mirror::IfTable>::DownCast(
Vladimir Marko3068d582019-05-28 16:39:29 +01007403 mirror::IfTable::CopyOf(iftable, self, new_ifcount * mirror::IfTable::kMax)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08007404 if (UNLIKELY(iftable == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07007405 self->AssertPendingOOMException();
Ian Rogersa436fde2013-08-27 23:34:06 -07007406 return false;
7407 }
Alex Lighteb7c1442015-08-31 13:17:42 -07007408 ifcount = new_ifcount;
Ian Rogersb52b01a2012-01-12 17:01:38 -08007409 } else {
Alex Lighteb7c1442015-08-31 13:17:42 -07007410 DCHECK_EQ(new_ifcount, ifcount);
Ian Rogersb52b01a2012-01-12 17:01:38 -08007411 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07007412 klass->SetIfTable(iftable.Get());
Alex Lighteb7c1442015-08-31 13:17:42 -07007413 return true;
7414}
7415
Alex Light1f3925d2016-09-07 12:04:20 -07007416// Finds the method with a name/signature that matches cmp in the given lists of methods. The list
7417// of methods must be unique.
7418static ArtMethod* FindSameNameAndSignature(MethodNameAndSignatureComparator& cmp ATTRIBUTE_UNUSED) {
7419 return nullptr;
7420}
7421
7422template <typename ... Types>
Alex Light9139e002015-10-09 15:59:48 -07007423static ArtMethod* FindSameNameAndSignature(MethodNameAndSignatureComparator& cmp,
Alex Light1f3925d2016-09-07 12:04:20 -07007424 const ScopedArenaVector<ArtMethod*>& list,
7425 const Types& ... rest)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07007426 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light9139e002015-10-09 15:59:48 -07007427 for (ArtMethod* method : list) {
7428 if (cmp.HasSameNameAndSignature(method)) {
7429 return method;
7430 }
7431 }
Alex Light1f3925d2016-09-07 12:04:20 -07007432 return FindSameNameAndSignature(cmp, rest...);
Alex Light9139e002015-10-09 15:59:48 -07007433}
7434
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007435namespace {
7436
Alex Light1f3925d2016-09-07 12:04:20 -07007437// Check that all vtable entries are present in this class's virtuals or are the same as a
7438// superclasses vtable entry.
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007439void CheckClassOwnsVTableEntries(Thread* self,
7440 Handle<mirror::Class> klass,
7441 PointerSize pointer_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07007442 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light1f3925d2016-09-07 12:04:20 -07007443 StackHandleScope<2> hs(self);
7444 Handle<mirror::PointerArray> check_vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007445 ObjPtr<mirror::Class> super_temp = (klass->HasSuperClass()) ? klass->GetSuperClass() : nullptr;
Alex Light1f3925d2016-09-07 12:04:20 -07007446 Handle<mirror::Class> superclass(hs.NewHandle(super_temp));
Andreas Gampefa4333d2017-02-14 11:10:34 -08007447 int32_t super_vtable_length = (superclass != nullptr) ? superclass->GetVTableLength() : 0;
Alex Lighte64300b2015-12-15 15:02:47 -08007448 for (int32_t i = 0; i < check_vtable->GetLength(); ++i) {
7449 ArtMethod* m = check_vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
7450 CHECK(m != nullptr);
7451
Alex Lighta41a30782017-03-29 11:33:19 -07007452 if (m->GetMethodIndexDuringLinking() != i) {
7453 LOG(WARNING) << m->PrettyMethod()
7454 << " has an unexpected method index for its spot in the vtable for class"
7455 << klass->PrettyClass();
7456 }
Alex Lighte64300b2015-12-15 15:02:47 -08007457 ArraySlice<ArtMethod> virtuals = klass->GetVirtualMethodsSliceUnchecked(pointer_size);
7458 auto is_same_method = [m] (const ArtMethod& meth) {
7459 return &meth == m;
7460 };
Alex Light3f980532017-03-17 15:10:32 -07007461 if (!((super_vtable_length > i && superclass->GetVTableEntry(i, pointer_size) == m) ||
7462 std::find_if(virtuals.begin(), virtuals.end(), is_same_method) != virtuals.end())) {
7463 LOG(WARNING) << m->PrettyMethod() << " does not seem to be owned by current class "
7464 << klass->PrettyClass() << " or any of its superclasses!";
7465 }
Alex Lighte64300b2015-12-15 15:02:47 -08007466 }
7467}
7468
Alex Light1f3925d2016-09-07 12:04:20 -07007469// Check to make sure the vtable does not have duplicates. Duplicates could cause problems when a
7470// method is overridden in a subclass.
Andreas Gampea2fed082019-02-01 09:34:43 -08007471template <PointerSize kPointerSize>
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007472void CheckVTableHasNoDuplicates(Thread* self, Handle<mirror::Class> klass)
Alex Light1f3925d2016-09-07 12:04:20 -07007473 REQUIRES_SHARED(Locks::mutator_lock_) {
7474 StackHandleScope<1> hs(self);
7475 Handle<mirror::PointerArray> vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
7476 int32_t num_entries = vtable->GetLength();
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007477
7478 // Observations:
7479 // * The older implementation was O(n^2) and got too expensive for apps with larger classes.
7480 // * Many classes do not override Object functions (e.g., equals/hashCode/toString). Thus,
7481 // for many classes outside of libcore a cross-dexfile check has to be run anyways.
7482 // * In the cross-dexfile case, with the O(n^2), in the best case O(n) cross checks would have
7483 // to be done. It is thus OK in a single-pass algorithm to read all data, anyways.
7484 // * The single-pass algorithm will trade memory for speed, but that is OK.
7485
7486 CHECK_GT(num_entries, 0);
7487
7488 auto log_fn = [&vtable, &klass](int32_t i, int32_t j) REQUIRES_SHARED(Locks::mutator_lock_) {
7489 ArtMethod* m1 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(i);
7490 ArtMethod* m2 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
7491 LOG(WARNING) << "vtable entries " << i << " and " << j << " are identical for "
7492 << klass->PrettyClass() << " in method " << m1->PrettyMethod()
7493 << " (0x" << std::hex << reinterpret_cast<uintptr_t>(m2) << ") and "
7494 << m2->PrettyMethod() << " (0x" << std::hex
7495 << reinterpret_cast<uintptr_t>(m2) << ")";
7496 };
7497 struct BaseHashType {
7498 static size_t HashCombine(size_t seed, size_t val) {
7499 return seed ^ (val + 0x9e3779b9 + (seed << 6) + (seed >> 2));
7500 }
7501 };
7502
7503 // Check assuming all entries come from the same dex file.
7504 {
7505 // Find the first interesting method and its dex file.
7506 int32_t start = 0;
7507 for (; start < num_entries; ++start) {
7508 ArtMethod* vtable_entry = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start);
7509 // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7510 // maybe).
7511 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7512 vtable_entry->GetAccessFlags())) {
7513 continue;
7514 }
7515 break;
7516 }
7517 if (start == num_entries) {
7518 return;
7519 }
7520 const DexFile* dex_file =
7521 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start)->
7522 GetInterfaceMethodIfProxy(kPointerSize)->GetDexFile();
7523
7524 // Helper function to avoid logging if we have to run the cross-file checks.
7525 auto check_fn = [&](bool log_warn) REQUIRES_SHARED(Locks::mutator_lock_) {
7526 // Use a map to store seen entries, as the storage space is too large for a bitvector.
7527 using PairType = std::pair<uint32_t, uint16_t>;
7528 struct PairHash : BaseHashType {
7529 size_t operator()(const PairType& key) const {
7530 return BaseHashType::HashCombine(BaseHashType::HashCombine(0, key.first), key.second);
7531 }
7532 };
7533 std::unordered_map<PairType, int32_t, PairHash> seen;
7534 seen.reserve(2 * num_entries);
7535 bool need_slow_path = false;
7536 bool found_dup = false;
7537 for (int i = start; i < num_entries; ++i) {
7538 // Can use Unchecked here as the start loop already ensured that the arrays are correct
7539 // wrt/ kPointerSize.
7540 ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7541 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7542 vtable_entry->GetAccessFlags())) {
7543 continue;
7544 }
7545 ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7546 if (dex_file != m->GetDexFile()) {
7547 need_slow_path = true;
7548 break;
7549 }
7550 const dex::MethodId* m_mid = &dex_file->GetMethodId(m->GetDexMethodIndex());
7551 PairType pair = std::make_pair(m_mid->name_idx_.index_, m_mid->proto_idx_.index_);
7552 auto it = seen.find(pair);
7553 if (it != seen.end()) {
7554 found_dup = true;
7555 if (log_warn) {
7556 log_fn(it->second, i);
7557 }
7558 } else {
7559 seen.emplace(pair, i);
7560 }
7561 }
7562 return std::make_pair(need_slow_path, found_dup);
7563 };
7564 std::pair<bool, bool> result = check_fn(/* log_warn= */ false);
7565 if (!result.first) {
7566 if (result.second) {
7567 check_fn(/* log_warn= */ true);
7568 }
7569 return;
7570 }
7571 }
7572
7573 // Need to check across dex files.
7574 struct Entry {
7575 size_t cached_hash = 0;
7576 const char* name = nullptr;
7577 Signature signature = Signature::NoSignature();
7578 uint32_t name_len = 0;
7579
7580 Entry(const DexFile* dex_file, const dex::MethodId& mid)
7581 : name(dex_file->StringDataAndUtf16LengthByIdx(mid.name_idx_, &name_len)),
7582 signature(dex_file->GetMethodSignature(mid)) {
7583 }
7584
7585 bool operator==(const Entry& other) const {
7586 if (name_len != other.name_len || strcmp(name, other.name) != 0) {
7587 return false;
7588 }
7589 return signature == other.signature;
7590 }
7591 };
7592 struct EntryHash {
7593 size_t operator()(const Entry& key) const {
7594 return key.cached_hash;
7595 }
7596 };
7597 std::unordered_map<Entry, int32_t, EntryHash> map;
7598 for (int32_t i = 0; i < num_entries; ++i) {
7599 // Can use Unchecked here as the first loop already ensured that the arrays are correct
7600 // wrt/ kPointerSize.
7601 ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7602 // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7603 // maybe).
Alex Light1f3925d2016-09-07 12:04:20 -07007604 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7605 vtable_entry->GetAccessFlags())) {
7606 continue;
7607 }
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007608 ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7609 const DexFile* dex_file = m->GetDexFile();
7610 const dex::MethodId& mid = dex_file->GetMethodId(m->GetDexMethodIndex());
7611
7612 Entry e(dex_file, mid);
7613
7614 size_t string_hash = std::hash<std::string_view>()(std::string_view(e.name, e.name_len));
7615 size_t sig_hash = std::hash<std::string>()(e.signature.ToString());
7616 e.cached_hash = BaseHashType::HashCombine(BaseHashType::HashCombine(0u, string_hash),
7617 sig_hash);
7618
7619 auto it = map.find(e);
7620 if (it != map.end()) {
7621 log_fn(it->second, i);
7622 } else {
7623 map.emplace(e, i);
Alex Light1f3925d2016-09-07 12:04:20 -07007624 }
7625 }
7626}
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007627
7628void CheckVTableHasNoDuplicates(Thread* self,
7629 Handle<mirror::Class> klass,
7630 PointerSize pointer_size)
Andreas Gampea2fed082019-02-01 09:34:43 -08007631 REQUIRES_SHARED(Locks::mutator_lock_) {
7632 switch (pointer_size) {
7633 case PointerSize::k64:
7634 CheckVTableHasNoDuplicates<PointerSize::k64>(self, klass);
7635 break;
7636 case PointerSize::k32:
7637 CheckVTableHasNoDuplicates<PointerSize::k32>(self, klass);
7638 break;
7639 }
7640}
Alex Light1f3925d2016-09-07 12:04:20 -07007641
7642static void SanityCheckVTable(Thread* self, Handle<mirror::Class> klass, PointerSize pointer_size)
7643 REQUIRES_SHARED(Locks::mutator_lock_) {
7644 CheckClassOwnsVTableEntries(self, klass, pointer_size);
7645 CheckVTableHasNoDuplicates(self, klass, pointer_size);
7646}
7647
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007648} // namespace
7649
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007650void ClassLinker::FillImtFromSuperClass(Handle<mirror::Class> klass,
7651 ArtMethod* unimplemented_method,
7652 ArtMethod* imt_conflict_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007653 bool* new_conflict,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007654 ArtMethod** imt) {
Alex Light705ad492015-09-21 11:36:30 -07007655 DCHECK(klass->HasSuperClass());
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007656 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007657 if (super_class->ShouldHaveImt()) {
7658 ImTable* super_imt = super_class->GetImt(image_pointer_size_);
7659 for (size_t i = 0; i < ImTable::kSize; ++i) {
7660 imt[i] = super_imt->Get(i, image_pointer_size_);
Alex Light705ad492015-09-21 11:36:30 -07007661 }
7662 } else {
7663 // No imt in the super class, need to reconstruct from the iftable.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007664 ObjPtr<mirror::IfTable> if_table = super_class->GetIfTable();
Mathieu Chartier6beced42016-11-15 15:51:31 -08007665 if (if_table->Count() != 0) {
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007666 // Ignore copied methods since we will handle these in LinkInterfaceMethods.
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007667 FillIMTFromIfTable(if_table,
7668 unimplemented_method,
7669 imt_conflict_method,
7670 klass.Get(),
Andreas Gampe98ea9d92018-10-19 14:06:15 -07007671 /*create_conflict_tables=*/false,
7672 /*ignore_copied_methods=*/true,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007673 /*out*/new_conflict,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007674 /*out*/imt);
Alex Light705ad492015-09-21 11:36:30 -07007675 }
7676 }
7677}
7678
Vladimir Marko921094a2017-01-12 18:37:06 +00007679class ClassLinker::LinkInterfaceMethodsHelper {
7680 public:
7681 LinkInterfaceMethodsHelper(ClassLinker* class_linker,
7682 Handle<mirror::Class> klass,
7683 Thread* self,
7684 Runtime* runtime)
7685 : class_linker_(class_linker),
7686 klass_(klass),
7687 method_alignment_(ArtMethod::Alignment(class_linker->GetImagePointerSize())),
7688 method_size_(ArtMethod::Size(class_linker->GetImagePointerSize())),
7689 self_(self),
7690 stack_(runtime->GetLinearAlloc()->GetArenaPool()),
7691 allocator_(&stack_),
7692 default_conflict_methods_(allocator_.Adapter()),
7693 overriding_default_conflict_methods_(allocator_.Adapter()),
7694 miranda_methods_(allocator_.Adapter()),
7695 default_methods_(allocator_.Adapter()),
7696 overriding_default_methods_(allocator_.Adapter()),
7697 move_table_(allocator_.Adapter()) {
7698 }
7699
7700 ArtMethod* FindMethod(ArtMethod* interface_method,
7701 MethodNameAndSignatureComparator& interface_name_comparator,
7702 ArtMethod* vtable_impl)
7703 REQUIRES_SHARED(Locks::mutator_lock_);
7704
7705 ArtMethod* GetOrCreateMirandaMethod(ArtMethod* interface_method,
7706 MethodNameAndSignatureComparator& interface_name_comparator)
7707 REQUIRES_SHARED(Locks::mutator_lock_);
7708
7709 bool HasNewVirtuals() const {
7710 return !(miranda_methods_.empty() &&
7711 default_methods_.empty() &&
7712 overriding_default_methods_.empty() &&
7713 overriding_default_conflict_methods_.empty() &&
7714 default_conflict_methods_.empty());
7715 }
7716
7717 void ReallocMethods() REQUIRES_SHARED(Locks::mutator_lock_);
7718
7719 ObjPtr<mirror::PointerArray> UpdateVtable(
7720 const std::unordered_map<size_t, ClassLinker::MethodTranslation>& default_translations,
Vladimir Marko3068d582019-05-28 16:39:29 +01007721 Handle<mirror::PointerArray> old_vtable) REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Marko921094a2017-01-12 18:37:06 +00007722
7723 void UpdateIfTable(Handle<mirror::IfTable> iftable) REQUIRES_SHARED(Locks::mutator_lock_);
7724
7725 void UpdateIMT(ArtMethod** out_imt);
7726
7727 void CheckNoStaleMethodsInDexCache() REQUIRES_SHARED(Locks::mutator_lock_) {
7728 if (kIsDebugBuild) {
7729 PointerSize pointer_size = class_linker_->GetImagePointerSize();
7730 // Check that there are no stale methods are in the dex cache array.
7731 auto* resolved_methods = klass_->GetDexCache()->GetResolvedMethods();
7732 for (size_t i = 0, count = klass_->GetDexCache()->NumResolvedMethods(); i < count; ++i) {
Vladimir Marko07bfbac2017-07-06 14:55:02 +01007733 auto pair = mirror::DexCache::GetNativePairPtrSize(resolved_methods, i, pointer_size);
7734 ArtMethod* m = pair.object;
Vladimir Marko921094a2017-01-12 18:37:06 +00007735 CHECK(move_table_.find(m) == move_table_.end() ||
7736 // The original versions of copied methods will still be present so allow those too.
7737 // Note that if the first check passes this might fail to GetDeclaringClass().
7738 std::find_if(m->GetDeclaringClass()->GetMethods(pointer_size).begin(),
7739 m->GetDeclaringClass()->GetMethods(pointer_size).end(),
7740 [m] (ArtMethod& meth) {
7741 return &meth == m;
7742 }) != m->GetDeclaringClass()->GetMethods(pointer_size).end())
7743 << "Obsolete method " << m->PrettyMethod() << " is in dex cache!";
7744 }
7745 }
7746 }
7747
7748 void ClobberOldMethods(LengthPrefixedArray<ArtMethod>* old_methods,
7749 LengthPrefixedArray<ArtMethod>* methods) {
7750 if (kIsDebugBuild) {
7751 CHECK(methods != nullptr);
7752 // Put some random garbage in old methods to help find stale pointers.
7753 if (methods != old_methods && old_methods != nullptr) {
7754 // Need to make sure the GC is not running since it could be scanning the methods we are
7755 // about to overwrite.
7756 ScopedThreadStateChange tsc(self_, kSuspended);
7757 gc::ScopedGCCriticalSection gcs(self_,
7758 gc::kGcCauseClassLinker,
7759 gc::kCollectorTypeClassLinker);
7760 const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_methods->size(),
7761 method_size_,
7762 method_alignment_);
7763 memset(old_methods, 0xFEu, old_size);
7764 }
7765 }
7766 }
7767
7768 private:
7769 size_t NumberOfNewVirtuals() const {
7770 return miranda_methods_.size() +
7771 default_methods_.size() +
7772 overriding_default_conflict_methods_.size() +
7773 overriding_default_methods_.size() +
7774 default_conflict_methods_.size();
7775 }
7776
7777 bool FillTables() REQUIRES_SHARED(Locks::mutator_lock_) {
7778 return !klass_->IsInterface();
7779 }
7780
7781 void LogNewVirtuals() const REQUIRES_SHARED(Locks::mutator_lock_) {
7782 DCHECK(!klass_->IsInterface() || (default_methods_.empty() && miranda_methods_.empty()))
7783 << "Interfaces should only have default-conflict methods appended to them.";
7784 VLOG(class_linker) << mirror::Class::PrettyClass(klass_.Get()) << ": miranda_methods="
7785 << miranda_methods_.size()
7786 << " default_methods=" << default_methods_.size()
7787 << " overriding_default_methods=" << overriding_default_methods_.size()
7788 << " default_conflict_methods=" << default_conflict_methods_.size()
7789 << " overriding_default_conflict_methods="
7790 << overriding_default_conflict_methods_.size();
7791 }
7792
7793 ClassLinker* class_linker_;
7794 Handle<mirror::Class> klass_;
7795 size_t method_alignment_;
7796 size_t method_size_;
7797 Thread* const self_;
7798
7799 // These are allocated on the heap to begin, we then transfer to linear alloc when we re-create
7800 // the virtual methods array.
7801 // Need to use low 4GB arenas for compiler or else the pointers wont fit in 32 bit method array
7802 // during cross compilation.
7803 // Use the linear alloc pool since this one is in the low 4gb for the compiler.
7804 ArenaStack stack_;
7805 ScopedArenaAllocator allocator_;
7806
7807 ScopedArenaVector<ArtMethod*> default_conflict_methods_;
7808 ScopedArenaVector<ArtMethod*> overriding_default_conflict_methods_;
7809 ScopedArenaVector<ArtMethod*> miranda_methods_;
7810 ScopedArenaVector<ArtMethod*> default_methods_;
7811 ScopedArenaVector<ArtMethod*> overriding_default_methods_;
7812
7813 ScopedArenaUnorderedMap<ArtMethod*, ArtMethod*> move_table_;
7814};
7815
7816ArtMethod* ClassLinker::LinkInterfaceMethodsHelper::FindMethod(
7817 ArtMethod* interface_method,
7818 MethodNameAndSignatureComparator& interface_name_comparator,
7819 ArtMethod* vtable_impl) {
7820 ArtMethod* current_method = nullptr;
7821 switch (class_linker_->FindDefaultMethodImplementation(self_,
7822 interface_method,
7823 klass_,
7824 /*out*/&current_method)) {
7825 case DefaultMethodSearchResult::kDefaultConflict: {
7826 // Default method conflict.
7827 DCHECK(current_method == nullptr);
7828 ArtMethod* default_conflict_method = nullptr;
7829 if (vtable_impl != nullptr && vtable_impl->IsDefaultConflicting()) {
7830 // We can reuse the method from the superclass, don't bother adding it to virtuals.
7831 default_conflict_method = vtable_impl;
7832 } else {
7833 // See if we already have a conflict method for this method.
7834 ArtMethod* preexisting_conflict = FindSameNameAndSignature(
7835 interface_name_comparator,
7836 default_conflict_methods_,
7837 overriding_default_conflict_methods_);
7838 if (LIKELY(preexisting_conflict != nullptr)) {
7839 // We already have another conflict we can reuse.
7840 default_conflict_method = preexisting_conflict;
7841 } else {
7842 // Note that we do this even if we are an interface since we need to create this and
7843 // cannot reuse another classes.
7844 // Create a new conflict method for this to use.
7845 default_conflict_method = reinterpret_cast<ArtMethod*>(allocator_.Alloc(method_size_));
7846 new(default_conflict_method) ArtMethod(interface_method,
7847 class_linker_->GetImagePointerSize());
7848 if (vtable_impl == nullptr) {
7849 // Save the conflict method. We need to add it to the vtable.
7850 default_conflict_methods_.push_back(default_conflict_method);
7851 } else {
7852 // Save the conflict method but it is already in the vtable.
7853 overriding_default_conflict_methods_.push_back(default_conflict_method);
7854 }
7855 }
7856 }
7857 current_method = default_conflict_method;
7858 break;
7859 } // case kDefaultConflict
7860 case DefaultMethodSearchResult::kDefaultFound: {
7861 DCHECK(current_method != nullptr);
7862 // Found a default method.
7863 if (vtable_impl != nullptr &&
7864 current_method->GetDeclaringClass() == vtable_impl->GetDeclaringClass()) {
7865 // We found a default method but it was the same one we already have from our
7866 // superclass. Don't bother adding it to our vtable again.
7867 current_method = vtable_impl;
7868 } else if (LIKELY(FillTables())) {
7869 // Interfaces don't need to copy default methods since they don't have vtables.
7870 // Only record this default method if it is new to save space.
7871 // TODO It might be worthwhile to copy default methods on interfaces anyway since it
7872 // would make lookup for interface super much faster. (We would only need to scan
7873 // the iftable to find if there is a NSME or AME.)
7874 ArtMethod* old = FindSameNameAndSignature(interface_name_comparator,
7875 default_methods_,
7876 overriding_default_methods_);
7877 if (old == nullptr) {
7878 // We found a default method implementation and there were no conflicts.
7879 if (vtable_impl == nullptr) {
7880 // Save the default method. We need to add it to the vtable.
7881 default_methods_.push_back(current_method);
7882 } else {
7883 // Save the default method but it is already in the vtable.
7884 overriding_default_methods_.push_back(current_method);
7885 }
7886 } else {
7887 CHECK(old == current_method) << "Multiple default implementations selected!";
7888 }
7889 }
7890 break;
7891 } // case kDefaultFound
7892 case DefaultMethodSearchResult::kAbstractFound: {
7893 DCHECK(current_method == nullptr);
7894 // Abstract method masks all defaults.
7895 if (vtable_impl != nullptr &&
7896 vtable_impl->IsAbstract() &&
7897 !vtable_impl->IsDefaultConflicting()) {
7898 // We need to make this an abstract method but the version in the vtable already is so
7899 // don't do anything.
7900 current_method = vtable_impl;
7901 }
7902 break;
7903 } // case kAbstractFound
7904 }
7905 return current_method;
7906}
7907
7908ArtMethod* ClassLinker::LinkInterfaceMethodsHelper::GetOrCreateMirandaMethod(
7909 ArtMethod* interface_method,
7910 MethodNameAndSignatureComparator& interface_name_comparator) {
7911 // Find out if there is already a miranda method we can use.
7912 ArtMethod* miranda_method = FindSameNameAndSignature(interface_name_comparator,
7913 miranda_methods_);
7914 if (miranda_method == nullptr) {
7915 DCHECK(interface_method->IsAbstract()) << interface_method->PrettyMethod();
7916 miranda_method = reinterpret_cast<ArtMethod*>(allocator_.Alloc(method_size_));
7917 CHECK(miranda_method != nullptr);
7918 // Point the interface table at a phantom slot.
7919 new(miranda_method) ArtMethod(interface_method, class_linker_->GetImagePointerSize());
7920 miranda_methods_.push_back(miranda_method);
7921 }
7922 return miranda_method;
7923}
7924
7925void ClassLinker::LinkInterfaceMethodsHelper::ReallocMethods() {
7926 LogNewVirtuals();
7927
7928 const size_t old_method_count = klass_->NumMethods();
7929 const size_t new_method_count = old_method_count + NumberOfNewVirtuals();
7930 DCHECK_NE(old_method_count, new_method_count);
7931
7932 // Attempt to realloc to save RAM if possible.
7933 LengthPrefixedArray<ArtMethod>* old_methods = klass_->GetMethodsPtr();
7934 // The Realloced virtual methods aren't visible from the class roots, so there is no issue
7935 // where GCs could attempt to mark stale pointers due to memcpy. And since we overwrite the
7936 // realloced memory with out->CopyFrom, we are guaranteed to have objects in the to space since
7937 // CopyFrom has internal read barriers.
7938 //
7939 // TODO We should maybe move some of this into mirror::Class or at least into another method.
7940 const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_method_count,
7941 method_size_,
7942 method_alignment_);
7943 const size_t new_size = LengthPrefixedArray<ArtMethod>::ComputeSize(new_method_count,
7944 method_size_,
7945 method_alignment_);
7946 const size_t old_methods_ptr_size = (old_methods != nullptr) ? old_size : 0;
7947 auto* methods = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
Nicolas Geoffray48b40cc2017-08-07 16:52:40 +01007948 class_linker_->GetAllocatorForClassLoader(klass_->GetClassLoader())->Realloc(
Vladimir Marko921094a2017-01-12 18:37:06 +00007949 self_, old_methods, old_methods_ptr_size, new_size));
7950 CHECK(methods != nullptr); // Native allocation failure aborts.
7951
7952 PointerSize pointer_size = class_linker_->GetImagePointerSize();
7953 if (methods != old_methods) {
7954 // Maps from heap allocated miranda method to linear alloc miranda method.
7955 StrideIterator<ArtMethod> out = methods->begin(method_size_, method_alignment_);
7956 // Copy over the old methods.
7957 for (auto& m : klass_->GetMethods(pointer_size)) {
7958 move_table_.emplace(&m, &*out);
7959 // The CopyFrom is only necessary to not miss read barriers since Realloc won't do read
7960 // barriers when it copies.
7961 out->CopyFrom(&m, pointer_size);
7962 ++out;
7963 }
7964 }
7965 StrideIterator<ArtMethod> out(methods->begin(method_size_, method_alignment_) + old_method_count);
7966 // Copy over miranda methods before copying vtable since CopyOf may cause thread suspension and
7967 // we want the roots of the miranda methods to get visited.
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00007968 for (size_t i = 0; i < miranda_methods_.size(); ++i) {
7969 ArtMethod* mir_method = miranda_methods_[i];
Vladimir Marko921094a2017-01-12 18:37:06 +00007970 ArtMethod& new_method = *out;
7971 new_method.CopyFrom(mir_method, pointer_size);
7972 new_method.SetAccessFlags(new_method.GetAccessFlags() | kAccMiranda | kAccCopied);
7973 DCHECK_NE(new_method.GetAccessFlags() & kAccAbstract, 0u)
7974 << "Miranda method should be abstract!";
7975 move_table_.emplace(mir_method, &new_method);
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00007976 // Update the entry in the method array, as the array will be used for future lookups,
7977 // where thread suspension is allowed.
7978 // As such, the array should not contain locally allocated ArtMethod, otherwise the GC
7979 // would not see them.
7980 miranda_methods_[i] = &new_method;
Vladimir Marko921094a2017-01-12 18:37:06 +00007981 ++out;
7982 }
7983 // We need to copy the default methods into our own method table since the runtime requires that
7984 // every method on a class's vtable be in that respective class's virtual method table.
7985 // NOTE This means that two classes might have the same implementation of a method from the same
7986 // interface but will have different ArtMethod*s for them. This also means we cannot compare a
7987 // default method found on a class with one found on the declaring interface directly and must
7988 // look at the declaring class to determine if they are the same.
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00007989 for (ScopedArenaVector<ArtMethod*>* methods_vec : {&default_methods_,
7990 &overriding_default_methods_}) {
7991 for (size_t i = 0; i < methods_vec->size(); ++i) {
7992 ArtMethod* def_method = (*methods_vec)[i];
Vladimir Marko921094a2017-01-12 18:37:06 +00007993 ArtMethod& new_method = *out;
7994 new_method.CopyFrom(def_method, pointer_size);
7995 // Clear the kAccSkipAccessChecks flag if it is present. Since this class hasn't been
7996 // verified yet it shouldn't have methods that are skipping access checks.
7997 // TODO This is rather arbitrary. We should maybe support classes where only some of its
7998 // methods are skip_access_checks.
Vladimir Markob0a6aee2017-10-27 10:34:04 +01007999 DCHECK_EQ(new_method.GetAccessFlags() & kAccNative, 0u);
Vladimir Marko921094a2017-01-12 18:37:06 +00008000 constexpr uint32_t kSetFlags = kAccDefault | kAccCopied;
8001 constexpr uint32_t kMaskFlags = ~kAccSkipAccessChecks;
8002 new_method.SetAccessFlags((new_method.GetAccessFlags() | kSetFlags) & kMaskFlags);
8003 move_table_.emplace(def_method, &new_method);
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008004 // Update the entry in the method array, as the array will be used for future lookups,
8005 // where thread suspension is allowed.
8006 // As such, the array should not contain locally allocated ArtMethod, otherwise the GC
8007 // would not see them.
8008 (*methods_vec)[i] = &new_method;
Vladimir Marko921094a2017-01-12 18:37:06 +00008009 ++out;
8010 }
8011 }
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008012 for (ScopedArenaVector<ArtMethod*>* methods_vec : {&default_conflict_methods_,
8013 &overriding_default_conflict_methods_}) {
8014 for (size_t i = 0; i < methods_vec->size(); ++i) {
8015 ArtMethod* conf_method = (*methods_vec)[i];
Vladimir Marko921094a2017-01-12 18:37:06 +00008016 ArtMethod& new_method = *out;
8017 new_method.CopyFrom(conf_method, pointer_size);
8018 // This is a type of default method (there are default method impls, just a conflict) so
8019 // mark this as a default, non-abstract method, since thats what it is. Also clear the
8020 // kAccSkipAccessChecks bit since this class hasn't been verified yet it shouldn't have
8021 // methods that are skipping access checks.
Nicolas Geoffray7aca9d52018-09-07 11:13:33 +01008022 // Also clear potential kAccSingleImplementation to avoid CHA trying to inline
8023 // the default method.
Vladimir Markob0a6aee2017-10-27 10:34:04 +01008024 DCHECK_EQ(new_method.GetAccessFlags() & kAccNative, 0u);
Vladimir Marko921094a2017-01-12 18:37:06 +00008025 constexpr uint32_t kSetFlags = kAccDefault | kAccDefaultConflict | kAccCopied;
Nicolas Geoffray7aca9d52018-09-07 11:13:33 +01008026 constexpr uint32_t kMaskFlags =
8027 ~(kAccAbstract | kAccSkipAccessChecks | kAccSingleImplementation);
Vladimir Marko921094a2017-01-12 18:37:06 +00008028 new_method.SetAccessFlags((new_method.GetAccessFlags() | kSetFlags) & kMaskFlags);
8029 DCHECK(new_method.IsDefaultConflicting());
8030 // The actual method might or might not be marked abstract since we just copied it from a
8031 // (possibly default) interface method. We need to set it entry point to be the bridge so
8032 // that the compiler will not invoke the implementation of whatever method we copied from.
8033 EnsureThrowsInvocationError(class_linker_, &new_method);
8034 move_table_.emplace(conf_method, &new_method);
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008035 // Update the entry in the method array, as the array will be used for future lookups,
8036 // where thread suspension is allowed.
8037 // As such, the array should not contain locally allocated ArtMethod, otherwise the GC
8038 // would not see them.
8039 (*methods_vec)[i] = &new_method;
Vladimir Marko921094a2017-01-12 18:37:06 +00008040 ++out;
8041 }
8042 }
8043 methods->SetSize(new_method_count);
8044 class_linker_->UpdateClassMethods(klass_.Get(), methods);
8045}
8046
8047ObjPtr<mirror::PointerArray> ClassLinker::LinkInterfaceMethodsHelper::UpdateVtable(
8048 const std::unordered_map<size_t, ClassLinker::MethodTranslation>& default_translations,
Vladimir Marko3068d582019-05-28 16:39:29 +01008049 Handle<mirror::PointerArray> old_vtable) {
Vladimir Marko921094a2017-01-12 18:37:06 +00008050 // Update the vtable to the new method structures. We can skip this for interfaces since they
8051 // do not have vtables.
8052 const size_t old_vtable_count = old_vtable->GetLength();
8053 const size_t new_vtable_count = old_vtable_count +
8054 miranda_methods_.size() +
8055 default_methods_.size() +
8056 default_conflict_methods_.size();
8057
Vladimir Marko3068d582019-05-28 16:39:29 +01008058 ObjPtr<mirror::PointerArray> vtable = ObjPtr<mirror::PointerArray>::DownCast(
8059 mirror::Array::CopyOf(old_vtable, self_, new_vtable_count));
Vladimir Marko921094a2017-01-12 18:37:06 +00008060 if (UNLIKELY(vtable == nullptr)) {
8061 self_->AssertPendingOOMException();
8062 return nullptr;
8063 }
8064
8065 size_t vtable_pos = old_vtable_count;
8066 PointerSize pointer_size = class_linker_->GetImagePointerSize();
8067 // Update all the newly copied method's indexes so they denote their placement in the vtable.
8068 for (const ScopedArenaVector<ArtMethod*>& methods_vec : {default_methods_,
8069 default_conflict_methods_,
8070 miranda_methods_}) {
8071 // These are the functions that are not already in the vtable!
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008072 for (ArtMethod* new_vtable_method : methods_vec) {
Vladimir Marko921094a2017-01-12 18:37:06 +00008073 // Leave the declaring class alone the method's dex_code_item_offset_ and dex_method_index_
8074 // fields are references into the dex file the method was defined in. Since the ArtMethod
8075 // does not store that information it uses declaring_class_->dex_cache_.
8076 new_vtable_method->SetMethodIndex(0xFFFF & vtable_pos);
8077 vtable->SetElementPtrSize(vtable_pos, new_vtable_method, pointer_size);
8078 ++vtable_pos;
8079 }
8080 }
8081 DCHECK_EQ(vtable_pos, new_vtable_count);
8082
8083 // Update old vtable methods. We use the default_translations map to figure out what each
8084 // vtable entry should be updated to, if they need to be at all.
8085 for (size_t i = 0; i < old_vtable_count; ++i) {
8086 ArtMethod* translated_method = vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
8087 // Try and find what we need to change this method to.
8088 auto translation_it = default_translations.find(i);
Vladimir Marko921094a2017-01-12 18:37:06 +00008089 if (translation_it != default_translations.end()) {
8090 if (translation_it->second.IsInConflict()) {
8091 // Find which conflict method we are to use for this method.
8092 MethodNameAndSignatureComparator old_method_comparator(
8093 translated_method->GetInterfaceMethodIfProxy(pointer_size));
8094 // We only need to look through overriding_default_conflict_methods since this is an
8095 // overridden method we are fixing up here.
8096 ArtMethod* new_conflict_method = FindSameNameAndSignature(
8097 old_method_comparator, overriding_default_conflict_methods_);
8098 CHECK(new_conflict_method != nullptr) << "Expected a conflict method!";
8099 translated_method = new_conflict_method;
8100 } else if (translation_it->second.IsAbstract()) {
8101 // Find which miranda method we are to use for this method.
8102 MethodNameAndSignatureComparator old_method_comparator(
8103 translated_method->GetInterfaceMethodIfProxy(pointer_size));
8104 ArtMethod* miranda_method = FindSameNameAndSignature(old_method_comparator,
8105 miranda_methods_);
8106 DCHECK(miranda_method != nullptr);
8107 translated_method = miranda_method;
8108 } else {
8109 // Normal default method (changed from an older default or abstract interface method).
8110 DCHECK(translation_it->second.IsTranslation());
8111 translated_method = translation_it->second.GetTranslation();
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008112 auto it = move_table_.find(translated_method);
8113 DCHECK(it != move_table_.end());
8114 translated_method = it->second;
Vladimir Marko921094a2017-01-12 18:37:06 +00008115 }
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008116 } else {
8117 auto it = move_table_.find(translated_method);
8118 translated_method = (it != move_table_.end()) ? it->second : nullptr;
Vladimir Marko921094a2017-01-12 18:37:06 +00008119 }
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008120
8121 if (translated_method != nullptr) {
Vladimir Marko921094a2017-01-12 18:37:06 +00008122 // Make sure the new_methods index is set.
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008123 if (translated_method->GetMethodIndexDuringLinking() != i) {
Vladimir Marko921094a2017-01-12 18:37:06 +00008124 if (kIsDebugBuild) {
8125 auto* methods = klass_->GetMethodsPtr();
8126 CHECK_LE(reinterpret_cast<uintptr_t>(&*methods->begin(method_size_, method_alignment_)),
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008127 reinterpret_cast<uintptr_t>(translated_method));
8128 CHECK_LT(reinterpret_cast<uintptr_t>(translated_method),
Vladimir Marko921094a2017-01-12 18:37:06 +00008129 reinterpret_cast<uintptr_t>(&*methods->end(method_size_, method_alignment_)));
8130 }
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008131 translated_method->SetMethodIndex(0xFFFF & i);
Vladimir Marko921094a2017-01-12 18:37:06 +00008132 }
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008133 vtable->SetElementPtrSize(i, translated_method, pointer_size);
Vladimir Marko921094a2017-01-12 18:37:06 +00008134 }
8135 }
Vladimir Markod93e3742018-07-18 10:58:13 +01008136 klass_->SetVTable(vtable);
Vladimir Marko921094a2017-01-12 18:37:06 +00008137 return vtable;
8138}
8139
8140void ClassLinker::LinkInterfaceMethodsHelper::UpdateIfTable(Handle<mirror::IfTable> iftable) {
8141 PointerSize pointer_size = class_linker_->GetImagePointerSize();
8142 const size_t ifcount = klass_->GetIfTableCount();
8143 // Go fix up all the stale iftable pointers.
8144 for (size_t i = 0; i < ifcount; ++i) {
8145 for (size_t j = 0, count = iftable->GetMethodArrayCount(i); j < count; ++j) {
Vladimir Marko557fece2019-03-26 14:29:41 +00008146 ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArray(i);
8147 ArtMethod* m = method_array->GetElementPtrSize<ArtMethod*>(j, pointer_size);
Vladimir Marko921094a2017-01-12 18:37:06 +00008148 DCHECK(m != nullptr) << klass_->PrettyClass();
8149 auto it = move_table_.find(m);
8150 if (it != move_table_.end()) {
8151 auto* new_m = it->second;
8152 DCHECK(new_m != nullptr) << klass_->PrettyClass();
8153 method_array->SetElementPtrSize(j, new_m, pointer_size);
8154 }
8155 }
8156 }
8157}
8158
8159void ClassLinker::LinkInterfaceMethodsHelper::UpdateIMT(ArtMethod** out_imt) {
8160 // Fix up IMT next.
8161 for (size_t i = 0; i < ImTable::kSize; ++i) {
8162 auto it = move_table_.find(out_imt[i]);
8163 if (it != move_table_.end()) {
8164 out_imt[i] = it->second;
8165 }
8166 }
8167}
8168
Alex Light705ad492015-09-21 11:36:30 -07008169// TODO This method needs to be split up into several smaller methods.
Alex Lighteb7c1442015-08-31 13:17:42 -07008170bool ClassLinker::LinkInterfaceMethods(
8171 Thread* self,
8172 Handle<mirror::Class> klass,
Alex Light9139e002015-10-09 15:59:48 -07008173 const std::unordered_map<size_t, ClassLinker::MethodTranslation>& default_translations,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00008174 bool* out_new_conflict,
Alex Lighteb7c1442015-08-31 13:17:42 -07008175 ArtMethod** out_imt) {
8176 StackHandleScope<3> hs(self);
8177 Runtime* const runtime = Runtime::Current();
Alex Light705ad492015-09-21 11:36:30 -07008178
8179 const bool is_interface = klass->IsInterface();
Alex Lighteb7c1442015-08-31 13:17:42 -07008180 const bool has_superclass = klass->HasSuperClass();
Alex Light705ad492015-09-21 11:36:30 -07008181 const bool fill_tables = !is_interface;
Alex Lighteb7c1442015-08-31 13:17:42 -07008182 const size_t super_ifcount = has_superclass ? klass->GetSuperClass()->GetIfTableCount() : 0U;
Alex Lighteb7c1442015-08-31 13:17:42 -07008183 const size_t ifcount = klass->GetIfTableCount();
8184
Vladimir Marko921094a2017-01-12 18:37:06 +00008185 Handle<mirror::IfTable> iftable(hs.NewHandle(klass->GetIfTable()));
Mathieu Chartiere401d142015-04-22 13:56:20 -07008186
8187 MutableHandle<mirror::PointerArray> vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
8188 ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod();
Alex Light9139e002015-10-09 15:59:48 -07008189 ArtMethod* const imt_conflict_method = runtime->GetImtConflictMethod();
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07008190 // Copy the IMT from the super class if possible.
Alex Light705ad492015-09-21 11:36:30 -07008191 const bool extend_super_iftable = has_superclass;
8192 if (has_superclass && fill_tables) {
8193 FillImtFromSuperClass(klass,
Alex Light705ad492015-09-21 11:36:30 -07008194 unimplemented_method,
8195 imt_conflict_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00008196 out_new_conflict,
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07008197 out_imt);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07008198 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07008199 // Allocate method arrays before since we don't want miss visiting miranda method roots due to
8200 // thread suspension.
Alex Light705ad492015-09-21 11:36:30 -07008201 if (fill_tables) {
Vladimir Marko921094a2017-01-12 18:37:06 +00008202 if (!AllocateIfTableMethodArrays(self, klass, iftable)) {
8203 return false;
Mathieu Chartiere401d142015-04-22 13:56:20 -07008204 }
8205 }
8206
Vladimir Marko921094a2017-01-12 18:37:06 +00008207 LinkInterfaceMethodsHelper helper(this, klass, self, runtime);
8208
Igor Murashkinb1d8c312015-08-04 11:18:43 -07008209 auto* old_cause = self->StartAssertNoThreadSuspension(
Mathieu Chartiere401d142015-04-22 13:56:20 -07008210 "Copying ArtMethods for LinkInterfaceMethods");
Alex Light9139e002015-10-09 15:59:48 -07008211 // Going in reverse to ensure that we will hit abstract methods that override defaults before the
8212 // defaults. This means we don't need to do any trickery when creating the Miranda methods, since
8213 // they will already be null. This has the additional benefit that the declarer of a miranda
8214 // method will actually declare an abstract method.
Vladimir Markoba118822017-06-12 15:41:56 +01008215 for (size_t i = ifcount; i != 0u; ) {
Alex Light9139e002015-10-09 15:59:48 -07008216 --i;
Alex Light9139e002015-10-09 15:59:48 -07008217 DCHECK_LT(i, ifcount);
8218
Alex Light705ad492015-09-21 11:36:30 -07008219 size_t num_methods = iftable->GetInterface(i)->NumDeclaredVirtualMethods();
Mathieu Chartiere401d142015-04-22 13:56:20 -07008220 if (num_methods > 0) {
8221 StackHandleScope<2> hs2(self);
8222 const bool is_super = i < super_ifcount;
8223 const bool super_interface = is_super && extend_super_iftable;
Alex Light705ad492015-09-21 11:36:30 -07008224 // We don't actually create or fill these tables for interfaces, we just copy some methods for
8225 // conflict methods. Just set this as nullptr in those cases.
8226 Handle<mirror::PointerArray> method_array(fill_tables
8227 ? hs2.NewHandle(iftable->GetMethodArray(i))
8228 : hs2.NewHandle<mirror::PointerArray>(nullptr));
Mathieu Chartiere401d142015-04-22 13:56:20 -07008229
Alex Lighte64300b2015-12-15 15:02:47 -08008230 ArraySlice<ArtMethod> input_virtual_methods;
Mathieu Chartier9865bde2015-12-21 09:58:16 -08008231 ScopedNullHandle<mirror::PointerArray> null_handle;
8232 Handle<mirror::PointerArray> input_vtable_array(null_handle);
Mathieu Chartiere401d142015-04-22 13:56:20 -07008233 int32_t input_array_length = 0;
Alex Lighte64300b2015-12-15 15:02:47 -08008234
Alex Light9139e002015-10-09 15:59:48 -07008235 // TODO Cleanup Needed: In the presence of default methods this optimization is rather dirty
8236 // and confusing. Default methods should always look through all the superclasses
8237 // because they are the last choice of an implementation. We get around this by looking
8238 // at the super-classes iftable methods (copied into method_array previously) when we are
8239 // looking for the implementation of a super-interface method but that is rather dirty.
Alex Lighte64300b2015-12-15 15:02:47 -08008240 bool using_virtuals;
Alex Light705ad492015-09-21 11:36:30 -07008241 if (super_interface || is_interface) {
Alex Lighte64300b2015-12-15 15:02:47 -08008242 // If we are overwriting a super class interface, try to only virtual methods instead of the
Mathieu Chartiere401d142015-04-22 13:56:20 -07008243 // whole vtable.
Alex Lighte64300b2015-12-15 15:02:47 -08008244 using_virtuals = true;
Alex Lighta467a6e2020-03-23 16:07:29 -07008245 input_virtual_methods = klass->GetDeclaredVirtualMethodsSlice(image_pointer_size_);
Alex Lighte64300b2015-12-15 15:02:47 -08008246 input_array_length = input_virtual_methods.size();
Mathieu Chartiere401d142015-04-22 13:56:20 -07008247 } else {
Alex Lighte64300b2015-12-15 15:02:47 -08008248 // For a new interface, however, we need the whole vtable in case a new
8249 // interface method is implemented in the whole superclass.
8250 using_virtuals = false;
Andreas Gampefa4333d2017-02-14 11:10:34 -08008251 DCHECK(vtable != nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07008252 input_vtable_array = vtable;
8253 input_array_length = input_vtable_array->GetLength();
8254 }
Alex Lighte64300b2015-12-15 15:02:47 -08008255
Alex Lighteb7c1442015-08-31 13:17:42 -07008256 // For each method in interface
Ian Rogers62d6c772013-02-27 08:32:07 -08008257 for (size_t j = 0; j < num_methods; ++j) {
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07008258 auto* interface_method = iftable->GetInterface(i)->GetVirtualMethod(j, image_pointer_size_);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07008259 MethodNameAndSignatureComparator interface_name_comparator(
Mathieu Chartiere401d142015-04-22 13:56:20 -07008260 interface_method->GetInterfaceMethodIfProxy(image_pointer_size_));
David Srbeckye36e7f22018-11-14 14:21:23 +00008261 uint32_t imt_index = interface_method->GetImtIndex();
Alex Lighteb7c1442015-08-31 13:17:42 -07008262 ArtMethod** imt_ptr = &out_imt[imt_index];
Ian Rogers9bc81912012-10-11 21:43:36 -07008263 // For each method listed in the interface's method list, find the
8264 // matching method in our class's method list. We want to favor the
8265 // subclass over the superclass, which just requires walking
8266 // back from the end of the vtable. (This only matters if the
8267 // superclass defines a private method and this class redefines
8268 // it -- otherwise it would use the same vtable slot. In .dex files
8269 // those don't end up in the virtual method table, so it shouldn't
8270 // matter which direction we go. We walk it backward anyway.)
Alex Lighteb7c1442015-08-31 13:17:42 -07008271 //
8272 // To find defaults we need to do the same but also go over interfaces.
8273 bool found_impl = false;
Alex Light9139e002015-10-09 15:59:48 -07008274 ArtMethod* vtable_impl = nullptr;
Alex Lighteb7c1442015-08-31 13:17:42 -07008275 for (int32_t k = input_array_length - 1; k >= 0; --k) {
Alex Lighte64300b2015-12-15 15:02:47 -08008276 ArtMethod* vtable_method = using_virtuals ?
8277 &input_virtual_methods[k] :
Mathieu Chartiere401d142015-04-22 13:56:20 -07008278 input_vtable_array->GetElementPtrSize<ArtMethod*>(k, image_pointer_size_);
8279 ArtMethod* vtable_method_for_name_comparison =
8280 vtable_method->GetInterfaceMethodIfProxy(image_pointer_size_);
Alex Lighta467a6e2020-03-23 16:07:29 -07008281 DCHECK(!vtable_method->IsStatic()) << vtable_method->PrettyMethod();
Ian Rogers03b6eaf2014-10-28 09:34:57 -07008282 if (interface_name_comparator.HasSameNameAndSignature(
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07008283 vtable_method_for_name_comparison)) {
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07008284 if (!vtable_method->IsAbstract() && !vtable_method->IsPublic()) {
Mathieu Chartier4d122c12015-06-17 14:14:36 -07008285 // Must do EndAssertNoThreadSuspension before throw since the throw can cause
8286 // allocations.
8287 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartiere401d142015-04-22 13:56:20 -07008288 ThrowIllegalAccessError(klass.Get(),
Brian Carlstromf3632832014-05-20 15:36:53 -07008289 "Method '%s' implementing interface method '%s' is not public",
David Sehr709b0702016-10-13 09:12:37 -07008290 vtable_method->PrettyMethod().c_str(),
8291 interface_method->PrettyMethod().c_str());
Ian Rogers9bc81912012-10-11 21:43:36 -07008292 return false;
Alex Light9139e002015-10-09 15:59:48 -07008293 } else if (UNLIKELY(vtable_method->IsOverridableByDefaultMethod())) {
Alex Lighteb7c1442015-08-31 13:17:42 -07008294 // We might have a newer, better, default method for this, so we just skip it. If we
8295 // are still using this we will select it again when scanning for default methods. To
8296 // obviate the need to copy the method again we will make a note that we already found
8297 // a default here.
8298 // TODO This should be much cleaner.
Alex Light9139e002015-10-09 15:59:48 -07008299 vtable_impl = vtable_method;
Alex Lighteb7c1442015-08-31 13:17:42 -07008300 break;
8301 } else {
8302 found_impl = true;
Alex Light705ad492015-09-21 11:36:30 -07008303 if (LIKELY(fill_tables)) {
8304 method_array->SetElementPtrSize(j, vtable_method, image_pointer_size_);
8305 // Place method in imt if entry is empty, place conflict otherwise.
8306 SetIMTRef(unimplemented_method,
8307 imt_conflict_method,
Alex Light705ad492015-09-21 11:36:30 -07008308 vtable_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00008309 /*out*/out_new_conflict,
Alex Light705ad492015-09-21 11:36:30 -07008310 /*out*/imt_ptr);
8311 }
Ian Rogers9bc81912012-10-11 21:43:36 -07008312 break;
8313 }
8314 }
Alex Light9139e002015-10-09 15:59:48 -07008315 }
8316 // Continue on to the next method if we are done.
8317 if (LIKELY(found_impl)) {
8318 continue;
8319 } else if (LIKELY(super_interface)) {
8320 // Don't look for a default implementation when the super-method is implemented directly
8321 // by the class.
8322 //
8323 // See if we can use the superclasses method and skip searching everything else.
8324 // Note: !found_impl && super_interface
8325 CHECK(extend_super_iftable);
8326 // If this is a super_interface method it is possible we shouldn't override it because a
8327 // superclass could have implemented it directly. We get the method the superclass used
8328 // to implement this to know if we can override it with a default method. Doing this is
8329 // safe since we know that the super_iftable is filled in so we can simply pull it from
8330 // there. We don't bother if this is not a super-classes interface since in that case we
8331 // have scanned the entire vtable anyway and would have found it.
8332 // TODO This is rather dirty but it is faster than searching through the entire vtable
8333 // every time.
8334 ArtMethod* supers_method =
8335 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
8336 DCHECK(supers_method != nullptr);
8337 DCHECK(interface_name_comparator.HasSameNameAndSignature(supers_method));
Alex Light705ad492015-09-21 11:36:30 -07008338 if (LIKELY(!supers_method->IsOverridableByDefaultMethod())) {
Alex Light9139e002015-10-09 15:59:48 -07008339 // The method is not overridable by a default method (i.e. it is directly implemented
8340 // in some class). Therefore move onto the next interface method.
8341 continue;
Alex Lightd6c2bfa2016-05-02 18:51:34 -07008342 } else {
8343 // If the super-classes method is override-able by a default method we need to keep
8344 // track of it since though it is override-able it is not guaranteed to be 'overridden'.
8345 // If it turns out not to be overridden and we did not keep track of it we might add it
Alex Light66630be2016-05-04 09:23:09 -07008346 // to the vtable twice, causing corruption (vtable entries having inconsistent and
8347 // illegal states, incorrect vtable size, and incorrect or inconsistent iftable entries)
8348 // in this class and any subclasses.
Alex Lightd6c2bfa2016-05-02 18:51:34 -07008349 DCHECK(vtable_impl == nullptr || vtable_impl == supers_method)
David Sehr709b0702016-10-13 09:12:37 -07008350 << "vtable_impl was " << ArtMethod::PrettyMethod(vtable_impl)
8351 << " and not 'nullptr' or "
8352 << supers_method->PrettyMethod()
8353 << " as expected. IFTable appears to be corrupt!";
Alex Lightd6c2bfa2016-05-02 18:51:34 -07008354 vtable_impl = supers_method;
Alex Light9139e002015-10-09 15:59:48 -07008355 }
8356 }
8357 // If we haven't found it yet we should search through the interfaces for default methods.
Vladimir Marko921094a2017-01-12 18:37:06 +00008358 ArtMethod* current_method = helper.FindMethod(interface_method,
8359 interface_name_comparator,
8360 vtable_impl);
Alex Light705ad492015-09-21 11:36:30 -07008361 if (LIKELY(fill_tables)) {
Alex Light12771082016-01-26 16:07:41 -08008362 if (current_method == nullptr && !super_interface) {
Alex Light705ad492015-09-21 11:36:30 -07008363 // We could not find an implementation for this method and since it is a brand new
8364 // interface we searched the entire vtable (and all default methods) for an
8365 // implementation but couldn't find one. We therefore need to make a miranda method.
Vladimir Marko921094a2017-01-12 18:37:06 +00008366 current_method = helper.GetOrCreateMirandaMethod(interface_method,
8367 interface_name_comparator);
Alex Light12771082016-01-26 16:07:41 -08008368 }
8369
8370 if (current_method != nullptr) {
8371 // We found a default method implementation. Record it in the iftable and IMT.
8372 method_array->SetElementPtrSize(j, current_method, image_pointer_size_);
8373 SetIMTRef(unimplemented_method,
8374 imt_conflict_method,
Alex Light12771082016-01-26 16:07:41 -08008375 current_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00008376 /*out*/out_new_conflict,
Alex Light12771082016-01-26 16:07:41 -08008377 /*out*/imt_ptr);
Alex Light9139e002015-10-09 15:59:48 -07008378 }
8379 }
Alex Light705ad492015-09-21 11:36:30 -07008380 } // For each method in interface end.
8381 } // if (num_methods > 0)
8382 } // For each interface.
Alex Light705ad492015-09-21 11:36:30 -07008383 // TODO don't extend virtuals of interface unless necessary (when is it?).
Vladimir Marko921094a2017-01-12 18:37:06 +00008384 if (helper.HasNewVirtuals()) {
8385 LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8386 helper.ReallocMethods(); // No return value to check. Native allocation failure aborts.
8387 LengthPrefixedArray<ArtMethod>* methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8388
Mathieu Chartierd4d83b82015-06-19 20:24:45 -07008389 // Done copying methods, they are all roots in the class now, so we can end the no thread
Mathieu Chartiere401d142015-04-22 13:56:20 -07008390 // suspension assert.
8391 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartierd4d83b82015-06-19 20:24:45 -07008392
Alex Light705ad492015-09-21 11:36:30 -07008393 if (fill_tables) {
Vladimir Marko3068d582019-05-28 16:39:29 +01008394 vtable.Assign(helper.UpdateVtable(default_translations, vtable));
Andreas Gampefa4333d2017-02-14 11:10:34 -08008395 if (UNLIKELY(vtable == nullptr)) {
Vladimir Marko921094a2017-01-12 18:37:06 +00008396 // The helper has already called self->AssertPendingOOMException();
Alex Light705ad492015-09-21 11:36:30 -07008397 return false;
8398 }
Vladimir Marko921094a2017-01-12 18:37:06 +00008399 helper.UpdateIfTable(iftable);
8400 helper.UpdateIMT(out_imt);
Mathieu Chartiere401d142015-04-22 13:56:20 -07008401 }
Alex Light705ad492015-09-21 11:36:30 -07008402
Vladimir Marko921094a2017-01-12 18:37:06 +00008403 helper.CheckNoStaleMethodsInDexCache();
8404 helper.ClobberOldMethods(old_methods, methods);
Mathieu Chartiere401d142015-04-22 13:56:20 -07008405 } else {
8406 self->EndAssertNoThreadSuspension(old_cause);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008407 }
Alex Light705ad492015-09-21 11:36:30 -07008408 if (kIsDebugBuild && !is_interface) {
Alex Light1f3925d2016-09-07 12:04:20 -07008409 SanityCheckVTable(self, klass, image_pointer_size_);
Elliott Hughes4681c802011-09-25 18:04:37 -07008410 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008411 return true;
8412}
8413
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07008414bool ClassLinker::LinkInstanceFields(Thread* self, Handle<mirror::Class> klass) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08008415 CHECK(klass != nullptr);
Igor Murashkinb1d8c312015-08-04 11:18:43 -07008416 return LinkFields(self, klass, false, nullptr);
Brian Carlstrom4873d462011-08-21 15:23:39 -07008417}
8418
Igor Murashkinb1d8c312015-08-04 11:18:43 -07008419bool ClassLinker::LinkStaticFields(Thread* self, Handle<mirror::Class> klass, size_t* class_size) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08008420 CHECK(klass != nullptr);
Igor Murashkinb1d8c312015-08-04 11:18:43 -07008421 return LinkFields(self, klass, true, class_size);
Brian Carlstrom4873d462011-08-21 15:23:39 -07008422}
8423
Brian Carlstromdbc05252011-09-09 01:59:59 -07008424struct LinkFieldsComparator {
Igor Murashkin2ffb7032017-11-08 13:35:21 -08008425 LinkFieldsComparator() REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07008426 }
Ian Rogers00f7d0e2012-07-19 15:28:27 -07008427 // No thread safety analysis as will be called from STL. Checked lock held in constructor.
Mathieu Chartierc7853442015-03-27 14:35:38 -07008428 bool operator()(ArtField* field1, ArtField* field2)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08008429 NO_THREAD_SAFETY_ANALYSIS {
Fred Shih37f05ef2014-07-16 18:38:08 -07008430 // First come reference fields, then 64-bit, then 32-bit, and then 16-bit, then finally 8-bit.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07008431 Primitive::Type type1 = field1->GetTypeAsPrimitiveType();
8432 Primitive::Type type2 = field2->GetTypeAsPrimitiveType();
Ian Rogersef7d42f2014-01-06 12:55:46 -08008433 if (type1 != type2) {
Vladimir Markod5777482014-11-12 17:02:02 +00008434 if (type1 == Primitive::kPrimNot) {
8435 // Reference always goes first.
8436 return true;
Ian Rogersef7d42f2014-01-06 12:55:46 -08008437 }
Vladimir Markod5777482014-11-12 17:02:02 +00008438 if (type2 == Primitive::kPrimNot) {
8439 // Reference always goes first.
8440 return false;
8441 }
8442 size_t size1 = Primitive::ComponentSize(type1);
8443 size_t size2 = Primitive::ComponentSize(type2);
8444 if (size1 != size2) {
8445 // Larger primitive types go first.
8446 return size1 > size2;
8447 }
8448 // Primitive types differ but sizes match. Arbitrarily order by primitive type.
8449 return type1 < type2;
Brian Carlstromdbc05252011-09-09 01:59:59 -07008450 }
Vladimir Marko7a7c1db2014-11-17 15:13:34 +00008451 // Same basic group? Then sort by dex field index. This is guaranteed to be sorted
8452 // by name and for equal names by type id index.
8453 // NOTE: This works also for proxies. Their static fields are assigned appropriate indexes.
8454 return field1->GetDexFieldIndex() < field2->GetDexFieldIndex();
Brian Carlstromdbc05252011-09-09 01:59:59 -07008455 }
8456};
8457
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07008458bool ClassLinker::LinkFields(Thread* self,
8459 Handle<mirror::Class> klass,
8460 bool is_static,
Igor Murashkinb1d8c312015-08-04 11:18:43 -07008461 size_t* class_size) {
Ian Rogers7b078e82014-09-10 14:44:24 -07008462 self->AllowThreadSuspension();
Mathieu Chartierc7853442015-03-27 14:35:38 -07008463 const size_t num_fields = is_static ? klass->NumStaticFields() : klass->NumInstanceFields();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07008464 LengthPrefixedArray<ArtField>* const fields = is_static ? klass->GetSFieldsPtr() :
8465 klass->GetIFieldsPtr();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008466
Mingyao Yang98d1cc82014-05-15 17:02:16 -07008467 // Initialize field_offset
Brian Carlstrom693267a2011-09-06 09:25:34 -07008468 MemberOffset field_offset(0);
Brian Carlstrom3320cf42011-10-04 14:58:28 -07008469 if (is_static) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07008470 field_offset = klass->GetFirstReferenceStaticFieldOffsetDuringLinking(image_pointer_size_);
Brian Carlstrom3320cf42011-10-04 14:58:28 -07008471 } else {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07008472 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07008473 if (super_class != nullptr) {
Brian Carlstromf3632832014-05-20 15:36:53 -07008474 CHECK(super_class->IsResolved())
David Sehr709b0702016-10-13 09:12:37 -07008475 << klass->PrettyClass() << " " << super_class->PrettyClass();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008476 field_offset = MemberOffset(super_class->GetObjectSize());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008477 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008478 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008479
David Sehr709b0702016-10-13 09:12:37 -07008480 CHECK_EQ(num_fields == 0, fields == nullptr) << klass->PrettyClass();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008481
Brian Carlstromdbc05252011-09-09 01:59:59 -07008482 // we want a relatively stable order so that adding new fields
Elliott Hughesadb460d2011-10-05 17:02:34 -07008483 // minimizes disruption of C++ version such as Class and Method.
Alex Lighte64300b2015-12-15 15:02:47 -08008484 //
8485 // The overall sort order order is:
8486 // 1) All object reference fields, sorted alphabetically.
8487 // 2) All java long (64-bit) integer fields, sorted alphabetically.
8488 // 3) All java double (64-bit) floating point fields, sorted alphabetically.
8489 // 4) All java int (32-bit) integer fields, sorted alphabetically.
8490 // 5) All java float (32-bit) floating point fields, sorted alphabetically.
8491 // 6) All java char (16-bit) integer fields, sorted alphabetically.
8492 // 7) All java short (16-bit) integer fields, sorted alphabetically.
8493 // 8) All java boolean (8-bit) integer fields, sorted alphabetically.
8494 // 9) All java byte (8-bit) integer fields, sorted alphabetically.
8495 //
8496 // Once the fields are sorted in this order we will attempt to fill any gaps that might be present
8497 // in the memory layout of the structure. See ShuffleForward for how this is done.
Mathieu Chartierc7853442015-03-27 14:35:38 -07008498 std::deque<ArtField*> grouped_and_sorted_fields;
Mathieu Chartier2d5f39e2014-09-19 17:52:37 -07008499 const char* old_no_suspend_cause = self->StartAssertNoThreadSuspension(
Fred Shih37f05ef2014-07-16 18:38:08 -07008500 "Naked ArtField references in deque");
Brian Carlstromdbc05252011-09-09 01:59:59 -07008501 for (size_t i = 0; i < num_fields; i++) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07008502 grouped_and_sorted_fields.push_back(&fields->At(i));
Brian Carlstromdbc05252011-09-09 01:59:59 -07008503 }
Mathieu Chartier590fee92013-09-13 13:46:47 -07008504 std::sort(grouped_and_sorted_fields.begin(), grouped_and_sorted_fields.end(),
8505 LinkFieldsComparator());
Brian Carlstromdbc05252011-09-09 01:59:59 -07008506
Fred Shih381e4ca2014-08-25 17:24:27 -07008507 // References should be at the front.
Brian Carlstromdbc05252011-09-09 01:59:59 -07008508 size_t current_field = 0;
8509 size_t num_reference_fields = 0;
Fred Shih381e4ca2014-08-25 17:24:27 -07008510 FieldGaps gaps;
8511
Brian Carlstromdbc05252011-09-09 01:59:59 -07008512 for (; current_field < num_fields; current_field++) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07008513 ArtField* field = grouped_and_sorted_fields.front();
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07008514 Primitive::Type type = field->GetTypeAsPrimitiveType();
Brian Carlstrom6b4ef022011-10-23 14:59:04 -07008515 bool isPrimitive = type != Primitive::kPrimNot;
Brian Carlstromdbc05252011-09-09 01:59:59 -07008516 if (isPrimitive) {
Brian Carlstrom7934ac22013-07-26 10:54:15 -07008517 break; // past last reference, move on to the next phase
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008518 }
Vladimir Marko76649e82014-11-10 18:32:59 +00008519 if (UNLIKELY(!IsAligned<sizeof(mirror::HeapReference<mirror::Object>)>(
8520 field_offset.Uint32Value()))) {
Fred Shih381e4ca2014-08-25 17:24:27 -07008521 MemberOffset old_offset = field_offset;
8522 field_offset = MemberOffset(RoundUp(field_offset.Uint32Value(), 4));
8523 AddFieldGap(old_offset.Uint32Value(), field_offset.Uint32Value(), &gaps);
8524 }
Roland Levillain14d90572015-07-16 10:52:26 +01008525 DCHECK_ALIGNED(field_offset.Uint32Value(), sizeof(mirror::HeapReference<mirror::Object>));
Brian Carlstromdbc05252011-09-09 01:59:59 -07008526 grouped_and_sorted_fields.pop_front();
8527 num_reference_fields++;
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008528 field->SetOffset(field_offset);
Vladimir Marko76649e82014-11-10 18:32:59 +00008529 field_offset = MemberOffset(field_offset.Uint32Value() +
8530 sizeof(mirror::HeapReference<mirror::Object>));
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008531 }
Fred Shih381e4ca2014-08-25 17:24:27 -07008532 // Gaps are stored as a max heap which means that we must shuffle from largest to smallest
8533 // otherwise we could end up with suboptimal gap fills.
Vladimir Marko76649e82014-11-10 18:32:59 +00008534 ShuffleForward<8>(&current_field, &field_offset, &grouped_and_sorted_fields, &gaps);
8535 ShuffleForward<4>(&current_field, &field_offset, &grouped_and_sorted_fields, &gaps);
8536 ShuffleForward<2>(&current_field, &field_offset, &grouped_and_sorted_fields, &gaps);
8537 ShuffleForward<1>(&current_field, &field_offset, &grouped_and_sorted_fields, &gaps);
Fred Shih37f05ef2014-07-16 18:38:08 -07008538 CHECK(grouped_and_sorted_fields.empty()) << "Missed " << grouped_and_sorted_fields.size() <<
8539 " fields.";
Ian Rogers7b078e82014-09-10 14:44:24 -07008540 self->EndAssertNoThreadSuspension(old_no_suspend_cause);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008541
Elliott Hughesadb460d2011-10-05 17:02:34 -07008542 // We lie to the GC about the java.lang.ref.Reference.referent field, so it doesn't scan it.
Mathieu Chartier0cd81352014-05-22 16:48:55 -07008543 if (!is_static && klass->DescriptorEquals("Ljava/lang/ref/Reference;")) {
Elliott Hughesadb460d2011-10-05 17:02:34 -07008544 // We know there are no non-reference fields in the Reference classes, and we know
8545 // that 'referent' is alphabetically last, so this is easy...
David Sehr709b0702016-10-13 09:12:37 -07008546 CHECK_EQ(num_reference_fields, num_fields) << klass->PrettyClass();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07008547 CHECK_STREQ(fields->At(num_fields - 1).GetName(), "referent")
David Sehr709b0702016-10-13 09:12:37 -07008548 << klass->PrettyClass();
Elliott Hughesadb460d2011-10-05 17:02:34 -07008549 --num_reference_fields;
8550 }
8551
Mingyao Yang98d1cc82014-05-15 17:02:16 -07008552 size_t size = field_offset.Uint32Value();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008553 // Update klass
Brian Carlstrom3320cf42011-10-04 14:58:28 -07008554 if (is_static) {
8555 klass->SetNumReferenceStaticFields(num_reference_fields);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07008556 *class_size = size;
Brian Carlstrom3320cf42011-10-04 14:58:28 -07008557 } else {
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008558 klass->SetNumReferenceInstanceFields(num_reference_fields);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07008559 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07008560 if (num_reference_fields == 0 || super_class == nullptr) {
8561 // object has one reference field, klass, but we ignore it since we always visit the class.
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07008562 // super_class is null iff the class is java.lang.Object.
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07008563 if (super_class == nullptr ||
8564 (super_class->GetClassFlags() & mirror::kClassFlagNoReferenceFields) != 0) {
8565 klass->SetClassFlags(klass->GetClassFlags() | mirror::kClassFlagNoReferenceFields);
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07008566 }
8567 }
8568 if (kIsDebugBuild) {
8569 DCHECK_EQ(super_class == nullptr, klass->DescriptorEquals("Ljava/lang/Object;"));
8570 size_t total_reference_instance_fields = 0;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07008571 ObjPtr<mirror::Class> cur_super = klass.Get();
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07008572 while (cur_super != nullptr) {
8573 total_reference_instance_fields += cur_super->NumReferenceInstanceFieldsDuringLinking();
8574 cur_super = cur_super->GetSuperClass();
8575 }
8576 if (super_class == nullptr) {
David Sehr709b0702016-10-13 09:12:37 -07008577 CHECK_EQ(total_reference_instance_fields, 1u) << klass->PrettyDescriptor();
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07008578 } else {
8579 // Check that there is at least num_reference_fields other than Object.class.
8580 CHECK_GE(total_reference_instance_fields, 1u + num_reference_fields)
David Sehr709b0702016-10-13 09:12:37 -07008581 << klass->PrettyClass();
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07008582 }
8583 }
Brian Carlstromdbc05252011-09-09 01:59:59 -07008584 if (!klass->IsVariableSize()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07008585 std::string temp;
8586 DCHECK_GE(size, sizeof(mirror::Object)) << klass->GetDescriptor(&temp);
8587 size_t previous_size = klass->GetObjectSize();
8588 if (previous_size != 0) {
8589 // Make sure that we didn't originally have an incorrect size.
8590 CHECK_EQ(previous_size, size) << klass->GetDescriptor(&temp);
Mathieu Chartier79b4f382013-10-23 15:21:37 -07008591 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07008592 klass->SetObjectSize(size);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008593 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008594 }
Vladimir Marko76649e82014-11-10 18:32:59 +00008595
8596 if (kIsDebugBuild) {
8597 // Make sure that the fields array is ordered by name but all reference
8598 // offsets are at the beginning as far as alignment allows.
8599 MemberOffset start_ref_offset = is_static
Mathieu Chartiere401d142015-04-22 13:56:20 -07008600 ? klass->GetFirstReferenceStaticFieldOffsetDuringLinking(image_pointer_size_)
Vladimir Marko76649e82014-11-10 18:32:59 +00008601 : klass->GetFirstReferenceInstanceFieldOffset();
8602 MemberOffset end_ref_offset(start_ref_offset.Uint32Value() +
8603 num_reference_fields *
8604 sizeof(mirror::HeapReference<mirror::Object>));
8605 MemberOffset current_ref_offset = start_ref_offset;
8606 for (size_t i = 0; i < num_fields; i++) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07008607 ArtField* field = &fields->At(i);
Mathieu Chartierc7853442015-03-27 14:35:38 -07008608 VLOG(class_linker) << "LinkFields: " << (is_static ? "static" : "instance")
David Sehr709b0702016-10-13 09:12:37 -07008609 << " class=" << klass->PrettyClass() << " field=" << field->PrettyField()
8610 << " offset=" << field->GetOffsetDuringLinking();
Vladimir Marko76649e82014-11-10 18:32:59 +00008611 if (i != 0) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07008612 ArtField* const prev_field = &fields->At(i - 1);
Vladimir Marko7a7c1db2014-11-17 15:13:34 +00008613 // NOTE: The field names can be the same. This is not possible in the Java language
8614 // but it's valid Java/dex bytecode and for example proguard can generate such bytecode.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07008615 DCHECK_LE(strcmp(prev_field->GetName(), field->GetName()), 0);
Vladimir Marko76649e82014-11-10 18:32:59 +00008616 }
8617 Primitive::Type type = field->GetTypeAsPrimitiveType();
8618 bool is_primitive = type != Primitive::kPrimNot;
8619 if (klass->DescriptorEquals("Ljava/lang/ref/Reference;") &&
8620 strcmp("referent", field->GetName()) == 0) {
8621 is_primitive = true; // We lied above, so we have to expect a lie here.
8622 }
8623 MemberOffset offset = field->GetOffsetDuringLinking();
8624 if (is_primitive) {
8625 if (offset.Uint32Value() < end_ref_offset.Uint32Value()) {
8626 // Shuffled before references.
8627 size_t type_size = Primitive::ComponentSize(type);
8628 CHECK_LT(type_size, sizeof(mirror::HeapReference<mirror::Object>));
8629 CHECK_LT(offset.Uint32Value(), start_ref_offset.Uint32Value());
8630 CHECK_LE(offset.Uint32Value() + type_size, start_ref_offset.Uint32Value());
8631 CHECK(!IsAligned<sizeof(mirror::HeapReference<mirror::Object>)>(offset.Uint32Value()));
8632 }
8633 } else {
8634 CHECK_EQ(current_ref_offset.Uint32Value(), offset.Uint32Value());
8635 current_ref_offset = MemberOffset(current_ref_offset.Uint32Value() +
8636 sizeof(mirror::HeapReference<mirror::Object>));
8637 }
8638 }
8639 CHECK_EQ(current_ref_offset.Uint32Value(), end_ref_offset.Uint32Value());
8640 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008641 return true;
8642}
8643
Vladimir Marko76649e82014-11-10 18:32:59 +00008644// Set the bitmap of reference instance field offsets.
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07008645void ClassLinker::CreateReferenceInstanceOffsets(Handle<mirror::Class> klass) {
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008646 uint32_t reference_offsets = 0;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07008647 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07008648 // Leave the reference offsets as 0 for mirror::Object (the class field is handled specially).
Andreas Gampe2ed8def2014-08-28 14:41:02 -07008649 if (super_class != nullptr) {
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008650 reference_offsets = super_class->GetReferenceInstanceOffsets();
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07008651 // Compute reference offsets unless our superclass overflowed.
8652 if (reference_offsets != mirror::Class::kClassWalkSuper) {
8653 size_t num_reference_fields = klass->NumReferenceInstanceFieldsDuringLinking();
Vladimir Marko76649e82014-11-10 18:32:59 +00008654 if (num_reference_fields != 0u) {
8655 // All of the fields that contain object references are guaranteed be grouped in memory
8656 // starting at an appropriately aligned address after super class object data.
8657 uint32_t start_offset = RoundUp(super_class->GetObjectSize(),
8658 sizeof(mirror::HeapReference<mirror::Object>));
8659 uint32_t start_bit = (start_offset - mirror::kObjectHeaderSize) /
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07008660 sizeof(mirror::HeapReference<mirror::Object>);
Vladimir Marko76649e82014-11-10 18:32:59 +00008661 if (start_bit + num_reference_fields > 32) {
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07008662 reference_offsets = mirror::Class::kClassWalkSuper;
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07008663 } else {
Vladimir Marko76649e82014-11-10 18:32:59 +00008664 reference_offsets |= (0xffffffffu << start_bit) &
8665 (0xffffffffu >> (32 - (start_bit + num_reference_fields)));
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07008666 }
8667 }
Brian Carlstrom4873d462011-08-21 15:23:39 -07008668 }
8669 }
Mingyao Yangfaff0f02014-09-10 12:03:22 -07008670 klass->SetReferenceInstanceOffsets(reference_offsets);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008671}
8672
Vladimir Marko18090d12018-06-01 16:53:12 +01008673ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
8674 ObjPtr<mirror::DexCache> dex_cache) {
8675 StackHandleScope<1> hs(Thread::Current());
8676 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
8677 return DoResolveString(string_idx, h_dex_cache);
8678}
8679
8680ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
8681 Handle<mirror::DexCache> dex_cache) {
Vladimir Markoa64b52d2017-12-08 16:27:49 +00008682 const DexFile& dex_file = *dex_cache->GetDexFile();
Ian Rogersdfb325e2013-10-30 01:00:44 -07008683 uint32_t utf16_length;
8684 const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07008685 ObjPtr<mirror::String> string = intern_table_->InternStrong(utf16_length, utf8_data);
Vladimir Marko8d6768d2017-03-14 10:13:21 +00008686 if (string != nullptr) {
8687 dex_cache->SetResolvedString(string_idx, string);
8688 }
Vladimir Marko28e012a2017-12-07 11:22:59 +00008689 return string;
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07008690}
8691
Vladimir Marko18090d12018-06-01 16:53:12 +01008692ObjPtr<mirror::String> ClassLinker::DoLookupString(dex::StringIndex string_idx,
8693 ObjPtr<mirror::DexCache> dex_cache) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08008694 DCHECK(dex_cache != nullptr);
Vladimir Markoa64b52d2017-12-08 16:27:49 +00008695 const DexFile& dex_file = *dex_cache->GetDexFile();
Vladimir Markocac5a7e2016-02-22 10:39:50 +00008696 uint32_t utf16_length;
8697 const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08008698 ObjPtr<mirror::String> string =
8699 intern_table_->LookupStrong(Thread::Current(), utf16_length, utf8_data);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00008700 if (string != nullptr) {
8701 dex_cache->SetResolvedString(string_idx, string);
8702 }
Vladimir Marko28e012a2017-12-07 11:22:59 +00008703 return string;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00008704}
8705
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008706ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
Vladimir Marko09c5ca42018-05-31 15:15:31 +01008707 ObjPtr<mirror::Class> referrer) {
8708 return DoLookupResolvedType(type_idx, referrer->GetDexCache(), referrer->GetClassLoader());
8709}
8710
8711ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008712 ObjPtr<mirror::DexCache> dex_cache,
8713 ObjPtr<mirror::ClassLoader> class_loader) {
8714 const DexFile& dex_file = *dex_cache->GetDexFile();
8715 const char* descriptor = dex_file.StringByTypeIdx(type_idx);
8716 DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
8717 ObjPtr<mirror::Class> type = nullptr;
8718 if (descriptor[1] == '\0') {
8719 // only the descriptors of primitive types should be 1 character long, also avoid class lookup
8720 // for primitive classes that aren't backed by dex files.
Vladimir Marko9186b182018-11-06 14:55:54 +00008721 type = LookupPrimitiveClass(descriptor[0]);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008722 } else {
8723 Thread* const self = Thread::Current();
8724 DCHECK(self != nullptr);
8725 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
8726 // Find the class in the loaded classes table.
Vladimir Markobcf17522018-06-01 13:14:32 +01008727 type = LookupClass(self, descriptor, hash, class_loader);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008728 }
8729 if (type != nullptr) {
8730 if (type->IsResolved()) {
8731 dex_cache->SetResolvedType(type_idx, type);
Mathieu Chartierb8901302016-09-30 10:27:43 -07008732 } else {
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008733 type = nullptr;
Vladimir Marko8d6768d2017-03-14 10:13:21 +00008734 }
Mathieu Chartierb8901302016-09-30 10:27:43 -07008735 }
Vladimir Marko8d6768d2017-03-14 10:13:21 +00008736 return type;
Mathieu Chartierb8901302016-09-30 10:27:43 -07008737}
8738
Andreas Gampeb0625e02019-05-01 12:43:31 -07008739template <typename RefType>
8740ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx, RefType referrer) {
Vladimir Marko09c5ca42018-05-31 15:15:31 +01008741 StackHandleScope<2> hs(Thread::Current());
8742 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
8743 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
8744 return DoResolveType(type_idx, dex_cache, class_loader);
8745}
8746
Andreas Gampe4835d212018-11-21 14:55:10 -08008747// Instantiate the above.
8748template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
8749 ArtField* referrer);
8750template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
8751 ArtMethod* referrer);
8752template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
8753 ObjPtr<mirror::Class> referrer);
8754
Vladimir Marko09c5ca42018-05-31 15:15:31 +01008755ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008756 Handle<mirror::DexCache> dex_cache,
8757 Handle<mirror::ClassLoader> class_loader) {
8758 Thread* self = Thread::Current();
8759 const char* descriptor = dex_cache->GetDexFile()->StringByTypeIdx(type_idx);
8760 ObjPtr<mirror::Class> resolved = FindClass(self, descriptor, class_loader);
8761 if (resolved != nullptr) {
8762 // TODO: we used to throw here if resolved's class loader was not the
8763 // boot class loader. This was to permit different classes with the
8764 // same name to be loaded simultaneously by different loaders
8765 dex_cache->SetResolvedType(type_idx, resolved);
8766 } else {
8767 CHECK(self->IsExceptionPending())
8768 << "Expected pending exception for failed resolution of: " << descriptor;
8769 // Convert a ClassNotFoundException to a NoClassDefFoundError.
8770 StackHandleScope<1> hs(self);
8771 Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
Vladimir Markob4eb1b12018-05-24 11:09:38 +01008772 if (cause->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException, this))) {
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008773 DCHECK(resolved == nullptr); // No Handle needed to preserve resolved.
8774 self->ClearException();
8775 ThrowNoClassDefFoundError("Failed resolution of: %s", descriptor);
8776 self->GetException()->SetCause(cause.Get());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008777 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008778 }
Vladimir Marko72ab6842017-01-20 19:32:50 +00008779 DCHECK((resolved == nullptr) || resolved->IsResolved())
David Sehr709b0702016-10-13 09:12:37 -07008780 << resolved->PrettyDescriptor() << " " << resolved->GetStatus();
Vladimir Marko28e012a2017-12-07 11:22:59 +00008781 return resolved;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008782}
8783
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00008784ArtMethod* ClassLinker::FindResolvedMethod(ObjPtr<mirror::Class> klass,
8785 ObjPtr<mirror::DexCache> dex_cache,
8786 ObjPtr<mirror::ClassLoader> class_loader,
8787 uint32_t method_idx) {
8788 // Search for the method using dex_cache and method_idx. The Class::Find*Method()
8789 // functions can optimize the search if the dex_cache is the same as the DexCache
8790 // of the class, with fall-back to name and signature search otherwise.
8791 ArtMethod* resolved = nullptr;
8792 if (klass->IsInterface()) {
8793 resolved = klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
8794 } else {
8795 resolved = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
8796 }
8797 DCHECK(resolved == nullptr || resolved->GetDeclaringClassUnchecked() != nullptr);
David Brazdil8ce3bfa2018-03-12 18:01:18 +00008798 if (resolved != nullptr &&
David Brazdilf50ac102018-10-17 18:00:06 +01008799 hiddenapi::ShouldDenyAccessToMember(resolved,
8800 hiddenapi::AccessContext(class_loader, dex_cache),
8801 hiddenapi::AccessMethod::kLinking)) {
David Brazdil8ce3bfa2018-03-12 18:01:18 +00008802 resolved = nullptr;
8803 }
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00008804 if (resolved != nullptr) {
8805 // In case of jmvti, the dex file gets verified before being registered, so first
8806 // check if it's registered before checking class tables.
8807 const DexFile& dex_file = *dex_cache->GetDexFile();
Nicolas Geoffraybefa3092018-02-22 14:50:01 +00008808 DCHECK(!IsDexFileRegistered(Thread::Current(), dex_file) ||
8809 FindClassTable(Thread::Current(), dex_cache) == ClassTableForClassLoader(class_loader))
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00008810 << "DexFile referrer: " << dex_file.GetLocation()
8811 << " ClassLoader: " << DescribeLoaders(class_loader, "");
8812 // Be a good citizen and update the dex cache to speed subsequent calls.
8813 dex_cache->SetResolvedMethod(method_idx, resolved, image_pointer_size_);
Nicolas Geoffraybefa3092018-02-22 14:50:01 +00008814 // Disable the following invariant check as the verifier breaks it. b/73760543
8815 // const DexFile::MethodId& method_id = dex_file.GetMethodId(method_idx);
8816 // DCHECK(LookupResolvedType(method_id.class_idx_, dex_cache, class_loader) != nullptr)
8817 // << "Method: " << resolved->PrettyMethod() << ", "
8818 // << "Class: " << klass->PrettyClass() << " (" << klass->GetStatus() << "), "
8819 // << "DexFile referrer: " << dex_file.GetLocation();
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00008820 }
8821 return resolved;
8822}
8823
David Brazdil4525e0b2018-04-05 16:57:32 +01008824// Returns true if `method` is either null or hidden.
8825// Does not print any warnings if it is hidden.
8826static bool CheckNoSuchMethod(ArtMethod* method,
8827 ObjPtr<mirror::DexCache> dex_cache,
8828 ObjPtr<mirror::ClassLoader> class_loader)
8829 REQUIRES_SHARED(Locks::mutator_lock_) {
8830 return method == nullptr ||
David Brazdilf50ac102018-10-17 18:00:06 +01008831 hiddenapi::ShouldDenyAccessToMember(method,
8832 hiddenapi::AccessContext(class_loader, dex_cache),
8833 hiddenapi::AccessMethod::kNone); // no warnings
David Brazdil4525e0b2018-04-05 16:57:32 +01008834}
8835
8836ArtMethod* ClassLinker::FindIncompatibleMethod(ObjPtr<mirror::Class> klass,
8837 ObjPtr<mirror::DexCache> dex_cache,
8838 ObjPtr<mirror::ClassLoader> class_loader,
8839 uint32_t method_idx) {
8840 if (klass->IsInterface()) {
8841 ArtMethod* method = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
8842 return CheckNoSuchMethod(method, dex_cache, class_loader) ? nullptr : method;
8843 } else {
8844 // If there was an interface method with the same signature, we would have
8845 // found it in the "copied" methods. Only DCHECK that the interface method
8846 // really does not exist.
8847 if (kIsDebugBuild) {
8848 ArtMethod* method =
8849 klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
8850 DCHECK(CheckNoSuchMethod(method, dex_cache, class_loader));
8851 }
8852 return nullptr;
8853 }
8854}
8855
Andreas Gampe42ef8ab2015-12-03 17:27:32 -08008856template <ClassLinker::ResolveMode kResolveMode>
Vladimir Marko89011192017-12-11 13:45:05 +00008857ArtMethod* ClassLinker::ResolveMethod(uint32_t method_idx,
Mathieu Chartiere401d142015-04-22 13:56:20 -07008858 Handle<mirror::DexCache> dex_cache,
8859 Handle<mirror::ClassLoader> class_loader,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07008860 ArtMethod* referrer,
8861 InvokeType type) {
Nicolas Geoffray605c5912020-04-08 15:12:39 +01008862 DCHECK(!Thread::Current()->IsExceptionPending()) << Thread::Current()->GetException()->Dump();
Andreas Gampefa4333d2017-02-14 11:10:34 -08008863 DCHECK(dex_cache != nullptr);
Vladimir Markoba118822017-06-12 15:41:56 +01008864 DCHECK(referrer == nullptr || !referrer->IsProxyMethod());
Ian Rogers08f753d2012-08-24 14:35:25 -07008865 // Check for hit in the dex cache.
Vladimir Markoba118822017-06-12 15:41:56 +01008866 PointerSize pointer_size = image_pointer_size_;
8867 ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx, pointer_size);
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07008868 Thread::PoisonObjectPointersIfDebug();
Vladimir Marko07bfbac2017-07-06 14:55:02 +01008869 DCHECK(resolved == nullptr || !resolved->IsRuntimeMethod());
8870 bool valid_dex_cache_method = resolved != nullptr;
Vladimir Markoba118822017-06-12 15:41:56 +01008871 if (kResolveMode == ResolveMode::kNoChecks && valid_dex_cache_method) {
8872 // We have a valid method from the DexCache and no checks to perform.
Mathieu Chartiere401d142015-04-22 13:56:20 -07008873 DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07008874 return resolved;
8875 }
Vladimir Marko89011192017-12-11 13:45:05 +00008876 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08008877 const dex::MethodId& method_id = dex_file.GetMethodId(method_idx);
Vladimir Markoba118822017-06-12 15:41:56 +01008878 ObjPtr<mirror::Class> klass = nullptr;
8879 if (valid_dex_cache_method) {
8880 // We have a valid method from the DexCache but we need to perform ICCE and IAE checks.
8881 DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008882 klass = LookupResolvedType(method_id.class_idx_, dex_cache.Get(), class_loader.Get());
Vladimir Marko6f1bd462017-12-06 17:45:03 +00008883 if (UNLIKELY(klass == nullptr)) {
Nicolas Geoffraybefa3092018-02-22 14:50:01 +00008884 // We normaly should not end up here. However the verifier currently doesn't guarantee
8885 // the invariant of having the klass in the class table. b/73760543
8886 klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
Nicolas Geoffray07614192020-04-28 17:31:31 +01008887 if (klass == nullptr) {
8888 // This can only happen if the current thread is not allowed to load
8889 // classes.
8890 DCHECK(!Thread::Current()->CanLoadClasses());
8891 DCHECK(Thread::Current()->IsExceptionPending());
8892 return nullptr;
8893 }
Vladimir Marko6f1bd462017-12-06 17:45:03 +00008894 }
Vladimir Markoba118822017-06-12 15:41:56 +01008895 } else {
8896 // The method was not in the DexCache, resolve the declaring class.
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008897 klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
Vladimir Markoba118822017-06-12 15:41:56 +01008898 if (klass == nullptr) {
8899 DCHECK(Thread::Current()->IsExceptionPending());
8900 return nullptr;
8901 }
8902 }
8903
8904 // Check if the invoke type matches the class type.
8905 if (kResolveMode == ResolveMode::kCheckICCEAndIAE &&
Andreas Gampe98ea9d92018-10-19 14:06:15 -07008906 CheckInvokeClassMismatch</* kThrow= */ true>(
Vladimir Markoba118822017-06-12 15:41:56 +01008907 dex_cache.Get(), type, [klass]() { return klass; })) {
Elliott Hughescc5f9a92011-09-28 19:17:29 -07008908 DCHECK(Thread::Current()->IsExceptionPending());
Andreas Gampeeff0f5d2014-08-13 21:49:37 -07008909 return nullptr;
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07008910 }
Vladimir Markoba118822017-06-12 15:41:56 +01008911
8912 if (!valid_dex_cache_method) {
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00008913 resolved = FindResolvedMethod(klass, dex_cache.Get(), class_loader.Get(), method_idx);
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07008914 }
Vladimir Markoba118822017-06-12 15:41:56 +01008915
8916 // Note: We can check for IllegalAccessError only if we have a referrer.
8917 if (kResolveMode == ResolveMode::kCheckICCEAndIAE && resolved != nullptr && referrer != nullptr) {
8918 ObjPtr<mirror::Class> methods_class = resolved->GetDeclaringClass();
8919 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
8920 if (!referring_class->CheckResolvedMethodAccess(methods_class,
8921 resolved,
8922 dex_cache.Get(),
8923 method_idx,
8924 type)) {
8925 DCHECK(Thread::Current()->IsExceptionPending());
8926 return nullptr;
8927 }
8928 }
8929
Andreas Gampeeff0f5d2014-08-13 21:49:37 -07008930 // If we found a method, check for incompatible class changes.
Vladimir Markoba118822017-06-12 15:41:56 +01008931 if (LIKELY(resolved != nullptr) &&
8932 LIKELY(kResolveMode == ResolveMode::kNoChecks ||
8933 !resolved->CheckIncompatibleClassChange(type))) {
Ian Rogers08f753d2012-08-24 14:35:25 -07008934 return resolved;
8935 } else {
Vladimir Markoba118822017-06-12 15:41:56 +01008936 // If we had a method, or if we can find one with another lookup type,
8937 // it's an incompatible-class-change error.
8938 if (resolved == nullptr) {
David Brazdil4525e0b2018-04-05 16:57:32 +01008939 resolved = FindIncompatibleMethod(klass, dex_cache.Get(), class_loader.Get(), method_idx);
Vladimir Markoba118822017-06-12 15:41:56 +01008940 }
Andreas Gampeeff0f5d2014-08-13 21:49:37 -07008941 if (resolved != nullptr) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07008942 ThrowIncompatibleClassChangeError(type, resolved->GetInvokeType(), resolved, referrer);
Andreas Gampeeff0f5d2014-08-13 21:49:37 -07008943 } else {
Vladimir Markoba118822017-06-12 15:41:56 +01008944 // We failed to find the method (using all lookup types), so throw a NoSuchMethodError.
Andreas Gampeeff0f5d2014-08-13 21:49:37 -07008945 const char* name = dex_file.StringDataByIdx(method_id.name_idx_);
8946 const Signature signature = dex_file.GetMethodSignature(method_id);
Vladimir Markoba118822017-06-12 15:41:56 +01008947 ThrowNoSuchMethodError(type, klass, name, signature);
Ian Rogers08f753d2012-08-24 14:35:25 -07008948 }
Ian Rogerse0a02da2014-12-02 14:10:53 -08008949 Thread::Current()->AssertPendingException();
Andreas Gampeeff0f5d2014-08-13 21:49:37 -07008950 return nullptr;
Ian Rogers08f753d2012-08-24 14:35:25 -07008951 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008952}
8953
Vladimir Marko89011192017-12-11 13:45:05 +00008954ArtMethod* ClassLinker::ResolveMethodWithoutInvokeType(uint32_t method_idx,
Jeff Hao13e748b2015-08-25 20:44:19 +00008955 Handle<mirror::DexCache> dex_cache,
8956 Handle<mirror::ClassLoader> class_loader) {
8957 ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx, image_pointer_size_);
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07008958 Thread::PoisonObjectPointersIfDebug();
Vladimir Marko07bfbac2017-07-06 14:55:02 +01008959 if (resolved != nullptr) {
8960 DCHECK(!resolved->IsRuntimeMethod());
Jeff Hao13e748b2015-08-25 20:44:19 +00008961 DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
8962 return resolved;
8963 }
8964 // Fail, get the declaring class.
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08008965 const dex::MethodId& method_id = dex_cache->GetDexFile()->GetMethodId(method_idx);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008966 ObjPtr<mirror::Class> klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
Jeff Hao13e748b2015-08-25 20:44:19 +00008967 if (klass == nullptr) {
8968 Thread::Current()->AssertPendingException();
8969 return nullptr;
8970 }
8971 if (klass->IsInterface()) {
Vladimir Markoba118822017-06-12 15:41:56 +01008972 resolved = klass->FindInterfaceMethod(dex_cache.Get(), method_idx, image_pointer_size_);
8973 } else {
8974 resolved = klass->FindClassMethod(dex_cache.Get(), method_idx, image_pointer_size_);
Jeff Hao13e748b2015-08-25 20:44:19 +00008975 }
David Brazdil8ce3bfa2018-03-12 18:01:18 +00008976 if (resolved != nullptr &&
David Brazdilf50ac102018-10-17 18:00:06 +01008977 hiddenapi::ShouldDenyAccessToMember(
8978 resolved,
8979 hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
8980 hiddenapi::AccessMethod::kLinking)) {
David Brazdil8ce3bfa2018-03-12 18:01:18 +00008981 resolved = nullptr;
8982 }
Jeff Hao13e748b2015-08-25 20:44:19 +00008983 return resolved;
8984}
8985
Vladimir Markof44d36c2017-03-14 14:18:46 +00008986ArtField* ClassLinker::LookupResolvedField(uint32_t field_idx,
8987 ObjPtr<mirror::DexCache> dex_cache,
8988 ObjPtr<mirror::ClassLoader> class_loader,
8989 bool is_static) {
8990 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08008991 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
Vladimir Markof44d36c2017-03-14 14:18:46 +00008992 ObjPtr<mirror::Class> klass = dex_cache->GetResolvedType(field_id.class_idx_);
8993 if (klass == nullptr) {
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008994 klass = LookupResolvedType(field_id.class_idx_, dex_cache, class_loader);
Vladimir Markof44d36c2017-03-14 14:18:46 +00008995 }
8996 if (klass == nullptr) {
8997 // The class has not been resolved yet, so the field is also unresolved.
8998 return nullptr;
8999 }
9000 DCHECK(klass->IsResolved());
Vladimir Markof44d36c2017-03-14 14:18:46 +00009001
David Brazdil1ab0fa82018-05-04 11:28:03 +01009002 return FindResolvedField(klass, dex_cache, class_loader, field_idx, is_static);
Vladimir Markof44d36c2017-03-14 14:18:46 +00009003}
9004
Vladimir Markoe11dd502017-12-08 14:09:45 +00009005ArtField* ClassLinker::ResolveField(uint32_t field_idx,
Mathieu Chartierc7853442015-03-27 14:35:38 -07009006 Handle<mirror::DexCache> dex_cache,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07009007 Handle<mirror::ClassLoader> class_loader,
9008 bool is_static) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08009009 DCHECK(dex_cache != nullptr);
Nicolas Geoffrayf3688822020-03-25 15:04:03 +00009010 DCHECK(!Thread::Current()->IsExceptionPending()) << Thread::Current()->GetException()->Dump();
Mathieu Chartierc7853442015-03-27 14:35:38 -07009011 ArtField* resolved = dex_cache->GetResolvedField(field_idx, image_pointer_size_);
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07009012 Thread::PoisonObjectPointersIfDebug();
Andreas Gampe58a5af82014-07-31 16:23:49 -07009013 if (resolved != nullptr) {
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07009014 return resolved;
9015 }
Vladimir Markoe11dd502017-12-08 14:09:45 +00009016 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009017 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009018 ObjPtr<mirror::Class> klass = ResolveType(field_id.class_idx_, dex_cache, class_loader);
Vladimir Marko19a4d372016-12-08 14:41:46 +00009019 if (klass == nullptr) {
Ian Rogers9f1ab122011-12-12 08:52:43 -08009020 DCHECK(Thread::Current()->IsExceptionPending());
Andreas Gampe58a5af82014-07-31 16:23:49 -07009021 return nullptr;
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07009022 }
9023
David Brazdil1ab0fa82018-05-04 11:28:03 +01009024 resolved = FindResolvedField(klass, dex_cache.Get(), class_loader.Get(), field_idx, is_static);
Andreas Gampe58a5af82014-07-31 16:23:49 -07009025 if (resolved == nullptr) {
Ian Rogers7b0c5b42012-02-16 15:29:07 -08009026 const char* name = dex_file.GetFieldName(field_id);
9027 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
David Brazdil8ce3bfa2018-03-12 18:01:18 +00009028 ThrowNoSuchFieldError(is_static ? "static " : "instance ", klass, type, name);
David Brazdil8ce3bfa2018-03-12 18:01:18 +00009029 }
Ian Rogersb067ac22011-12-13 18:05:09 -08009030 return resolved;
9031}
9032
Vladimir Markoe11dd502017-12-08 14:09:45 +00009033ArtField* ClassLinker::ResolveFieldJLS(uint32_t field_idx,
Mathieu Chartierc7853442015-03-27 14:35:38 -07009034 Handle<mirror::DexCache> dex_cache,
9035 Handle<mirror::ClassLoader> class_loader) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08009036 DCHECK(dex_cache != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07009037 ArtField* resolved = dex_cache->GetResolvedField(field_idx, image_pointer_size_);
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07009038 Thread::PoisonObjectPointersIfDebug();
Andreas Gampe58a5af82014-07-31 16:23:49 -07009039 if (resolved != nullptr) {
Ian Rogersb067ac22011-12-13 18:05:09 -08009040 return resolved;
9041 }
Vladimir Markoe11dd502017-12-08 14:09:45 +00009042 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009043 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009044 ObjPtr<mirror::Class> klass = ResolveType(field_id.class_idx_, dex_cache, class_loader);
Vladimir Marko19a4d372016-12-08 14:41:46 +00009045 if (klass == nullptr) {
Ian Rogersb067ac22011-12-13 18:05:09 -08009046 DCHECK(Thread::Current()->IsExceptionPending());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07009047 return nullptr;
Ian Rogersb067ac22011-12-13 18:05:09 -08009048 }
9049
David Brazdil1ab0fa82018-05-04 11:28:03 +01009050 resolved = FindResolvedFieldJLS(klass, dex_cache.Get(), class_loader.Get(), field_idx);
9051 if (resolved == nullptr) {
9052 const char* name = dex_file.GetFieldName(field_id);
9053 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
Vladimir Marko19a4d372016-12-08 14:41:46 +00009054 ThrowNoSuchFieldError("", klass, type, name);
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07009055 }
9056 return resolved;
Carl Shapiro5fafe2b2011-07-09 15:34:41 -07009057}
9058
David Brazdil1ab0fa82018-05-04 11:28:03 +01009059ArtField* ClassLinker::FindResolvedField(ObjPtr<mirror::Class> klass,
9060 ObjPtr<mirror::DexCache> dex_cache,
9061 ObjPtr<mirror::ClassLoader> class_loader,
9062 uint32_t field_idx,
9063 bool is_static) {
9064 ArtField* resolved = nullptr;
9065 Thread* self = is_static ? Thread::Current() : nullptr;
9066 const DexFile& dex_file = *dex_cache->GetDexFile();
9067
9068 resolved = is_static ? mirror::Class::FindStaticField(self, klass, dex_cache, field_idx)
9069 : klass->FindInstanceField(dex_cache, field_idx);
9070
9071 if (resolved == nullptr) {
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009072 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
David Brazdil1ab0fa82018-05-04 11:28:03 +01009073 const char* name = dex_file.GetFieldName(field_id);
9074 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
9075 resolved = is_static ? mirror::Class::FindStaticField(self, klass, name, type)
9076 : klass->FindInstanceField(name, type);
9077 }
9078
9079 if (resolved != nullptr &&
David Brazdilf50ac102018-10-17 18:00:06 +01009080 hiddenapi::ShouldDenyAccessToMember(resolved,
9081 hiddenapi::AccessContext(class_loader, dex_cache),
9082 hiddenapi::AccessMethod::kLinking)) {
David Brazdil1ab0fa82018-05-04 11:28:03 +01009083 resolved = nullptr;
9084 }
9085
9086 if (resolved != nullptr) {
9087 dex_cache->SetResolvedField(field_idx, resolved, image_pointer_size_);
9088 }
9089
9090 return resolved;
9091}
9092
9093ArtField* ClassLinker::FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,
9094 ObjPtr<mirror::DexCache> dex_cache,
9095 ObjPtr<mirror::ClassLoader> class_loader,
9096 uint32_t field_idx) {
9097 ArtField* resolved = nullptr;
9098 Thread* self = Thread::Current();
9099 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009100 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
David Brazdil1ab0fa82018-05-04 11:28:03 +01009101
9102 const char* name = dex_file.GetFieldName(field_id);
9103 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
9104 resolved = mirror::Class::FindField(self, klass, name, type);
9105
9106 if (resolved != nullptr &&
David Brazdilf50ac102018-10-17 18:00:06 +01009107 hiddenapi::ShouldDenyAccessToMember(resolved,
9108 hiddenapi::AccessContext(class_loader, dex_cache),
9109 hiddenapi::AccessMethod::kLinking)) {
David Brazdil1ab0fa82018-05-04 11:28:03 +01009110 resolved = nullptr;
9111 }
9112
9113 if (resolved != nullptr) {
9114 dex_cache->SetResolvedField(field_idx, resolved, image_pointer_size_);
9115 }
9116
9117 return resolved;
9118}
9119
Vladimir Markoaf940202017-12-08 15:01:18 +00009120ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(
9121 Thread* self,
Orion Hodson06d10a72018-05-14 08:53:38 +01009122 dex::ProtoIndex proto_idx,
Vladimir Markoaf940202017-12-08 15:01:18 +00009123 Handle<mirror::DexCache> dex_cache,
9124 Handle<mirror::ClassLoader> class_loader) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009125 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
Andreas Gampefa4333d2017-02-14 11:10:34 -08009126 DCHECK(dex_cache != nullptr);
Narayan Kamath25352fc2016-08-03 12:46:58 +01009127
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009128 ObjPtr<mirror::MethodType> resolved = dex_cache->GetResolvedMethodType(proto_idx);
Narayan Kamath25352fc2016-08-03 12:46:58 +01009129 if (resolved != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01009130 return resolved;
Narayan Kamath25352fc2016-08-03 12:46:58 +01009131 }
9132
Narayan Kamath25352fc2016-08-03 12:46:58 +01009133 StackHandleScope<4> hs(self);
9134
9135 // First resolve the return type.
Vladimir Markoaf940202017-12-08 15:01:18 +00009136 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009137 const dex::ProtoId& proto_id = dex_file.GetProtoId(proto_idx);
Narayan Kamath25352fc2016-08-03 12:46:58 +01009138 Handle<mirror::Class> return_type(hs.NewHandle(
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009139 ResolveType(proto_id.return_type_idx_, dex_cache, class_loader)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009140 if (return_type == nullptr) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009141 DCHECK(self->IsExceptionPending());
9142 return nullptr;
9143 }
9144
9145 // Then resolve the argument types.
9146 //
9147 // TODO: Is there a better way to figure out the number of method arguments
9148 // other than by looking at the shorty ?
9149 const size_t num_method_args = strlen(dex_file.StringDataByIdx(proto_id.shorty_idx_)) - 1;
9150
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009151 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Narayan Kamath25352fc2016-08-03 12:46:58 +01009152 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9153 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_method_args)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009154 if (method_params == nullptr) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009155 DCHECK(self->IsExceptionPending());
9156 return nullptr;
9157 }
9158
9159 DexFileParameterIterator it(dex_file, proto_id);
9160 int32_t i = 0;
9161 MutableHandle<mirror::Class> param_class = hs.NewHandle<mirror::Class>(nullptr);
9162 for (; it.HasNext(); it.Next()) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08009163 const dex::TypeIndex type_idx = it.GetTypeIdx();
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009164 param_class.Assign(ResolveType(type_idx, dex_cache, class_loader));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009165 if (param_class == nullptr) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009166 DCHECK(self->IsExceptionPending());
9167 return nullptr;
9168 }
9169
9170 method_params->Set(i++, param_class.Get());
9171 }
9172
9173 DCHECK(!it.HasNext());
9174
9175 Handle<mirror::MethodType> type = hs.NewHandle(
9176 mirror::MethodType::Create(self, return_type, method_params));
9177 dex_cache->SetResolvedMethodType(proto_idx, type.Get());
9178
9179 return type.Get();
9180}
9181
Vladimir Markoaf940202017-12-08 15:01:18 +00009182ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(Thread* self,
Orion Hodson06d10a72018-05-14 08:53:38 +01009183 dex::ProtoIndex proto_idx,
Vladimir Markoaf940202017-12-08 15:01:18 +00009184 ArtMethod* referrer) {
Orion Hodson2e599942017-09-22 16:17:41 +01009185 StackHandleScope<2> hs(self);
Orion Hodson2e599942017-09-22 16:17:41 +01009186 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
9187 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
Vladimir Markoaf940202017-12-08 15:01:18 +00009188 return ResolveMethodType(self, proto_idx, dex_cache, class_loader);
Orion Hodson2e599942017-09-22 16:17:41 +01009189}
9190
Vladimir Marko5aead702019-03-27 11:00:36 +00009191ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForField(
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009192 Thread* self,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009193 const dex::MethodHandleItem& method_handle,
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009194 ArtMethod* referrer) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009195 DexFile::MethodHandleType handle_type =
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009196 static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
9197 mirror::MethodHandle::Kind kind;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009198 bool is_put;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009199 bool is_static;
9200 int32_t num_params;
Orion Hodsonc069a302017-01-18 09:23:12 +00009201 switch (handle_type) {
9202 case DexFile::MethodHandleType::kStaticPut: {
Orion Hodson82b351f2017-07-05 14:34:25 +01009203 kind = mirror::MethodHandle::Kind::kStaticPut;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009204 is_put = true;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009205 is_static = true;
9206 num_params = 1;
Orion Hodson631827d2017-04-10 14:53:47 +01009207 break;
9208 }
9209 case DexFile::MethodHandleType::kStaticGet: {
Orion Hodson82b351f2017-07-05 14:34:25 +01009210 kind = mirror::MethodHandle::Kind::kStaticGet;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009211 is_put = false;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009212 is_static = true;
9213 num_params = 0;
Orion Hodson631827d2017-04-10 14:53:47 +01009214 break;
9215 }
9216 case DexFile::MethodHandleType::kInstancePut: {
Orion Hodson82b351f2017-07-05 14:34:25 +01009217 kind = mirror::MethodHandle::Kind::kInstancePut;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009218 is_put = true;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009219 is_static = false;
Orion Hodsonc069a302017-01-18 09:23:12 +00009220 num_params = 2;
9221 break;
9222 }
9223 case DexFile::MethodHandleType::kInstanceGet: {
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009224 kind = mirror::MethodHandle::Kind::kInstanceGet;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009225 is_put = false;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009226 is_static = false;
Orion Hodsonc069a302017-01-18 09:23:12 +00009227 num_params = 1;
9228 break;
9229 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009230 case DexFile::MethodHandleType::kInvokeStatic:
Orion Hodson82b351f2017-07-05 14:34:25 +01009231 case DexFile::MethodHandleType::kInvokeInstance:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009232 case DexFile::MethodHandleType::kInvokeConstructor:
Orion Hodson82b351f2017-07-05 14:34:25 +01009233 case DexFile::MethodHandleType::kInvokeDirect:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009234 case DexFile::MethodHandleType::kInvokeInterface:
9235 UNREACHABLE();
Orion Hodsonc069a302017-01-18 09:23:12 +00009236 }
9237
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009238 ArtField* target_field =
9239 ResolveField(method_handle.field_or_method_idx_, referrer, is_static);
9240 if (LIKELY(target_field != nullptr)) {
9241 ObjPtr<mirror::Class> target_class = target_field->GetDeclaringClass();
9242 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
9243 if (UNLIKELY(!referring_class->CanAccessMember(target_class, target_field->GetAccessFlags()))) {
9244 ThrowIllegalAccessErrorField(referring_class, target_field);
9245 return nullptr;
9246 }
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009247 if (UNLIKELY(is_put && target_field->IsFinal())) {
9248 ThrowIllegalAccessErrorField(referring_class, target_field);
9249 return nullptr;
9250 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009251 } else {
9252 DCHECK(Thread::Current()->IsExceptionPending());
9253 return nullptr;
9254 }
9255
9256 StackHandleScope<4> hs(self);
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009257 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Orion Hodsonc069a302017-01-18 09:23:12 +00009258 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9259 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009260 if (UNLIKELY(method_params == nullptr)) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009261 DCHECK(self->IsExceptionPending());
9262 return nullptr;
9263 }
9264
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009265 Handle<mirror::Class> constructor_class;
Orion Hodsonc069a302017-01-18 09:23:12 +00009266 Handle<mirror::Class> return_type;
9267 switch (handle_type) {
9268 case DexFile::MethodHandleType::kStaticPut: {
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009269 method_params->Set(0, target_field->ResolveType());
Vladimir Marko9186b182018-11-06 14:55:54 +00009270 return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
Orion Hodsonc069a302017-01-18 09:23:12 +00009271 break;
9272 }
9273 case DexFile::MethodHandleType::kStaticGet: {
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009274 return_type = hs.NewHandle(target_field->ResolveType());
Orion Hodsonc069a302017-01-18 09:23:12 +00009275 break;
9276 }
9277 case DexFile::MethodHandleType::kInstancePut: {
Orion Hodson631827d2017-04-10 14:53:47 +01009278 method_params->Set(0, target_field->GetDeclaringClass());
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009279 method_params->Set(1, target_field->ResolveType());
Vladimir Marko9186b182018-11-06 14:55:54 +00009280 return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
Orion Hodsonc069a302017-01-18 09:23:12 +00009281 break;
9282 }
9283 case DexFile::MethodHandleType::kInstanceGet: {
Orion Hodson631827d2017-04-10 14:53:47 +01009284 method_params->Set(0, target_field->GetDeclaringClass());
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009285 return_type = hs.NewHandle(target_field->ResolveType());
Orion Hodsonc069a302017-01-18 09:23:12 +00009286 break;
9287 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009288 case DexFile::MethodHandleType::kInvokeStatic:
Orion Hodson631827d2017-04-10 14:53:47 +01009289 case DexFile::MethodHandleType::kInvokeInstance:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009290 case DexFile::MethodHandleType::kInvokeConstructor:
9291 case DexFile::MethodHandleType::kInvokeDirect:
Orion Hodson631827d2017-04-10 14:53:47 +01009292 case DexFile::MethodHandleType::kInvokeInterface:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009293 UNREACHABLE();
9294 }
9295
9296 for (int32_t i = 0; i < num_params; ++i) {
9297 if (UNLIKELY(method_params->Get(i) == nullptr)) {
9298 DCHECK(self->IsExceptionPending());
9299 return nullptr;
Orion Hodsonc069a302017-01-18 09:23:12 +00009300 }
9301 }
9302
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009303 if (UNLIKELY(return_type.IsNull())) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009304 DCHECK(self->IsExceptionPending());
9305 return nullptr;
9306 }
9307
9308 Handle<mirror::MethodType>
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009309 method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
9310 if (UNLIKELY(method_type.IsNull())) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009311 DCHECK(self->IsExceptionPending());
9312 return nullptr;
9313 }
Orion Hodson631827d2017-04-10 14:53:47 +01009314
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009315 uintptr_t target = reinterpret_cast<uintptr_t>(target_field);
9316 return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
9317}
9318
Vladimir Marko5aead702019-03-27 11:00:36 +00009319ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForMethod(
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009320 Thread* self,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009321 const dex::MethodHandleItem& method_handle,
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009322 ArtMethod* referrer) {
9323 DexFile::MethodHandleType handle_type =
9324 static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
9325 mirror::MethodHandle::Kind kind;
9326 uint32_t receiver_count = 0;
9327 ArtMethod* target_method = nullptr;
9328 switch (handle_type) {
9329 case DexFile::MethodHandleType::kStaticPut:
9330 case DexFile::MethodHandleType::kStaticGet:
9331 case DexFile::MethodHandleType::kInstancePut:
9332 case DexFile::MethodHandleType::kInstanceGet:
9333 UNREACHABLE();
9334 case DexFile::MethodHandleType::kInvokeStatic: {
9335 kind = mirror::MethodHandle::Kind::kInvokeStatic;
9336 receiver_count = 0;
Vladimir Markoba118822017-06-12 15:41:56 +01009337 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9338 method_handle.field_or_method_idx_,
9339 referrer,
9340 InvokeType::kStatic);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009341 break;
9342 }
9343 case DexFile::MethodHandleType::kInvokeInstance: {
9344 kind = mirror::MethodHandle::Kind::kInvokeVirtual;
9345 receiver_count = 1;
Vladimir Markoba118822017-06-12 15:41:56 +01009346 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9347 method_handle.field_or_method_idx_,
9348 referrer,
9349 InvokeType::kVirtual);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009350 break;
9351 }
9352 case DexFile::MethodHandleType::kInvokeConstructor: {
9353 // Constructors are currently implemented as a transform. They
9354 // are special cased later in this method.
9355 kind = mirror::MethodHandle::Kind::kInvokeTransform;
9356 receiver_count = 0;
Vladimir Markoba118822017-06-12 15:41:56 +01009357 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9358 method_handle.field_or_method_idx_,
9359 referrer,
9360 InvokeType::kDirect);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009361 break;
9362 }
9363 case DexFile::MethodHandleType::kInvokeDirect: {
9364 kind = mirror::MethodHandle::Kind::kInvokeDirect;
9365 receiver_count = 1;
9366 StackHandleScope<2> hs(self);
9367 // A constant method handle with type kInvokeDirect can refer to
9368 // a method that is private or to a method in a super class. To
9369 // disambiguate the two options, we resolve the method ignoring
9370 // the invocation type to determine if the method is private. We
9371 // then resolve again specifying the intended invocation type to
9372 // force the appropriate checks.
Vladimir Marko89011192017-12-11 13:45:05 +00009373 target_method = ResolveMethodWithoutInvokeType(method_handle.field_or_method_idx_,
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009374 hs.NewHandle(referrer->GetDexCache()),
9375 hs.NewHandle(referrer->GetClassLoader()));
9376 if (UNLIKELY(target_method == nullptr)) {
9377 break;
9378 }
9379
9380 if (target_method->IsPrivate()) {
9381 kind = mirror::MethodHandle::Kind::kInvokeDirect;
Vladimir Markoba118822017-06-12 15:41:56 +01009382 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9383 method_handle.field_or_method_idx_,
9384 referrer,
9385 InvokeType::kDirect);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009386 } else {
9387 kind = mirror::MethodHandle::Kind::kInvokeSuper;
Vladimir Markoba118822017-06-12 15:41:56 +01009388 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9389 method_handle.field_or_method_idx_,
9390 referrer,
9391 InvokeType::kSuper);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009392 if (UNLIKELY(target_method == nullptr)) {
9393 break;
9394 }
9395 // Find the method specified in the parent in referring class
9396 // so invoke-super invokes the method in the parent of the
9397 // referrer.
9398 target_method =
9399 referrer->GetDeclaringClass()->FindVirtualMethodForVirtual(target_method,
9400 kRuntimePointerSize);
9401 }
9402 break;
9403 }
9404 case DexFile::MethodHandleType::kInvokeInterface: {
9405 kind = mirror::MethodHandle::Kind::kInvokeInterface;
9406 receiver_count = 1;
Vladimir Markoba118822017-06-12 15:41:56 +01009407 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9408 method_handle.field_or_method_idx_,
9409 referrer,
9410 InvokeType::kInterface);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009411 break;
9412 }
Orion Hodson631827d2017-04-10 14:53:47 +01009413 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009414
9415 if (UNLIKELY(target_method == nullptr)) {
9416 DCHECK(Thread::Current()->IsExceptionPending());
9417 return nullptr;
9418 }
9419
9420 ObjPtr<mirror::Class> target_class = target_method->GetDeclaringClass();
9421 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
9422 uint32_t access_flags = target_method->GetAccessFlags();
9423 if (UNLIKELY(!referring_class->CanAccessMember(target_class, access_flags))) {
9424 ThrowIllegalAccessErrorMethod(referring_class, target_method);
9425 return nullptr;
9426 }
9427
9428 // Calculate the number of parameters from the method shorty. We add the
9429 // receiver count (0 or 1) and deduct one for the return value.
9430 uint32_t shorty_length;
9431 target_method->GetShorty(&shorty_length);
9432 int32_t num_params = static_cast<int32_t>(shorty_length + receiver_count - 1);
9433
Orion Hodsonecd58562018-09-24 11:27:33 +01009434 StackHandleScope<5> hs(self);
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009435 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009436 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9437 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
9438 if (method_params.Get() == nullptr) {
9439 DCHECK(self->IsExceptionPending());
9440 return nullptr;
9441 }
9442
Orion Hodsonecd58562018-09-24 11:27:33 +01009443 const DexFile* dex_file = referrer->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009444 const dex::MethodId& method_id = dex_file->GetMethodId(method_handle.field_or_method_idx_);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009445 int32_t index = 0;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009446 if (receiver_count != 0) {
Orion Hodsonecd58562018-09-24 11:27:33 +01009447 // Insert receiver. Use the class identified in the method handle rather than the declaring
9448 // class of the resolved method which may be super class or default interface method
9449 // (b/115964401).
9450 ObjPtr<mirror::Class> receiver_class = LookupResolvedType(method_id.class_idx_, referrer);
9451 // receiver_class should have been resolved when resolving the target method.
9452 DCHECK(receiver_class != nullptr);
9453 method_params->Set(index++, receiver_class);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009454 }
Orion Hodsonecd58562018-09-24 11:27:33 +01009455
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009456 const dex::ProtoId& proto_id = dex_file->GetProtoId(method_id.proto_idx_);
Orion Hodsonecd58562018-09-24 11:27:33 +01009457 DexFileParameterIterator it(*dex_file, proto_id);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009458 while (it.HasNext()) {
Orion Hodsonda1cdd02018-01-31 18:08:28 +00009459 DCHECK_LT(index, num_params);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009460 const dex::TypeIndex type_idx = it.GetTypeIdx();
Orion Hodsonecd58562018-09-24 11:27:33 +01009461 ObjPtr<mirror::Class> klass = ResolveType(type_idx, referrer);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009462 if (nullptr == klass) {
9463 DCHECK(self->IsExceptionPending());
9464 return nullptr;
9465 }
9466 method_params->Set(index++, klass);
9467 it.Next();
9468 }
9469
Orion Hodsonecd58562018-09-24 11:27:33 +01009470 Handle<mirror::Class> return_type =
9471 hs.NewHandle(ResolveType(proto_id.return_type_idx_, referrer));
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009472 if (UNLIKELY(return_type.IsNull())) {
9473 DCHECK(self->IsExceptionPending());
9474 return nullptr;
9475 }
9476
9477 Handle<mirror::MethodType>
9478 method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
9479 if (UNLIKELY(method_type.IsNull())) {
9480 DCHECK(self->IsExceptionPending());
9481 return nullptr;
9482 }
9483
9484 if (UNLIKELY(handle_type == DexFile::MethodHandleType::kInvokeConstructor)) {
9485 Handle<mirror::Class> constructor_class = hs.NewHandle(target_method->GetDeclaringClass());
9486 Handle<mirror::MethodHandlesLookup> lookup =
9487 hs.NewHandle(mirror::MethodHandlesLookup::GetDefault(self));
9488 return lookup->FindConstructor(self, constructor_class, method_type);
9489 }
9490
9491 uintptr_t target = reinterpret_cast<uintptr_t>(target_method);
9492 return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
9493}
9494
Vladimir Markoaf940202017-12-08 15:01:18 +00009495ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandle(Thread* self,
9496 uint32_t method_handle_idx,
9497 ArtMethod* referrer)
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009498 REQUIRES_SHARED(Locks::mutator_lock_) {
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009499 const DexFile* const dex_file = referrer->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009500 const dex::MethodHandleItem& method_handle = dex_file->GetMethodHandle(method_handle_idx);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009501 switch (static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_)) {
9502 case DexFile::MethodHandleType::kStaticPut:
9503 case DexFile::MethodHandleType::kStaticGet:
9504 case DexFile::MethodHandleType::kInstancePut:
9505 case DexFile::MethodHandleType::kInstanceGet:
9506 return ResolveMethodHandleForField(self, method_handle, referrer);
9507 case DexFile::MethodHandleType::kInvokeStatic:
9508 case DexFile::MethodHandleType::kInvokeInstance:
9509 case DexFile::MethodHandleType::kInvokeConstructor:
9510 case DexFile::MethodHandleType::kInvokeDirect:
9511 case DexFile::MethodHandleType::kInvokeInterface:
Orion Hodsonda1cdd02018-01-31 18:08:28 +00009512 return ResolveMethodHandleForMethod(self, method_handle, referrer);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009513 }
Orion Hodsonc069a302017-01-18 09:23:12 +00009514}
9515
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009516bool ClassLinker::IsQuickResolutionStub(const void* entry_point) const {
9517 return (entry_point == GetQuickResolutionStub()) ||
9518 (quick_resolution_trampoline_ == entry_point);
9519}
9520
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009521bool ClassLinker::IsQuickToInterpreterBridge(const void* entry_point) const {
9522 return (entry_point == GetQuickToInterpreterBridge()) ||
9523 (quick_to_interpreter_bridge_trampoline_ == entry_point);
9524}
9525
9526bool ClassLinker::IsQuickGenericJniStub(const void* entry_point) const {
9527 return (entry_point == GetQuickGenericJniStub()) ||
9528 (quick_generic_jni_trampoline_ == entry_point);
9529}
9530
David Sehra49e0532017-08-25 08:05:29 -07009531bool ClassLinker::IsJniDlsymLookupStub(const void* entry_point) const {
Vladimir Marko7dac8642019-11-06 17:09:30 +00009532 return entry_point == GetJniDlsymLookupStub() ||
9533 (jni_dlsym_lookup_trampoline_ == entry_point);
David Sehra49e0532017-08-25 08:05:29 -07009534}
9535
Vladimir Markofa458ac2020-02-12 14:08:07 +00009536bool ClassLinker::IsJniDlsymLookupCriticalStub(const void* entry_point) const {
9537 return entry_point == GetJniDlsymLookupCriticalStub() ||
9538 (jni_dlsym_lookup_critical_trampoline_ == entry_point);
9539}
9540
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009541const void* ClassLinker::GetRuntimeQuickGenericJniStub() const {
9542 return GetQuickGenericJniStub();
9543}
9544
Mathieu Chartiere401d142015-04-22 13:56:20 -07009545void ClassLinker::SetEntryPointsToInterpreter(ArtMethod* method) const {
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009546 if (!method->IsNative()) {
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009547 method->SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
9548 } else {
Goran Jakovljevicc16268f2017-07-27 10:03:32 +02009549 method->SetEntryPointFromQuickCompiledCode(GetQuickGenericJniStub());
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009550 }
9551}
9552
Alex Lightdb01a092017-04-03 15:39:55 -07009553void ClassLinker::SetEntryPointsForObsoleteMethod(ArtMethod* method) const {
9554 DCHECK(method->IsObsolete());
9555 // We cannot mess with the entrypoints of native methods because they are used to determine how
9556 // large the method's quick stack frame is. Without this information we cannot walk the stacks.
9557 if (!method->IsNative()) {
9558 method->SetEntryPointFromQuickCompiledCode(GetInvokeObsoleteMethodStub());
9559 }
9560}
9561
Ian Rogers7dfb28c2013-08-22 08:18:36 -07009562void ClassLinker::DumpForSigQuit(std::ostream& os) {
Mathieu Chartier6b069532015-08-05 15:08:12 -07009563 ScopedObjectAccess soa(Thread::Current());
Mathieu Chartier6b069532015-08-05 15:08:12 -07009564 ReaderMutexLock mu(soa.Self(), *Locks::classlinker_classes_lock_);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07009565 os << "Zygote loaded classes=" << NumZygoteClasses() << " post zygote classes="
9566 << NumNonZygoteClasses() << "\n";
Nicolas Geoffraya90c9222018-09-07 13:19:19 +01009567 ReaderMutexLock mu2(soa.Self(), *Locks::dex_lock_);
9568 os << "Dumping registered class loaders\n";
9569 size_t class_loader_index = 0;
9570 for (const ClassLoaderData& class_loader : class_loaders_) {
9571 ObjPtr<mirror::ClassLoader> loader =
9572 ObjPtr<mirror::ClassLoader>::DownCast(soa.Self()->DecodeJObject(class_loader.weak_root));
9573 if (loader != nullptr) {
9574 os << "#" << class_loader_index++ << " " << loader->GetClass()->PrettyDescriptor() << ": [";
9575 bool saw_one_dex_file = false;
9576 for (const DexCacheData& dex_cache : dex_caches_) {
9577 if (dex_cache.IsValid() && dex_cache.class_table == class_loader.class_table) {
9578 if (saw_one_dex_file) {
9579 os << ":";
9580 }
9581 saw_one_dex_file = true;
9582 os << dex_cache.dex_file->GetLocation();
9583 }
9584 }
9585 os << "]";
9586 bool found_parent = false;
9587 if (loader->GetParent() != nullptr) {
9588 size_t parent_index = 0;
9589 for (const ClassLoaderData& class_loader2 : class_loaders_) {
9590 ObjPtr<mirror::ClassLoader> loader2 = ObjPtr<mirror::ClassLoader>::DownCast(
9591 soa.Self()->DecodeJObject(class_loader2.weak_root));
9592 if (loader2 == loader->GetParent()) {
9593 os << ", parent #" << parent_index;
9594 found_parent = true;
9595 break;
9596 }
9597 parent_index++;
9598 }
9599 if (!found_parent) {
9600 os << ", unregistered parent of type "
9601 << loader->GetParent()->GetClass()->PrettyDescriptor();
9602 }
9603 } else {
9604 os << ", no parent";
9605 }
9606 os << "\n";
9607 }
9608 }
9609 os << "Done dumping class loaders\n";
Andreas Gampe9b7f8b52019-06-07 08:59:29 -07009610 Runtime* runtime = Runtime::Current();
9611 os << "Classes initialized: " << runtime->GetStat(KIND_GLOBAL_CLASS_INIT_COUNT) << " in "
9612 << PrettyDuration(runtime->GetStat(KIND_GLOBAL_CLASS_INIT_TIME)) << "\n";
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07009613}
9614
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009615class CountClassesVisitor : public ClassLoaderVisitor {
9616 public:
9617 CountClassesVisitor() : num_zygote_classes(0), num_non_zygote_classes(0) {}
9618
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009619 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01009620 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009621 ClassTable* const class_table = class_loader->GetClassTable();
Mathieu Chartier6b069532015-08-05 15:08:12 -07009622 if (class_table != nullptr) {
Vladimir Markoc5798bf2016-12-09 10:20:54 +00009623 num_zygote_classes += class_table->NumZygoteClasses(class_loader);
9624 num_non_zygote_classes += class_table->NumNonZygoteClasses(class_loader);
Mathieu Chartier6b069532015-08-05 15:08:12 -07009625 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07009626 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009627
9628 size_t num_zygote_classes;
9629 size_t num_non_zygote_classes;
9630};
9631
9632size_t ClassLinker::NumZygoteClasses() const {
9633 CountClassesVisitor visitor;
9634 VisitClassLoaders(&visitor);
Andreas Gampe2af99022017-04-25 08:32:59 -07009635 return visitor.num_zygote_classes + boot_class_table_->NumZygoteClasses(nullptr);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07009636}
9637
9638size_t ClassLinker::NumNonZygoteClasses() const {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009639 CountClassesVisitor visitor;
9640 VisitClassLoaders(&visitor);
Andreas Gampe2af99022017-04-25 08:32:59 -07009641 return visitor.num_non_zygote_classes + boot_class_table_->NumNonZygoteClasses(nullptr);
Elliott Hughescac6cc72011-11-03 20:31:21 -07009642}
9643
Ian Rogers7dfb28c2013-08-22 08:18:36 -07009644size_t ClassLinker::NumLoadedClasses() {
Ian Rogers1bf8d4d2013-05-30 00:18:49 -07009645 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Mathieu Chartierc2e20622014-11-03 11:41:47 -08009646 // Only return non zygote classes since these are the ones which apps which care about.
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07009647 return NumNonZygoteClasses();
Elliott Hughese27955c2011-08-26 15:21:24 -07009648}
9649
Brian Carlstrom47d237a2011-10-18 15:08:33 -07009650pid_t ClassLinker::GetClassesLockOwner() {
Ian Rogersb726dcb2012-09-05 08:57:23 -07009651 return Locks::classlinker_classes_lock_->GetExclusiveOwnerTid();
Brian Carlstrom47d237a2011-10-18 15:08:33 -07009652}
9653
9654pid_t ClassLinker::GetDexLockOwner() {
Andreas Gampecc1b5352016-12-01 16:58:38 -08009655 return Locks::dex_lock_->GetExclusiveOwnerTid();
Brian Carlstrom24a3c2e2011-10-17 18:07:52 -07009656}
9657
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009658void ClassLinker::SetClassRoot(ClassRoot class_root, ObjPtr<mirror::Class> klass) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08009659 DCHECK(!init_done_);
9660
Andreas Gampe2ed8def2014-08-28 14:41:02 -07009661 DCHECK(klass != nullptr);
9662 DCHECK(klass->GetClassLoader() == nullptr);
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08009663
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -07009664 mirror::ObjectArray<mirror::Class>* class_roots = class_roots_.Read();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07009665 DCHECK(class_roots != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01009666 DCHECK_LT(static_cast<uint32_t>(class_root), static_cast<uint32_t>(ClassRoot::kMax));
9667 int32_t index = static_cast<int32_t>(class_root);
9668 DCHECK(class_roots->Get(index) == nullptr);
9669 class_roots->Set<false>(index, klass);
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009670}
9671
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009672ObjPtr<mirror::ClassLoader> ClassLinker::CreateWellKnownClassLoader(
9673 Thread* self,
9674 const std::vector<const DexFile*>& dex_files,
9675 Handle<mirror::Class> loader_class,
Nicolas Geoffraye1672732018-11-30 01:09:49 +00009676 Handle<mirror::ClassLoader> parent_loader,
9677 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries) {
Calin Juravle7865ac72017-06-28 11:03:12 -07009678
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009679 StackHandleScope<5> hs(self);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009680
Mathieu Chartierc7853442015-03-27 14:35:38 -07009681 ArtField* dex_elements_field =
Andreas Gampe08883de2016-11-08 13:20:52 -08009682 jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList_dexElements);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009683
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009684 Handle<mirror::Class> dex_elements_class(hs.NewHandle(dex_elements_field->ResolveType()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009685 DCHECK(dex_elements_class != nullptr);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009686 DCHECK(dex_elements_class->IsArrayClass());
Mathieu Chartierdaaf3262015-03-24 13:30:28 -07009687 Handle<mirror::ObjectArray<mirror::Object>> h_dex_elements(hs.NewHandle(
Mathieu Chartier3398c782016-09-30 10:27:43 -07009688 mirror::ObjectArray<mirror::Object>::Alloc(self,
9689 dex_elements_class.Get(),
9690 dex_files.size())));
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009691 Handle<mirror::Class> h_dex_element_class =
9692 hs.NewHandle(dex_elements_class->GetComponentType());
9693
Mathieu Chartierc7853442015-03-27 14:35:38 -07009694 ArtField* element_file_field =
Andreas Gampe08883de2016-11-08 13:20:52 -08009695 jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList__Element_dexFile);
Mathieu Chartierc7853442015-03-27 14:35:38 -07009696 DCHECK_EQ(h_dex_element_class.Get(), element_file_field->GetDeclaringClass());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009697
Andreas Gampe08883de2016-11-08 13:20:52 -08009698 ArtField* cookie_field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie);
Vladimir Marko208f6702017-12-08 12:00:50 +00009699 DCHECK_EQ(cookie_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009700
Andreas Gampe08883de2016-11-08 13:20:52 -08009701 ArtField* file_name_field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_fileName);
Vladimir Marko208f6702017-12-08 12:00:50 +00009702 DCHECK_EQ(file_name_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
Mathieu Chartierfbc31082016-01-24 11:59:56 -08009703
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009704 // Fill the elements array.
9705 int32_t index = 0;
9706 for (const DexFile* dex_file : dex_files) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08009707 StackHandleScope<4> hs2(self);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009708
Calin Juravle7865ac72017-06-28 11:03:12 -07009709 // CreateWellKnownClassLoader is only used by gtests and compiler.
9710 // Index 0 of h_long_array is supposed to be the oat file but we can leave it null.
Mathieu Chartiere58991b2015-10-13 07:59:34 -07009711 Handle<mirror::LongArray> h_long_array = hs2.NewHandle(mirror::LongArray::Alloc(
9712 self,
9713 kDexFileIndexStart + 1));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009714 DCHECK(h_long_array != nullptr);
Vladimir Marko78baed52018-10-11 10:44:58 +01009715 h_long_array->Set(kDexFileIndexStart, reinterpret_cast64<int64_t>(dex_file));
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009716
Mathieu Chartier3738e982017-05-12 16:07:28 -07009717 // Note that this creates a finalizable dalvik.system.DexFile object and a corresponding
9718 // FinalizerReference which will never get cleaned up without a started runtime.
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009719 Handle<mirror::Object> h_dex_file = hs2.NewHandle(
Mathieu Chartierc7853442015-03-27 14:35:38 -07009720 cookie_field->GetDeclaringClass()->AllocObject(self));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009721 DCHECK(h_dex_file != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07009722 cookie_field->SetObject<false>(h_dex_file.Get(), h_long_array.Get());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009723
Mathieu Chartierfbc31082016-01-24 11:59:56 -08009724 Handle<mirror::String> h_file_name = hs2.NewHandle(
9725 mirror::String::AllocFromModifiedUtf8(self, dex_file->GetLocation().c_str()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009726 DCHECK(h_file_name != nullptr);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08009727 file_name_field->SetObject<false>(h_dex_file.Get(), h_file_name.Get());
9728
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009729 Handle<mirror::Object> h_element = hs2.NewHandle(h_dex_element_class->AllocObject(self));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009730 DCHECK(h_element != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07009731 element_file_field->SetObject<false>(h_element.Get(), h_dex_file.Get());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009732
9733 h_dex_elements->Set(index, h_element.Get());
9734 index++;
9735 }
9736 DCHECK_EQ(index, h_dex_elements->GetLength());
9737
9738 // Create DexPathList.
9739 Handle<mirror::Object> h_dex_path_list = hs.NewHandle(
Mathieu Chartierc7853442015-03-27 14:35:38 -07009740 dex_elements_field->GetDeclaringClass()->AllocObject(self));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009741 DCHECK(h_dex_path_list != nullptr);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009742 // Set elements.
Mathieu Chartierc7853442015-03-27 14:35:38 -07009743 dex_elements_field->SetObject<false>(h_dex_path_list.Get(), h_dex_elements.Get());
Andreas Gampe473191c2017-12-28 16:55:31 -08009744 // Create an empty List for the "nativeLibraryDirectories," required for native tests.
9745 // Note: this code is uncommon(oatdump)/testing-only, so don't add further WellKnownClasses
9746 // elements.
9747 {
9748 ArtField* native_lib_dirs = dex_elements_field->GetDeclaringClass()->
9749 FindDeclaredInstanceField("nativeLibraryDirectories", "Ljava/util/List;");
9750 DCHECK(native_lib_dirs != nullptr);
9751 ObjPtr<mirror::Class> list_class = FindSystemClass(self, "Ljava/util/ArrayList;");
9752 DCHECK(list_class != nullptr);
9753 {
9754 StackHandleScope<1> h_list_scope(self);
9755 Handle<mirror::Class> h_list_class(h_list_scope.NewHandle<mirror::Class>(list_class));
9756 bool list_init = EnsureInitialized(self, h_list_class, true, true);
9757 DCHECK(list_init);
9758 list_class = h_list_class.Get();
9759 }
9760 ObjPtr<mirror::Object> list_object = list_class->AllocObject(self);
9761 // Note: we leave the object uninitialized. This must never leak into any non-testing code, but
9762 // is fine for testing. While it violates a Java-code invariant (the elementData field is
9763 // normally never null), as long as one does not try to add elements, this will still
9764 // work.
9765 native_lib_dirs->SetObject<false>(h_dex_path_list.Get(), list_object);
9766 }
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009767
Calin Juravle7865ac72017-06-28 11:03:12 -07009768 // Create the class loader..
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009769 Handle<mirror::ClassLoader> h_class_loader = hs.NewHandle<mirror::ClassLoader>(
9770 ObjPtr<mirror::ClassLoader>::DownCast(loader_class->AllocObject(self)));
Calin Juravle7865ac72017-06-28 11:03:12 -07009771 DCHECK(h_class_loader != nullptr);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009772 // Set DexPathList.
Mathieu Chartierc7853442015-03-27 14:35:38 -07009773 ArtField* path_list_field =
Andreas Gampe08883de2016-11-08 13:20:52 -08009774 jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_pathList);
Mathieu Chartierc7853442015-03-27 14:35:38 -07009775 DCHECK(path_list_field != nullptr);
Calin Juravle7865ac72017-06-28 11:03:12 -07009776 path_list_field->SetObject<false>(h_class_loader.Get(), h_dex_path_list.Get());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009777
9778 // Make a pretend boot-classpath.
9779 // TODO: Should we scan the image?
Mathieu Chartierc7853442015-03-27 14:35:38 -07009780 ArtField* const parent_field =
Vladimir Marko19a4d372016-12-08 14:41:46 +00009781 mirror::Class::FindField(self,
Calin Juravle7865ac72017-06-28 11:03:12 -07009782 h_class_loader->GetClass(),
Vladimir Marko19a4d372016-12-08 14:41:46 +00009783 "parent",
Mathieu Chartierc7853442015-03-27 14:35:38 -07009784 "Ljava/lang/ClassLoader;");
Roland Levillainf39c9eb2015-05-26 15:02:07 +01009785 DCHECK(parent_field != nullptr);
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009786 if (parent_loader.Get() == nullptr) {
9787 ScopedObjectAccessUnchecked soa(self);
9788 ObjPtr<mirror::Object> boot_loader(soa.Decode<mirror::Class>(
9789 WellKnownClasses::java_lang_BootClassLoader)->AllocObject(self));
9790 parent_field->SetObject<false>(h_class_loader.Get(), boot_loader);
9791 } else {
9792 parent_field->SetObject<false>(h_class_loader.Get(), parent_loader.Get());
9793 }
Calin Juravle7865ac72017-06-28 11:03:12 -07009794
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009795 ArtField* shared_libraries_field =
9796 jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders);
9797 DCHECK(shared_libraries_field != nullptr);
9798 shared_libraries_field->SetObject<false>(h_class_loader.Get(), shared_libraries.Get());
9799
9800 return h_class_loader.Get();
9801}
9802
9803jobject ClassLinker::CreateWellKnownClassLoader(Thread* self,
9804 const std::vector<const DexFile*>& dex_files,
9805 jclass loader_class,
Nicolas Geoffraye1672732018-11-30 01:09:49 +00009806 jobject parent_loader,
9807 jobject shared_libraries) {
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009808 CHECK(self->GetJniEnv()->IsSameObject(loader_class,
9809 WellKnownClasses::dalvik_system_PathClassLoader) ||
9810 self->GetJniEnv()->IsSameObject(loader_class,
David Brazdil1a9ac532019-03-05 11:57:13 +00009811 WellKnownClasses::dalvik_system_DelegateLastClassLoader) ||
9812 self->GetJniEnv()->IsSameObject(loader_class,
9813 WellKnownClasses::dalvik_system_InMemoryDexClassLoader));
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009814
9815 // SOAAlreadyRunnable is protected, and we need something to add a global reference.
9816 // We could move the jobject to the callers, but all call-sites do this...
9817 ScopedObjectAccessUnchecked soa(self);
9818
9819 // For now, create a libcore-level DexFile for each ART DexFile. This "explodes" multidex.
Nicolas Geoffraye1672732018-11-30 01:09:49 +00009820 StackHandleScope<4> hs(self);
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009821
9822 Handle<mirror::Class> h_loader_class =
9823 hs.NewHandle<mirror::Class>(soa.Decode<mirror::Class>(loader_class));
Nicolas Geoffraye1672732018-11-30 01:09:49 +00009824 Handle<mirror::ClassLoader> h_parent =
9825 hs.NewHandle<mirror::ClassLoader>(soa.Decode<mirror::ClassLoader>(parent_loader));
9826 Handle<mirror::ObjectArray<mirror::ClassLoader>> h_shared_libraries =
9827 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ClassLoader>>(shared_libraries));
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009828
9829 ObjPtr<mirror::ClassLoader> loader = CreateWellKnownClassLoader(
9830 self,
9831 dex_files,
9832 h_loader_class,
Nicolas Geoffraye1672732018-11-30 01:09:49 +00009833 h_parent,
9834 h_shared_libraries);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009835
9836 // Make it a global ref and return.
9837 ScopedLocalRef<jobject> local_ref(
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009838 soa.Env(), soa.Env()->AddLocalReference<jobject>(loader));
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009839 return soa.Env()->NewGlobalRef(local_ref.get());
9840}
9841
Calin Juravle7865ac72017-06-28 11:03:12 -07009842jobject ClassLinker::CreatePathClassLoader(Thread* self,
9843 const std::vector<const DexFile*>& dex_files) {
9844 return CreateWellKnownClassLoader(self,
9845 dex_files,
9846 WellKnownClasses::dalvik_system_PathClassLoader,
9847 nullptr);
9848}
9849
Andreas Gampe8ac75952015-06-02 21:01:45 -07009850void ClassLinker::DropFindArrayClassCache() {
9851 std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
9852 find_array_class_cache_next_victim_ = 0;
9853}
9854
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07009855void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009856 Thread* const self = Thread::Current();
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07009857 for (const ClassLoaderData& data : class_loaders_) {
Mathieu Chartier4843bd52015-10-01 17:08:44 -07009858 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
Mathieu Chartierc4f39252016-10-05 18:32:08 -07009859 ObjPtr<mirror::ClassLoader> class_loader = ObjPtr<mirror::ClassLoader>::DownCast(
9860 self->DecodeJObject(data.weak_root));
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009861 if (class_loader != nullptr) {
Vladimir Markod93e3742018-07-18 10:58:13 +01009862 visitor->Visit(class_loader);
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009863 }
9864 }
9865}
9866
Alexey Grebenkin252a4e42018-04-02 18:18:01 +03009867void ClassLinker::VisitAllocators(AllocatorVisitor* visitor) const {
9868 for (const ClassLoaderData& data : class_loaders_) {
9869 LinearAlloc* alloc = data.allocator;
9870 if (alloc != nullptr && !visitor->Visit(alloc)) {
9871 break;
9872 }
9873 }
9874}
9875
Mathieu Chartierbc5a7952016-10-17 15:46:31 -07009876void ClassLinker::InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,
9877 ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier00310e02015-10-17 12:46:42 -07009878 DCHECK(dex_file != nullptr);
Mathieu Chartier00310e02015-10-17 12:46:42 -07009879 Thread* const self = Thread::Current();
9880 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Vladimir Markobcf17522018-06-01 13:14:32 +01009881 ClassTable* const table = ClassTableForClassLoader(class_loader);
Mathieu Chartier00310e02015-10-17 12:46:42 -07009882 DCHECK(table != nullptr);
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07009883 if (table->InsertStrongRoot(dex_file) && class_loader != nullptr) {
Mathieu Chartier00310e02015-10-17 12:46:42 -07009884 // It was not already inserted, perform the write barrier to let the GC know the class loader's
9885 // class table was modified.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07009886 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier00310e02015-10-17 12:46:42 -07009887 }
9888}
9889
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07009890void ClassLinker::CleanupClassLoaders() {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009891 Thread* const self = Thread::Current();
Mathieu Chartier65975772016-08-05 10:46:36 -07009892 std::vector<ClassLoaderData> to_delete;
9893 // Do the delete outside the lock to avoid lock violation in jit code cache.
9894 {
9895 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
9896 for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) {
9897 const ClassLoaderData& data = *it;
9898 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
Mathieu Chartierc4f39252016-10-05 18:32:08 -07009899 ObjPtr<mirror::ClassLoader> class_loader =
9900 ObjPtr<mirror::ClassLoader>::DownCast(self->DecodeJObject(data.weak_root));
Mathieu Chartier65975772016-08-05 10:46:36 -07009901 if (class_loader != nullptr) {
9902 ++it;
9903 } else {
9904 VLOG(class_linker) << "Freeing class loader";
9905 to_delete.push_back(data);
9906 it = class_loaders_.erase(it);
9907 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009908 }
9909 }
Mathieu Chartier65975772016-08-05 10:46:36 -07009910 for (ClassLoaderData& data : to_delete) {
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03009911 // CHA unloading analysis and SingleImplementaion cleanups are required.
Andreas Gampe98ea9d92018-10-19 14:06:15 -07009912 DeleteClassLoader(self, data, /*cleanup_cha=*/ true);
Mathieu Chartier65975772016-08-05 10:46:36 -07009913 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009914}
9915
Mathieu Chartier65975772016-08-05 10:46:36 -07009916class ClassLinker::FindVirtualMethodHolderVisitor : public ClassVisitor {
9917 public:
9918 FindVirtualMethodHolderVisitor(const ArtMethod* method, PointerSize pointer_size)
9919 : method_(method),
9920 pointer_size_(pointer_size) {}
9921
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01009922 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) override {
Mathieu Chartier65975772016-08-05 10:46:36 -07009923 if (klass->GetVirtualMethodsSliceUnchecked(pointer_size_).Contains(method_)) {
9924 holder_ = klass;
9925 }
9926 // Return false to stop searching if holder_ is not null.
9927 return holder_ == nullptr;
9928 }
9929
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009930 ObjPtr<mirror::Class> holder_ = nullptr;
Mathieu Chartier65975772016-08-05 10:46:36 -07009931 const ArtMethod* const method_;
9932 const PointerSize pointer_size_;
9933};
9934
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009935ObjPtr<mirror::Class> ClassLinker::GetHoldingClassOfCopiedMethod(ArtMethod* method) {
Mathieu Chartier65975772016-08-05 10:46:36 -07009936 ScopedTrace trace(__FUNCTION__); // Since this function is slow, have a trace to notify people.
9937 CHECK(method->IsCopied());
9938 FindVirtualMethodHolderVisitor visitor(method, image_pointer_size_);
9939 VisitClasses(&visitor);
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009940 return visitor.holder_;
Mathieu Chartier65975772016-08-05 10:46:36 -07009941}
9942
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009943ObjPtr<mirror::IfTable> ClassLinker::AllocIfTable(Thread* self, size_t ifcount) {
9944 return ObjPtr<mirror::IfTable>::DownCast(ObjPtr<mirror::ObjectArray<mirror::Object>>(
Andreas Gampec6ea7d02017-02-01 16:46:28 -08009945 mirror::IfTable::Alloc(self,
Vladimir Markob4eb1b12018-05-24 11:09:38 +01009946 GetClassRoot<mirror::ObjectArray<mirror::Object>>(this),
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009947 ifcount * mirror::IfTable::kMax)));
Andreas Gampec6ea7d02017-02-01 16:46:28 -08009948}
9949
Vladimir Markod1f73512020-04-02 10:50:35 +01009950bool ClassLinker::IsUpdatableBootClassPathDescriptor(const char* descriptor ATTRIBUTE_UNUSED) {
9951 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
9952 LOG(FATAL) << "UNREACHABLE";
9953 UNREACHABLE();
9954}
9955
Roland Levillain0e840272018-08-23 19:55:30 +01009956// Instantiate ClassLinker::ResolveMethod.
Vladimir Markoba118822017-06-12 15:41:56 +01009957template ArtMethod* ClassLinker::ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
Andreas Gampe42ef8ab2015-12-03 17:27:32 -08009958 uint32_t method_idx,
9959 Handle<mirror::DexCache> dex_cache,
9960 Handle<mirror::ClassLoader> class_loader,
9961 ArtMethod* referrer,
9962 InvokeType type);
Vladimir Markoba118822017-06-12 15:41:56 +01009963template ArtMethod* ClassLinker::ResolveMethod<ClassLinker::ResolveMode::kNoChecks>(
Andreas Gampe42ef8ab2015-12-03 17:27:32 -08009964 uint32_t method_idx,
9965 Handle<mirror::DexCache> dex_cache,
9966 Handle<mirror::ClassLoader> class_loader,
9967 ArtMethod* referrer,
9968 InvokeType type);
9969
Roland Levillain0e840272018-08-23 19:55:30 +01009970// Instantiate ClassLinker::AllocClass.
Andreas Gampe98ea9d92018-10-19 14:06:15 -07009971template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ true>(
Roland Levillain0e840272018-08-23 19:55:30 +01009972 Thread* self,
9973 ObjPtr<mirror::Class> java_lang_Class,
9974 uint32_t class_size);
Andreas Gampe98ea9d92018-10-19 14:06:15 -07009975template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ false>(
Roland Levillain0e840272018-08-23 19:55:30 +01009976 Thread* self,
9977 ObjPtr<mirror::Class> java_lang_Class,
9978 uint32_t class_size);
9979
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07009980} // namespace art