blob: 4d941302f989aad16a3ffce738289943dd4f4286 [file] [log] [blame]
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartierad2541a2013-10-25 10:05:23 -070017#include <ctime>
18
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080019#include "object.h"
20
Brian Carlstromea46f952013-07-30 01:26:50 -070021#include "art_field.h"
22#include "art_field-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080023#include "array-inl.h"
24#include "class.h"
25#include "class-inl.h"
Ian Rogers04d7aa92013-03-16 14:29:17 -070026#include "class_linker-inl.h"
Elliott Hughes956af0f2014-12-11 14:34:28 -080027#include "dex_file-inl.h"
Ian Rogers1d54e732013-05-02 21:10:01 -070028#include "gc/accounting/card_table-inl.h"
29#include "gc/heap.h"
Ian Rogers04d7aa92013-03-16 14:29:17 -070030#include "iftable-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031#include "monitor.h"
32#include "object-inl.h"
Ian Rogers04d7aa92013-03-16 14:29:17 -070033#include "object_array-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080034#include "runtime.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070035#include "handle_scope-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080036#include "throwable.h"
37#include "well_known_classes.h"
38
39namespace art {
40namespace mirror {
41
tony.ys_liu7380c312015-01-16 19:16:45 +080042Atomic<uint32_t> Object::hash_code_seed(987654321U + std::time(nullptr));
43
Hiroshi Yamauchi79719282014-04-10 12:46:22 -070044class CopyReferenceFieldsWithReadBarrierVisitor {
45 public:
46 explicit CopyReferenceFieldsWithReadBarrierVisitor(Object* dest_obj)
47 : dest_obj_(dest_obj) {}
48
49 void operator()(Object* obj, MemberOffset offset, bool /* is_static */) const
Mathieu Chartier90443472015-07-16 20:32:27 -070050 ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) {
Hiroshi Yamauchi79719282014-04-10 12:46:22 -070051 // GetFieldObject() contains a RB.
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070052 Object* ref = obj->GetFieldObject<Object>(offset);
Hiroshi Yamauchi79719282014-04-10 12:46:22 -070053 // No WB here as a large object space does not have a card table
54 // coverage. Instead, cards will be marked separately.
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070055 dest_obj_->SetFieldObjectWithoutWriteBarrier<false, false>(offset, ref);
Hiroshi Yamauchi79719282014-04-10 12:46:22 -070056 }
57
58 void operator()(mirror::Class* klass, mirror::Reference* ref) const
Mathieu Chartier90443472015-07-16 20:32:27 -070059 ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) {
Hiroshi Yamauchi79719282014-04-10 12:46:22 -070060 // Copy java.lang.ref.Reference.referent which isn't visited in
61 // Object::VisitReferences().
Fred Shih4ee7a662014-07-11 09:59:27 -070062 DCHECK(klass->IsTypeOfReferenceClass());
Hiroshi Yamauchi79719282014-04-10 12:46:22 -070063 this->operator()(ref, mirror::Reference::ReferentOffset(), false);
64 }
65
Mathieu Chartierda7c6502015-07-23 16:01:26 -070066 // Unused since we don't copy class native roots.
67 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
68 const {}
69 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
70
Hiroshi Yamauchi79719282014-04-10 12:46:22 -070071 private:
72 Object* const dest_obj_;
73};
74
Hiroshi Yamauchi0fbd6e62014-07-17 16:16:31 -070075Object* Object::CopyObject(Thread* self, mirror::Object* dest, mirror::Object* src,
76 size_t num_bytes) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080077 // Copy instance data. We assume memcpy copies by words.
78 // TODO: expose and use move32.
Ian Rogers13735952014-10-08 12:43:28 -070079 uint8_t* src_bytes = reinterpret_cast<uint8_t*>(src);
80 uint8_t* dst_bytes = reinterpret_cast<uint8_t*>(dest);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080081 size_t offset = sizeof(Object);
82 memcpy(dst_bytes + offset, src_bytes + offset, num_bytes - offset);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080083 if (kUseReadBarrier) {
Hiroshi Yamauchi79719282014-04-10 12:46:22 -070084 // We need a RB here. After the memcpy that covers the whole
85 // object above, copy references fields one by one again with a
86 // RB. TODO: Optimize this later?
87 CopyReferenceFieldsWithReadBarrierVisitor visitor(dest);
Mathieu Chartier059ef3d2015-08-18 13:54:21 -070088 src->VisitReferences(visitor, visitor);
Hiroshi Yamauchi79719282014-04-10 12:46:22 -070089 }
Mathieu Chartier590fee92013-09-13 13:46:47 -070090 gc::Heap* heap = Runtime::Current()->GetHeap();
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080091 // Perform write barriers on copied object references.
Mathieu Chartier590fee92013-09-13 13:46:47 -070092 Class* c = src->GetClass();
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080093 if (c->IsArrayClass()) {
94 if (!c->GetComponentType()->IsPrimitive()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -080095 ObjectArray<Object>* array = dest->AsObjectArray<Object>();
Mathieu Chartier590fee92013-09-13 13:46:47 -070096 heap->WriteBarrierArray(dest, 0, array->GetLength());
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080097 }
98 } else {
Mathieu Chartier590fee92013-09-13 13:46:47 -070099 heap->WriteBarrierEveryFieldOf(dest);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800100 }
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800101 if (c->IsFinalizable()) {
Mathieu Chartier8668c3c2014-04-24 16:48:11 -0700102 heap->AddFinalizerReference(self, &dest);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700103 }
104 return dest;
105}
106
Hiroshi Yamauchi4cd662e2014-04-03 16:28:10 -0700107// An allocation pre-fence visitor that copies the object.
108class CopyObjectVisitor {
109 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100110 CopyObjectVisitor(Thread* self, Handle<Object>* orig, size_t num_bytes)
Hiroshi Yamauchi4cd662e2014-04-03 16:28:10 -0700111 : self_(self), orig_(orig), num_bytes_(num_bytes) {
112 }
113
Mathieu Chartiere401d142015-04-22 13:56:20 -0700114 void operator()(Object* obj, size_t usable_size ATTRIBUTE_UNUSED) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700115 SHARED_REQUIRES(Locks::mutator_lock_) {
Hiroshi Yamauchi0fbd6e62014-07-17 16:16:31 -0700116 Object::CopyObject(self_, obj, orig_->Get(), num_bytes_);
Hiroshi Yamauchi4cd662e2014-04-03 16:28:10 -0700117 }
118
119 private:
120 Thread* const self_;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700121 Handle<Object>* const orig_;
Hiroshi Yamauchi4cd662e2014-04-03 16:28:10 -0700122 const size_t num_bytes_;
123 DISALLOW_COPY_AND_ASSIGN(CopyObjectVisitor);
124};
125
Mathieu Chartier590fee92013-09-13 13:46:47 -0700126Object* Object::Clone(Thread* self) {
127 CHECK(!IsClass()) << "Can't clone classes.";
128 // Object::SizeOf gets the right size even if we're an array. Using c->AllocObject() here would
129 // be wrong.
130 gc::Heap* heap = Runtime::Current()->GetHeap();
131 size_t num_bytes = SizeOf();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700132 StackHandleScope<1> hs(self);
133 Handle<Object> this_object(hs.NewHandle(this));
Mathieu Chartier590fee92013-09-13 13:46:47 -0700134 Object* copy;
Hiroshi Yamauchi4cd662e2014-04-03 16:28:10 -0700135 CopyObjectVisitor visitor(self, &this_object, num_bytes);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700136 if (heap->IsMovableObject(this)) {
Hiroshi Yamauchi4cd662e2014-04-03 16:28:10 -0700137 copy = heap->AllocObject<true>(self, GetClass(), num_bytes, visitor);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700138 } else {
Hiroshi Yamauchi4cd662e2014-04-03 16:28:10 -0700139 copy = heap->AllocNonMovableObject<true>(self, GetClass(), num_bytes, visitor);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800140 }
Mathieu Chartier0732d592013-11-06 11:02:50 -0800141 return copy;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800142}
143
Ian Rogersbbcd30b2014-10-30 15:25:36 -0700144uint32_t Object::GenerateIdentityHashCode() {
Ian Rogersbbcd30b2014-10-30 15:25:36 -0700145 uint32_t expected_value, new_value;
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700146 do {
tony.ys_liu7380c312015-01-16 19:16:45 +0800147 expected_value = hash_code_seed.LoadRelaxed();
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700148 new_value = expected_value * 1103515245 + 12345;
tony.ys_liu7380c312015-01-16 19:16:45 +0800149 } while (!hash_code_seed.CompareExchangeWeakRelaxed(expected_value, new_value) ||
150 (expected_value & LockWord::kHashMask) == 0);
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700151 return expected_value & LockWord::kHashMask;
152}
153
tony.ys_liu7380c312015-01-16 19:16:45 +0800154void Object::SetHashCodeSeed(uint32_t new_seed) {
155 hash_code_seed.StoreRelaxed(new_seed);
156}
157
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700158int32_t Object::IdentityHashCode() const {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700159 mirror::Object* current_this = const_cast<mirror::Object*>(this);
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700160 while (true) {
Mathieu Chartier4d7f61d2014-04-17 14:43:39 -0700161 LockWord lw = current_this->GetLockWord(false);
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700162 switch (lw.GetState()) {
163 case LockWord::kUnlocked: {
164 // Try to compare and swap in a new hash, if we succeed we will return the hash on the next
165 // loop iteration.
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -0800166 LockWord hash_word = LockWord::FromHashCode(GenerateIdentityHashCode(),
167 lw.ReadBarrierState());
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700168 DCHECK_EQ(hash_word.GetState(), LockWord::kHashCode);
Hans Boehmd8434432014-07-11 09:56:07 -0700169 if (const_cast<Object*>(this)->CasLockWordWeakRelaxed(lw, hash_word)) {
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700170 return hash_word.GetHashCode();
171 }
172 break;
173 }
174 case LockWord::kThinLocked: {
Ian Rogers43c69cc2014-08-15 11:09:28 -0700175 // Inflate the thin lock to a monitor and stick the hash code inside of the monitor. May
176 // fail spuriously.
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700177 Thread* self = Thread::Current();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700178 StackHandleScope<1> hs(self);
179 Handle<mirror::Object> h_this(hs.NewHandle(current_this));
180 Monitor::InflateThinLocked(self, h_this, lw, GenerateIdentityHashCode());
Mathieu Chartier590fee92013-09-13 13:46:47 -0700181 // A GC may have occurred when we switched to kBlocked.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700182 current_this = h_this.Get();
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700183 break;
184 }
185 case LockWord::kFatLocked: {
186 // Already inflated, return the has stored in the monitor.
187 Monitor* monitor = lw.FatLockMonitor();
188 DCHECK(monitor != nullptr);
189 return monitor->GetHashCode();
190 }
191 case LockWord::kHashCode: {
192 return lw.GetHashCode();
193 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700194 default: {
195 LOG(FATAL) << "Invalid state during hashcode " << lw.GetState();
196 break;
197 }
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700198 }
199 }
Ian Rogers07140832014-09-30 15:43:59 -0700200 UNREACHABLE();
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700201}
202
Ian Rogersef7d42f2014-01-06 12:55:46 -0800203void Object::CheckFieldAssignmentImpl(MemberOffset field_offset, Object* new_value) {
204 Class* c = GetClass();
Mathieu Chartier4e305412014-02-19 10:54:44 -0800205 Runtime* runtime = Runtime::Current();
206 if (runtime->GetClassLinker() == nullptr || !runtime->IsStarted() ||
207 !runtime->GetHeap()->IsObjectValidationEnabled() || !c->IsResolved()) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800208 return;
209 }
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700210 for (Class* cur = c; cur != nullptr; cur = cur->GetSuperClass()) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700211 for (ArtField& field : cur->GetIFields()) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700212 StackHandleScope<1> hs(Thread::Current());
213 Handle<Object> h_object(hs.NewHandle(new_value));
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700214 if (field.GetOffset().Int32Value() == field_offset.Int32Value()) {
215 CHECK_NE(field.GetTypeAsPrimitiveType(), Primitive::kPrimNot);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700216 // TODO: resolve the field type for moving GC.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700217 mirror::Class* field_type = field.GetType<!kMovingCollector>();
Mathieu Chartierc7853442015-03-27 14:35:38 -0700218 if (field_type != nullptr) {
219 CHECK(field_type->IsAssignableFrom(new_value->GetClass()));
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800220 }
Mathieu Chartierc7853442015-03-27 14:35:38 -0700221 return;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800222 }
223 }
224 }
225 if (c->IsArrayClass()) {
226 // Bounds and assign-ability done in the array setter.
227 return;
228 }
229 if (IsClass()) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700230 for (ArtField& field : AsClass()->GetSFields()) {
231 if (field.GetOffset().Int32Value() == field_offset.Int32Value()) {
232 CHECK_NE(field.GetTypeAsPrimitiveType(), Primitive::kPrimNot);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700233 // TODO: resolve the field type for moving GC.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700234 mirror::Class* field_type = field.GetType<!kMovingCollector>();
Mathieu Chartierc7853442015-03-27 14:35:38 -0700235 if (field_type != nullptr) {
236 CHECK(field_type->IsAssignableFrom(new_value->GetClass()));
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800237 }
Mathieu Chartierc7853442015-03-27 14:35:38 -0700238 return;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800239 }
240 }
241 }
242 LOG(FATAL) << "Failed to find field for assignment to " << reinterpret_cast<void*>(this)
243 << " of type " << PrettyDescriptor(c) << " at offset " << field_offset;
Ian Rogers08f1f502014-12-02 15:04:37 -0800244 UNREACHABLE();
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800245}
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800246
Hiroshi Yamauchieb2baaf2015-05-13 21:14:22 -0700247ArtField* Object::FindFieldByOffset(MemberOffset offset) {
248 return IsClass() ? ArtField::FindStaticFieldWithOffset(AsClass(), offset.Uint32Value())
249 : ArtField::FindInstanceFieldWithOffset(GetClass(), offset.Uint32Value());
250}
251
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800252} // namespace mirror
253} // namespace art