Upgrade V8 to version 4.9.385.28
https://chromium.googlesource.com/v8/v8/+/4.9.385.28
FPIIM-449
Change-Id: I4b2e74289d4bf3667f2f3dc8aa2e541f63e26eb4
diff --git a/src/objects-inl.h b/src/objects-inl.h
index fdfadb1..0509a80 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -14,17 +14,12 @@
#include "src/base/atomicops.h"
#include "src/base/bits.h"
-#include "src/contexts.h"
+#include "src/contexts-inl.h"
#include "src/conversions-inl.h"
-#include "src/elements.h"
#include "src/factory.h"
#include "src/field-index-inl.h"
#include "src/heap/heap-inl.h"
#include "src/heap/heap.h"
-#include "src/heap/incremental-marking.h"
-#include "src/heap/objects-visiting.h"
-#include "src/heap/spaces.h"
-#include "src/heap/store-buffer.h"
#include "src/isolate.h"
#include "src/layout-descriptor-inl.h"
#include "src/lookup.h"
@@ -33,6 +28,7 @@
#include "src/prototype.h"
#include "src/transitions-inl.h"
#include "src/type-feedback-vector-inl.h"
+#include "src/types-inl.h"
#include "src/v8memory.h"
namespace v8 {
@@ -51,14 +47,8 @@
}
-PropertyDetails PropertyDetails::AsDeleted() const {
- Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
- return PropertyDetails(smi);
-}
-
-
int PropertyDetails::field_width_in_words() const {
- DCHECK(type() == FIELD);
+ DCHECK(location() == kField);
if (!FLAG_unbox_double_fields) return 1;
if (kDoubleSize == kPointerSize) return 1;
return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
@@ -96,14 +86,6 @@
}
-// Getter that returns a tagged Smi and setter that writes a tagged Smi.
-#define ACCESSORS_TO_SMI(holder, name, offset) \
- Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
- void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
- WRITE_FIELD(this, offset, value); \
- }
-
-
// Getter that returns a Smi as an int and writes an int as a Smi.
#define SMI_ACCESSORS(holder, name, offset) \
int holder::name() const { \
@@ -148,8 +130,15 @@
bool Object::IsFixedArrayBase() const {
- return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
- IsFixedTypedArrayBase() || IsExternalArray();
+ return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
+}
+
+
+bool Object::IsFixedArray() const {
+ if (!IsHeapObject()) return false;
+ InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
+ return instance_type == FIXED_ARRAY_TYPE ||
+ instance_type == TRANSITION_ARRAY_TYPE;
}
@@ -161,24 +150,23 @@
}
-bool Object::IsAccessorInfo() const {
- return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
-}
-
-
-bool Object::IsSmi() const {
- return HAS_SMI_TAG(this);
-}
-
-
-bool Object::IsHeapObject() const {
- return Internals::HasHeapObjectTag(this);
-}
+bool Object::IsAccessorInfo() const { return IsExecutableAccessorInfo(); }
TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
TYPE_CHECKER(Symbol, SYMBOL_TYPE)
+TYPE_CHECKER(Simd128Value, SIMD128_VALUE_TYPE)
+
+
+#define SIMD128_TYPE_CHECKER(TYPE, Type, type, lane_count, lane_type) \
+ bool Object::Is##Type() const { \
+ return Object::IsHeapObject() && \
+ HeapObject::cast(this)->map() == \
+ HeapObject::cast(this)->GetHeap()->type##_map(); \
+ }
+SIMD128_TYPES(SIMD128_TYPE_CHECKER)
+#undef SIMD128_TYPE_CHECKER
bool Object::IsString() const {
@@ -188,7 +176,9 @@
bool Object::IsName() const {
- return IsString() || IsSymbol();
+ STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
+ return Object::IsHeapObject() &&
+ HeapObject::cast(this)->map()->instance_type() <= LAST_NAME_TYPE;
}
@@ -197,16 +187,21 @@
}
-bool Object::IsSpecObject() const {
- return Object::IsHeapObject()
- && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
+bool Object::IsFunction() const {
+ STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
+ return Object::IsHeapObject() &&
+ HeapObject::cast(this)->map()->instance_type() >= FIRST_FUNCTION_TYPE;
}
-bool Object::IsSpecFunction() const {
- if (!Object::IsHeapObject()) return false;
- InstanceType type = HeapObject::cast(this)->map()->instance_type();
- return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
+bool Object::IsCallable() const {
+ return Object::IsHeapObject() && HeapObject::cast(this)->map()->is_callable();
+}
+
+
+bool Object::IsConstructor() const {
+ return Object::IsHeapObject() &&
+ HeapObject::cast(this)->map()->is_constructor();
}
@@ -278,8 +273,36 @@
bool Object::HasValidElements() {
// Dictionary is covered under FixedArray.
- return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
- IsFixedTypedArrayBase();
+ return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
+}
+
+
+bool Object::KeyEquals(Object* second) {
+ Object* first = this;
+ if (second->IsNumber()) {
+ if (first->IsNumber()) return first->Number() == second->Number();
+ Object* temp = first;
+ first = second;
+ second = temp;
+ }
+ if (first->IsNumber()) {
+ DCHECK_LE(0, first->Number());
+ uint32_t expected = static_cast<uint32_t>(first->Number());
+ uint32_t index;
+ return Name::cast(second)->AsArrayIndex(&index) && index == expected;
+ }
+ return Name::cast(first)->Equals(Name::cast(second));
+}
+
+
+bool Object::FilterKey(PropertyFilter filter) {
+ if (IsSymbol()) {
+ if (filter & SKIP_SYMBOLS) return true;
+ if (Symbol::cast(this)->is_private()) return true;
+ } else {
+ if (filter & SKIP_STRINGS) return true;
+ }
+ return false;
}
@@ -517,7 +540,7 @@
explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
: string_(string), hash_field_(0), seed_(seed) { }
- uint32_t Hash() OVERRIDE {
+ uint32_t Hash() override {
hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
string_.length(),
seed_);
@@ -528,7 +551,7 @@
}
- uint32_t HashForObject(Object* other) OVERRIDE {
+ uint32_t HashForObject(Object* other) override {
return String::cast(other)->Hash();
}
@@ -543,11 +566,11 @@
OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
: SequentialStringKey<uint8_t>(str, seed) { }
- bool IsMatch(Object* string) OVERRIDE {
+ bool IsMatch(Object* string) override {
return String::cast(string)->IsOneByteEqualTo(string_);
}
- Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
+ Handle<Object> AsHandle(Isolate* isolate) override;
};
@@ -558,7 +581,7 @@
DCHECK(string_->IsSeqOneByteString());
}
- uint32_t Hash() OVERRIDE {
+ uint32_t Hash() override {
DCHECK(length_ >= 0);
DCHECK(from_ + length_ <= string_->length());
const uint8_t* chars = string_->GetChars() + from_;
@@ -569,12 +592,12 @@
return result;
}
- uint32_t HashForObject(Object* other) OVERRIDE {
+ uint32_t HashForObject(Object* other) override {
return String::cast(other)->Hash();
}
- bool IsMatch(Object* string) OVERRIDE;
- Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
+ bool IsMatch(Object* string) override;
+ Handle<Object> AsHandle(Isolate* isolate) override;
private:
Handle<SeqOneByteString> string_;
@@ -589,11 +612,11 @@
explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
: SequentialStringKey<uc16>(str, seed) { }
- bool IsMatch(Object* string) OVERRIDE {
+ bool IsMatch(Object* string) override {
return String::cast(string)->IsTwoByteEqualTo(string_);
}
- Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
+ Handle<Object> AsHandle(Isolate* isolate) override;
};
@@ -603,11 +626,11 @@
explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
: string_(string), hash_field_(0), seed_(seed) { }
- bool IsMatch(Object* string) OVERRIDE {
+ bool IsMatch(Object* string) override {
return String::cast(string)->IsUtf8EqualTo(string_);
}
- uint32_t Hash() OVERRIDE {
+ uint32_t Hash() override {
if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
uint32_t result = hash_field_ >> String::kHashShift;
@@ -615,11 +638,11 @@
return result;
}
- uint32_t HashForObject(Object* other) OVERRIDE {
+ uint32_t HashForObject(Object* other) override {
return String::cast(other)->Hash();
}
- Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
+ Handle<Object> AsHandle(Isolate* isolate) override {
if (hash_field_ == 0) Hash();
return isolate->factory()->NewInternalizedStringFromUtf8(
string_, chars_, hash_field_);
@@ -638,6 +661,7 @@
TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
+TYPE_CHECKER(BytecodeArray, BYTECODE_ARRAY_TYPE)
TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
@@ -648,18 +672,8 @@
}
-bool Object::IsExternalArray() const {
- if (!Object::IsHeapObject())
- return false;
- InstanceType instance_type =
- HeapObject::cast(this)->map()->instance_type();
- return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
- instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
-}
-
#define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
- TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
@@ -685,8 +699,7 @@
bool Object::IsJSObject() const {
STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
- return IsHeapObject() &&
- HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
+ return IsHeapObject() && HeapObject::cast(this)->map()->IsJSObjectMap();
}
@@ -696,19 +709,18 @@
}
-TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
TYPE_CHECKER(JSSet, JS_SET_TYPE)
TYPE_CHECKER(JSMap, JS_MAP_TYPE)
TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
+TYPE_CHECKER(JSIteratorResult, JS_ITERATOR_RESULT_TYPE)
TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
TYPE_CHECKER(Map, MAP_TYPE)
-TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
-TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
+TYPE_CHECKER(TransitionArray, TRANSITION_ARRAY_TYPE)
bool Object::IsJSWeakCollection() const {
@@ -721,19 +733,23 @@
}
+bool Object::IsArrayList() const { return IsFixedArray(); }
+
+
bool Object::IsLayoutDescriptor() const {
return IsSmi() || IsFixedTypedArrayBase();
}
-bool Object::IsTransitionArray() const {
- return IsFixedArray();
-}
-
-
bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
+bool Object::IsTypeFeedbackMetadata() const { return IsFixedArray(); }
+
+
+bool Object::IsLiteralsArray() const { return IsFixedArray(); }
+
+
bool Object::IsDeoptimizationInputData() const {
// Must be a fixed array.
if (!IsFixedArray()) return false;
@@ -760,6 +776,14 @@
}
+bool Object::IsHandlerTable() const {
+ if (!IsFixedArray()) return false;
+ // There's actually no way to see the difference between a fixed array and
+ // a handler table array.
+ return true;
+}
+
+
bool Object::IsDependentCode() const {
if (!IsFixedArray()) return false;
// There's actually no way to see the difference between a fixed array and
@@ -804,6 +828,7 @@
}
+TYPE_CHECKER(JSBoundFunction, JS_BOUND_FUNCTION_TYPE)
TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
@@ -881,6 +906,9 @@
}
+bool Object::IsGlobalDictionary() const { return IsDictionary(); }
+
+
bool Object::IsSeededNumberDictionary() const {
return IsDictionary();
}
@@ -896,28 +924,6 @@
}
-bool Object::IsJSFunctionResultCache() const {
- if (!IsFixedArray()) return false;
- const FixedArray* self = FixedArray::cast(this);
- int length = self->length();
- if (length < JSFunctionResultCache::kEntriesIndex) return false;
- if ((length - JSFunctionResultCache::kEntriesIndex)
- % JSFunctionResultCache::kEntrySize != 0) {
- return false;
- }
-#ifdef VERIFY_HEAP
- if (FLAG_verify_heap) {
- // TODO(svenpanne) We use const_cast here and below to break our dependency
- // cycle between the predicates and the verifiers. This can be removed when
- // the verifiers are const-correct, too.
- reinterpret_cast<JSFunctionResultCache*>(const_cast<Object*>(this))->
- JSFunctionResultCacheVerify();
- }
-#endif
- return true;
-}
-
-
bool Object::IsNormalizedMapCache() const {
return NormalizedMapCache::IsNormalizedMapCache(this);
}
@@ -986,7 +992,7 @@
bool Object::IsPrimitive() const {
- return IsOddball() || IsNumber() || IsString();
+ return IsSmi() || HeapObject::cast(this)->map()->IsPrimitiveMap();
}
@@ -1000,17 +1006,7 @@
}
-bool Object::IsGlobalObject() const {
- if (!IsHeapObject()) return false;
-
- InstanceType type = HeapObject::cast(this)->map()->instance_type();
- return type == JS_GLOBAL_OBJECT_TYPE ||
- type == JS_BUILTINS_OBJECT_TYPE;
-}
-
-
TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
-TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
bool Object::IsUndetectableObject() const {
@@ -1023,7 +1019,7 @@
if (!IsHeapObject()) return false;
if (IsJSGlobalProxy()) {
const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
- GlobalObject* global = proxy->GetIsolate()->context()->global_object();
+ JSGlobalObject* global = proxy->GetIsolate()->context()->global_object();
return proxy->IsDetachedFrom(global);
}
return HeapObject::cast(this)->map()->is_access_check_needed();
@@ -1090,11 +1086,11 @@
}
-double Object::Number() {
+double Object::Number() const {
DCHECK(IsNumber());
return IsSmi()
- ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
- : reinterpret_cast<HeapNumber*>(this)->value();
+ ? static_cast<double>(reinterpret_cast<const Smi*>(this)->value())
+ : reinterpret_cast<const HeapNumber*>(this)->value();
}
@@ -1109,19 +1105,45 @@
}
-MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) {
- if (object->IsSmi()) return Handle<Smi>::cast(object);
- if (object->IsHeapNumber()) {
- double value = Handle<HeapNumber>::cast(object)->value();
- int int_value = FastD2I(value);
- if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
- return handle(Smi::FromInt(int_value), isolate);
- }
+Representation Object::OptimalRepresentation() {
+ if (!FLAG_track_fields) return Representation::Tagged();
+ if (IsSmi()) {
+ return Representation::Smi();
+ } else if (FLAG_track_double_fields && IsHeapNumber()) {
+ return Representation::Double();
+ } else if (FLAG_track_computed_fields && IsUninitialized()) {
+ return Representation::None();
+ } else if (FLAG_track_heap_object_fields) {
+ DCHECK(IsHeapObject());
+ return Representation::HeapObject();
+ } else {
+ return Representation::Tagged();
}
- return Handle<Smi>();
}
+ElementsKind Object::OptimalElementsKind() {
+ if (IsSmi()) return FAST_SMI_ELEMENTS;
+ if (IsNumber()) return FAST_DOUBLE_ELEMENTS;
+ return FAST_ELEMENTS;
+}
+
+
+bool Object::FitsRepresentation(Representation representation) {
+ if (FLAG_track_fields && representation.IsNone()) {
+ return false;
+ } else if (FLAG_track_fields && representation.IsSmi()) {
+ return IsSmi();
+ } else if (FLAG_track_double_fields && representation.IsDouble()) {
+ return IsMutableHeapNumber() || IsNumber();
+ } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
+ return IsHeapObject();
+ }
+ return true;
+}
+
+
+// static
MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
Handle<Object> object) {
return ToObject(
@@ -1129,88 +1151,63 @@
}
+// static
+MaybeHandle<Object> Object::ToPrimitive(Handle<Object> input,
+ ToPrimitiveHint hint) {
+ if (input->IsPrimitive()) return input;
+ return JSReceiver::ToPrimitive(Handle<JSReceiver>::cast(input), hint);
+}
+
+
bool Object::HasSpecificClassOf(String* name) {
return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
}
MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
- Handle<Name> name) {
+ Handle<Name> name,
+ LanguageMode language_mode) {
LookupIterator it(object, name);
- return GetProperty(&it);
+ return GetProperty(&it, language_mode);
}
-MaybeHandle<Object> Object::GetElement(Isolate* isolate,
- Handle<Object> object,
- uint32_t index) {
- // GetElement can trigger a getter which can cause allocation.
- // This was not always the case. This DCHECK is here to catch
- // leftover incorrect uses.
- DCHECK(AllowHeapAllocation::IsAllowed());
- return Object::GetElementWithReceiver(isolate, object, object, index);
+MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
+ uint32_t index,
+ LanguageMode language_mode) {
+ LookupIterator it(isolate, object, index);
+ return GetProperty(&it, language_mode);
}
-Handle<Object> Object::GetPrototypeSkipHiddenPrototypes(
- Isolate* isolate, Handle<Object> receiver) {
- PrototypeIterator iter(isolate, receiver);
- while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN)) {
- if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
- return PrototypeIterator::GetCurrent(iter);
- }
- iter.Advance();
- }
+MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
+ uint32_t index, Handle<Object> value,
+ LanguageMode language_mode) {
+ LookupIterator it(isolate, object, index);
+ MAYBE_RETURN_NULL(
+ SetProperty(&it, value, language_mode, MAY_BE_STORE_FROM_KEYED));
+ return value;
+}
+
+
+MaybeHandle<Object> Object::GetPrototype(Isolate* isolate,
+ Handle<Object> receiver) {
+ // We don't expect access checks to be needed on JSProxy objects.
+ DCHECK(!receiver->IsAccessCheckNeeded() || receiver->IsJSObject());
+ PrototypeIterator iter(isolate, receiver,
+ PrototypeIterator::START_AT_RECEIVER);
+ do {
+ if (!iter.AdvanceFollowingProxies()) return MaybeHandle<Object>();
+ } while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN));
return PrototypeIterator::GetCurrent(iter);
}
-MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
- Handle<Name> name) {
- uint32_t index;
- Isolate* isolate = name->GetIsolate();
- if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
- return GetProperty(object, name);
-}
-
-
-MaybeHandle<Object> Object::GetProperty(Isolate* isolate,
- Handle<Object> object,
- const char* name) {
+MaybeHandle<Object> Object::GetProperty(Isolate* isolate, Handle<Object> object,
+ const char* name,
+ LanguageMode language_mode) {
Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
- DCHECK(!str.is_null());
-#ifdef DEBUG
- uint32_t index; // Assert that the name is not an array index.
- DCHECK(!str->AsArrayIndex(&index));
-#endif // DEBUG
- return GetProperty(object, str);
-}
-
-
-MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy,
- Handle<Object> receiver,
- uint32_t index) {
- return GetPropertyWithHandler(
- proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
-}
-
-
-MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
- Handle<JSReceiver> receiver,
- uint32_t index,
- Handle<Object> value,
- StrictMode strict_mode) {
- Isolate* isolate = proxy->GetIsolate();
- Handle<String> name = isolate->factory()->Uint32ToString(index);
- return SetPropertyWithHandler(proxy, receiver, name, value, strict_mode);
-}
-
-
-Maybe<bool> JSProxy::HasElementWithHandler(Handle<JSProxy> proxy,
- uint32_t index) {
- Isolate* isolate = proxy->GetIsolate();
- Handle<String> name = isolate->factory()->Uint32ToString(index);
- return HasPropertyWithHandler(proxy, name);
+ return GetProperty(object, str, language_mode);
}
@@ -1251,55 +1248,22 @@
heap->RecordWrite(object->address(), offset); \
}
-#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
- if (mode == UPDATE_WRITE_BARRIER) { \
- heap->incremental_marking()->RecordWrite( \
- object, HeapObject::RawField(object, offset), value); \
- if (heap->InNewSpace(value)) { \
- heap->RecordWrite(object->address(), offset); \
- } \
+#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
+ if (mode != SKIP_WRITE_BARRIER) { \
+ if (mode == UPDATE_WRITE_BARRIER) { \
+ heap->incremental_marking()->RecordWrite( \
+ object, HeapObject::RawField(object, offset), value); \
+ } \
+ if (heap->InNewSpace(value)) { \
+ heap->RecordWrite(object->address(), offset); \
+ } \
}
-#ifndef V8_TARGET_ARCH_MIPS
- #define READ_DOUBLE_FIELD(p, offset) \
- (*reinterpret_cast<const double*>(FIELD_ADDR_CONST(p, offset)))
-#else // V8_TARGET_ARCH_MIPS
- // Prevent gcc from using load-double (mips ldc1) on (possibly)
- // non-64-bit aligned HeapNumber::value.
- static inline double read_double_field(const void* p, int offset) {
- union conversion {
- double d;
- uint32_t u[2];
- } c;
- c.u[0] = (*reinterpret_cast<const uint32_t*>(
- FIELD_ADDR_CONST(p, offset)));
- c.u[1] = (*reinterpret_cast<const uint32_t*>(
- FIELD_ADDR_CONST(p, offset + 4)));
- return c.d;
- }
- #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
-#endif // V8_TARGET_ARCH_MIPS
+#define READ_DOUBLE_FIELD(p, offset) \
+ ReadDoubleValue(FIELD_ADDR_CONST(p, offset))
-#ifndef V8_TARGET_ARCH_MIPS
- #define WRITE_DOUBLE_FIELD(p, offset, value) \
- (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
-#else // V8_TARGET_ARCH_MIPS
- // Prevent gcc from using store-double (mips sdc1) on (possibly)
- // non-64-bit aligned HeapNumber::value.
- static inline void write_double_field(void* p, int offset,
- double value) {
- union conversion {
- double d;
- uint32_t u[2];
- } c;
- c.d = value;
- (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
- (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
- }
- #define WRITE_DOUBLE_FIELD(p, offset, value) \
- write_double_field(p, offset, value)
-#endif // V8_TARGET_ARCH_MIPS
-
+#define WRITE_DOUBLE_FIELD(p, offset, value) \
+ WriteDoubleValue(FIELD_ADDR(p, offset), value)
#define READ_INT_FIELD(p, offset) \
(*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
@@ -1313,6 +1277,30 @@
#define WRITE_INTPTR_FIELD(p, offset, value) \
(*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
+#define READ_UINT8_FIELD(p, offset) \
+ (*reinterpret_cast<const uint8_t*>(FIELD_ADDR_CONST(p, offset)))
+
+#define WRITE_UINT8_FIELD(p, offset, value) \
+ (*reinterpret_cast<uint8_t*>(FIELD_ADDR(p, offset)) = value)
+
+#define READ_INT8_FIELD(p, offset) \
+ (*reinterpret_cast<const int8_t*>(FIELD_ADDR_CONST(p, offset)))
+
+#define WRITE_INT8_FIELD(p, offset, value) \
+ (*reinterpret_cast<int8_t*>(FIELD_ADDR(p, offset)) = value)
+
+#define READ_UINT16_FIELD(p, offset) \
+ (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
+
+#define WRITE_UINT16_FIELD(p, offset, value) \
+ (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
+
+#define READ_INT16_FIELD(p, offset) \
+ (*reinterpret_cast<const int16_t*>(FIELD_ADDR_CONST(p, offset)))
+
+#define WRITE_INT16_FIELD(p, offset, value) \
+ (*reinterpret_cast<int16_t*>(FIELD_ADDR(p, offset)) = value)
+
#define READ_UINT32_FIELD(p, offset) \
(*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
@@ -1325,18 +1313,24 @@
#define WRITE_INT32_FIELD(p, offset, value) \
(*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
+#define READ_FLOAT_FIELD(p, offset) \
+ (*reinterpret_cast<const float*>(FIELD_ADDR_CONST(p, offset)))
+
+#define WRITE_FLOAT_FIELD(p, offset, value) \
+ (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
+
+#define READ_UINT64_FIELD(p, offset) \
+ (*reinterpret_cast<const uint64_t*>(FIELD_ADDR_CONST(p, offset)))
+
+#define WRITE_UINT64_FIELD(p, offset, value) \
+ (*reinterpret_cast<uint64_t*>(FIELD_ADDR(p, offset)) = value)
+
#define READ_INT64_FIELD(p, offset) \
(*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
#define WRITE_INT64_FIELD(p, offset, value) \
(*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
-#define READ_SHORT_FIELD(p, offset) \
- (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
-
-#define WRITE_SHORT_FIELD(p, offset, value) \
- (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
-
#define READ_BYTE_FIELD(p, offset) \
(*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
@@ -1357,31 +1351,6 @@
}
-int Smi::value() const {
- return Internals::SmiValue(this);
-}
-
-
-Smi* Smi::FromInt(int value) {
- DCHECK(Smi::IsValid(value));
- return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
-}
-
-
-Smi* Smi::FromIntptr(intptr_t value) {
- DCHECK(Smi::IsValid(value));
- int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
- return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
-}
-
-
-bool Smi::IsValid(intptr_t value) {
- bool result = Internals::IsValidSmi(value);
- DCHECK_EQ(result, value >= kMinValue && value <= kMaxValue);
- return result;
-}
-
-
MapWord MapWord::FromMap(const Map* map) {
return MapWord(reinterpret_cast<uintptr_t>(map));
}
@@ -1505,57 +1474,11 @@
}
-HeapObject* HeapObject::FromAddress(Address address) {
- DCHECK_TAG_ALIGNED(address);
- return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
-}
-
-
-Address HeapObject::address() {
- return reinterpret_cast<Address>(this) - kHeapObjectTag;
-}
-
-
int HeapObject::Size() {
return SizeFromMap(map());
}
-bool HeapObject::MayContainRawValues() {
- InstanceType type = map()->instance_type();
- if (type <= LAST_NAME_TYPE) {
- if (type == SYMBOL_TYPE) {
- return false;
- }
- DCHECK(type < FIRST_NONSTRING_TYPE);
- // There are four string representations: sequential strings, external
- // strings, cons strings, and sliced strings.
- // Only the former two contain raw values and no heap pointers (besides the
- // map-word).
- return ((type & kIsIndirectStringMask) != kIsIndirectStringTag);
- }
- // The ConstantPoolArray contains heap pointers, but also raw values.
- if (type == CONSTANT_POOL_ARRAY_TYPE) return true;
- return (type <= LAST_DATA_TYPE);
-}
-
-
-void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
- v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
- reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
-}
-
-
-void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
- v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
-}
-
-
-void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
- v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
-}
-
-
double HeapNumber::value() const {
return READ_DOUBLE_FIELD(this, kValueOffset);
}
@@ -1577,7 +1500,106 @@
}
-ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
+bool Simd128Value::Equals(Simd128Value* that) {
+#define SIMD128_VALUE(TYPE, Type, type, lane_count, lane_type) \
+ if (this->Is##Type()) { \
+ if (!that->Is##Type()) return false; \
+ return Type::cast(this)->Equals(Type::cast(that)); \
+ }
+ SIMD128_TYPES(SIMD128_VALUE)
+#undef SIMD128_VALUE
+ return false;
+}
+
+
+// static
+bool Simd128Value::Equals(Handle<Simd128Value> one, Handle<Simd128Value> two) {
+ return one->Equals(*two);
+}
+
+
+#define SIMD128_VALUE_EQUALS(TYPE, Type, type, lane_count, lane_type) \
+ bool Type::Equals(Type* that) { \
+ for (int lane = 0; lane < lane_count; ++lane) { \
+ if (this->get_lane(lane) != that->get_lane(lane)) return false; \
+ } \
+ return true; \
+ }
+SIMD128_TYPES(SIMD128_VALUE_EQUALS)
+#undef SIMD128_VALUE_EQUALS
+
+
+#if defined(V8_TARGET_LITTLE_ENDIAN)
+#define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
+ lane_type value = \
+ READ_##field_type##_FIELD(this, kValueOffset + lane * field_size);
+#elif defined(V8_TARGET_BIG_ENDIAN)
+#define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
+ lane_type value = READ_##field_type##_FIELD( \
+ this, kValueOffset + (lane_count - lane - 1) * field_size);
+#else
+#error Unknown byte ordering
+#endif
+
+#if defined(V8_TARGET_LITTLE_ENDIAN)
+#define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
+ WRITE_##field_type##_FIELD(this, kValueOffset + lane * field_size, value);
+#elif defined(V8_TARGET_BIG_ENDIAN)
+#define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
+ WRITE_##field_type##_FIELD( \
+ this, kValueOffset + (lane_count - lane - 1) * field_size, value);
+#else
+#error Unknown byte ordering
+#endif
+
+#define SIMD128_NUMERIC_LANE_FNS(type, lane_type, lane_count, field_type, \
+ field_size) \
+ lane_type type::get_lane(int lane) const { \
+ DCHECK(lane < lane_count && lane >= 0); \
+ SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
+ return value; \
+ } \
+ \
+ void type::set_lane(int lane, lane_type value) { \
+ DCHECK(lane < lane_count && lane >= 0); \
+ SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
+ }
+
+SIMD128_NUMERIC_LANE_FNS(Float32x4, float, 4, FLOAT, kFloatSize)
+SIMD128_NUMERIC_LANE_FNS(Int32x4, int32_t, 4, INT32, kInt32Size)
+SIMD128_NUMERIC_LANE_FNS(Uint32x4, uint32_t, 4, UINT32, kInt32Size)
+SIMD128_NUMERIC_LANE_FNS(Int16x8, int16_t, 8, INT16, kShortSize)
+SIMD128_NUMERIC_LANE_FNS(Uint16x8, uint16_t, 8, UINT16, kShortSize)
+SIMD128_NUMERIC_LANE_FNS(Int8x16, int8_t, 16, INT8, kCharSize)
+SIMD128_NUMERIC_LANE_FNS(Uint8x16, uint8_t, 16, UINT8, kCharSize)
+#undef SIMD128_NUMERIC_LANE_FNS
+
+
+#define SIMD128_BOOLEAN_LANE_FNS(type, lane_type, lane_count, field_type, \
+ field_size) \
+ bool type::get_lane(int lane) const { \
+ DCHECK(lane < lane_count && lane >= 0); \
+ SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
+ DCHECK(value == 0 || value == -1); \
+ return value != 0; \
+ } \
+ \
+ void type::set_lane(int lane, bool value) { \
+ DCHECK(lane < lane_count && lane >= 0); \
+ int32_t int_val = value ? -1 : 0; \
+ SIMD128_WRITE_LANE(lane_count, field_type, field_size, int_val) \
+ }
+
+SIMD128_BOOLEAN_LANE_FNS(Bool32x4, int32_t, 4, INT32, kInt32Size)
+SIMD128_BOOLEAN_LANE_FNS(Bool16x8, int16_t, 8, INT16, kShortSize)
+SIMD128_BOOLEAN_LANE_FNS(Bool8x16, int8_t, 16, INT8, kCharSize)
+#undef SIMD128_BOOLEAN_LANE_FNS
+
+#undef SIMD128_READ_LANE
+#undef SIMD128_WRITE_LANE
+
+
+ACCESSORS(JSReceiver, properties, FixedArray, kPropertiesOffset)
Object** FixedArray::GetFirstElementAddress() {
@@ -1602,27 +1624,30 @@
}
-void JSObject::ValidateElements(Handle<JSObject> object) {
-#ifdef ENABLE_SLOW_DCHECKS
- if (FLAG_enable_slow_asserts) {
- ElementsAccessor* accessor = object->GetElementsAccessor();
- accessor->Validate(object);
- }
-#endif
-}
-
-
void AllocationSite::Initialize() {
set_transition_info(Smi::FromInt(0));
SetElementsKind(GetInitialFastElementsKind());
set_nested_site(Smi::FromInt(0));
- set_pretenure_data(Smi::FromInt(0));
- set_pretenure_create_count(Smi::FromInt(0));
+ set_pretenure_data(0);
+ set_pretenure_create_count(0);
set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
SKIP_WRITE_BARRIER);
}
+bool AllocationSite::IsZombie() { return pretenure_decision() == kZombie; }
+
+
+bool AllocationSite::IsMaybeTenure() {
+ return pretenure_decision() == kMaybeTenure;
+}
+
+
+bool AllocationSite::PretenuringDecisionMade() {
+ return pretenure_decision() != kUndecided;
+}
+
+
void AllocationSite::MarkZombie() {
DCHECK(!IsZombie());
Initialize();
@@ -1630,12 +1655,46 @@
}
+ElementsKind AllocationSite::GetElementsKind() {
+ DCHECK(!SitePointsToLiteral());
+ int value = Smi::cast(transition_info())->value();
+ return ElementsKindBits::decode(value);
+}
+
+
+void AllocationSite::SetElementsKind(ElementsKind kind) {
+ int value = Smi::cast(transition_info())->value();
+ set_transition_info(Smi::FromInt(ElementsKindBits::update(value, kind)),
+ SKIP_WRITE_BARRIER);
+}
+
+
+bool AllocationSite::CanInlineCall() {
+ int value = Smi::cast(transition_info())->value();
+ return DoNotInlineBit::decode(value) == 0;
+}
+
+
+void AllocationSite::SetDoNotInlineCall() {
+ int value = Smi::cast(transition_info())->value();
+ set_transition_info(Smi::FromInt(DoNotInlineBit::update(value, true)),
+ SKIP_WRITE_BARRIER);
+}
+
+
+bool AllocationSite::SitePointsToLiteral() {
+ // If transition_info is a smi, then it represents an ElementsKind
+ // for a constructed array. Otherwise, it must be a boilerplate
+ // for an object or array literal.
+ return transition_info()->IsJSArray() || transition_info()->IsJSObject();
+}
+
+
// Heuristic: We only need to create allocation site info if the boilerplate
// elements kind is the initial elements kind.
AllocationSiteMode AllocationSite::GetMode(
ElementsKind boilerplate_elements_kind) {
- if (FLAG_pretenuring_call_new ||
- IsFastSmiElementsKind(boilerplate_elements_kind)) {
+ if (IsFastSmiElementsKind(boilerplate_elements_kind)) {
return TRACK_ALLOCATION_SITE;
}
@@ -1645,9 +1704,8 @@
AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
ElementsKind to) {
- if (FLAG_pretenuring_call_new ||
- (IsFastSmiElementsKind(from) &&
- IsMoreGeneralElementsKindTransition(from, to))) {
+ if (IsFastSmiElementsKind(from) &&
+ IsMoreGeneralElementsKindTransition(from, to)) {
return TRACK_ALLOCATION_SITE;
}
@@ -1665,25 +1723,62 @@
}
+AllocationSite::PretenureDecision AllocationSite::pretenure_decision() {
+ int value = pretenure_data();
+ return PretenureDecisionBits::decode(value);
+}
+
+
+void AllocationSite::set_pretenure_decision(PretenureDecision decision) {
+ int value = pretenure_data();
+ set_pretenure_data(PretenureDecisionBits::update(value, decision));
+}
+
+
+bool AllocationSite::deopt_dependent_code() {
+ int value = pretenure_data();
+ return DeoptDependentCodeBit::decode(value);
+}
+
+
+void AllocationSite::set_deopt_dependent_code(bool deopt) {
+ int value = pretenure_data();
+ set_pretenure_data(DeoptDependentCodeBit::update(value, deopt));
+}
+
+
+int AllocationSite::memento_found_count() {
+ int value = pretenure_data();
+ return MementoFoundCountBits::decode(value);
+}
+
+
inline void AllocationSite::set_memento_found_count(int count) {
- int value = pretenure_data()->value();
+ int value = pretenure_data();
// Verify that we can count more mementos than we can possibly find in one
// new space collection.
DCHECK((GetHeap()->MaxSemiSpaceSize() /
- (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
+ (Heap::kMinObjectSizeInWords * kPointerSize +
AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
DCHECK(count < MementoFoundCountBits::kMax);
- set_pretenure_data(
- Smi::FromInt(MementoFoundCountBits::update(value, count)),
- SKIP_WRITE_BARRIER);
+ set_pretenure_data(MementoFoundCountBits::update(value, count));
}
-inline bool AllocationSite::IncrementMementoFoundCount() {
+
+int AllocationSite::memento_create_count() { return pretenure_create_count(); }
+
+
+void AllocationSite::set_memento_create_count(int count) {
+ set_pretenure_create_count(count);
+}
+
+
+bool AllocationSite::IncrementMementoFoundCount(int increment) {
if (IsZombie()) return false;
int value = memento_found_count();
- set_memento_found_count(value + 1);
- return memento_found_count() == kPretenureMinimumCreated;
+ set_memento_found_count(value + increment);
+ return memento_found_count() >= kPretenureMinimumCreated;
}
@@ -1737,11 +1832,12 @@
}
if (FLAG_trace_pretenuring_statistics) {
- PrintF(
- "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
- static_cast<void*>(this), create_count, found_count, ratio,
- PretenureDecisionName(current_decision),
- PretenureDecisionName(pretenure_decision()));
+ PrintIsolate(GetIsolate(),
+ "pretenuring: AllocationSite(%p): (created, found, ratio) "
+ "(%d, %d, %f) %s => %s\n",
+ this, create_count, found_count, ratio,
+ PretenureDecisionName(current_decision),
+ PretenureDecisionName(pretenure_decision()));
}
// Clear feedback calculation fields until the next gc.
@@ -1751,6 +1847,18 @@
}
+bool AllocationMemento::IsValid() {
+ return allocation_site()->IsAllocationSite() &&
+ !AllocationSite::cast(allocation_site())->IsZombie();
+}
+
+
+AllocationSite* AllocationMemento::GetAllocationSite() {
+ DCHECK(IsValid());
+ return AllocationSite::cast(allocation_site());
+}
+
+
void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
JSObject::ValidateElements(object);
ElementsKind elements_kind = object->map()->elements_kind();
@@ -1861,55 +1969,25 @@
}
-void JSObject::initialize_properties() {
- DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
- WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
-}
-
-
void JSObject::initialize_elements() {
FixedArrayBase* elements = map()->GetInitialElements();
WRITE_FIELD(this, kElementsOffset, elements);
}
-Handle<String> Map::ExpectedTransitionKey(Handle<Map> map) {
- DisallowHeapAllocation no_gc;
- if (!map->HasTransitionArray()) return Handle<String>::null();
- TransitionArray* transitions = map->transitions();
- if (!transitions->IsSimpleTransition()) return Handle<String>::null();
- int transition = TransitionArray::kSimpleTransitionIndex;
- PropertyDetails details = transitions->GetTargetDetails(transition);
- Name* name = transitions->GetKey(transition);
- if (details.type() != FIELD) return Handle<String>::null();
- if (details.attributes() != NONE) return Handle<String>::null();
- if (!name->IsString()) return Handle<String>::null();
- return Handle<String>(String::cast(name));
-}
-
-
-Handle<Map> Map::ExpectedTransitionTarget(Handle<Map> map) {
- DCHECK(!ExpectedTransitionKey(map).is_null());
- return Handle<Map>(map->transitions()->GetTarget(
- TransitionArray::kSimpleTransitionIndex));
-}
-
-
-Handle<Map> Map::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
- DisallowHeapAllocation no_allocation;
- if (!map->HasTransitionArray()) return Handle<Map>::null();
- TransitionArray* transitions = map->transitions();
- int transition = transitions->Search(DATA, *key, NONE);
- if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
- PropertyDetails details = transitions->GetTargetDetails(transition);
- if (details.type() != FIELD) return Handle<Map>::null();
- DCHECK_EQ(NONE, details.attributes());
- return Handle<Map>(transitions->GetTarget(transition));
+InterceptorInfo* JSObject::GetIndexedInterceptor() {
+ DCHECK(map()->has_indexed_interceptor());
+ JSFunction* constructor = JSFunction::cast(map()->GetConstructor());
+ DCHECK(constructor->shared()->IsApiFunction());
+ Object* result =
+ constructor->shared()->get_api_func_data()->indexed_property_handler();
+ return InterceptorInfo::cast(result);
}
ACCESSORS(Oddball, to_string, String, kToStringOffset)
ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
+ACCESSORS(Oddball, type_of, String, kTypeOfOffset)
byte Oddball::kind() const {
@@ -1922,26 +2000,25 @@
}
-Object* Cell::value() const {
- return READ_FIELD(this, kValueOffset);
+// static
+Handle<Object> Oddball::ToNumber(Handle<Oddball> input) {
+ return handle(input->to_number(), input->GetIsolate());
}
-void Cell::set_value(Object* val, WriteBarrierMode ignored) {
- // The write barrier is not used for global property cells.
- DCHECK(!val->IsPropertyCell() && !val->IsCell());
- WRITE_FIELD(this, kValueOffset, val);
-}
-
+ACCESSORS(Cell, value, Object, kValueOffset)
ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
+ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset)
+ACCESSORS(PropertyCell, value, Object, kValueOffset)
-Object* PropertyCell::type_raw() const {
- return READ_FIELD(this, kTypeOffset);
+
+PropertyDetails PropertyCell::property_details() {
+ return PropertyDetails(Smi::cast(property_details_raw()));
}
-void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
- WRITE_FIELD(this, kTypeOffset, val);
+void PropertyCell::set_property_details(PropertyDetails details) {
+ set_property_details_raw(details.AsSmi());
}
@@ -1949,14 +2026,23 @@
void WeakCell::clear() {
- DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT);
+ // Either the garbage collector is clearing the cell or we are simply
+ // initializing the root empty weak cell.
+ DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT ||
+ this == GetHeap()->empty_weak_cell());
WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
}
void WeakCell::initialize(HeapObject* val) {
WRITE_FIELD(this, kValueOffset, val);
- WRITE_BARRIER(GetHeap(), this, kValueOffset, val);
+ Heap* heap = GetHeap();
+ // We just have to execute the generational barrier here because we never
+ // mark through a weak cell and collect evacuation candidates when we process
+ // all weak cells.
+ if (heap->InNewSpace(val)) {
+ heap->RecordWrite(address(), kValueOffset);
+ }
}
@@ -1974,8 +2060,19 @@
}
-int JSObject::GetHeaderSize() {
- InstanceType type = map()->instance_type();
+void WeakCell::clear_next(Object* the_hole_value) {
+ DCHECK_EQ(GetHeap()->the_hole_value(), the_hole_value);
+ set_next(the_hole_value, SKIP_WRITE_BARRIER);
+}
+
+
+bool WeakCell::next_cleared() { return next()->IsTheHole(); }
+
+
+int JSObject::GetHeaderSize() { return GetHeaderSize(map()->instance_type()); }
+
+
+int JSObject::GetHeaderSize(InstanceType type) {
// Check for the most common kind of JavaScript object before
// falling into the generic switch. This speeds up the internal
// field operations considerably on average.
@@ -1989,8 +2086,8 @@
return JSGlobalProxy::kSize;
case JS_GLOBAL_OBJECT_TYPE:
return JSGlobalObject::kSize;
- case JS_BUILTINS_OBJECT_TYPE:
- return JSBuiltinsObject::kSize;
+ case JS_BOUND_FUNCTION_TYPE:
+ return JSBoundFunction::kSize;
case JS_FUNCTION_TYPE:
return JSFunction::kSize;
case JS_VALUE_TYPE:
@@ -2013,10 +2110,14 @@
return JSSetIterator::kSize;
case JS_MAP_ITERATOR_TYPE:
return JSMapIterator::kSize;
+ case JS_ITERATOR_RESULT_TYPE:
+ return JSIteratorResult::kSize;
case JS_WEAK_MAP_TYPE:
return JSWeakMap::kSize;
case JS_WEAK_SET_TYPE:
return JSWeakSet::kSize;
+ case JS_PROMISE_TYPE:
+ return JSObject::kHeaderSize;
case JS_REGEXP_TYPE:
return JSRegExp::kSize;
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
@@ -2024,23 +2125,24 @@
case JS_MESSAGE_OBJECT_TYPE:
return JSMessageObject::kSize;
default:
- // TODO(jkummerow): Re-enable this. Blink currently hits this
- // from its CustomElementConstructorBuilder.
- // UNREACHABLE();
+ UNREACHABLE();
return 0;
}
}
-int JSObject::GetInternalFieldCount() {
- DCHECK(1 << kPointerSizeLog2 == kPointerSize);
- // Make sure to adjust for the number of in-object properties. These
- // properties do contribute to the size, but are not internal fields.
- return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
- map()->inobject_properties();
+int JSObject::GetInternalFieldCount(Map* map) {
+ int instance_size = map->instance_size();
+ if (instance_size == kVariableSizeSentinel) return 0;
+ InstanceType instance_type = map->instance_type();
+ return ((instance_size - GetHeaderSize(instance_type)) >> kPointerSizeLog2) -
+ map->GetInObjectProperties();
}
+int JSObject::GetInternalFieldCount() { return GetInternalFieldCount(map()); }
+
+
int JSObject::GetInternalFieldOffset(int index) {
DCHECK(index < GetInternalFieldCount() && index >= 0);
return GetHeaderSize() + (kPointerSize * index);
@@ -2135,6 +2237,31 @@
}
+void JSObject::WriteToField(int descriptor, Object* value) {
+ DisallowHeapAllocation no_gc;
+
+ DescriptorArray* desc = map()->instance_descriptors();
+ PropertyDetails details = desc->GetDetails(descriptor);
+
+ DCHECK(details.type() == DATA);
+
+ FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
+ if (details.representation().IsDouble()) {
+ // Nothing more to be done.
+ if (value->IsUninitialized()) return;
+ if (IsUnboxedDoubleField(index)) {
+ RawFastDoublePropertyAtPut(index, value->Number());
+ } else {
+ HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
+ DCHECK(box->IsMutableHeapNumber());
+ box->set_value(value->Number());
+ }
+ } else {
+ RawFastPropertyAtPut(index, value);
+ }
+}
+
+
int JSObject::GetInObjectPropertyOffset(int index) {
return map()->GetInObjectPropertyOffset(index);
}
@@ -2157,8 +2284,7 @@
}
-
-void JSObject::InitializeBody(Map* map,
+void JSObject::InitializeBody(Map* map, int start_offset,
Object* pre_allocated_value,
Object* filler_value) {
DCHECK(!filler_value->IsHeapObject() ||
@@ -2166,11 +2292,12 @@
DCHECK(!pre_allocated_value->IsHeapObject() ||
!GetHeap()->InNewSpace(pre_allocated_value));
int size = map->instance_size();
- int offset = kHeaderSize;
+ int offset = start_offset;
if (filler_value != pre_allocated_value) {
- int pre_allocated = map->pre_allocated_property_fields();
- DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
- for (int i = 0; i < pre_allocated; i++) {
+ int end_of_pre_allocated_offset =
+ size - (map->unused_property_fields() * kPointerSize);
+ DCHECK_LE(kHeaderSize, end_of_pre_allocated_offset);
+ while (offset < end_of_pre_allocated_offset) {
WRITE_FIELD(this, offset, pre_allocated_value);
offset += kPointerSize;
}
@@ -2182,18 +2309,12 @@
}
-bool JSObject::HasFastProperties() {
- DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
- return !properties()->IsDictionary();
-}
-
-
bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
if (unused_property_fields() != 0) return false;
if (is_prototype_map()) return false;
int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
- int limit = Max(minimum, inobject_properties());
- int external = NumberOfFields() - inobject_properties();
+ int limit = Max(minimum, GetInObjectProperties());
+ int external = NumberOfFields() - GetInObjectProperties();
return external > limit;
}
@@ -2206,22 +2327,11 @@
}
+bool Object::ToArrayLength(uint32_t* index) { return Object::ToUint32(index); }
+
+
bool Object::ToArrayIndex(uint32_t* index) {
- if (IsSmi()) {
- int value = Smi::cast(this)->value();
- if (value < 0) return false;
- *index = value;
- return true;
- }
- if (IsHeapNumber()) {
- double value = HeapNumber::cast(this)->value();
- uint32_t uint_value = static_cast<uint32_t>(value);
- if (value == static_cast<double>(uint_value)) {
- *index = uint_value;
- return true;
- }
- }
- return false;
+ return Object::ToUint32(index) && *index != kMaxUInt32;
}
@@ -2239,19 +2349,13 @@
void Object::VerifyApiCallResultType() {
-#if ENABLE_EXTRA_CHECKS
- if (!(IsSmi() ||
- IsString() ||
- IsSymbol() ||
- IsSpecObject() ||
- IsHeapNumber() ||
- IsUndefined() ||
- IsTrue() ||
- IsFalse() ||
- IsNull())) {
+#if DEBUG
+ if (!(IsSmi() || IsString() || IsSymbol() || IsJSReceiver() ||
+ IsHeapNumber() || IsSimd128Value() || IsUndefined() || IsTrue() ||
+ IsFalse() || IsNull())) {
FATAL("API call returned invalid object");
}
-#endif // ENABLE_EXTRA_CHECKS
+#endif // DEBUG
}
@@ -2282,7 +2386,7 @@
void FixedArray::set(int index, Object* value) {
DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
- DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
+ DCHECK(IsFixedArray());
DCHECK(index >= 0 && index < this->length());
int offset = kHeaderSize + index * kPointerSize;
WRITE_FIELD(this, offset, value);
@@ -2290,37 +2394,21 @@
}
-inline bool FixedDoubleArray::is_the_hole_nan(double value) {
- return bit_cast<uint64_t, double>(value) == kHoleNanInt64;
-}
-
-
-inline double FixedDoubleArray::hole_nan_as_double() {
- return bit_cast<double, uint64_t>(kHoleNanInt64);
-}
-
-
-inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
- DCHECK(bit_cast<uint64_t>(base::OS::nan_value()) != kHoleNanInt64);
- DCHECK((bit_cast<uint64_t>(base::OS::nan_value()) >> 32) != kHoleNanUpper32);
- return base::OS::nan_value();
-}
-
-
double FixedDoubleArray::get_scalar(int index) {
DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
map() != GetHeap()->fixed_array_map());
DCHECK(index >= 0 && index < this->length());
- double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
- DCHECK(!is_the_hole_nan(result));
- return result;
+ DCHECK(!is_the_hole(index));
+ return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
}
-int64_t FixedDoubleArray::get_representation(int index) {
+
+uint64_t FixedDoubleArray::get_representation(int index) {
DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
map() != GetHeap()->fixed_array_map());
DCHECK(index >= 0 && index < this->length());
- return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
+ int offset = kHeaderSize + index * kDoubleSize;
+ return READ_UINT64_FIELD(this, offset);
}
@@ -2338,8 +2426,12 @@
DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
map() != GetHeap()->fixed_array_map());
int offset = kHeaderSize + index * kDoubleSize;
- if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
- WRITE_DOUBLE_FIELD(this, offset, value);
+ if (std::isnan(value)) {
+ WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
+ } else {
+ WRITE_DOUBLE_FIELD(this, offset, value);
+ }
+ DCHECK(!is_the_hole(index));
}
@@ -2347,13 +2439,12 @@
DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
map() != GetHeap()->fixed_array_map());
int offset = kHeaderSize + index * kDoubleSize;
- WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
+ WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
}
bool FixedDoubleArray::is_the_hole(int index) {
- int offset = kHeaderSize + index * kDoubleSize;
- return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
+ return get_representation(index) == kHoleNanInt64;
}
@@ -2372,6 +2463,7 @@
Object* WeakFixedArray::Get(int index) const {
Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
if (raw->IsSmi()) return raw;
+ DCHECK(raw->IsWeakCell());
return WeakCell::cast(raw)->value();
}
@@ -2382,7 +2474,7 @@
}
-void WeakFixedArray::clear(int index) {
+void WeakFixedArray::Clear(int index) {
FixedArray::cast(this)->set(index + kFirstIndex, Smi::FromInt(0));
}
@@ -2402,384 +2494,51 @@
}
-void ConstantPoolArray::NumberOfEntries::increment(Type type) {
- DCHECK(type < NUMBER_OF_TYPES);
- element_counts_[type]++;
-}
-
-
-int ConstantPoolArray::NumberOfEntries::equals(
- const ConstantPoolArray::NumberOfEntries& other) const {
- for (int i = 0; i < NUMBER_OF_TYPES; i++) {
- if (element_counts_[i] != other.element_counts_[i]) return false;
- }
- return true;
-}
-
-
-bool ConstantPoolArray::NumberOfEntries::is_empty() const {
- return total_count() == 0;
-}
-
-
-int ConstantPoolArray::NumberOfEntries::count_of(Type type) const {
- DCHECK(type < NUMBER_OF_TYPES);
- return element_counts_[type];
-}
-
-
-int ConstantPoolArray::NumberOfEntries::base_of(Type type) const {
- int base = 0;
- DCHECK(type < NUMBER_OF_TYPES);
- for (int i = 0; i < type; i++) {
- base += element_counts_[i];
- }
- return base;
-}
-
-
-int ConstantPoolArray::NumberOfEntries::total_count() const {
- int count = 0;
- for (int i = 0; i < NUMBER_OF_TYPES; i++) {
- count += element_counts_[i];
- }
- return count;
-}
-
-
-int ConstantPoolArray::NumberOfEntries::are_in_range(int min, int max) const {
- for (int i = FIRST_TYPE; i < NUMBER_OF_TYPES; i++) {
- if (element_counts_[i] < min || element_counts_[i] > max) {
- return false;
+template <class T>
+T* WeakFixedArray::Iterator::Next() {
+ if (list_ != NULL) {
+ // Assert that list did not change during iteration.
+ DCHECK_EQ(last_used_index_, list_->last_used_index());
+ while (index_ < list_->Length()) {
+ Object* item = list_->Get(index_++);
+ if (item != Empty()) return T::cast(item);
}
+ list_ = NULL;
}
- return true;
+ return NULL;
}
-int ConstantPoolArray::Iterator::next_index() {
- DCHECK(!is_finished());
- int ret = next_index_++;
- update_section();
- return ret;
+int ArrayList::Length() {
+ if (FixedArray::cast(this)->length() == 0) return 0;
+ return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
}
-bool ConstantPoolArray::Iterator::is_finished() {
- return next_index_ > array_->last_index(type_, final_section_);
+void ArrayList::SetLength(int length) {
+ return FixedArray::cast(this)->set(kLengthIndex, Smi::FromInt(length));
}
-void ConstantPoolArray::Iterator::update_section() {
- if (next_index_ > array_->last_index(type_, current_section_) &&
- current_section_ != final_section_) {
- DCHECK(final_section_ == EXTENDED_SECTION);
- current_section_ = EXTENDED_SECTION;
- next_index_ = array_->first_index(type_, EXTENDED_SECTION);
- }
+Object* ArrayList::Get(int index) {
+ return FixedArray::cast(this)->get(kFirstIndex + index);
}
-bool ConstantPoolArray::is_extended_layout() {
- uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
- return IsExtendedField::decode(small_layout_1);
+Object** ArrayList::Slot(int index) {
+ return data_start() + kFirstIndex + index;
}
-ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() {
- return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION;
+void ArrayList::Set(int index, Object* obj) {
+ FixedArray::cast(this)->set(kFirstIndex + index, obj);
}
-int ConstantPoolArray::first_extended_section_index() {
- DCHECK(is_extended_layout());
- uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
- return TotalCountField::decode(small_layout_2);
-}
-
-
-int ConstantPoolArray::get_extended_section_header_offset() {
- return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size);
-}
-
-
-ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
- uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
- return WeakObjectStateField::decode(small_layout_2);
-}
-
-
-void ConstantPoolArray::set_weak_object_state(
- ConstantPoolArray::WeakObjectState state) {
- uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
- small_layout_2 = WeakObjectStateField::update(small_layout_2, state);
- WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
-}
-
-
-int ConstantPoolArray::first_index(Type type, LayoutSection section) {
- int index = 0;
- if (section == EXTENDED_SECTION) {
- DCHECK(is_extended_layout());
- index += first_extended_section_index();
- }
-
- for (Type type_iter = FIRST_TYPE; type_iter < type;
- type_iter = next_type(type_iter)) {
- index += number_of_entries(type_iter, section);
- }
-
- return index;
-}
-
-
-int ConstantPoolArray::last_index(Type type, LayoutSection section) {
- return first_index(type, section) + number_of_entries(type, section) - 1;
-}
-
-
-int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) {
- if (section == SMALL_SECTION) {
- uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
- uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
- switch (type) {
- case INT64:
- return Int64CountField::decode(small_layout_1);
- case CODE_PTR:
- return CodePtrCountField::decode(small_layout_1);
- case HEAP_PTR:
- return HeapPtrCountField::decode(small_layout_1);
- case INT32:
- return Int32CountField::decode(small_layout_2);
- default:
- UNREACHABLE();
- return 0;
- }
- } else {
- DCHECK(section == EXTENDED_SECTION && is_extended_layout());
- int offset = get_extended_section_header_offset();
- switch (type) {
- case INT64:
- offset += kExtendedInt64CountOffset;
- break;
- case CODE_PTR:
- offset += kExtendedCodePtrCountOffset;
- break;
- case HEAP_PTR:
- offset += kExtendedHeapPtrCountOffset;
- break;
- case INT32:
- offset += kExtendedInt32CountOffset;
- break;
- default:
- UNREACHABLE();
- }
- return READ_INT_FIELD(this, offset);
- }
-}
-
-
-bool ConstantPoolArray::offset_is_type(int offset, Type type) {
- return (offset >= OffsetOfElementAt(first_index(type, SMALL_SECTION)) &&
- offset <= OffsetOfElementAt(last_index(type, SMALL_SECTION))) ||
- (is_extended_layout() &&
- offset >= OffsetOfElementAt(first_index(type, EXTENDED_SECTION)) &&
- offset <= OffsetOfElementAt(last_index(type, EXTENDED_SECTION)));
-}
-
-
-ConstantPoolArray::Type ConstantPoolArray::get_type(int index) {
- LayoutSection section;
- if (is_extended_layout() && index >= first_extended_section_index()) {
- section = EXTENDED_SECTION;
- } else {
- section = SMALL_SECTION;
- }
-
- Type type = FIRST_TYPE;
- while (index > last_index(type, section)) {
- type = next_type(type);
- }
- DCHECK(type <= LAST_TYPE);
- return type;
-}
-
-
-int64_t ConstantPoolArray::get_int64_entry(int index) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == INT64);
- return READ_INT64_FIELD(this, OffsetOfElementAt(index));
-}
-
-
-double ConstantPoolArray::get_int64_entry_as_double(int index) {
- STATIC_ASSERT(kDoubleSize == kInt64Size);
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == INT64);
- return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
-}
-
-
-Address ConstantPoolArray::get_code_ptr_entry(int index) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == CODE_PTR);
- return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
-}
-
-
-Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == HEAP_PTR);
- return READ_FIELD(this, OffsetOfElementAt(index));
-}
-
-
-int32_t ConstantPoolArray::get_int32_entry(int index) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == INT32);
- return READ_INT32_FIELD(this, OffsetOfElementAt(index));
-}
-
-
-void ConstantPoolArray::set(int index, int64_t value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == INT64);
- WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
-}
-
-
-void ConstantPoolArray::set(int index, double value) {
- STATIC_ASSERT(kDoubleSize == kInt64Size);
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == INT64);
- WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
-}
-
-
-void ConstantPoolArray::set(int index, Address value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == CODE_PTR);
- WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
-}
-
-
-void ConstantPoolArray::set(int index, Object* value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(!GetHeap()->InNewSpace(value));
- DCHECK(get_type(index) == HEAP_PTR);
- WRITE_FIELD(this, OffsetOfElementAt(index), value);
- WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
-}
-
-
-void ConstantPoolArray::set(int index, int32_t value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == INT32);
- WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
-}
-
-
-void ConstantPoolArray::set_at_offset(int offset, int32_t value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(offset_is_type(offset, INT32));
- WRITE_INT32_FIELD(this, offset, value);
-}
-
-
-void ConstantPoolArray::set_at_offset(int offset, int64_t value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(offset_is_type(offset, INT64));
- WRITE_INT64_FIELD(this, offset, value);
-}
-
-
-void ConstantPoolArray::set_at_offset(int offset, double value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(offset_is_type(offset, INT64));
- WRITE_DOUBLE_FIELD(this, offset, value);
-}
-
-
-void ConstantPoolArray::set_at_offset(int offset, Address value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(offset_is_type(offset, CODE_PTR));
- WRITE_FIELD(this, offset, reinterpret_cast<Object*>(value));
- WRITE_BARRIER(GetHeap(), this, offset, reinterpret_cast<Object*>(value));
-}
-
-
-void ConstantPoolArray::set_at_offset(int offset, Object* value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(!GetHeap()->InNewSpace(value));
- DCHECK(offset_is_type(offset, HEAP_PTR));
- WRITE_FIELD(this, offset, value);
- WRITE_BARRIER(GetHeap(), this, offset, value);
-}
-
-
-void ConstantPoolArray::Init(const NumberOfEntries& small) {
- uint32_t small_layout_1 =
- Int64CountField::encode(small.count_of(INT64)) |
- CodePtrCountField::encode(small.count_of(CODE_PTR)) |
- HeapPtrCountField::encode(small.count_of(HEAP_PTR)) |
- IsExtendedField::encode(false);
- uint32_t small_layout_2 =
- Int32CountField::encode(small.count_of(INT32)) |
- TotalCountField::encode(small.total_count()) |
- WeakObjectStateField::encode(NO_WEAK_OBJECTS);
- WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
- WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
- if (kHeaderSize != kFirstEntryOffset) {
- DCHECK(kFirstEntryOffset - kHeaderSize == kInt32Size);
- WRITE_UINT32_FIELD(this, kHeaderSize, 0); // Zero out header padding.
- }
-}
-
-
-void ConstantPoolArray::InitExtended(const NumberOfEntries& small,
- const NumberOfEntries& extended) {
- // Initialize small layout fields first.
- Init(small);
-
- // Set is_extended_layout field.
- uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
- small_layout_1 = IsExtendedField::update(small_layout_1, true);
- WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
-
- // Initialize the extended layout fields.
- int extended_header_offset = get_extended_section_header_offset();
- WRITE_INT32_FIELD(this, extended_header_offset + kExtendedInt64CountOffset,
- extended.count_of(INT64));
- WRITE_INT32_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset,
- extended.count_of(CODE_PTR));
- WRITE_INT32_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset,
- extended.count_of(HEAP_PTR));
- WRITE_INT32_FIELD(this, extended_header_offset + kExtendedInt32CountOffset,
- extended.count_of(INT32));
-}
-
-
-int ConstantPoolArray::size() {
- NumberOfEntries small(this, SMALL_SECTION);
- if (!is_extended_layout()) {
- return SizeFor(small);
- } else {
- NumberOfEntries extended(this, EXTENDED_SECTION);
- return SizeForExtended(small, extended);
- }
-}
-
-
-int ConstantPoolArray::length() {
- uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
- int length = TotalCountField::decode(small_layout_2);
- if (is_extended_layout()) {
- length += number_of_entries(INT64, EXTENDED_SECTION) +
- number_of_entries(CODE_PTR, EXTENDED_SECTION) +
- number_of_entries(HEAP_PTR, EXTENDED_SECTION) +
- number_of_entries(INT32, EXTENDED_SECTION);
- }
- return length;
+void ArrayList::Clear(int index, Object* undefined) {
+ DCHECK(undefined->IsUndefined());
+ FixedArray::cast(this)
+ ->set(kFirstIndex + index, undefined, SKIP_WRITE_BARRIER);
}
@@ -2792,14 +2551,16 @@
}
-bool HeapObject::NeedsToEnsureDoubleAlignment() {
-#ifndef V8_HOST_ARCH_64_BIT
- return (IsFixedFloat64Array() || IsFixedDoubleArray() ||
- IsConstantPoolArray()) &&
- FixedArrayBase::cast(this)->length() != 0;
-#else
- return false;
-#endif // V8_HOST_ARCH_64_BIT
+AllocationAlignment HeapObject::RequiredAlignment() {
+#ifdef V8_HOST_ARCH_32_BIT
+ if ((IsFixedFloat64Array() || IsFixedDoubleArray()) &&
+ FixedArrayBase::cast(this)->length() != 0) {
+ return kDoubleAligned;
+ }
+ if (IsHeapNumber()) return kDoubleUnaligned;
+ if (IsSimd128Value()) return kSimd128Unaligned;
+#endif // V8_HOST_ARCH_32_BIT
+ return kWordAligned;
}
@@ -2814,20 +2575,6 @@
}
-void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
- int index,
- Object* value) {
- DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
- DCHECK(index >= 0 && index < array->length());
- int offset = kHeaderSize + index * kPointerSize;
- WRITE_FIELD(array, offset, value);
- Heap* heap = array->GetHeap();
- if (heap->InNewSpace(value)) {
- heap->RecordWrite(array->address(), offset);
- }
-}
-
-
void FixedArray::NoWriteBarrierSet(FixedArray* array,
int index,
Object* value) {
@@ -2879,6 +2626,11 @@
}
+Object** FixedArray::RawFieldOfElementAt(int index) {
+ return HeapObject::RawField(this, OffsetOfElementAt(index));
+}
+
+
bool DescriptorArray::IsEmpty() {
DCHECK(length() >= kFirstIndex ||
this == GetHeap()->empty_descriptor_array());
@@ -2886,12 +2638,75 @@
}
+int DescriptorArray::number_of_descriptors() {
+ DCHECK(length() >= kFirstIndex || IsEmpty());
+ int len = length();
+ return len == 0 ? 0 : Smi::cast(get(kDescriptorLengthIndex))->value();
+}
+
+
+int DescriptorArray::number_of_descriptors_storage() {
+ int len = length();
+ return len == 0 ? 0 : (len - kFirstIndex) / kDescriptorSize;
+}
+
+
+int DescriptorArray::NumberOfSlackDescriptors() {
+ return number_of_descriptors_storage() - number_of_descriptors();
+}
+
+
void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
WRITE_FIELD(
this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
}
+inline int DescriptorArray::number_of_entries() {
+ return number_of_descriptors();
+}
+
+
+bool DescriptorArray::HasEnumCache() {
+ return !IsEmpty() && !get(kEnumCacheIndex)->IsSmi();
+}
+
+
+void DescriptorArray::CopyEnumCacheFrom(DescriptorArray* array) {
+ set(kEnumCacheIndex, array->get(kEnumCacheIndex));
+}
+
+
+FixedArray* DescriptorArray::GetEnumCache() {
+ DCHECK(HasEnumCache());
+ FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
+ return FixedArray::cast(bridge->get(kEnumCacheBridgeCacheIndex));
+}
+
+
+bool DescriptorArray::HasEnumIndicesCache() {
+ if (IsEmpty()) return false;
+ Object* object = get(kEnumCacheIndex);
+ if (object->IsSmi()) return false;
+ FixedArray* bridge = FixedArray::cast(object);
+ return !bridge->get(kEnumCacheBridgeIndicesCacheIndex)->IsSmi();
+}
+
+
+FixedArray* DescriptorArray::GetEnumIndicesCache() {
+ DCHECK(HasEnumIndicesCache());
+ FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
+ return FixedArray::cast(bridge->get(kEnumCacheBridgeIndicesCacheIndex));
+}
+
+
+Object** DescriptorArray::GetEnumCacheSlot() {
+ DCHECK(HasEnumCache());
+ return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
+ kEnumCacheOffset);
+}
+
+
// Perform a binary search in a fixed array. Low and high are entry indices. If
// there are three entries in this array it should be called with low=0 and
// high=2.
@@ -2905,7 +2720,7 @@
DCHECK(low <= high);
while (low != high) {
- int mid = (low + high) / 2;
+ int mid = low + (high - low) / 2;
Name* mid_name = array->GetSortedKey(mid);
uint32_t mid_hash = mid_name->Hash();
@@ -2960,7 +2775,7 @@
return T::kNotFound;
} else {
DCHECK(len >= valid_entries);
- DCHECK_EQ(NULL, out_insertion_index); // Not supported here.
+ DCHECK_NULL(out_insertion_index); // Not supported here.
for (int number = 0; number < valid_entries; number++) {
Name* entry = array->GetKey(number);
uint32_t current_hash = entry->Hash();
@@ -3027,22 +2842,34 @@
}
-void Map::LookupDescriptor(JSObject* holder,
- Name* name,
- LookupResult* result) {
- DescriptorArray* descriptors = this->instance_descriptors();
- int number = descriptors->SearchWithCache(name, this);
- if (number == DescriptorArray::kNotFound) return result->NotFound();
- result->DescriptorResult(holder, descriptors->GetDetails(number), number);
+int Map::LastAdded() {
+ int number_of_own_descriptors = NumberOfOwnDescriptors();
+ DCHECK(number_of_own_descriptors > 0);
+ return number_of_own_descriptors - 1;
}
-void Map::LookupTransition(JSObject* holder, Name* name,
- PropertyAttributes attributes,
- LookupResult* result) {
- int transition_index = this->SearchTransition(DATA, name, attributes);
- if (transition_index == TransitionArray::kNotFound) return result->NotFound();
- result->TransitionResult(holder, this->GetTransition(transition_index));
+int Map::NumberOfOwnDescriptors() {
+ return NumberOfOwnDescriptorsBits::decode(bit_field3());
+}
+
+
+void Map::SetNumberOfOwnDescriptors(int number) {
+ DCHECK(number <= instance_descriptors()->number_of_descriptors());
+ set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
+}
+
+
+int Map::EnumLength() { return EnumLengthBits::decode(bit_field3()); }
+
+
+void Map::SetEnumLength(int length) {
+ if (length != kInvalidEnumCacheSentinel) {
+ DCHECK(length >= 0);
+ DCHECK(length == 0 || instance_descriptors()->HasEnumCache());
+ DCHECK(length <= NumberOfOwnDescriptors());
+ }
+ set_bit_field3(EnumLengthBits::update(bit_field3(), length));
}
@@ -3051,13 +2878,9 @@
has_fast_double_elements()) {
DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
return GetHeap()->empty_fixed_array();
- } else if (has_external_array_elements()) {
- ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
- DCHECK(!GetHeap()->InNewSpace(empty_array));
- return empty_array;
} else if (has_fixed_typed_array_elements()) {
FixedTypedArrayBase* empty_array =
- GetHeap()->EmptyFixedTypedArrayForMap(this);
+ GetHeap()->EmptyFixedTypedArrayForMap(this);
DCHECK(!GetHeap()->InNewSpace(empty_array));
return empty_array;
} else {
@@ -3149,14 +2972,19 @@
int DescriptorArray::GetFieldIndex(int descriptor_number) {
- DCHECK(GetDetails(descriptor_number).type() == FIELD);
+ DCHECK(GetDetails(descriptor_number).location() == kField);
return GetDetails(descriptor_number).field_index();
}
HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
- DCHECK(GetDetails(descriptor_number).type() == FIELD);
- return HeapType::cast(GetValue(descriptor_number));
+ DCHECK(GetDetails(descriptor_number).location() == kField);
+ Object* value = GetValue(descriptor_number);
+ if (value->IsWeakCell()) {
+ if (WeakCell::cast(value)->cleared()) return HeapType::None();
+ value = WeakCell::cast(value)->value();
+ }
+ return HeapType::cast(value);
}
@@ -3166,13 +2994,13 @@
Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
- DCHECK(GetType(descriptor_number) == CALLBACKS);
+ DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
return GetValue(descriptor_number);
}
AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
- DCHECK(GetType(descriptor_number) == CALLBACKS);
+ DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
}
@@ -3185,20 +3013,12 @@
}
-void DescriptorArray::Set(int descriptor_number,
- Descriptor* desc,
- const WhitenessWitness&) {
+void DescriptorArray::SetDescriptor(int descriptor_number, Descriptor* desc) {
// Range check.
DCHECK(descriptor_number < number_of_descriptors());
-
- NoIncrementalWriteBarrierSet(this,
- ToKeyIndex(descriptor_number),
- *desc->GetKey());
- NoIncrementalWriteBarrierSet(this,
- ToValueIndex(descriptor_number),
- *desc->GetValue());
- NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number),
- desc->GetDetails().AsSmi());
+ set(ToKeyIndex(descriptor_number), *desc->GetKey());
+ set(ToValueIndex(descriptor_number), *desc->GetValue());
+ set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
}
@@ -3239,49 +3059,95 @@
}
-DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
- : marking_(array->GetHeap()->incremental_marking()) {
- marking_->EnterNoMarkingScope();
- DCHECK(!marking_->IsMarking() ||
- Marking::Color(array) == Marking::WHITE_OBJECT);
+PropertyType DescriptorArray::Entry::type() { return descs_->GetType(index_); }
+
+
+Object* DescriptorArray::Entry::GetCallbackObject() {
+ return descs_->GetValue(index_);
}
-DescriptorArray::WhitenessWitness::~WhitenessWitness() {
- marking_->LeaveNoMarkingScope();
+int HashTableBase::NumberOfElements() {
+ return Smi::cast(get(kNumberOfElementsIndex))->value();
}
-template<typename Derived, typename Shape, typename Key>
-int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
- const int kMinCapacity = 32;
+int HashTableBase::NumberOfDeletedElements() {
+ return Smi::cast(get(kNumberOfDeletedElementsIndex))->value();
+}
+
+
+int HashTableBase::Capacity() {
+ return Smi::cast(get(kCapacityIndex))->value();
+}
+
+
+void HashTableBase::ElementAdded() {
+ SetNumberOfElements(NumberOfElements() + 1);
+}
+
+
+void HashTableBase::ElementRemoved() {
+ SetNumberOfElements(NumberOfElements() - 1);
+ SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
+}
+
+
+void HashTableBase::ElementsRemoved(int n) {
+ SetNumberOfElements(NumberOfElements() - n);
+ SetNumberOfDeletedElements(NumberOfDeletedElements() + n);
+}
+
+
+// static
+int HashTableBase::ComputeCapacity(int at_least_space_for) {
+ const int kMinCapacity = 4;
int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
- if (capacity < kMinCapacity) {
- capacity = kMinCapacity; // Guarantee min capacity.
- }
- return capacity;
+ return Max(capacity, kMinCapacity);
}
-template<typename Derived, typename Shape, typename Key>
+bool HashTableBase::IsKey(Object* k) {
+ return !k->IsTheHole() && !k->IsUndefined();
+}
+
+
+void HashTableBase::SetNumberOfElements(int nof) {
+ set(kNumberOfElementsIndex, Smi::FromInt(nof));
+}
+
+
+void HashTableBase::SetNumberOfDeletedElements(int nod) {
+ set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
+}
+
+
+template <typename Derived, typename Shape, typename Key>
int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
return FindEntry(GetIsolate(), key);
}
-// Find entry for key otherwise return kNotFound.
template<typename Derived, typename Shape, typename Key>
int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
+ return FindEntry(isolate, key, HashTable::Hash(key));
+}
+
+
+// Find entry for key otherwise return kNotFound.
+template <typename Derived, typename Shape, typename Key>
+int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key,
+ int32_t hash) {
uint32_t capacity = Capacity();
- uint32_t entry = FirstProbe(HashTable::Hash(key), capacity);
+ uint32_t entry = FirstProbe(hash, capacity);
uint32_t count = 1;
// EnsureCapacity will guarantee the hash table is never full.
while (true) {
Object* element = KeyAt(entry);
// Empty entry. Uses raw unchecked accessors because it is called by the
// string table during bootstrapping.
- if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
- if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
+ if (element == isolate->heap()->root(Heap::kUndefinedValueRootIndex)) break;
+ if (element != isolate->heap()->root(Heap::kTheHoleValueRootIndex) &&
Shape::IsMatch(key, element)) return entry;
entry = NextProbe(entry, count++, capacity);
}
@@ -3296,6 +3162,7 @@
(Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
}
+
uint32_t SeededNumberDictionary::max_number_key() {
DCHECK(!requires_slow_elements());
Object* max_index_object = get(kMaxNumberKeyIndex);
@@ -3304,6 +3171,7 @@
return value >> kRequiresSlowElementsTagSize;
}
+
void SeededNumberDictionary::set_requires_slow_elements() {
set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
}
@@ -3314,47 +3182,43 @@
CAST_ACCESSOR(AccessorInfo)
+CAST_ACCESSOR(ArrayList)
+CAST_ACCESSOR(Bool16x8)
+CAST_ACCESSOR(Bool32x4)
+CAST_ACCESSOR(Bool8x16)
CAST_ACCESSOR(ByteArray)
+CAST_ACCESSOR(BytecodeArray)
CAST_ACCESSOR(Cell)
CAST_ACCESSOR(Code)
CAST_ACCESSOR(CodeCacheHashTable)
CAST_ACCESSOR(CompilationCacheTable)
CAST_ACCESSOR(ConsString)
-CAST_ACCESSOR(ConstantPoolArray)
CAST_ACCESSOR(DeoptimizationInputData)
CAST_ACCESSOR(DeoptimizationOutputData)
CAST_ACCESSOR(DependentCode)
CAST_ACCESSOR(DescriptorArray)
-CAST_ACCESSOR(ExternalArray)
CAST_ACCESSOR(ExternalOneByteString)
-CAST_ACCESSOR(ExternalFloat32Array)
-CAST_ACCESSOR(ExternalFloat64Array)
-CAST_ACCESSOR(ExternalInt16Array)
-CAST_ACCESSOR(ExternalInt32Array)
-CAST_ACCESSOR(ExternalInt8Array)
CAST_ACCESSOR(ExternalString)
CAST_ACCESSOR(ExternalTwoByteString)
-CAST_ACCESSOR(ExternalUint16Array)
-CAST_ACCESSOR(ExternalUint32Array)
-CAST_ACCESSOR(ExternalUint8Array)
-CAST_ACCESSOR(ExternalUint8ClampedArray)
CAST_ACCESSOR(FixedArray)
CAST_ACCESSOR(FixedArrayBase)
CAST_ACCESSOR(FixedDoubleArray)
CAST_ACCESSOR(FixedTypedArrayBase)
+CAST_ACCESSOR(Float32x4)
CAST_ACCESSOR(Foreign)
-CAST_ACCESSOR(FreeSpace)
-CAST_ACCESSOR(GlobalObject)
+CAST_ACCESSOR(GlobalDictionary)
+CAST_ACCESSOR(HandlerTable)
CAST_ACCESSOR(HeapObject)
+CAST_ACCESSOR(Int16x8)
+CAST_ACCESSOR(Int32x4)
+CAST_ACCESSOR(Int8x16)
CAST_ACCESSOR(JSArray)
CAST_ACCESSOR(JSArrayBuffer)
CAST_ACCESSOR(JSArrayBufferView)
-CAST_ACCESSOR(JSBuiltinsObject)
+CAST_ACCESSOR(JSBoundFunction)
CAST_ACCESSOR(JSDataView)
CAST_ACCESSOR(JSDate)
CAST_ACCESSOR(JSFunction)
-CAST_ACCESSOR(JSFunctionProxy)
-CAST_ACCESSOR(JSFunctionResultCache)
CAST_ACCESSOR(JSGeneratorObject)
CAST_ACCESSOR(JSGlobalObject)
CAST_ACCESSOR(JSGlobalProxy)
@@ -3368,6 +3232,7 @@
CAST_ACCESSOR(JSRegExp)
CAST_ACCESSOR(JSSet)
CAST_ACCESSOR(JSSetIterator)
+CAST_ACCESSOR(JSIteratorResult)
CAST_ACCESSOR(JSTypedArray)
CAST_ACCESSOR(JSValue)
CAST_ACCESSOR(JSWeakMap)
@@ -3390,18 +3255,28 @@
CAST_ACCESSOR(SeqString)
CAST_ACCESSOR(SeqTwoByteString)
CAST_ACCESSOR(SharedFunctionInfo)
+CAST_ACCESSOR(Simd128Value)
CAST_ACCESSOR(SlicedString)
CAST_ACCESSOR(Smi)
CAST_ACCESSOR(String)
CAST_ACCESSOR(StringTable)
CAST_ACCESSOR(Struct)
CAST_ACCESSOR(Symbol)
+CAST_ACCESSOR(Uint16x8)
+CAST_ACCESSOR(Uint32x4)
+CAST_ACCESSOR(Uint8x16)
CAST_ACCESSOR(UnseededNumberDictionary)
CAST_ACCESSOR(WeakCell)
CAST_ACCESSOR(WeakFixedArray)
CAST_ACCESSOR(WeakHashTable)
+// static
+template <class Traits>
+STATIC_CONST_MEMBER_DEFINITION const InstanceType
+ FixedTypedArray<Traits>::kInstanceType;
+
+
template <class Traits>
FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
SLOW_DCHECK(object->IsHeapObject() &&
@@ -3421,6 +3296,165 @@
}
+#define DEFINE_DEOPT_ELEMENT_ACCESSORS(name, type) \
+ type* DeoptimizationInputData::name() { \
+ return type::cast(get(k##name##Index)); \
+ } \
+ void DeoptimizationInputData::Set##name(type* value) { \
+ set(k##name##Index, value); \
+ }
+
+DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
+DEFINE_DEOPT_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
+DEFINE_DEOPT_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
+DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrAstId, Smi)
+DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
+DEFINE_DEOPT_ELEMENT_ACCESSORS(OptimizationId, Smi)
+DEFINE_DEOPT_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
+DEFINE_DEOPT_ELEMENT_ACCESSORS(WeakCellCache, Object)
+
+#undef DEFINE_DEOPT_ELEMENT_ACCESSORS
+
+
+#define DEFINE_DEOPT_ENTRY_ACCESSORS(name, type) \
+ type* DeoptimizationInputData::name(int i) { \
+ return type::cast(get(IndexForEntry(i) + k##name##Offset)); \
+ } \
+ void DeoptimizationInputData::Set##name(int i, type* value) { \
+ set(IndexForEntry(i) + k##name##Offset, value); \
+ }
+
+DEFINE_DEOPT_ENTRY_ACCESSORS(AstIdRaw, Smi)
+DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
+DEFINE_DEOPT_ENTRY_ACCESSORS(ArgumentsStackHeight, Smi)
+DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
+
+#undef DEFINE_DEOPT_ENTRY_ACCESSORS
+
+
+BailoutId DeoptimizationInputData::AstId(int i) {
+ return BailoutId(AstIdRaw(i)->value());
+}
+
+
+void DeoptimizationInputData::SetAstId(int i, BailoutId value) {
+ SetAstIdRaw(i, Smi::FromInt(value.ToInt()));
+}
+
+
+int DeoptimizationInputData::DeoptCount() {
+ return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
+}
+
+
+int DeoptimizationOutputData::DeoptPoints() { return length() / 2; }
+
+
+BailoutId DeoptimizationOutputData::AstId(int index) {
+ return BailoutId(Smi::cast(get(index * 2))->value());
+}
+
+
+void DeoptimizationOutputData::SetAstId(int index, BailoutId id) {
+ set(index * 2, Smi::FromInt(id.ToInt()));
+}
+
+
+Smi* DeoptimizationOutputData::PcAndState(int index) {
+ return Smi::cast(get(1 + index * 2));
+}
+
+
+void DeoptimizationOutputData::SetPcAndState(int index, Smi* offset) {
+ set(1 + index * 2, offset);
+}
+
+
+Object* LiteralsArray::get(int index) const { return FixedArray::get(index); }
+
+
+void LiteralsArray::set(int index, Object* value) {
+ FixedArray::set(index, value);
+}
+
+
+void LiteralsArray::set(int index, Smi* value) {
+ FixedArray::set(index, value);
+}
+
+
+void LiteralsArray::set(int index, Object* value, WriteBarrierMode mode) {
+ FixedArray::set(index, value, mode);
+}
+
+
+LiteralsArray* LiteralsArray::cast(Object* object) {
+ SLOW_DCHECK(object->IsLiteralsArray());
+ return reinterpret_cast<LiteralsArray*>(object);
+}
+
+
+TypeFeedbackVector* LiteralsArray::feedback_vector() const {
+ return TypeFeedbackVector::cast(get(kVectorIndex));
+}
+
+
+void LiteralsArray::set_feedback_vector(TypeFeedbackVector* vector) {
+ set(kVectorIndex, vector);
+}
+
+
+Object* LiteralsArray::literal(int literal_index) const {
+ return get(kFirstLiteralIndex + literal_index);
+}
+
+
+void LiteralsArray::set_literal(int literal_index, Object* literal) {
+ set(kFirstLiteralIndex + literal_index, literal);
+}
+
+
+int LiteralsArray::literals_count() const {
+ return length() - kFirstLiteralIndex;
+}
+
+
+void HandlerTable::SetRangeStart(int index, int value) {
+ set(index * kRangeEntrySize + kRangeStartIndex, Smi::FromInt(value));
+}
+
+
+void HandlerTable::SetRangeEnd(int index, int value) {
+ set(index * kRangeEntrySize + kRangeEndIndex, Smi::FromInt(value));
+}
+
+
+void HandlerTable::SetRangeHandler(int index, int offset,
+ CatchPrediction prediction) {
+ int value = HandlerOffsetField::encode(offset) |
+ HandlerPredictionField::encode(prediction);
+ set(index * kRangeEntrySize + kRangeHandlerIndex, Smi::FromInt(value));
+}
+
+
+void HandlerTable::SetRangeDepth(int index, int value) {
+ set(index * kRangeEntrySize + kRangeDepthIndex, Smi::FromInt(value));
+}
+
+
+void HandlerTable::SetReturnOffset(int index, int value) {
+ set(index * kReturnEntrySize + kReturnOffsetIndex, Smi::FromInt(value));
+}
+
+
+void HandlerTable::SetReturnHandler(int index, int offset,
+ CatchPrediction prediction) {
+ int value = HandlerOffsetField::encode(offset) |
+ HandlerPredictionField::encode(prediction);
+ set(index * kReturnEntrySize + kReturnHandlerIndex, Smi::FromInt(value));
+}
+
+
#define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
STRUCT_LIST(MAKE_STRUCT_CAST)
#undef MAKE_STRUCT_CAST
@@ -3452,6 +3486,34 @@
SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
+int FreeSpace::Size() { return size(); }
+
+
+FreeSpace* FreeSpace::next() {
+ DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
+ (!GetHeap()->deserialization_complete() && map() == NULL));
+ DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
+ return reinterpret_cast<FreeSpace*>(
+ Memory::Address_at(address() + kNextOffset));
+}
+
+
+void FreeSpace::set_next(FreeSpace* next) {
+ DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
+ (!GetHeap()->deserialization_complete() && map() == NULL));
+ DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
+ base::NoBarrier_Store(
+ reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
+ reinterpret_cast<base::AtomicWord>(next));
+}
+
+
+FreeSpace* FreeSpace::cast(HeapObject* o) {
+ SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
+ return reinterpret_cast<FreeSpace*>(o);
+}
+
+
uint32_t Name::hash_field() {
return READ_UINT32_FIELD(this, kHashFieldOffset);
}
@@ -3491,9 +3553,9 @@
ACCESSORS(Symbol, name, Object, kNameOffset)
-ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
+SMI_ACCESSORS(Symbol, flags, kFlagsOffset)
BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
-BOOL_ACCESSORS(Symbol, flags, is_own, kOwnBit)
+BOOL_ACCESSORS(Symbol, flags, is_well_known_symbol, kWellKnownSymbolBit)
bool String::Equals(String* other) {
@@ -3522,6 +3584,12 @@
}
+Handle<Name> Name::Flatten(Handle<Name> name, PretenureFlag pretenure) {
+ if (name->IsSymbol()) return name;
+ return String::Flatten(Handle<String>::cast(name));
+}
+
+
uint16_t String::Get(int index) {
DCHECK(index >= 0 && index < length());
switch (StringShape(this).full_representation_tag()) {
@@ -3680,13 +3748,13 @@
uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
DCHECK(index >= 0 && index < length());
- return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
+ return READ_UINT16_FIELD(this, kHeaderSize + index * kShortSize);
}
void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
DCHECK(index >= 0 && index < length());
- WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
+ WRITE_UINT16_FIELD(this, kHeaderSize + index * kShortSize, value);
}
@@ -3905,40 +3973,7 @@
}
-void JSFunctionResultCache::MakeZeroSize() {
- set_finger_index(kEntriesIndex);
- set_size(kEntriesIndex);
-}
-
-
-void JSFunctionResultCache::Clear() {
- int cache_size = size();
- Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
- MemsetPointer(entries_start,
- GetHeap()->the_hole_value(),
- cache_size - kEntriesIndex);
- MakeZeroSize();
-}
-
-
-int JSFunctionResultCache::size() {
- return Smi::cast(get(kCacheSizeIndex))->value();
-}
-
-
-void JSFunctionResultCache::set_size(int size) {
- set(kCacheSizeIndex, Smi::FromInt(size));
-}
-
-
-int JSFunctionResultCache::finger_index() {
- return Smi::cast(get(kFingerIndex))->value();
-}
-
-
-void JSFunctionResultCache::set_finger_index(int finger_index) {
- set(kFingerIndex, Smi::FromInt(finger_index));
-}
+int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kPointerSize); }
byte ByteArray::get(int index) {
@@ -3965,222 +4000,94 @@
}
+int ByteArray::ByteArraySize() { return SizeFor(this->length()); }
+
+
Address ByteArray::GetDataStartAddress() {
return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
}
-uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
- return reinterpret_cast<uint8_t*>(external_pointer());
+byte BytecodeArray::get(int index) {
+ DCHECK(index >= 0 && index < this->length());
+ return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
}
-uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
- DCHECK((index >= 0) && (index < this->length()));
- uint8_t* ptr = external_uint8_clamped_pointer();
- return ptr[index];
+void BytecodeArray::set(int index, byte value) {
+ DCHECK(index >= 0 && index < this->length());
+ WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
}
-Handle<Object> ExternalUint8ClampedArray::get(
- Handle<ExternalUint8ClampedArray> array,
- int index) {
- return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
- array->GetIsolate());
+void BytecodeArray::set_frame_size(int frame_size) {
+ DCHECK_GE(frame_size, 0);
+ DCHECK(IsAligned(frame_size, static_cast<unsigned>(kPointerSize)));
+ WRITE_INT_FIELD(this, kFrameSizeOffset, frame_size);
}
-void ExternalUint8ClampedArray::set(int index, uint8_t value) {
- DCHECK((index >= 0) && (index < this->length()));
- uint8_t* ptr = external_uint8_clamped_pointer();
- ptr[index] = value;
+int BytecodeArray::frame_size() const {
+ return READ_INT_FIELD(this, kFrameSizeOffset);
}
-void* ExternalArray::external_pointer() const {
+int BytecodeArray::register_count() const {
+ return frame_size() / kPointerSize;
+}
+
+
+void BytecodeArray::set_parameter_count(int number_of_parameters) {
+ DCHECK_GE(number_of_parameters, 0);
+ // Parameter count is stored as the size on stack of the parameters to allow
+ // it to be used directly by generated code.
+ WRITE_INT_FIELD(this, kParameterSizeOffset,
+ (number_of_parameters << kPointerSizeLog2));
+}
+
+
+int BytecodeArray::parameter_count() const {
+ // Parameter count is stored as the size on stack of the parameters to allow
+ // it to be used directly by generated code.
+ return READ_INT_FIELD(this, kParameterSizeOffset) >> kPointerSizeLog2;
+}
+
+
+ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
+
+
+Address BytecodeArray::GetFirstBytecodeAddress() {
+ return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
+}
+
+
+int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
+
+
+ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
+
+
+void* FixedTypedArrayBase::external_pointer() const {
intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
return reinterpret_cast<void*>(ptr);
}
-void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
+void FixedTypedArrayBase::set_external_pointer(void* value,
+ WriteBarrierMode mode) {
intptr_t ptr = reinterpret_cast<intptr_t>(value);
WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
}
-int8_t ExternalInt8Array::get_scalar(int index) {
- DCHECK((index >= 0) && (index < this->length()));
- int8_t* ptr = static_cast<int8_t*>(external_pointer());
- return ptr[index];
-}
-
-
-Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
- int index) {
- return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
- array->GetIsolate());
-}
-
-
-void ExternalInt8Array::set(int index, int8_t value) {
- DCHECK((index >= 0) && (index < this->length()));
- int8_t* ptr = static_cast<int8_t*>(external_pointer());
- ptr[index] = value;
-}
-
-
-uint8_t ExternalUint8Array::get_scalar(int index) {
- DCHECK((index >= 0) && (index < this->length()));
- uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
- return ptr[index];
-}
-
-
-Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
- int index) {
- return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
- array->GetIsolate());
-}
-
-
-void ExternalUint8Array::set(int index, uint8_t value) {
- DCHECK((index >= 0) && (index < this->length()));
- uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
- ptr[index] = value;
-}
-
-
-int16_t ExternalInt16Array::get_scalar(int index) {
- DCHECK((index >= 0) && (index < this->length()));
- int16_t* ptr = static_cast<int16_t*>(external_pointer());
- return ptr[index];
-}
-
-
-Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
- int index) {
- return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
- array->GetIsolate());
-}
-
-
-void ExternalInt16Array::set(int index, int16_t value) {
- DCHECK((index >= 0) && (index < this->length()));
- int16_t* ptr = static_cast<int16_t*>(external_pointer());
- ptr[index] = value;
-}
-
-
-uint16_t ExternalUint16Array::get_scalar(int index) {
- DCHECK((index >= 0) && (index < this->length()));
- uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
- return ptr[index];
-}
-
-
-Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
- int index) {
- return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
- array->GetIsolate());
-}
-
-
-void ExternalUint16Array::set(int index, uint16_t value) {
- DCHECK((index >= 0) && (index < this->length()));
- uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
- ptr[index] = value;
-}
-
-
-int32_t ExternalInt32Array::get_scalar(int index) {
- DCHECK((index >= 0) && (index < this->length()));
- int32_t* ptr = static_cast<int32_t*>(external_pointer());
- return ptr[index];
-}
-
-
-Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
- int index) {
- return array->GetIsolate()->factory()->
- NewNumberFromInt(array->get_scalar(index));
-}
-
-
-void ExternalInt32Array::set(int index, int32_t value) {
- DCHECK((index >= 0) && (index < this->length()));
- int32_t* ptr = static_cast<int32_t*>(external_pointer());
- ptr[index] = value;
-}
-
-
-uint32_t ExternalUint32Array::get_scalar(int index) {
- DCHECK((index >= 0) && (index < this->length()));
- uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
- return ptr[index];
-}
-
-
-Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
- int index) {
- return array->GetIsolate()->factory()->
- NewNumberFromUint(array->get_scalar(index));
-}
-
-
-void ExternalUint32Array::set(int index, uint32_t value) {
- DCHECK((index >= 0) && (index < this->length()));
- uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
- ptr[index] = value;
-}
-
-
-float ExternalFloat32Array::get_scalar(int index) {
- DCHECK((index >= 0) && (index < this->length()));
- float* ptr = static_cast<float*>(external_pointer());
- return ptr[index];
-}
-
-
-Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
- int index) {
- return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
-}
-
-
-void ExternalFloat32Array::set(int index, float value) {
- DCHECK((index >= 0) && (index < this->length()));
- float* ptr = static_cast<float*>(external_pointer());
- ptr[index] = value;
-}
-
-
-double ExternalFloat64Array::get_scalar(int index) {
- DCHECK((index >= 0) && (index < this->length()));
- double* ptr = static_cast<double*>(external_pointer());
- return ptr[index];
-}
-
-
-Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
- int index) {
- return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
-}
-
-
-void ExternalFloat64Array::set(int index, double value) {
- DCHECK((index >= 0) && (index < this->length()));
- double* ptr = static_cast<double*>(external_pointer());
- ptr[index] = value;
-}
-
-
void* FixedTypedArrayBase::DataPtr() {
- return FIELD_ADDR(this, kDataOffset);
+ return reinterpret_cast<void*>(
+ reinterpret_cast<intptr_t>(base_pointer()) +
+ reinterpret_cast<intptr_t>(external_pointer()));
}
-int FixedTypedArrayBase::DataSize(InstanceType type) {
+int FixedTypedArrayBase::ElementSize(InstanceType type) {
int element_size;
switch (type) {
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
@@ -4194,7 +4101,13 @@
UNREACHABLE();
return 0;
}
- return length() * element_size;
+ return element_size;
+}
+
+
+int FixedTypedArrayBase::DataSize(InstanceType type) {
+ if (base_pointer() == Smi::FromInt(0)) return 0;
+ return length() * ElementSize(type);
}
@@ -4213,6 +4126,11 @@
}
+int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
+ return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
+}
+
+
uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
@@ -4235,47 +4153,31 @@
float Float32ArrayTraits::defaultValue() {
- return static_cast<float>(base::OS::nan_value());
+ return std::numeric_limits<float>::quiet_NaN();
}
-double Float64ArrayTraits::defaultValue() { return base::OS::nan_value(); }
+double Float64ArrayTraits::defaultValue() {
+ return std::numeric_limits<double>::quiet_NaN();
+}
template <class Traits>
typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
DCHECK((index >= 0) && (index < this->length()));
- ElementType* ptr = reinterpret_cast<ElementType*>(
- FIELD_ADDR(this, kDataOffset));
+ ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
return ptr[index];
}
-template<> inline
-FixedTypedArray<Float64ArrayTraits>::ElementType
- FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
- DCHECK((index >= 0) && (index < this->length()));
- return READ_DOUBLE_FIELD(this, ElementOffset(index));
-}
-
-
template <class Traits>
void FixedTypedArray<Traits>::set(int index, ElementType value) {
DCHECK((index >= 0) && (index < this->length()));
- ElementType* ptr = reinterpret_cast<ElementType*>(
- FIELD_ADDR(this, kDataOffset));
+ ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
ptr[index] = value;
}
-template<> inline
-void FixedTypedArray<Float64ArrayTraits>::set(
- int index, Float64ArrayTraits::ElementType value) {
- DCHECK((index >= 0) && (index < this->length()));
- WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
-}
-
-
template <class Traits>
typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
return static_cast<ElementType>(value);
@@ -4327,26 +4229,20 @@
template <class Traits>
-Handle<Object> FixedTypedArray<Traits>::SetValue(
- Handle<FixedTypedArray<Traits> > array,
- uint32_t index,
- Handle<Object> value) {
+void FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
ElementType cast_value = Traits::defaultValue();
- if (index < static_cast<uint32_t>(array->length())) {
- if (value->IsSmi()) {
- int int_value = Handle<Smi>::cast(value)->value();
- cast_value = from_int(int_value);
- } else if (value->IsHeapNumber()) {
- double double_value = Handle<HeapNumber>::cast(value)->value();
- cast_value = from_double(double_value);
- } else {
- // Clamp undefined to the default value. All other types have been
- // converted to a number type further up in the call chain.
- DCHECK(value->IsUndefined());
- }
- array->set(index, cast_value);
+ if (value->IsSmi()) {
+ int int_value = Smi::cast(value)->value();
+ cast_value = from_int(int_value);
+ } else if (value->IsHeapNumber()) {
+ double double_value = HeapNumber::cast(value)->value();
+ cast_value = from_double(double_value);
+ } else {
+ // Clamp undefined to the default value. All other types have been
+ // converted to a number type further up in the call chain.
+ DCHECK(value->IsUndefined());
}
- return Traits::ToHandle(array->GetIsolate(), cast_value);
+ set(index, cast_value);
}
@@ -4413,29 +4309,55 @@
}
-int Map::inobject_properties() {
- return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
+int Map::inobject_properties_or_constructor_function_index() {
+ return READ_BYTE_FIELD(this,
+ kInObjectPropertiesOrConstructorFunctionIndexOffset);
}
-int Map::pre_allocated_property_fields() {
- return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
+void Map::set_inobject_properties_or_constructor_function_index(int value) {
+ DCHECK(0 <= value && value < 256);
+ WRITE_BYTE_FIELD(this, kInObjectPropertiesOrConstructorFunctionIndexOffset,
+ static_cast<byte>(value));
+}
+
+
+int Map::GetInObjectProperties() {
+ DCHECK(IsJSObjectMap());
+ return inobject_properties_or_constructor_function_index();
+}
+
+
+void Map::SetInObjectProperties(int value) {
+ DCHECK(IsJSObjectMap());
+ set_inobject_properties_or_constructor_function_index(value);
+}
+
+
+int Map::GetConstructorFunctionIndex() {
+ DCHECK(IsPrimitiveMap());
+ return inobject_properties_or_constructor_function_index();
+}
+
+
+void Map::SetConstructorFunctionIndex(int value) {
+ DCHECK(IsPrimitiveMap());
+ set_inobject_properties_or_constructor_function_index(value);
}
int Map::GetInObjectPropertyOffset(int index) {
// Adjust for the number of properties stored in the object.
- index -= inobject_properties();
+ index -= GetInObjectProperties();
DCHECK(index <= 0);
return instance_size() + (index * kPointerSize);
}
-Handle<Map> Map::CopyInstallDescriptorsForTesting(
- Handle<Map> map, int new_descriptor, Handle<DescriptorArray> descriptors,
- Handle<LayoutDescriptor> layout_descriptor) {
- return CopyInstallDescriptors(map, new_descriptor, descriptors,
- layout_descriptor);
+Handle<Map> Map::AddMissingTransitionsForTesting(
+ Handle<Map> split_map, Handle<DescriptorArray> descriptors,
+ Handle<LayoutDescriptor> full_layout_descriptor) {
+ return AddMissingTransitions(split_map, descriptors, full_layout_descriptor);
}
@@ -4444,8 +4366,10 @@
if (instance_size != kVariableSizeSentinel) return instance_size;
// Only inline the most frequent cases.
InstanceType instance_type = map->instance_type();
- if (instance_type == FIXED_ARRAY_TYPE) {
- return FixedArray::BodyDescriptor::SizeOf(map, this);
+ if (instance_type == FIXED_ARRAY_TYPE ||
+ instance_type == TRANSITION_ARRAY_TYPE) {
+ return FixedArray::SizeFor(
+ reinterpret_cast<FixedArray*>(this)->synchronized_length());
}
if (instance_type == ONE_BYTE_STRING_TYPE ||
instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
@@ -4457,6 +4381,9 @@
if (instance_type == BYTE_ARRAY_TYPE) {
return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
}
+ if (instance_type == BYTECODE_ARRAY_TYPE) {
+ return reinterpret_cast<BytecodeArray*>(this)->BytecodeArraySize();
+ }
if (instance_type == FREE_SPACE_TYPE) {
return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
}
@@ -4471,9 +4398,6 @@
return FixedDoubleArray::SizeFor(
reinterpret_cast<FixedDoubleArray*>(this)->length());
}
- if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
- return reinterpret_cast<ConstantPoolArray*>(this)->size();
- }
if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
return reinterpret_cast<FixedTypedArrayBase*>(
@@ -4493,18 +4417,7 @@
}
-void Map::set_inobject_properties(int value) {
- DCHECK(0 <= value && value < 256);
- WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
-}
-
-
-void Map::set_pre_allocated_property_fields(int value) {
- DCHECK(0 <= value && value < 256);
- WRITE_BYTE_FIELD(this,
- kPreAllocatedPropertyFieldsOffset,
- static_cast<byte>(value));
-}
+void Map::clear_unused() { WRITE_BYTE_FIELD(this, kUnusedOffset, 0); }
InstanceType Map::instance_type() {
@@ -4527,9 +4440,7 @@
}
-byte Map::bit_field() {
- return READ_BYTE_FIELD(this, kBitFieldOffset);
-}
+byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
void Map::set_bit_field(byte value) {
@@ -4537,9 +4448,7 @@
}
-byte Map::bit_field2() {
- return READ_BYTE_FIELD(this, kBitField2Offset);
-}
+byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
void Map::set_bit_field2(byte value) {
@@ -4561,13 +4470,58 @@
}
-void Map::set_function_with_prototype(bool value) {
- set_bit_field(FunctionWithPrototype::update(bit_field(), value));
+void Map::set_is_constructor() {
+ set_bit_field(bit_field() | (1 << kIsConstructor));
}
-bool Map::function_with_prototype() {
- return FunctionWithPrototype::decode(bit_field());
+bool Map::is_constructor() const {
+ return ((1 << kIsConstructor) & bit_field()) != 0;
+}
+
+
+void Map::set_is_hidden_prototype() {
+ set_bit_field3(IsHiddenPrototype::update(bit_field3(), true));
+}
+
+
+bool Map::is_hidden_prototype() const {
+ return IsHiddenPrototype::decode(bit_field3());
+}
+
+
+void Map::set_has_indexed_interceptor() {
+ set_bit_field(bit_field() | (1 << kHasIndexedInterceptor));
+}
+
+
+bool Map::has_indexed_interceptor() {
+ return ((1 << kHasIndexedInterceptor) & bit_field()) != 0;
+}
+
+
+void Map::set_is_undetectable() {
+ set_bit_field(bit_field() | (1 << kIsUndetectable));
+}
+
+
+bool Map::is_undetectable() {
+ return ((1 << kIsUndetectable) & bit_field()) != 0;
+}
+
+
+void Map::set_is_observed() { set_bit_field(bit_field() | (1 << kIsObserved)); }
+
+bool Map::is_observed() { return ((1 << kIsObserved) & bit_field()) != 0; }
+
+
+void Map::set_has_named_interceptor() {
+ set_bit_field(bit_field() | (1 << kHasNamedInterceptor));
+}
+
+
+bool Map::has_named_interceptor() {
+ return ((1 << kHasNamedInterceptor) & bit_field()) != 0;
}
@@ -4602,11 +4556,55 @@
set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
}
-bool Map::is_prototype_map() {
+bool Map::is_prototype_map() const {
return IsPrototypeMapBits::decode(bit_field2());
}
+void Map::set_elements_kind(ElementsKind elements_kind) {
+ DCHECK(static_cast<int>(elements_kind) < kElementsKindCount);
+ DCHECK(kElementsKindCount <= (1 << Map::ElementsKindBits::kSize));
+ set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
+ DCHECK(this->elements_kind() == elements_kind);
+}
+
+
+ElementsKind Map::elements_kind() {
+ return Map::ElementsKindBits::decode(bit_field2());
+}
+
+
+bool Map::has_fast_smi_elements() {
+ return IsFastSmiElementsKind(elements_kind());
+}
+
+bool Map::has_fast_object_elements() {
+ return IsFastObjectElementsKind(elements_kind());
+}
+
+bool Map::has_fast_smi_or_object_elements() {
+ return IsFastSmiOrObjectElementsKind(elements_kind());
+}
+
+bool Map::has_fast_double_elements() {
+ return IsFastDoubleElementsKind(elements_kind());
+}
+
+bool Map::has_fast_elements() { return IsFastElementsKind(elements_kind()); }
+
+bool Map::has_sloppy_arguments_elements() {
+ return IsSloppyArgumentsElements(elements_kind());
+}
+
+bool Map::has_fixed_typed_array_elements() {
+ return IsFixedTypedArrayElementsKind(elements_kind());
+}
+
+bool Map::has_dictionary_elements() {
+ return IsDictionaryElementsKind(elements_kind());
+}
+
+
void Map::set_dictionary_map(bool value) {
uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
new_bit_field3 = IsUnstable::update(new_bit_field3, value);
@@ -4634,13 +4632,11 @@
}
-void Map::set_has_instance_call_handler() {
- set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
-}
+void Map::set_is_callable() { set_bit_field(bit_field() | (1 << kIsCallable)); }
-bool Map::has_instance_call_handler() {
- return HasInstanceCallHandler::decode(bit_field3());
+bool Map::is_callable() const {
+ return ((1 << kIsCallable) & bit_field()) != 0;
}
@@ -4664,12 +4660,32 @@
}
-void Map::set_counter(int value) {
- set_bit_field3(Counter::update(bit_field3(), value));
+void Map::set_is_strong() {
+ set_bit_field3(IsStrong::update(bit_field3(), true));
}
-int Map::counter() { return Counter::decode(bit_field3()); }
+bool Map::is_strong() {
+ return IsStrong::decode(bit_field3());
+}
+
+
+void Map::set_new_target_is_base(bool value) {
+ set_bit_field3(NewTargetIsBase::update(bit_field3(), value));
+}
+
+
+bool Map::new_target_is_base() { return NewTargetIsBase::decode(bit_field3()); }
+
+
+void Map::set_construction_counter(int value) {
+ set_bit_field3(ConstructionCounter::update(bit_field3(), value));
+}
+
+
+int Map::construction_counter() {
+ return ConstructionCounter::decode(bit_field3());
+}
void Map::mark_unstable() {
@@ -4695,7 +4711,7 @@
if (details.representation().IsSmi()) return true;
if (details.representation().IsDouble()) return true;
if (details.representation().IsHeapObject()) return true;
- if (details.type() == CONSTANT) return true;
+ if (details.type() == DATA_CONSTANT) return true;
}
return false;
}
@@ -4711,34 +4727,77 @@
}
+bool Map::CanTransition() {
+ // Only JSObject and subtypes have map transitions and back pointers.
+ STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
+ return instance_type() >= FIRST_JS_OBJECT_TYPE;
+}
+
+
+bool Map::IsBooleanMap() { return this == GetHeap()->boolean_map(); }
+bool Map::IsPrimitiveMap() {
+ STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
+ return instance_type() <= LAST_PRIMITIVE_TYPE;
+}
+bool Map::IsJSReceiverMap() {
+ STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
+ return instance_type() >= FIRST_JS_RECEIVER_TYPE;
+}
+bool Map::IsJSObjectMap() {
+ STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
+ return instance_type() >= FIRST_JS_OBJECT_TYPE;
+}
+bool Map::IsJSArrayMap() { return instance_type() == JS_ARRAY_TYPE; }
+bool Map::IsJSFunctionMap() { return instance_type() == JS_FUNCTION_TYPE; }
+bool Map::IsStringMap() { return instance_type() < FIRST_NONSTRING_TYPE; }
+bool Map::IsJSProxyMap() { return instance_type() == JS_PROXY_TYPE; }
+bool Map::IsJSGlobalProxyMap() {
+ return instance_type() == JS_GLOBAL_PROXY_TYPE;
+}
+bool Map::IsJSGlobalObjectMap() {
+ return instance_type() == JS_GLOBAL_OBJECT_TYPE;
+}
+bool Map::IsJSTypedArrayMap() { return instance_type() == JS_TYPED_ARRAY_TYPE; }
+bool Map::IsJSDataViewMap() { return instance_type() == JS_DATA_VIEW_TYPE; }
+
+
bool Map::CanOmitMapChecks() {
return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
}
-int DependentCode::number_of_entries(DependencyGroup group) {
- if (length() == 0) return 0;
- return Smi::cast(get(group))->value();
+DependentCode* DependentCode::next_link() {
+ return DependentCode::cast(get(kNextLinkIndex));
}
-void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
- set(group, Smi::FromInt(value));
+void DependentCode::set_next_link(DependentCode* next) {
+ set(kNextLinkIndex, next);
}
-bool DependentCode::is_code_at(int i) {
- return get(kCodesStartIndex + i)->IsCode();
-}
+int DependentCode::flags() { return Smi::cast(get(kFlagsIndex))->value(); }
-Code* DependentCode::code_at(int i) {
- return Code::cast(get(kCodesStartIndex + i));
+
+void DependentCode::set_flags(int flags) {
+ set(kFlagsIndex, Smi::FromInt(flags));
}
-CompilationInfo* DependentCode::compilation_info_at(int i) {
- return reinterpret_cast<CompilationInfo*>(
- Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
+int DependentCode::count() { return CountField::decode(flags()); }
+
+void DependentCode::set_count(int value) {
+ set_flags(CountField::update(flags(), value));
+}
+
+
+DependentCode::DependencyGroup DependentCode::group() {
+ return static_cast<DependencyGroup>(GroupField::decode(flags()));
+}
+
+
+void DependentCode::set_group(DependentCode::DependencyGroup group) {
+ set_flags(GroupField::update(flags(), static_cast<int>(group)));
}
@@ -4752,11 +4811,6 @@
}
-Object** DependentCode::slot_at(int i) {
- return RawFieldOfElementAt(kCodesStartIndex + i);
-}
-
-
void DependentCode::clear_at(int i) {
set_undefined(kCodesStartIndex + i);
}
@@ -4767,16 +4821,6 @@
}
-void DependentCode::ExtendGroup(DependencyGroup group) {
- GroupStartIndexes starts(this);
- for (int g = kGroupCount - 1; g > group; g--) {
- if (starts.at(g) < starts.at(g + 1)) {
- copy(starts.at(g), starts.at(g + 1));
- }
- }
-}
-
-
void Code::set_flags(Code::Flags flags) {
STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
WRITE_INT_FIELD(this, kFlagsOffset, flags);
@@ -4797,6 +4841,12 @@
}
+bool Code::IsJavaScriptCode() {
+ return kind() == FUNCTION || kind() == OPTIMIZED_FUNCTION ||
+ is_interpreter_entry_trampoline();
+}
+
+
InlineCacheState Code::ic_state() {
InlineCacheState result = ExtractICStateFromFlags(flags());
// Only allow uninitialized or debugger states for non-IC code
@@ -4842,6 +4892,12 @@
}
+inline bool Code::is_interpreter_entry_trampoline() {
+ Handle<Code> interpreter_entry =
+ GetIsolate()->builtins()->InterpreterEntryTrampoline();
+ return interpreter_entry.location() != nullptr && *interpreter_entry == this;
+}
+
inline void Code::set_is_crankshafted(bool value) {
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
int updated = IsCrankshaftedField::update(previous, value);
@@ -4850,89 +4906,75 @@
inline bool Code::is_turbofanned() {
- DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
return IsTurbofannedField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
inline void Code::set_is_turbofanned(bool value) {
- DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
int updated = IsTurbofannedField::update(previous, value);
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
-bool Code::optimizable() {
- DCHECK_EQ(FUNCTION, kind());
- return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
+inline bool Code::can_have_weak_objects() {
+ DCHECK(kind() == OPTIMIZED_FUNCTION);
+ return CanHaveWeakObjectsField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
-void Code::set_optimizable(bool value) {
- DCHECK_EQ(FUNCTION, kind());
- WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
+inline void Code::set_can_have_weak_objects(bool value) {
+ DCHECK(kind() == OPTIMIZED_FUNCTION);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = CanHaveWeakObjectsField::update(previous, value);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
bool Code::has_deoptimization_support() {
DCHECK_EQ(FUNCTION, kind());
- byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
+ unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
}
void Code::set_has_deoptimization_support(bool value) {
DCHECK_EQ(FUNCTION, kind());
- byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
+ unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
- WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
+ WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
}
bool Code::has_debug_break_slots() {
DCHECK_EQ(FUNCTION, kind());
- byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
+ unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
}
void Code::set_has_debug_break_slots(bool value) {
DCHECK_EQ(FUNCTION, kind());
- byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
+ unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
- WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
-}
-
-
-bool Code::is_compiled_optimizable() {
- DCHECK_EQ(FUNCTION, kind());
- byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
- return FullCodeFlagsIsCompiledOptimizable::decode(flags);
-}
-
-
-void Code::set_compiled_optimizable(bool value) {
- DCHECK_EQ(FUNCTION, kind());
- byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
- flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
- WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
+ WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
}
bool Code::has_reloc_info_for_serialization() {
DCHECK_EQ(FUNCTION, kind());
- byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
+ unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
}
void Code::set_has_reloc_info_for_serialization(bool value) {
DCHECK_EQ(FUNCTION, kind());
- byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
+ unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
flags = FullCodeFlagsHasRelocInfoForSerialization::update(flags, value);
- WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
+ WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
}
@@ -4954,14 +4996,16 @@
int Code::profiler_ticks() {
DCHECK_EQ(FUNCTION, kind());
- return READ_BYTE_FIELD(this, kProfilerTicksOffset);
+ return ProfilerTicksField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_profiler_ticks(int ticks) {
- DCHECK(ticks < 256);
if (kind() == FUNCTION) {
- WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
+ unsigned previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ unsigned updated = ProfilerTicksField::update(previous, ticks);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
}
@@ -5032,24 +5076,7 @@
}
-byte Code::to_boolean_state() {
- return extra_ic_state();
-}
-
-
-bool Code::has_function_cache() {
- DCHECK(kind() == STUB);
- return HasFunctionCacheField::decode(
- READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
-}
-
-
-void Code::set_has_function_cache(bool flag) {
- DCHECK(kind() == STUB);
- int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
- int updated = HasFunctionCacheField::update(previous, flag);
- WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
-}
+uint16_t Code::to_boolean_state() { return extra_ic_state(); }
bool Code::marked_for_deoptimization() {
@@ -5084,20 +5111,37 @@
}
-bool Code::is_debug_stub() {
- return ic_state() == DEBUG_STUB;
+bool Code::is_debug_stub() { return ic_state() == DEBUG_STUB; }
+bool Code::is_handler() { return kind() == HANDLER; }
+bool Code::is_load_stub() { return kind() == LOAD_IC; }
+bool Code::is_keyed_load_stub() { return kind() == KEYED_LOAD_IC; }
+bool Code::is_store_stub() { return kind() == STORE_IC; }
+bool Code::is_keyed_store_stub() { return kind() == KEYED_STORE_IC; }
+bool Code::is_call_stub() { return kind() == CALL_IC; }
+bool Code::is_binary_op_stub() { return kind() == BINARY_OP_IC; }
+bool Code::is_compare_ic_stub() { return kind() == COMPARE_IC; }
+bool Code::is_compare_nil_ic_stub() { return kind() == COMPARE_NIL_IC; }
+bool Code::is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
+bool Code::is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
+
+
+bool Code::embeds_maps_weakly() {
+ Kind k = kind();
+ return (k == LOAD_IC || k == STORE_IC || k == KEYED_LOAD_IC ||
+ k == KEYED_STORE_IC || k == COMPARE_NIL_IC) &&
+ ic_state() == MONOMORPHIC;
}
-ConstantPoolArray* Code::constant_pool() {
- return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
-}
-
-
-void Code::set_constant_pool(Object* value) {
- DCHECK(value->IsConstantPoolArray());
- WRITE_FIELD(this, kConstantPoolOffset, value);
- WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
+Address Code::constant_pool() {
+ Address constant_pool = NULL;
+ if (FLAG_enable_embedded_constant_pool) {
+ int offset = constant_pool_offset();
+ if (offset < instruction_size()) {
+ constant_pool = FIELD_ADDR(this, kHeaderSize + offset);
+ }
+ }
+ return constant_pool;
}
@@ -5182,14 +5226,31 @@
}
+bool Code::CanContainWeakObjects() {
+ return is_optimized_code() && can_have_weak_objects();
+}
+
+
+bool Code::IsWeakObject(Object* object) {
+ return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
+}
+
+
bool Code::IsWeakObjectInOptimizedCode(Object* object) {
- if (!FLAG_collect_maps) return false;
if (object->IsMap()) {
return Map::cast(object)->CanTransition() &&
FLAG_weak_embedded_maps_in_optimized_code;
}
- if (object->IsJSObject() ||
- (object->IsCell() && Cell::cast(object)->value()->IsJSObject())) {
+ if (object->IsCell()) {
+ object = Cell::cast(object)->value();
+ } else if (object->IsPropertyCell()) {
+ object = PropertyCell::cast(object)->value();
+ }
+ if (object->IsJSReceiver()) {
+ return FLAG_weak_embedded_objects_in_optimized_code;
+ }
+ if (object->IsContext()) {
+ // Contexts of inlined functions are embedded in optimized code.
return FLAG_weak_embedded_objects_in_optimized_code;
}
return false;
@@ -5226,22 +5287,6 @@
}
-// If the descriptor is using the empty transition array, install a new empty
-// transition array that will have place for an element transition.
-static void EnsureHasTransitionArray(Handle<Map> map) {
- Handle<TransitionArray> transitions;
- if (!map->HasTransitionArray()) {
- transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
- transitions->set_back_pointer_storage(map->GetBackPointer());
- } else if (!map->transitions()->IsFullTransitionArray()) {
- transitions = TransitionArray::ExtendToFullTransitionArray(map);
- } else {
- return;
- }
- map->set_transitions(*transitions);
-}
-
-
LayoutDescriptor* Map::layout_descriptor_gc_safe() {
Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
return LayoutDescriptor::cast_gc_safe(layout_desc);
@@ -5261,8 +5306,16 @@
if (layout_descriptor()->IsSlowLayout()) {
set_layout_descriptor(layout_desc);
}
+#ifdef VERIFY_HEAP
+ // TODO(ishell): remove these checks from VERIFY_HEAP mode.
+ if (FLAG_verify_heap) {
+ CHECK(layout_descriptor()->IsConsistentWithMap(this));
+ CHECK(visitor_id() == Heap::GetStaticVisitorIdForMap(this));
+ }
+#else
SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
- DCHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
+ DCHECK(visitor_id() == Heap::GetStaticVisitorIdForMap(this));
+#endif
}
}
@@ -5275,8 +5328,15 @@
if (FLAG_unbox_double_fields) {
set_layout_descriptor(layout_desc);
+#ifdef VERIFY_HEAP
+ // TODO(ishell): remove these checks from VERIFY_HEAP mode.
+ if (FLAG_verify_heap) {
+ CHECK(layout_descriptor()->IsConsistentWithMap(this));
+ }
+#else
SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
- set_visitor_id(StaticVisitorBase::GetVisitorId(this));
+#endif
+ set_visitor_id(Heap::GetStaticVisitorIdForMap(this));
}
}
@@ -5293,7 +5353,7 @@
}
-uint32_t Map::bit_field3() {
+uint32_t Map::bit_field3() const {
return READ_UINT32_FIELD(this, kBitField3Offset);
}
@@ -5315,197 +5375,127 @@
// it should never try to (otherwise, layout descriptor must be updated too).
#ifdef DEBUG
PropertyDetails details = desc->GetDetails();
- CHECK(details.type() != FIELD || !details.representation().IsDouble());
+ CHECK(details.type() != DATA || !details.representation().IsDouble());
#endif
}
Object* Map::GetBackPointer() {
- Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
- if (object->IsTransitionArray()) {
- return TransitionArray::cast(object)->back_pointer_storage();
- } else {
- DCHECK(object->IsMap() || object->IsUndefined());
+ Object* object = constructor_or_backpointer();
+ if (object->IsMap()) {
return object;
}
+ return GetIsolate()->heap()->undefined_value();
}
-bool Map::HasElementsTransition() {
- return HasTransitionArray() && transitions()->HasElementsTransition();
+Map* Map::ElementsTransitionMap() {
+ return TransitionArray::SearchSpecial(
+ this, GetHeap()->elements_transition_symbol());
}
-bool Map::HasTransitionArray() const {
- Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
- return object->IsTransitionArray();
+ACCESSORS(Map, raw_transitions, Object, kTransitionsOrPrototypeInfoOffset)
+
+
+Object* Map::prototype_info() const {
+ DCHECK(is_prototype_map());
+ return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
}
-Map* Map::elements_transition_map() {
- int index =
- transitions()->SearchSpecial(GetHeap()->elements_transition_symbol());
- return transitions()->GetTarget(index);
-}
-
-
-bool Map::CanHaveMoreTransitions() {
- if (!HasTransitionArray()) return true;
- return transitions()->number_of_transitions() <
- TransitionArray::kMaxNumberOfTransitions;
-}
-
-
-Map* Map::GetTransition(int transition_index) {
- return transitions()->GetTarget(transition_index);
-}
-
-
-int Map::SearchSpecialTransition(Symbol* name) {
- if (HasTransitionArray()) {
- return transitions()->SearchSpecial(name);
- }
- return TransitionArray::kNotFound;
-}
-
-
-int Map::SearchTransition(PropertyKind kind, Name* name,
- PropertyAttributes attributes) {
- if (HasTransitionArray()) {
- return transitions()->Search(kind, name, attributes);
- }
- return TransitionArray::kNotFound;
-}
-
-
-FixedArray* Map::GetPrototypeTransitions() {
- if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
- if (!transitions()->HasPrototypeTransitions()) {
- return GetHeap()->empty_fixed_array();
- }
- return transitions()->GetPrototypeTransitions();
-}
-
-
-void Map::SetPrototypeTransitions(
- Handle<Map> map, Handle<FixedArray> proto_transitions) {
- EnsureHasTransitionArray(map);
- int old_number_of_transitions = map->NumberOfProtoTransitions();
- if (Heap::ShouldZapGarbage() && map->HasPrototypeTransitions()) {
- DCHECK(map->GetPrototypeTransitions() != *proto_transitions);
- map->ZapPrototypeTransitions();
- }
- map->transitions()->SetPrototypeTransitions(*proto_transitions);
- map->SetNumberOfProtoTransitions(old_number_of_transitions);
-}
-
-
-bool Map::HasPrototypeTransitions() {
- return HasTransitionArray() && transitions()->HasPrototypeTransitions();
-}
-
-
-TransitionArray* Map::transitions() const {
- DCHECK(HasTransitionArray());
- Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
- return TransitionArray::cast(object);
-}
-
-
-void Map::set_transitions(TransitionArray* transition_array,
- WriteBarrierMode mode) {
- // Transition arrays are not shared. When one is replaced, it should not
- // keep referenced objects alive, so we zap it.
- // When there is another reference to the array somewhere (e.g. a handle),
- // not zapping turns from a waste of memory into a source of crashes.
- if (HasTransitionArray()) {
-#ifdef DEBUG
- for (int i = 0; i < transitions()->number_of_transitions(); i++) {
- Map* target = transitions()->GetTarget(i);
- if (target->instance_descriptors() == instance_descriptors()) {
- Name* key = transitions()->GetKey(i);
- int new_target_index;
- if (TransitionArray::IsSpecialTransition(key)) {
- new_target_index = transition_array->SearchSpecial(Symbol::cast(key));
- } else {
- PropertyDetails details =
- TransitionArray::GetTargetDetails(key, target);
- new_target_index = transition_array->Search(details.kind(), key,
- details.attributes());
- }
- DCHECK_NE(TransitionArray::kNotFound, new_target_index);
- DCHECK_EQ(target, transition_array->GetTarget(new_target_index));
- }
- }
-#endif
- DCHECK(transitions() != transition_array);
- ZapTransitions();
- }
-
- WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
+void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
+ DCHECK(is_prototype_map());
+ WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
CONDITIONAL_WRITE_BARRIER(
- GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
-}
-
-
-void Map::init_back_pointer(Object* undefined) {
- DCHECK(undefined->IsUndefined());
- WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
+ GetHeap(), this, Map::kTransitionsOrPrototypeInfoOffset, value, mode);
}
void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
- DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
- (value->IsMap() && GetBackPointer()->IsUndefined()));
- Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
- if (object->IsTransitionArray()) {
- TransitionArray::cast(object)->set_back_pointer_storage(value);
- } else {
- WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
- CONDITIONAL_WRITE_BARRIER(
- GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
- }
+ DCHECK((value->IsMap() && GetBackPointer()->IsUndefined()));
+ DCHECK(!value->IsMap() ||
+ Map::cast(value)->GetConstructor() == constructor_or_backpointer());
+ set_constructor_or_backpointer(value, mode);
}
ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
-ACCESSORS(Map, constructor, Object, kConstructorOffset)
+ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
+ACCESSORS(Map, constructor_or_backpointer, Object,
+ kConstructorOrBackPointerOffset)
+
+
+Object* Map::GetConstructor() const {
+ Object* maybe_constructor = constructor_or_backpointer();
+ // Follow any back pointers.
+ while (maybe_constructor->IsMap()) {
+ maybe_constructor =
+ Map::cast(maybe_constructor)->constructor_or_backpointer();
+ }
+ return maybe_constructor;
+}
+
+
+void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
+ // Never overwrite a back pointer with a constructor.
+ DCHECK(!constructor_or_backpointer()->IsMap());
+ set_constructor_or_backpointer(constructor, mode);
+}
+
+
+Handle<Map> Map::CopyInitialMap(Handle<Map> map) {
+ return CopyInitialMap(map, map->instance_size(), map->GetInObjectProperties(),
+ map->unused_property_fields());
+}
+
+
+ACCESSORS(JSBoundFunction, length, Object, kLengthOffset)
+ACCESSORS(JSBoundFunction, name, Object, kNameOffset)
+ACCESSORS(JSBoundFunction, bound_target_function, JSReceiver,
+ kBoundTargetFunctionOffset)
+ACCESSORS(JSBoundFunction, bound_this, Object, kBoundThisOffset)
+ACCESSORS(JSBoundFunction, bound_arguments, FixedArray, kBoundArgumentsOffset)
+ACCESSORS(JSBoundFunction, creation_context, Context, kCreationContextOffset)
ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
-ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
+ACCESSORS(JSFunction, literals, LiteralsArray, kLiteralsOffset)
ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
-ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
-ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
-ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
+ACCESSORS(JSGlobalObject, native_context, Context, kNativeContextOffset)
+ACCESSORS(JSGlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
ACCESSORS(AccessorInfo, name, Object, kNameOffset)
-ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
+SMI_ACCESSORS(AccessorInfo, flag, kFlagOffset)
ACCESSORS(AccessorInfo, expected_receiver_type, Object,
kExpectedReceiverTypeOffset)
-ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
- kSerializedDataOffset)
-
-ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
- kDescriptorOffset)
-
ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
ACCESSORS(Box, value, Object, kValueOffset)
+ACCESSORS(PrototypeInfo, prototype_users, Object, kPrototypeUsersOffset)
+SMI_ACCESSORS(PrototypeInfo, registry_slot, kRegistrySlotOffset)
+ACCESSORS(PrototypeInfo, validity_cell, Object, kValidityCellOffset)
+
+ACCESSORS(SloppyBlockWithEvalContextExtension, scope_info, ScopeInfo,
+ kScopeInfoOffset)
+ACCESSORS(SloppyBlockWithEvalContextExtension, extension, JSObject,
+ kExtensionOffset)
+
ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
+ACCESSORS(AccessCheckInfo, callback, Object, kCallbackOffset)
ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
@@ -5518,11 +5508,14 @@
BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
kCanInterceptSymbolsBit)
BOOL_ACCESSORS(InterceptorInfo, flags, all_can_read, kAllCanReadBit)
+BOOL_ACCESSORS(InterceptorInfo, flags, non_masking, kNonMasking)
ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
+ACCESSORS(CallHandlerInfo, fast_handler, Object, kFastHandlerOffset)
ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
+SMI_ACCESSORS(TemplateInfo, number_of_properties, kNumberOfProperties)
ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
@@ -5543,22 +5536,17 @@
kInstanceCallHandlerOffset)
ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
kAccessCheckInfoOffset)
-ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
+SMI_ACCESSORS(FunctionTemplateInfo, flag, kFlagOffset)
ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
kInternalFieldCountOffset)
-ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
-ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
-
-ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
-
ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
-ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
-ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
- kPretenureCreateCountOffset)
+SMI_ACCESSORS(AllocationSite, pretenure_data, kPretenureDataOffset)
+SMI_ACCESSORS(AllocationSite, pretenure_create_count,
+ kPretenureCreateCountOffset)
ACCESSORS(AllocationSite, dependent_code, DependentCode,
kDependentCodeOffset)
ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
@@ -5566,18 +5554,18 @@
ACCESSORS(Script, source, Object, kSourceOffset)
ACCESSORS(Script, name, Object, kNameOffset)
-ACCESSORS(Script, id, Smi, kIdOffset)
-ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
-ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
+SMI_ACCESSORS(Script, id, kIdOffset)
+SMI_ACCESSORS(Script, line_offset, kLineOffsetOffset)
+SMI_ACCESSORS(Script, column_offset, kColumnOffsetOffset)
ACCESSORS(Script, context_data, Object, kContextOffset)
ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
-ACCESSORS_TO_SMI(Script, type, kTypeOffset)
+SMI_ACCESSORS(Script, type, kTypeOffset)
ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
-ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
- kEvalFrominstructionsOffsetOffset)
-ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
-BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
+SMI_ACCESSORS(Script, eval_from_instructions_offset,
+ kEvalFrominstructionsOffsetOffset)
+ACCESSORS(Script, shared_function_infos, Object, kSharedFunctionInfosOffset)
+SMI_ACCESSORS(Script, flags, kFlagsOffset)
ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
@@ -5589,6 +5577,10 @@
set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
type == COMPILATION_TYPE_EVAL));
}
+bool Script::hide_source() { return BooleanBit::get(flags(), kHideSourceBit); }
+void Script::set_hide_source(bool value) {
+ set_flags(BooleanBit::set(flags(), kHideSourceBit, value));
+}
Script::CompilationState Script::compilation_state() {
return BooleanBit::get(flags(), kCompilationStateBit) ?
COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
@@ -5597,21 +5589,29 @@
set_flags(BooleanBit::set(flags(), kCompilationStateBit,
state == COMPILATION_STATE_COMPILED));
}
+ScriptOriginOptions Script::origin_options() {
+ return ScriptOriginOptions((flags() & kOriginOptionsMask) >>
+ kOriginOptionsShift);
+}
+void Script::set_origin_options(ScriptOriginOptions origin_options) {
+ DCHECK(!(origin_options.Flags() & ~((1 << kOriginOptionsSize) - 1)));
+ set_flags((flags() & ~kOriginOptionsMask) |
+ (origin_options.Flags() << kOriginOptionsShift));
+}
ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
-ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
-ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
+ACCESSORS(DebugInfo, code, Code, kCodeIndex)
ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
-ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
-ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
-ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
+SMI_ACCESSORS(BreakPointInfo, code_position, kCodePositionIndex)
+SMI_ACCESSORS(BreakPointInfo, source_position, kSourcePositionIndex)
+SMI_ACCESSORS(BreakPointInfo, statement_position, kStatementPositionIndex)
ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
-ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
- kOptimizedCodeMapOffset)
+ACCESSORS(SharedFunctionInfo, optimized_code_map, FixedArray,
+ kOptimizedCodeMapOffset)
ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
ACCESSORS(SharedFunctionInfo, feedback_vector, TypeFeedbackVector,
kFeedbackVectorOffset)
@@ -5638,6 +5638,9 @@
kRemovePrototypeBit)
BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
kDoNotCacheBit)
+BOOL_ACCESSORS(FunctionTemplateInfo, flag, instantiated, kInstantiatedBit)
+BOOL_ACCESSORS(FunctionTemplateInfo, flag, accept_any_receiver,
+ kAcceptAnyReceiver)
BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
kIsExpressionBit)
BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
@@ -5659,11 +5662,13 @@
kHasDuplicateParameters)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, deserialized, kDeserialized)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, never_compiled,
+ kNeverCompiled)
#if V8_HOST_ARCH_32_BIT
SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
-SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
+SMI_ACCESSORS(SharedFunctionInfo, internal_formal_parameter_count,
kFormalParameterCountOffset)
SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
kExpectedNofPropertiesOffset)
@@ -5711,8 +5716,7 @@
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
-PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
- formal_parameter_count,
+PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, internal_formal_parameter_count,
kFormalParameterCountOffset)
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
@@ -5757,25 +5761,25 @@
set_compiler_hints(BooleanBit::set(compiler_hints(),
kOptimizationDisabled,
disable));
- // If disabling optimizations we reflect that in the code object so
- // it will not be counted as optimizable code.
- if ((code()->kind() == Code::FUNCTION) && disable) {
- code()->set_optimizable(false);
- }
}
-StrictMode SharedFunctionInfo::strict_mode() {
- return BooleanBit::get(compiler_hints(), kStrictModeFunction)
- ? STRICT : SLOPPY;
+LanguageMode SharedFunctionInfo::language_mode() {
+ STATIC_ASSERT(LANGUAGE_END == 3);
+ return construct_language_mode(
+ BooleanBit::get(compiler_hints(), kStrictModeFunction),
+ BooleanBit::get(compiler_hints(), kStrongModeFunction));
}
-void SharedFunctionInfo::set_strict_mode(StrictMode strict_mode) {
- // We only allow mode transitions from sloppy to strict.
- DCHECK(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode);
+void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
+ STATIC_ASSERT(LANGUAGE_END == 3);
+ // We only allow language mode transitions that set the same language mode
+ // again or go up in the chain:
+ DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
int hints = compiler_hints();
- hints = BooleanBit::set(hints, kStrictModeFunction, strict_mode == STRICT);
+ hints = BooleanBit::set(hints, kStrictModeFunction, is_strict(language_mode));
+ hints = BooleanBit::set(hints, kStrongModeFunction, is_strong(language_mode));
set_compiler_hints(hints);
}
@@ -5793,31 +5797,29 @@
}
-BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, uses_super_property,
- kUsesSuperProperty)
-BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, uses_super_constructor_call,
- kUsesSuperConstructorCall)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, needs_home_object,
+ kNeedsHomeObject)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
-BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
- kInlineBuiltin)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, force_inline, kForceInline)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
name_should_print_as_anonymous,
kNameShouldPrintAsAnonymous)
-BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
-BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_crankshaft,
+ kDontCrankshaft)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
kIsConciseMethod)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_accessor_function,
+ kIsAccessorFunction)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_default_constructor,
kIsDefaultConstructor)
ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
-ACCESSORS(CodeCache, weak_cell_cache, Object, kWeakCellCacheOffset)
ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
@@ -5837,7 +5839,7 @@
void SharedFunctionInfo::DontAdaptArguments() {
DCHECK(code()->kind() == Code::BUILTIN);
- set_formal_parameter_count(kDontAdaptArgumentsSentinel);
+ set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
}
@@ -5873,8 +5875,13 @@
}
DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
+#ifdef DEBUG
+ Code::VerifyRecompiledCode(code(), value);
+#endif // DEBUG
set_code(value);
+
+ if (is_compiled()) set_never_compiled(false);
}
@@ -5895,7 +5902,33 @@
bool SharedFunctionInfo::is_compiled() {
- return code() != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy);
+ Builtins* builtins = GetIsolate()->builtins();
+ DCHECK(code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent));
+ DCHECK(code() != builtins->builtin(Builtins::kCompileOptimized));
+ return code() != builtins->builtin(Builtins::kCompileLazy);
+}
+
+
+bool SharedFunctionInfo::has_simple_parameters() {
+ return scope_info()->HasSimpleParameters();
+}
+
+
+bool SharedFunctionInfo::HasDebugInfo() {
+ bool has_debug_info = debug_info()->IsStruct();
+ DCHECK(!has_debug_info || HasDebugCode());
+ return has_debug_info;
+}
+
+
+DebugInfo* SharedFunctionInfo::GetDebugInfo() {
+ DCHECK(HasDebugInfo());
+ return DebugInfo::cast(debug_info());
+}
+
+
+bool SharedFunctionInfo::HasDebugCode() {
+ return code()->kind() == Code::FUNCTION && code()->has_debug_break_slots();
}
@@ -5921,6 +5954,17 @@
}
+bool SharedFunctionInfo::HasBytecodeArray() {
+ return function_data()->IsBytecodeArray();
+}
+
+
+BytecodeArray* SharedFunctionInfo::bytecode_array() {
+ DCHECK(HasBytecodeArray());
+ return BytecodeArray::cast(function_data());
+}
+
+
int SharedFunctionInfo::ic_age() {
return ICAgeBits::decode(counters());
}
@@ -5991,35 +6035,50 @@
set_optimization_disabled(false);
set_opt_count(0);
set_deopt_count(0);
- code()->set_optimizable(true);
}
}
-bool JSFunction::IsBuiltin() {
- return context()->global_object()->IsJSBuiltinsObject();
+void SharedFunctionInfo::set_disable_optimization_reason(BailoutReason reason) {
+ set_opt_count_and_bailout_reason(DisabledOptimizationReasonBits::update(
+ opt_count_and_bailout_reason(), reason));
}
-bool JSFunction::IsFromNativeScript() {
- Object* script = shared()->script();
- bool native = script->IsScript() &&
- Script::cast(script)->type()->value() == Script::TYPE_NATIVE;
- DCHECK(!IsBuiltin() || native); // All builtins are also native.
- return native;
+bool SharedFunctionInfo::IsBuiltin() {
+ Object* script_obj = script();
+ if (script_obj->IsUndefined()) return true;
+ Script* script = Script::cast(script_obj);
+ Script::Type type = static_cast<Script::Type>(script->type());
+ return type != Script::TYPE_NORMAL;
}
-bool JSFunction::IsFromExtensionScript() {
- Object* script = shared()->script();
- return script->IsScript() &&
- Script::cast(script)->type()->value() == Script::TYPE_EXTENSION;
+bool SharedFunctionInfo::IsSubjectToDebugging() { return !IsBuiltin(); }
+
+
+bool SharedFunctionInfo::OptimizedCodeMapIsCleared() const {
+ return optimized_code_map() == GetHeap()->cleared_optimized_code_map();
}
-bool JSFunction::NeedsArgumentsAdaption() {
- return shared()->formal_parameter_count() !=
- SharedFunctionInfo::kDontAdaptArgumentsSentinel;
+// static
+void SharedFunctionInfo::AddToOptimizedCodeMap(
+ Handle<SharedFunctionInfo> shared, Handle<Context> native_context,
+ Handle<Code> code, Handle<LiteralsArray> literals, BailoutId osr_ast_id) {
+ AddToOptimizedCodeMapInternal(shared, native_context, code, literals,
+ osr_ast_id);
+}
+
+
+// static
+void SharedFunctionInfo::AddLiteralsToOptimizedCodeMap(
+ Handle<SharedFunctionInfo> shared, Handle<Context> native_context,
+ Handle<LiteralsArray> literals) {
+ Isolate* isolate = shared->GetIsolate();
+ Handle<Oddball> undefined = isolate->factory()->undefined_value();
+ AddToOptimizedCodeMapInternal(shared, native_context, undefined, literals,
+ BailoutId::None());
}
@@ -6028,11 +6087,6 @@
}
-bool JSFunction::IsOptimizable() {
- return code()->kind() == Code::FUNCTION && code()->optimizable();
-}
-
-
bool JSFunction::IsMarkedForOptimization() {
return code() == GetIsolate()->builtins()->builtin(
Builtins::kCompileOptimized);
@@ -6051,9 +6105,25 @@
}
-bool JSFunction::IsInobjectSlackTrackingInProgress() {
- return has_initial_map() &&
- initial_map()->counter() >= Map::kSlackTrackingCounterEnd;
+void JSFunction::CompleteInobjectSlackTrackingIfActive() {
+ if (has_initial_map() && initial_map()->IsInobjectSlackTrackingInProgress()) {
+ initial_map()->CompleteInobjectSlackTracking();
+ }
+}
+
+
+bool Map::IsInobjectSlackTrackingInProgress() {
+ return construction_counter() != Map::kNoSlackTracking;
+}
+
+
+void Map::InobjectSlackTrackingStep() {
+ if (!IsInobjectSlackTrackingInProgress()) return;
+ int counter = construction_counter();
+ set_construction_counter(counter - 1);
+ if (counter == kSlackTrackingCounterEnd) {
+ CompleteInobjectSlackTracking();
+ }
}
@@ -6114,6 +6184,9 @@
}
+Context* JSFunction::native_context() { return context()->native_context(); }
+
+
void JSFunction::set_context(Object* value) {
DCHECK(value->IsUndefined() || value->IsContext());
WRITE_FIELD(this, kContextOffset, value);
@@ -6157,96 +6230,34 @@
DCHECK(has_prototype());
// If the function's prototype property has been set to a non-JSObject
// value, that value is stored in the constructor field of the map.
- if (map()->has_non_instance_prototype()) return map()->constructor();
+ if (map()->has_non_instance_prototype()) {
+ Object* prototype = map()->GetConstructor();
+ // The map must have a prototype in that field, not a back pointer.
+ DCHECK(!prototype->IsMap());
+ return prototype;
+ }
return instance_prototype();
}
-bool JSFunction::should_have_prototype() {
- return map()->function_with_prototype();
-}
-
-
bool JSFunction::is_compiled() {
- return code() != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy);
-}
-
-
-FixedArray* JSFunction::literals() {
- DCHECK(!shared()->bound());
- return literals_or_bindings();
-}
-
-
-void JSFunction::set_literals(FixedArray* literals) {
- DCHECK(!shared()->bound());
- set_literals_or_bindings(literals);
-}
-
-
-FixedArray* JSFunction::function_bindings() {
- DCHECK(shared()->bound());
- return literals_or_bindings();
-}
-
-
-void JSFunction::set_function_bindings(FixedArray* bindings) {
- DCHECK(shared()->bound());
- // Bound function literal may be initialized to the empty fixed array
- // before the bindings are set.
- DCHECK(bindings == GetHeap()->empty_fixed_array() ||
- bindings->map() == GetHeap()->fixed_cow_array_map());
- set_literals_or_bindings(bindings);
+ Builtins* builtins = GetIsolate()->builtins();
+ return code() != builtins->builtin(Builtins::kCompileLazy) &&
+ code() != builtins->builtin(Builtins::kCompileOptimized) &&
+ code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent);
}
int JSFunction::NumberOfLiterals() {
- DCHECK(!shared()->bound());
return literals()->length();
}
-Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
- DCHECK(id < kJSBuiltinsCount); // id is unsigned.
- return READ_FIELD(this, OffsetOfFunctionWithId(id));
-}
-
-
-void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
- Object* value) {
- DCHECK(id < kJSBuiltinsCount); // id is unsigned.
- WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
- WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
-}
-
-
-Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
- DCHECK(id < kJSBuiltinsCount); // id is unsigned.
- return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
-}
-
-
-void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
- Code* value) {
- DCHECK(id < kJSBuiltinsCount); // id is unsigned.
- WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
- DCHECK(!GetHeap()->InNewSpace(value));
-}
-
-
+ACCESSORS(JSProxy, target, JSReceiver, kTargetOffset)
ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
ACCESSORS(JSProxy, hash, Object, kHashOffset)
-ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
-ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
-
-void JSProxy::InitializeBody(int object_size, Object* value) {
- DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
- for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
- WRITE_FIELD(this, offset, value);
- }
-}
-
+bool JSProxy::IsRevoked() const { return !handler()->IsJSReceiver(); }
ACCESSORS(JSCollection, table, Object, kTableOffset)
@@ -6289,7 +6300,6 @@
ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
-SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
bool JSGeneratorObject::is_suspended() {
DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
@@ -6335,8 +6345,8 @@
ACCESSORS(JSDate, sec, Object, kSecOffset)
-ACCESSORS(JSMessageObject, type, String, kTypeOffset)
-ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
+SMI_ACCESSORS(JSMessageObject, type, kTypeOffset)
+ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset)
ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
@@ -6345,6 +6355,7 @@
INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
+INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
@@ -6356,11 +6367,12 @@
WRITE_FIELD(this, kRelocationInfoOffset, NULL);
WRITE_FIELD(this, kHandlerTableOffset, NULL);
WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
- WRITE_FIELD(this, kConstantPoolOffset, NULL);
// Do not wipe out major/minor keys on a code stub or IC
if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
}
+ WRITE_FIELD(this, kNextCodeLinkOffset, NULL);
+ WRITE_FIELD(this, kGCMetadataOffset, NULL);
}
@@ -6435,6 +6447,17 @@
}
+int Code::ExecutableSize() {
+ // Check that the assumptions about the layout of the code object holds.
+ DCHECK_EQ(static_cast<int>(instruction_start() - address()),
+ Code::kHeaderSize);
+ return instruction_size() + Code::kHeaderSize;
+}
+
+
+int Code::CodeSize() { return SizeFor(body_size()); }
+
+
ACCESSORS(JSArray, length, Object, kLengthOffset)
@@ -6451,50 +6474,123 @@
ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
-ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
-bool JSArrayBuffer::is_external() {
- return BooleanBit::get(flag(), kIsExternalBit);
+void JSArrayBuffer::set_bit_field(uint32_t bits) {
+ if (kInt32Size != kPointerSize) {
+#if V8_TARGET_LITTLE_ENDIAN
+ WRITE_UINT32_FIELD(this, kBitFieldSlot + kInt32Size, 0);
+#else
+ WRITE_UINT32_FIELD(this, kBitFieldSlot, 0);
+#endif
+ }
+ WRITE_UINT32_FIELD(this, kBitFieldOffset, bits);
}
+uint32_t JSArrayBuffer::bit_field() const {
+ return READ_UINT32_FIELD(this, kBitFieldOffset);
+}
+
+
+bool JSArrayBuffer::is_external() { return IsExternal::decode(bit_field()); }
+
+
void JSArrayBuffer::set_is_external(bool value) {
- set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
-}
-
-
-bool JSArrayBuffer::should_be_freed() {
- return BooleanBit::get(flag(), kShouldBeFreed);
-}
-
-
-void JSArrayBuffer::set_should_be_freed(bool value) {
- set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
+ set_bit_field(IsExternal::update(bit_field(), value));
}
bool JSArrayBuffer::is_neuterable() {
- return BooleanBit::get(flag(), kIsNeuterableBit);
+ return IsNeuterable::decode(bit_field());
}
void JSArrayBuffer::set_is_neuterable(bool value) {
- set_flag(BooleanBit::set(flag(), kIsNeuterableBit, value));
+ set_bit_field(IsNeuterable::update(bit_field(), value));
}
-ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
-ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
+bool JSArrayBuffer::was_neutered() { return WasNeutered::decode(bit_field()); }
+
+
+void JSArrayBuffer::set_was_neutered(bool value) {
+ set_bit_field(WasNeutered::update(bit_field(), value));
+}
+
+
+bool JSArrayBuffer::is_shared() { return IsShared::decode(bit_field()); }
+
+
+void JSArrayBuffer::set_is_shared(bool value) {
+ set_bit_field(IsShared::update(bit_field(), value));
+}
+
+
+Object* JSArrayBufferView::byte_offset() const {
+ if (WasNeutered()) return Smi::FromInt(0);
+ return Object::cast(READ_FIELD(this, kByteOffsetOffset));
+}
+
+
+void JSArrayBufferView::set_byte_offset(Object* value, WriteBarrierMode mode) {
+ WRITE_FIELD(this, kByteOffsetOffset, value);
+ CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteOffsetOffset, value, mode);
+}
+
+
+Object* JSArrayBufferView::byte_length() const {
+ if (WasNeutered()) return Smi::FromInt(0);
+ return Object::cast(READ_FIELD(this, kByteLengthOffset));
+}
+
+
+void JSArrayBufferView::set_byte_length(Object* value, WriteBarrierMode mode) {
+ WRITE_FIELD(this, kByteLengthOffset, value);
+ CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteLengthOffset, value, mode);
+}
ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
-ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
-ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
-ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
-ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
+#ifdef VERIFY_HEAP
+ACCESSORS(JSArrayBufferView, raw_byte_offset, Object, kByteOffsetOffset)
+ACCESSORS(JSArrayBufferView, raw_byte_length, Object, kByteLengthOffset)
+#endif
+
+
+bool JSArrayBufferView::WasNeutered() const {
+ return JSArrayBuffer::cast(buffer())->was_neutered();
+}
+
+
+Object* JSTypedArray::length() const {
+ if (WasNeutered()) return Smi::FromInt(0);
+ return Object::cast(READ_FIELD(this, kLengthOffset));
+}
+
+
+uint32_t JSTypedArray::length_value() const {
+ if (WasNeutered()) return 0;
+ uint32_t index = 0;
+ CHECK(Object::cast(READ_FIELD(this, kLengthOffset))->ToArrayLength(&index));
+ return index;
+}
+
+
+void JSTypedArray::set_length(Object* value, WriteBarrierMode mode) {
+ WRITE_FIELD(this, kLengthOffset, value);
+ CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kLengthOffset, value, mode);
+}
+
+
+#ifdef VERIFY_HEAP
+ACCESSORS(JSTypedArray, raw_length, Object, kLengthOffset)
+#endif
+
ACCESSORS(JSRegExp, data, Object, kDataOffset)
+ACCESSORS(JSRegExp, flags, Object, kFlagsOffset)
+ACCESSORS(JSRegExp, source, Object, kSourceOffset)
JSRegExp::Type JSRegExp::TypeTag() {
@@ -6567,7 +6663,7 @@
fixed_array->IsFixedArray() &&
fixed_array->IsDictionary()) ||
(kind > DICTIONARY_ELEMENTS));
- DCHECK((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
+ DCHECK(!IsSloppyArgumentsElements(kind) ||
(elements()->IsFixedArray() && elements()->length() >= 2));
}
#endif
@@ -6575,11 +6671,6 @@
}
-ElementsAccessor* JSObject::GetElementsAccessor() {
- return ElementsAccessor::ForKind(GetElementsKind());
-}
-
-
bool JSObject::HasFastObjectElements() {
return IsFastObjectElementsKind(GetElementsKind());
}
@@ -6615,32 +6706,21 @@
}
+bool JSObject::HasFastArgumentsElements() {
+ return GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
+}
+
+
+bool JSObject::HasSlowArgumentsElements() {
+ return GetElementsKind() == SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
+}
+
+
bool JSObject::HasSloppyArgumentsElements() {
- return GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS;
+ return IsSloppyArgumentsElements(GetElementsKind());
}
-bool JSObject::HasExternalArrayElements() {
- HeapObject* array = elements();
- DCHECK(array != NULL);
- return array->IsExternalArray();
-}
-
-
-#define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
-bool JSObject::HasExternal##Type##Elements() { \
- HeapObject* array = elements(); \
- DCHECK(array != NULL); \
- if (!array->IsHeapObject()) \
- return false; \
- return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
-}
-
-TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
-
-#undef EXTERNAL_ELEMENTS_CHECK
-
-
bool JSObject::HasFixedTypedArrayElements() {
HeapObject* array = elements();
DCHECK(array != NULL);
@@ -6672,9 +6752,10 @@
}
-NameDictionary* JSObject::property_dictionary() {
+GlobalDictionary* JSObject::global_dictionary() {
DCHECK(!HasFastProperties());
- return NameDictionary::cast(properties());
+ DCHECK(IsJSGlobalObject());
+ return GlobalDictionary::cast(properties());
}
@@ -6702,8 +6783,9 @@
return String::cast(this)->ComputeAndSetHash();
}
-bool Name::IsOwn() {
- return this->IsSymbol() && Symbol::cast(this)->is_own();
+
+bool Name::IsPrivate() {
+ return this->IsSymbol() && Symbol::cast(this)->is_private();
}
@@ -6786,7 +6868,7 @@
return false;
}
}
- if (array_index_ > 429496729U - ((d + 2) >> 3)) {
+ if (array_index_ > 429496729U - ((d + 3) >> 3)) {
is_array_index_ = false;
return false;
}
@@ -6825,6 +6907,10 @@
}
+IteratingStringHasher::IteratingStringHasher(int len, uint32_t seed)
+ : StringHasher(len, seed) {}
+
+
uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
IteratingStringHasher hasher(string->length(), seed);
// Nothing to do.
@@ -6888,49 +6974,180 @@
}
+// static
+Maybe<bool> Object::GreaterThan(Handle<Object> x, Handle<Object> y,
+ Strength strength) {
+ Maybe<ComparisonResult> result = Compare(x, y, strength);
+ if (result.IsJust()) {
+ switch (result.FromJust()) {
+ case ComparisonResult::kGreaterThan:
+ return Just(true);
+ case ComparisonResult::kLessThan:
+ case ComparisonResult::kEqual:
+ case ComparisonResult::kUndefined:
+ return Just(false);
+ }
+ }
+ return Nothing<bool>();
+}
+
+
+// static
+Maybe<bool> Object::GreaterThanOrEqual(Handle<Object> x, Handle<Object> y,
+ Strength strength) {
+ Maybe<ComparisonResult> result = Compare(x, y, strength);
+ if (result.IsJust()) {
+ switch (result.FromJust()) {
+ case ComparisonResult::kEqual:
+ case ComparisonResult::kGreaterThan:
+ return Just(true);
+ case ComparisonResult::kLessThan:
+ case ComparisonResult::kUndefined:
+ return Just(false);
+ }
+ }
+ return Nothing<bool>();
+}
+
+
+// static
+Maybe<bool> Object::LessThan(Handle<Object> x, Handle<Object> y,
+ Strength strength) {
+ Maybe<ComparisonResult> result = Compare(x, y, strength);
+ if (result.IsJust()) {
+ switch (result.FromJust()) {
+ case ComparisonResult::kLessThan:
+ return Just(true);
+ case ComparisonResult::kEqual:
+ case ComparisonResult::kGreaterThan:
+ case ComparisonResult::kUndefined:
+ return Just(false);
+ }
+ }
+ return Nothing<bool>();
+}
+
+
+// static
+Maybe<bool> Object::LessThanOrEqual(Handle<Object> x, Handle<Object> y,
+ Strength strength) {
+ Maybe<ComparisonResult> result = Compare(x, y, strength);
+ if (result.IsJust()) {
+ switch (result.FromJust()) {
+ case ComparisonResult::kEqual:
+ case ComparisonResult::kLessThan:
+ return Just(true);
+ case ComparisonResult::kGreaterThan:
+ case ComparisonResult::kUndefined:
+ return Just(false);
+ }
+ }
+ return Nothing<bool>();
+}
+
+
+MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
+ Handle<Name> name,
+ LanguageMode language_mode) {
+ LookupIterator it =
+ LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
+ return GetProperty(&it, language_mode);
+}
+
+
+MaybeHandle<Object> Object::GetPropertyOrElement(Handle<JSReceiver> holder,
+ Handle<Name> name,
+ Handle<Object> receiver,
+ LanguageMode language_mode) {
+ LookupIterator it = LookupIterator::PropertyOrElement(
+ name->GetIsolate(), receiver, name, holder);
+ return GetProperty(&it, language_mode);
+}
+
+
+void JSReceiver::initialize_properties() {
+ DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
+ DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_properties_dictionary()));
+ if (map()->is_dictionary_map()) {
+ WRITE_FIELD(this, kPropertiesOffset,
+ GetHeap()->empty_properties_dictionary());
+ } else {
+ WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
+ }
+}
+
+
+bool JSReceiver::HasFastProperties() {
+ DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
+ return !properties()->IsDictionary();
+}
+
+
+NameDictionary* JSReceiver::property_dictionary() {
+ DCHECK(!HasFastProperties());
+ DCHECK(!IsJSGlobalObject());
+ return NameDictionary::cast(properties());
+}
+
+
Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
Handle<Name> name) {
- if (object->IsJSProxy()) {
- Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
- return JSProxy::HasPropertyWithHandler(proxy, name);
- }
- Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
- if (!result.has_value) return Maybe<bool>();
- return maybe(result.value != ABSENT);
+ LookupIterator it =
+ LookupIterator::PropertyOrElement(object->GetIsolate(), object, name);
+ return HasProperty(&it);
}
Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
Handle<Name> name) {
- if (object->IsJSProxy()) {
- Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
- return JSProxy::HasPropertyWithHandler(proxy, name);
+ if (object->IsJSObject()) { // Shortcut
+ LookupIterator it = LookupIterator::PropertyOrElement(
+ object->GetIsolate(), object, name, LookupIterator::HIDDEN);
+ return HasProperty(&it);
}
- Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
- if (!result.has_value) return Maybe<bool>();
- return maybe(result.value != ABSENT);
+
+ Maybe<PropertyAttributes> attributes =
+ JSReceiver::GetOwnPropertyAttributes(object, name);
+ MAYBE_RETURN(attributes, Nothing<bool>());
+ return Just(attributes.FromJust() != ABSENT);
}
Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
- Handle<JSReceiver> object, Handle<Name> key) {
- uint32_t index;
- if (object->IsJSObject() && key->AsArrayIndex(&index)) {
- return GetElementAttribute(object, index);
- }
- LookupIterator it(object, key);
+ Handle<JSReceiver> object, Handle<Name> name) {
+ LookupIterator it =
+ LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
return GetPropertyAttributes(&it);
}
-Maybe<PropertyAttributes> JSReceiver::GetElementAttribute(
+Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
+ Handle<JSReceiver> object, Handle<Name> name) {
+ LookupIterator it = LookupIterator::PropertyOrElement(
+ name->GetIsolate(), object, name, LookupIterator::HIDDEN);
+ return GetPropertyAttributes(&it);
+}
+
+
+Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
+ LookupIterator it(object->GetIsolate(), object, index);
+ return HasProperty(&it);
+}
+
+
+Maybe<PropertyAttributes> JSReceiver::GetElementAttributes(
Handle<JSReceiver> object, uint32_t index) {
- if (object->IsJSProxy()) {
- return JSProxy::GetElementAttributeWithHandler(
- Handle<JSProxy>::cast(object), object, index);
- }
- return JSObject::GetElementAttributeWithReceiver(
- Handle<JSObject>::cast(object), object, index, true);
+ Isolate* isolate = object->GetIsolate();
+ LookupIterator it(isolate, object, index);
+ return GetPropertyAttributes(&it);
+}
+
+
+Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes(
+ Handle<JSReceiver> object, uint32_t index) {
+ Isolate* isolate = object->GetIsolate();
+ LookupIterator it(isolate, object, index, LookupIterator::HIDDEN);
+ return GetPropertyAttributes(&it);
}
@@ -6939,7 +7156,7 @@
}
-bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
+bool JSGlobalProxy::IsDetachedFrom(JSGlobalObject* global) const {
const PrototypeIterator iter(this->GetIsolate(),
const_cast<JSGlobalProxy*>(this));
return iter.GetCurrent() != global;
@@ -6960,42 +7177,6 @@
}
-Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
- if (object->IsJSProxy()) {
- Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
- return JSProxy::HasElementWithHandler(proxy, index);
- }
- Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
- Handle<JSObject>::cast(object), object, index, true);
- if (!result.has_value) return Maybe<bool>();
- return maybe(result.value != ABSENT);
-}
-
-
-Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
- uint32_t index) {
- if (object->IsJSProxy()) {
- Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
- return JSProxy::HasElementWithHandler(proxy, index);
- }
- Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
- Handle<JSObject>::cast(object), object, index, false);
- if (!result.has_value) return Maybe<bool>();
- return maybe(result.value != ABSENT);
-}
-
-
-Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttribute(
- Handle<JSReceiver> object, uint32_t index) {
- if (object->IsJSProxy()) {
- return JSProxy::GetElementAttributeWithHandler(
- Handle<JSProxy>::cast(object), object, index);
- }
- return JSObject::GetElementAttributeWithReceiver(
- Handle<JSObject>::cast(object), object, index, false);
-}
-
-
bool AccessorInfo::all_can_read() {
return BooleanBit::get(flag(), kAllCanReadBit);
}
@@ -7016,13 +7197,23 @@
}
+bool AccessorInfo::is_special_data_property() {
+ return BooleanBit::get(flag(), kSpecialDataProperty);
+}
+
+
+void AccessorInfo::set_is_special_data_property(bool value) {
+ set_flag(BooleanBit::set(flag(), kSpecialDataProperty, value));
+}
+
+
PropertyAttributes AccessorInfo::property_attributes() {
- return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
+ return AttributesField::decode(static_cast<uint32_t>(flag()));
}
void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
- set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
+ set_flag(AttributesField::update(flag(), attributes));
}
@@ -7034,11 +7225,48 @@
}
-void ExecutableAccessorInfo::clear_setter() {
- auto foreign = GetIsolate()->factory()->NewForeign(
- reinterpret_cast<v8::internal::Address>(
- reinterpret_cast<intptr_t>(nullptr)));
- set_setter(*foreign);
+bool AccessorInfo::HasExpectedReceiverType() {
+ return expected_receiver_type()->IsFunctionTemplateInfo();
+}
+
+
+Object* AccessorPair::get(AccessorComponent component) {
+ return component == ACCESSOR_GETTER ? getter() : setter();
+}
+
+
+void AccessorPair::set(AccessorComponent component, Object* value) {
+ if (component == ACCESSOR_GETTER) {
+ set_getter(value);
+ } else {
+ set_setter(value);
+ }
+}
+
+
+void AccessorPair::SetComponents(Object* getter, Object* setter) {
+ if (!getter->IsNull()) set_getter(getter);
+ if (!setter->IsNull()) set_setter(setter);
+}
+
+
+bool AccessorPair::Equals(AccessorPair* pair) {
+ return (this == pair) || pair->Equals(getter(), setter());
+}
+
+
+bool AccessorPair::Equals(Object* getter_value, Object* setter_value) {
+ return (getter() == getter_value) && (setter() == setter_value);
+}
+
+
+bool AccessorPair::ContainsAccessor() {
+ return IsJSAccessor(getter()) || IsJSAccessor(setter());
+}
+
+
+bool AccessorPair::IsJSAccessor(Object* obj) {
+ return obj->IsCallable() || obj->IsUndefined();
}
@@ -7046,7 +7274,7 @@
void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
Handle<Object> key,
Handle<Object> value) {
- SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
+ this->SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
}
@@ -7055,15 +7283,40 @@
Handle<Object> key,
Handle<Object> value,
PropertyDetails details) {
- DCHECK(!key->IsName() ||
- details.IsDeleted() ||
- details.dictionary_index() > 0);
- int index = DerivedHashTable::EntryToIndex(entry);
+ Shape::SetEntry(static_cast<Derived*>(this), entry, key, value, details);
+}
+
+
+template <typename Key>
+template <typename Dictionary>
+void BaseDictionaryShape<Key>::SetEntry(Dictionary* dict, int entry,
+ Handle<Object> key,
+ Handle<Object> value,
+ PropertyDetails details) {
+ STATIC_ASSERT(Dictionary::kEntrySize == 3);
+ DCHECK(!key->IsName() || details.dictionary_index() > 0);
+ int index = dict->EntryToIndex(entry);
DisallowHeapAllocation no_gc;
- WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
- FixedArray::set(index, *key, mode);
- FixedArray::set(index+1, *value, mode);
- FixedArray::set(index+2, details.AsSmi());
+ WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
+ dict->set(index, *key, mode);
+ dict->set(index + 1, *value, mode);
+ dict->set(index + 2, details.AsSmi());
+}
+
+
+template <typename Dictionary>
+void GlobalDictionaryShape::SetEntry(Dictionary* dict, int entry,
+ Handle<Object> key, Handle<Object> value,
+ PropertyDetails details) {
+ STATIC_ASSERT(Dictionary::kEntrySize == 2);
+ DCHECK(!key->IsName() || details.dictionary_index() > 0);
+ DCHECK(value->IsPropertyCell());
+ int index = dict->EntryToIndex(entry);
+ DisallowHeapAllocation no_gc;
+ WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
+ dict->set(index, *key, mode);
+ dict->set(index + 1, *value, mode);
+ PropertyCell::cast(*value)->set_property_details(details);
}
@@ -7134,6 +7387,34 @@
}
+template <typename Dictionary>
+PropertyDetails GlobalDictionaryShape::DetailsAt(Dictionary* dict, int entry) {
+ DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
+ Object* raw_value = dict->ValueAt(entry);
+ DCHECK(raw_value->IsPropertyCell());
+ PropertyCell* cell = PropertyCell::cast(raw_value);
+ return cell->property_details();
+}
+
+
+template <typename Dictionary>
+void GlobalDictionaryShape::DetailsAtPut(Dictionary* dict, int entry,
+ PropertyDetails value) {
+ DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
+ Object* raw_value = dict->ValueAt(entry);
+ DCHECK(raw_value->IsPropertyCell());
+ PropertyCell* cell = PropertyCell::cast(raw_value);
+ cell->set_property_details(value);
+}
+
+
+template <typename Dictionary>
+bool GlobalDictionaryShape::IsDeleted(Dictionary* dict, int entry) {
+ DCHECK(dict->ValueAt(entry)->IsPropertyCell());
+ return PropertyCell::cast(dict->ValueAt(entry))->value()->IsTheHole();
+}
+
+
bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
return key->SameValue(other);
}
@@ -7162,15 +7443,25 @@
}
+Object* OrderedHashMap::ValueAt(int entry) {
+ return get(EntryToIndex(entry) + kValueOffset);
+}
+
+
template <int entrysize>
bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
- return key->SameValue(other);
+ if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
+ return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
+ : *key == other;
}
template <int entrysize>
uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
- intptr_t hash = reinterpret_cast<intptr_t>(*key);
+ intptr_t hash =
+ key->IsWeakCell()
+ ? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
+ : reinterpret_cast<intptr_t>(*key);
return (uint32_t)(hash & 0xFFFFFFFF);
}
@@ -7178,6 +7469,7 @@
template <int entrysize>
uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
Object* other) {
+ if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
intptr_t hash = reinterpret_cast<intptr_t>(other);
return (uint32_t)(hash & 0xFFFFFFFF);
}
@@ -7190,6 +7482,30 @@
}
+bool ScopeInfo::IsAsmModule() { return AsmModuleField::decode(Flags()); }
+
+
+bool ScopeInfo::IsAsmFunction() { return AsmFunctionField::decode(Flags()); }
+
+
+bool ScopeInfo::HasSimpleParameters() {
+ return HasSimpleParametersField::decode(Flags());
+}
+
+
+#define SCOPE_INFO_FIELD_ACCESSORS(name) \
+ void ScopeInfo::Set##name(int value) { set(k##name, Smi::FromInt(value)); } \
+ int ScopeInfo::name() { \
+ if (length() > 0) { \
+ return Smi::cast(get(k##name))->value(); \
+ } else { \
+ return 0; \
+ } \
+ }
+FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(SCOPE_INFO_FIELD_ACCESSORS)
+#undef SCOPE_INFO_FIELD_ACCESSORS
+
+
void Map::ClearCodeCache(Heap* heap) {
// No write barrier is needed since empty_fixed_array is not in new space.
// Please note this function is used during marking:
@@ -7202,27 +7518,12 @@
int Map::SlackForArraySize(int old_size, int size_limit) {
const int max_slack = size_limit - old_size;
- CHECK(max_slack >= 0);
- if (old_size < 4) return Min(max_slack, 1);
- return Min(max_slack, old_size / 2);
-}
-
-
-void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
- DCHECK(array->HasFastSmiOrObjectElements());
- Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
- const int kArraySizeThatFitsComfortablyInNewSpace = 128;
- if (elts->length() < required_size) {
- // Doubling in size would be overkill, but leave some slack to avoid
- // constantly growing.
- Expand(array, required_size + (required_size >> 3));
- // It's a performance benefit to keep a frequently used array in new-space.
- } else if (!array->GetHeap()->new_space()->Contains(*elts) &&
- required_size < kArraySizeThatFitsComfortablyInNewSpace) {
- // Expand will allocate a new backing store in new space even if the size
- // we asked for isn't larger than what we had before.
- Expand(array, required_size);
+ CHECK_LE(0, max_slack);
+ if (old_size < 4) {
+ DCHECK_LE(1, max_slack);
+ return 1;
}
+ return Min(max_slack, old_size / 4);
}
@@ -7232,9 +7533,18 @@
}
-bool JSArray::AllowsSetElementsLength() {
+bool JSArray::SetLengthWouldNormalize(Heap* heap, uint32_t new_length) {
+ // If the new array won't fit in a some non-trivial fraction of the max old
+ // space size, then force it to go dictionary mode.
+ uint32_t max_fast_array_size =
+ static_cast<uint32_t>((heap->MaxOldGenerationSize() / kDoubleSize) / 4);
+ return new_length >= max_fast_array_size;
+}
+
+
+bool JSArray::AllowsSetLength() {
bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
- DCHECK(result == !HasExternalArrayElements());
+ DCHECK(result == !HasFixedTypedArrayElements());
return result;
}
@@ -7366,100 +7676,6 @@
}
-int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
- return map->instance_size();
-}
-
-
-void Foreign::ForeignIterateBody(ObjectVisitor* v) {
- v->VisitExternalReference(
- reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
-}
-
-
-template<typename StaticVisitor>
-void Foreign::ForeignIterateBody() {
- StaticVisitor::VisitExternalReference(
- reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
-}
-
-
-void ExternalOneByteString::ExternalOneByteStringIterateBody(ObjectVisitor* v) {
- typedef v8::String::ExternalOneByteStringResource Resource;
- v->VisitExternalOneByteString(
- reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
-}
-
-
-template <typename StaticVisitor>
-void ExternalOneByteString::ExternalOneByteStringIterateBody() {
- typedef v8::String::ExternalOneByteStringResource Resource;
- StaticVisitor::VisitExternalOneByteString(
- reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
-}
-
-
-void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
- typedef v8::String::ExternalStringResource Resource;
- v->VisitExternalTwoByteString(
- reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
-}
-
-
-template<typename StaticVisitor>
-void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
- typedef v8::String::ExternalStringResource Resource;
- StaticVisitor::VisitExternalTwoByteString(
- reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
-}
-
-
-static inline void IterateBodyUsingLayoutDescriptor(HeapObject* object,
- int start_offset,
- int end_offset,
- ObjectVisitor* v) {
- DCHECK(FLAG_unbox_double_fields);
- DCHECK(IsAligned(start_offset, kPointerSize) &&
- IsAligned(end_offset, kPointerSize));
-
- LayoutDescriptorHelper helper(object->map());
- DCHECK(!helper.all_fields_tagged());
-
- for (int offset = start_offset; offset < end_offset; offset += kPointerSize) {
- // Visit all tagged fields.
- if (helper.IsTagged(offset)) {
- v->VisitPointer(HeapObject::RawField(object, offset));
- }
- }
-}
-
-
-template<int start_offset, int end_offset, int size>
-void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
- HeapObject* obj,
- ObjectVisitor* v) {
- if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
- v->VisitPointers(HeapObject::RawField(obj, start_offset),
- HeapObject::RawField(obj, end_offset));
- } else {
- IterateBodyUsingLayoutDescriptor(obj, start_offset, end_offset, v);
- }
-}
-
-
-template<int start_offset>
-void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
- int object_size,
- ObjectVisitor* v) {
- if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
- v->VisitPointers(HeapObject::RawField(obj, start_offset),
- HeapObject::RawField(obj, object_size));
- } else {
- IterateBodyUsingLayoutDescriptor(obj, start_offset, object_size, v);
- }
-}
-
-
template<class Derived, class TableType>
Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
TableType* table(TableType::cast(this->table()));
@@ -7490,11 +7706,71 @@
}
+ACCESSORS(JSIteratorResult, done, Object, kDoneOffset)
+ACCESSORS(JSIteratorResult, value, Object, kValueOffset)
+
+
+String::SubStringRange::SubStringRange(String* string, int first, int length)
+ : string_(string),
+ first_(first),
+ length_(length == -1 ? string->length() : length) {}
+
+
+class String::SubStringRange::iterator final {
+ public:
+ typedef std::forward_iterator_tag iterator_category;
+ typedef int difference_type;
+ typedef uc16 value_type;
+ typedef uc16* pointer;
+ typedef uc16& reference;
+
+ iterator(const iterator& other)
+ : content_(other.content_), offset_(other.offset_) {}
+
+ uc16 operator*() { return content_.Get(offset_); }
+ bool operator==(const iterator& other) const {
+ return content_.UsesSameString(other.content_) && offset_ == other.offset_;
+ }
+ bool operator!=(const iterator& other) const {
+ return !content_.UsesSameString(other.content_) || offset_ != other.offset_;
+ }
+ iterator& operator++() {
+ ++offset_;
+ return *this;
+ }
+ iterator operator++(int);
+
+ private:
+ friend class String;
+ iterator(String* from, int offset)
+ : content_(from->GetFlatContent()), offset_(offset) {}
+ String::FlatContent content_;
+ int offset_;
+};
+
+
+String::SubStringRange::iterator String::SubStringRange::begin() {
+ return String::SubStringRange::iterator(string_, first_);
+}
+
+
+String::SubStringRange::iterator String::SubStringRange::end() {
+ return String::SubStringRange::iterator(string_, first_ + length_);
+}
+
+
+// Predictably converts HeapObject* or Address to uint32 by calculating
+// offset of the address in respective MemoryChunk.
+static inline uint32_t ObjectAddressForHashing(void* object) {
+ uint32_t value = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(object));
+ return value & MemoryChunk::kAlignmentMask;
+}
+
+
#undef TYPE_CHECKER
#undef CAST_ACCESSOR
#undef INT_ACCESSORS
#undef ACCESSORS
-#undef ACCESSORS_TO_SMI
#undef SMI_ACCESSORS
#undef SYNCHRONIZED_SMI_ACCESSORS
#undef NOBARRIER_SMI_ACCESSORS
@@ -7514,15 +7790,30 @@
#undef WRITE_INT_FIELD
#undef READ_INTPTR_FIELD
#undef WRITE_INTPTR_FIELD
+#undef READ_UINT8_FIELD
+#undef WRITE_UINT8_FIELD
+#undef READ_INT8_FIELD
+#undef WRITE_INT8_FIELD
+#undef READ_UINT16_FIELD
+#undef WRITE_UINT16_FIELD
+#undef READ_INT16_FIELD
+#undef WRITE_INT16_FIELD
#undef READ_UINT32_FIELD
#undef WRITE_UINT32_FIELD
-#undef READ_SHORT_FIELD
-#undef WRITE_SHORT_FIELD
+#undef READ_INT32_FIELD
+#undef WRITE_INT32_FIELD
+#undef READ_FLOAT_FIELD
+#undef WRITE_FLOAT_FIELD
+#undef READ_UINT64_FIELD
+#undef WRITE_UINT64_FIELD
+#undef READ_INT64_FIELD
+#undef WRITE_INT64_FIELD
#undef READ_BYTE_FIELD
#undef WRITE_BYTE_FIELD
#undef NOBARRIER_READ_BYTE_FIELD
#undef NOBARRIER_WRITE_BYTE_FIELD
-} } // namespace v8::internal
+} // namespace internal
+} // namespace v8
#endif // V8_OBJECTS_INL_H_