Update V8 to version 4.1.0.21
This is a cherry-pick of all commits up to and including the
4.1.0.21 cherry-pick in Chromium.
Original commit message:
Version 4.1.0.21 (cherry-pick)
Merged 206e9136bde0f2b5ae8cb77afbb1e7833e5bd412
Unlink pages from the space page list after evacuation.
BUG=430201
LOG=N
R=jkummerow@chromium.org
Review URL: https://codereview.chromium.org/953813002
Cr-Commit-Position: refs/branch-heads/4.1@{#22}
Cr-Branched-From: 2e08d2a7aa9d65d269d8c57aba82eb38a8cb0a18-refs/heads/candidates@{#25353}
---
FPIIM-449
Change-Id: I8c23c7bbb70772b4858fe8a47b64fa97ee0d1f8c
diff --git a/src/objects.h b/src/objects.h
index 3340350..c32f9f6 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -5,6 +5,8 @@
#ifndef V8_OBJECTS_H_
#define V8_OBJECTS_H_
+#include <iosfwd>
+
#include "src/allocation.h"
#include "src/assert-scope.h"
#include "src/bailout-reason.h"
@@ -18,6 +20,7 @@
#include "src/property-details.h"
#include "src/smart-pointers.h"
#include "src/unicode-inl.h"
+#include "src/unicode-decoder.h"
#include "src/zone.h"
#if V8_TARGET_ARCH_ARM
@@ -84,6 +87,8 @@
// - JSFunctionResultCache
// - ScopeInfo
// - TransitionArray
+// - ScriptContextTable
+// - WeakFixedArray
// - FixedDoubleArray
// - ExternalArray
// - ExternalUint8ClampedArray
@@ -140,6 +145,7 @@
// - DebugInfo
// - BreakPointInfo
// - CodeCache
+// - WeakCell
//
// Formats of Object*:
// Smi: [31 bit signed int] 0
@@ -148,8 +154,6 @@
namespace v8 {
namespace internal {
-class OStream;
-
enum KeyedAccessStoreMode {
STANDARD_STORE,
STORE_TRANSITION_SMI_TO_OBJECT,
@@ -231,15 +235,15 @@
}
+enum IcCheckType { ELEMENT, PROPERTY };
+
+
// Setter that skips the write barrier if mode is SKIP_WRITE_BARRIER.
enum WriteBarrierMode { SKIP_WRITE_BARRIER, UPDATE_WRITE_BARRIER };
// Indicates whether a value can be loaded as a constant.
-enum StoreMode {
- ALLOW_AS_CONSTANT,
- FORCE_FIELD
-};
+enum StoreMode { ALLOW_IN_DESCRIPTOR, FORCE_IN_OBJECT };
// PropertyNormalizationMode is used to specify whether to keep
@@ -275,8 +279,9 @@
// either extends the current map with a new property, or it modifies the
// property that was added last to the current map.
enum SimpleTransitionFlag {
- SIMPLE_TRANSITION,
- FULL_TRANSITION
+ SIMPLE_PROPERTY_TRANSITION,
+ PROPERTY_TRANSITION,
+ SPECIAL_TRANSITION
};
@@ -421,6 +426,7 @@
V(FIXED_DOUBLE_ARRAY_TYPE) \
V(CONSTANT_POOL_ARRAY_TYPE) \
V(SHARED_FUNCTION_INFO_TYPE) \
+ V(WEAK_CELL_TYPE) \
\
V(JS_MESSAGE_OBJECT_TYPE) \
\
@@ -717,6 +723,7 @@
FIXED_ARRAY_TYPE,
CONSTANT_POOL_ARRAY_TYPE,
SHARED_FUNCTION_INFO_TYPE,
+ WEAK_CELL_TYPE,
// All the following types are subtypes of JSReceiver, which corresponds to
// objects in the JS sense. The first and the last type in this range are
@@ -845,14 +852,17 @@
class AllocationSite;
class AllocationSiteCreationContext;
class AllocationSiteUsageContext;
+class ConsString;
class DictionaryElementsAccessor;
class ElementsAccessor;
class FixedArrayBase;
class GlobalObject;
-class ObjectVisitor;
+class LayoutDescriptor;
class LookupIterator;
+class ObjectVisitor;
class StringStream;
class TypeFeedbackVector;
+class WeakCell;
// We cannot just say "class HeapType;" if it is created from a template... =8-?
template<class> class TypeImpl;
struct HeapTypeConfig;
@@ -869,7 +879,7 @@
#endif
#ifdef OBJECT_PRINT
-#define DECLARE_PRINTER(Name) void Name##Print(OStream& os); // NOLINT
+#define DECLARE_PRINTER(Name) void Name##Print(std::ostream& os); // NOLINT
#else
#define DECLARE_PRINTER(Name)
#endif
@@ -924,6 +934,7 @@
V(JSContextExtensionObject) \
V(JSGeneratorObject) \
V(JSModule) \
+ V(LayoutDescriptor) \
V(Map) \
V(DescriptorArray) \
V(TransitionArray) \
@@ -933,8 +944,10 @@
V(DependentCode) \
V(FixedArray) \
V(FixedDoubleArray) \
+ V(WeakFixedArray) \
V(ConstantPoolArray) \
V(Context) \
+ V(ScriptContextTable) \
V(NativeContext) \
V(ScopeInfo) \
V(JSFunction) \
@@ -980,6 +993,7 @@
V(AccessCheckNeeded) \
V(Cell) \
V(PropertyCell) \
+ V(WeakCell) \
V(ObjectHashTable) \
V(WeakHashTable) \
V(OrderedHashTable)
@@ -1007,6 +1021,8 @@
CERTAINLY_NOT_STORE_FROM_KEYED
};
+ enum StorePropertyMode { NORMAL_PROPERTY, SUPER_PROPERTY };
+
INLINE(bool IsFixedArrayBase() const);
INLINE(bool IsExternal() const);
INLINE(bool IsAccessorInfo() const);
@@ -1116,11 +1132,15 @@
MUST_USE_RESULT static MaybeHandle<Object> SetProperty(
LookupIterator* it, Handle<Object> value, StrictMode strict_mode,
- StoreFromKeyed store_mode);
+ StoreFromKeyed store_mode,
+ StorePropertyMode data_store_mode = NORMAL_PROPERTY);
MUST_USE_RESULT static MaybeHandle<Object> WriteToReadOnlyProperty(
LookupIterator* it, Handle<Object> value, StrictMode strict_mode);
- static Handle<Object> SetDataProperty(LookupIterator* it,
- Handle<Object> value);
+ MUST_USE_RESULT static MaybeHandle<Object> WriteToReadOnlyElement(
+ Isolate* isolate, Handle<Object> receiver, uint32_t index,
+ Handle<Object> value, StrictMode strict_mode);
+ MUST_USE_RESULT static MaybeHandle<Object> SetDataProperty(
+ LookupIterator* it, Handle<Object> value);
MUST_USE_RESULT static MaybeHandle<Object> AddDataProperty(
LookupIterator* it, Handle<Object> value, PropertyAttributes attributes,
StrictMode strict_mode, StoreFromKeyed store_mode);
@@ -1164,6 +1184,13 @@
Handle<Object> receiver,
uint32_t index);
+ MUST_USE_RESULT static MaybeHandle<Object> SetElementWithReceiver(
+ Isolate* isolate, Handle<Object> object, Handle<Object> receiver,
+ uint32_t index, Handle<Object> value, StrictMode strict_mode);
+
+ static inline Handle<Object> GetPrototypeSkipHiddenPrototypes(
+ Isolate* isolate, Handle<Object> receiver);
+
// Returns the permanent hash code associated with this object. May return
// undefined if not yet created.
Object* GetHash();
@@ -1206,6 +1233,8 @@
// Prints this object without details to a message accumulator.
void ShortPrint(StringStream* accumulator);
+ void ShortPrint(std::ostream& os); // NOLINT
+
DECLARE_CAST(Object)
// Layout description.
@@ -1216,7 +1245,10 @@
void Print();
// Prints this object with details.
- void Print(OStream& os); // NOLINT
+ void Print(std::ostream& os); // NOLINT
+#else
+ void Print() { ShortPrint(); }
+ void Print(std::ostream& os) { ShortPrint(os); } // NOLINT
#endif
private:
@@ -1236,7 +1268,7 @@
};
-OStream& operator<<(OStream& os, const Brief& v);
+std::ostream& operator<<(std::ostream& os, const Brief& v);
// Smi represents integer Numbers that can be stored in 31 bits.
@@ -1261,7 +1293,7 @@
DECLARE_CAST(Smi)
// Dispatched behavior.
- void SmiPrint(OStream& os) const; // NOLINT
+ void SmiPrint(std::ostream& os) const; // NOLINT
DECLARE_VERIFIER(Smi)
static const int kMinValue =
@@ -1404,9 +1436,9 @@
const DisallowHeapAllocation& promise);
// Dispatched behavior.
- void HeapObjectShortPrint(OStream& os); // NOLINT
+ void HeapObjectShortPrint(std::ostream& os); // NOLINT
#ifdef OBJECT_PRINT
- void PrintHeader(OStream& os, const char* id); // NOLINT
+ void PrintHeader(std::ostream& os, const char* id); // NOLINT
#endif
DECLARE_PRINTER(HeapObject)
DECLARE_VERIFIER(HeapObject)
@@ -1419,6 +1451,8 @@
static void VerifyHeapPointer(Object* p);
#endif
+ inline bool NeedsToEnsureDoubleAlignment();
+
// Layout description.
// First field in a heap object is map.
static const int kMapOffset = Object::kHeaderSize;
@@ -1493,7 +1527,7 @@
// Dispatched behavior.
bool HeapNumberBooleanValue();
- void HeapNumberPrint(OStream& os); // NOLINT
+ void HeapNumberPrint(std::ostream& os); // NOLINT
DECLARE_VERIFIER(HeapNumber)
inline int get_exponent();
@@ -1618,9 +1652,6 @@
MUST_USE_RESULT static inline Maybe<PropertyAttributes>
GetOwnElementAttribute(Handle<JSReceiver> object, uint32_t index);
- // Return the constructor function (may be Heap::null_value()).
- inline Object* GetConstructor();
-
// Retrieves a permanent object identity hash code. The undefined value might
// be returned in case no hash was created yet.
inline Object* GetIdentityHash();
@@ -1792,6 +1823,10 @@
static void OptimizeAsPrototype(Handle<JSObject> object,
PrototypeOptimizationMode mode);
static void ReoptimizeIfPrototype(Handle<JSObject> object);
+ static void RegisterPrototypeUser(Handle<JSObject> prototype,
+ Handle<HeapObject> user);
+ static void UnregisterPrototypeUser(Handle<JSObject> prototype,
+ Handle<HeapObject> user);
// Retrieve interceptors.
InterceptorInfo* GetNamedInterceptor();
@@ -1834,10 +1869,6 @@
Handle<Object> receiver,
Handle<Name> name);
- // Returns true if this is an instance of an api function and has
- // been modified since it was created. May give false positives.
- bool IsDirty();
-
// Accessors for hidden properties object.
//
// Hidden properties are not own properties of the object itself.
@@ -1912,8 +1943,7 @@
// These methods do not perform access checks!
MUST_USE_RESULT static MaybeHandle<AccessorPair> GetOwnElementAccessorPair(
- Handle<JSObject> object,
- uint32_t index);
+ Handle<JSObject> object, uint32_t index);
MUST_USE_RESULT static MaybeHandle<Object> SetFastElement(
Handle<JSObject> object,
@@ -1941,9 +1971,8 @@
// Returns the index'th element.
// The undefined object if index is out of bounds.
MUST_USE_RESULT static MaybeHandle<Object> GetElementWithInterceptor(
- Handle<JSObject> object,
- Handle<Object> receiver,
- uint32_t index);
+ Handle<JSObject> object, Handle<Object> receiver, uint32_t index,
+ bool check_prototype);
enum SetFastElementsCapacitySmiMode {
kAllowSmiElements,
@@ -2034,7 +2063,8 @@
// an initial capacity for holding these properties.
static void NormalizeProperties(Handle<JSObject> object,
PropertyNormalizationMode mode,
- int expected_additional_properties);
+ int expected_additional_properties,
+ const char* reason);
// Convert and update the elements backing store to be a
// SeededNumberDictionary dictionary. Returns the backing after conversion.
@@ -2043,14 +2073,20 @@
// Transform slow named properties to fast variants.
static void MigrateSlowToFast(Handle<JSObject> object,
- int unused_property_fields);
+ int unused_property_fields, const char* reason);
+
+ inline bool IsUnboxedDoubleField(FieldIndex index);
// Access fast-case object properties at index.
static Handle<Object> FastPropertyAt(Handle<JSObject> object,
Representation representation,
FieldIndex index);
inline Object* RawFastPropertyAt(FieldIndex index);
+ inline double RawFastDoublePropertyAt(FieldIndex index);
+
inline void FastPropertyAtPut(FieldIndex index, Object* value);
+ inline void RawFastPropertyAtPut(FieldIndex index, Object* value);
+ inline void RawFastDoublePropertyAtPut(FieldIndex index, double value);
void WriteToField(int descriptor, Object* value);
// Access to in object properties.
@@ -2081,6 +2117,9 @@
MUST_USE_RESULT static MaybeHandle<Object> PreventExtensions(
Handle<JSObject> object);
+ // ES5 Object.seal
+ MUST_USE_RESULT static MaybeHandle<Object> Seal(Handle<JSObject> object);
+
// ES5 Object.freeze
MUST_USE_RESULT static MaybeHandle<Object> Freeze(Handle<JSObject> object);
@@ -2110,9 +2149,11 @@
DECLARE_PRINTER(JSObject)
DECLARE_VERIFIER(JSObject)
#ifdef OBJECT_PRINT
- void PrintProperties(OStream& os); // NOLINT
- void PrintElements(OStream& os); // NOLINT
- void PrintTransitions(OStream& os); // NOLINT
+ void PrintProperties(std::ostream& os); // NOLINT
+ void PrintElements(std::ostream& os); // NOLINT
+#endif
+#if defined(DEBUG) || defined(OBJECT_PRINT)
+ void PrintTransitions(std::ostream& os); // NOLINT
#endif
static void PrintElementsTransition(
@@ -2201,14 +2242,9 @@
Context* GetCreationContext();
// Enqueue change record for Object.observe. May cause GC.
- static void EnqueueChangeRecord(Handle<JSObject> object,
- const char* type,
- Handle<Name> name,
- Handle<Object> old_value);
-
- static void MigrateToNewProperty(Handle<JSObject> object,
- Handle<Map> transition,
- Handle<Object> value);
+ MUST_USE_RESULT static MaybeHandle<Object> EnqueueChangeRecord(
+ Handle<JSObject> object, const char* type, Handle<Name> name,
+ Handle<Object> old_value);
private:
friend class DictionaryElementsAccessor;
@@ -2220,11 +2256,6 @@
Handle<Map> new_map,
int expected_additional_properties);
- static void GeneralizeFieldRepresentation(Handle<JSObject> object,
- int modify_index,
- Representation new_representation,
- Handle<HeapType> new_field_type);
-
static void UpdateAllocationSite(Handle<JSObject> object,
ElementsKind to_kind);
@@ -2243,18 +2274,30 @@
GetElementAttributeWithInterceptor(Handle<JSObject> object,
Handle<JSReceiver> receiver,
uint32_t index, bool continue_search);
+
+ // Queries indexed interceptor on an object for property attributes.
+ //
+ // We determine property attributes as follows:
+ // - if interceptor has a query callback, then the property attributes are
+ // the result of query callback for index.
+ // - otherwise if interceptor has a getter callback and it returns
+ // non-empty value on index, then the property attributes is NONE
+ // (property is present, and it is enumerable, configurable, writable)
+ // - otherwise there are no property attributes that can be inferred for
+ // interceptor, and this function returns ABSENT.
+ MUST_USE_RESULT static Maybe<PropertyAttributes>
+ GetElementAttributeFromInterceptor(Handle<JSObject> object,
+ Handle<Object> receiver,
+ uint32_t index);
+
MUST_USE_RESULT static Maybe<PropertyAttributes>
GetElementAttributeWithoutInterceptor(Handle<JSObject> object,
Handle<JSReceiver> receiver,
uint32_t index,
bool continue_search);
MUST_USE_RESULT static MaybeHandle<Object> SetElementWithCallback(
- Handle<JSObject> object,
- Handle<Object> structure,
- uint32_t index,
- Handle<Object> value,
- Handle<JSObject> holder,
- StrictMode strict_mode);
+ Handle<Object> object, Handle<Object> structure, uint32_t index,
+ Handle<Object> value, Handle<JSObject> holder, StrictMode strict_mode);
MUST_USE_RESULT static MaybeHandle<Object> SetElementWithInterceptor(
Handle<JSObject> object,
uint32_t index,
@@ -2292,6 +2335,14 @@
Handle<Object> value,
StrictMode strict_mode,
bool check_prototype = true);
+ MUST_USE_RESULT static MaybeHandle<Object> GetElementWithFailedAccessCheck(
+ Isolate* isolate, Handle<JSObject> object, Handle<Object> receiver,
+ uint32_t index);
+ MUST_USE_RESULT static Maybe<PropertyAttributes>
+ GetElementAttributesWithFailedAccessCheck(Isolate* isolate,
+ Handle<JSObject> object,
+ Handle<Object> receiver,
+ uint32_t index);
MUST_USE_RESULT static MaybeHandle<Object> SetPropertyWithFailedAccessCheck(
LookupIterator* it, Handle<Object> value, StrictMode strict_mode);
@@ -2366,6 +2417,15 @@
static Handle<Smi> GetOrCreateIdentityHash(Handle<JSObject> object);
+ static Handle<SeededNumberDictionary> GetNormalizedElementDictionary(
+ Handle<JSObject> object);
+
+ // Helper for fast versions of preventExtensions, seal, and freeze.
+ // attrs is one of NONE, SEALED, or FROZEN (depending on the operation).
+ template <PropertyAttributes attrs>
+ MUST_USE_RESULT static MaybeHandle<Object> PreventExtensionsWithTransition(
+ Handle<JSObject> object);
+
DISALLOW_IMPLICIT_CONSTRUCTORS(JSObject);
};
@@ -2399,7 +2459,7 @@
class FixedArray: public FixedArrayBase {
public:
// Setter and getter for elements.
- inline Object* get(int index);
+ inline Object* get(int index) const;
static inline Handle<Object> get(Handle<FixedArray> array, int index);
// Setter that uses write barrier.
inline void set(int index, Object* value);
@@ -2431,10 +2491,12 @@
int new_length,
PretenureFlag pretenure = NOT_TENURED);
+ enum KeyFilter { ALL_KEYS, NON_SYMBOL_KEYS };
+
// Add the elements of a JSArray to this FixedArray.
MUST_USE_RESULT static MaybeHandle<FixedArray> AddKeysFromArrayLike(
- Handle<FixedArray> content,
- Handle<JSObject> array);
+ Handle<FixedArray> content, Handle<JSObject> array,
+ KeyFilter filter = ALL_KEYS);
// Computes the union of keys and return the result.
// Used for implementing "for (n in object) { }"
@@ -2559,6 +2621,45 @@
};
+class WeakFixedArray : public FixedArray {
+ public:
+ enum SearchForDuplicates { kAlwaysAdd, kAddIfNotFound };
+
+ // If |maybe_array| is not a WeakFixedArray, a fresh one will be allocated.
+ static Handle<WeakFixedArray> Add(
+ Handle<Object> maybe_array, Handle<HeapObject> value,
+ SearchForDuplicates search_for_duplicates = kAlwaysAdd);
+
+ void Remove(Handle<HeapObject> value);
+
+ inline Object* Get(int index) const;
+ inline int Length() const;
+
+ DECLARE_CAST(WeakFixedArray)
+
+ private:
+ static const int kLastUsedIndexIndex = 0;
+ static const int kFirstIndex = 1;
+
+ static Handle<WeakFixedArray> Allocate(
+ Isolate* isolate, int size, Handle<WeakFixedArray> initialize_from);
+
+ static void Set(Handle<WeakFixedArray> array, int index,
+ Handle<HeapObject> value);
+ inline void clear(int index);
+ inline bool IsEmptySlot(int index) const;
+
+ inline int last_used_index() const;
+ inline void set_last_used_index(int index);
+
+ // Disallow inherited setters.
+ void set(int index, Smi* value);
+ void set(int index, Object* value);
+ void set(int index, Object* value, WriteBarrierMode mode);
+ DISALLOW_IMPLICIT_CONSTRUCTORS(WeakFixedArray);
+};
+
+
// ConstantPoolArray describes a fixed-sized array containing constant pool
// entries.
//
@@ -2596,11 +2697,7 @@
//
class ConstantPoolArray: public HeapObject {
public:
- enum WeakObjectState {
- NO_WEAK_OBJECTS,
- WEAK_OBJECTS_IN_OPTIMIZED_CODE,
- WEAK_OBJECTS_IN_IC
- };
+ enum WeakObjectState { NO_WEAK_OBJECTS, WEAK_OBJECTS_IN_OPTIMIZED_CODE };
enum Type {
INT64 = 0,
@@ -2841,13 +2938,13 @@
// get_extended_section_header_offset().
static const int kExtendedInt64CountOffset = 0;
static const int kExtendedCodePtrCountOffset =
- kExtendedInt64CountOffset + kPointerSize;
+ kExtendedInt64CountOffset + kInt32Size;
static const int kExtendedHeapPtrCountOffset =
- kExtendedCodePtrCountOffset + kPointerSize;
+ kExtendedCodePtrCountOffset + kInt32Size;
static const int kExtendedInt32CountOffset =
- kExtendedHeapPtrCountOffset + kPointerSize;
+ kExtendedHeapPtrCountOffset + kInt32Size;
static const int kExtendedFirstOffset =
- kExtendedInt32CountOffset + kPointerSize;
+ kExtendedInt32CountOffset + kInt32Size;
// Dispatched behavior.
void ConstantPoolIterateBody(ObjectVisitor* v);
@@ -3034,9 +3131,12 @@
static const int kDescriptorValue = 2;
static const int kDescriptorSize = 3;
-#ifdef OBJECT_PRINT
+#if defined(DEBUG) || defined(OBJECT_PRINT)
+ // For our gdb macros, we should perhaps change these in the future.
+ void Print();
+
// Print all the descriptors.
- void PrintDescriptors(OStream& os); // NOLINT
+ void PrintDescriptors(std::ostream& os); // NOLINT
#endif
#ifdef DEBUG
@@ -3109,9 +3209,7 @@
// Transfer a complete descriptor from the src descriptor array to this
// descriptor array.
- void CopyFrom(int index,
- DescriptorArray* src,
- const WhitenessWitness&);
+ void CopyFrom(int index, DescriptorArray* src, const WhitenessWitness&);
inline void Set(int descriptor_number,
Descriptor* desc,
@@ -3126,12 +3224,9 @@
enum SearchMode { ALL_ENTRIES, VALID_ENTRIES };
-template<SearchMode search_mode, typename T>
-inline int LinearSearch(T* array, Name* name, int len, int valid_entries);
-
-
-template<SearchMode search_mode, typename T>
-inline int Search(T* array, Name* name, int valid_entries = 0);
+template <SearchMode search_mode, typename T>
+inline int Search(T* array, Name* name, int valid_entries = 0,
+ int* out_insertion_index = NULL);
// HashTable is a subclass of FixedArray that implements a hash table
@@ -3430,6 +3525,8 @@
uint16_t c1,
uint16_t c2);
+ static void EnsureCapacityForDeserialization(Isolate* isolate, int expected);
+
DECLARE_CAST(StringTable)
private:
@@ -3440,44 +3537,6 @@
};
-class MapCacheShape : public BaseShape<HashTableKey*> {
- public:
- static inline bool IsMatch(HashTableKey* key, Object* value) {
- return key->IsMatch(value);
- }
-
- static inline uint32_t Hash(HashTableKey* key) {
- return key->Hash();
- }
-
- static inline uint32_t HashForObject(HashTableKey* key, Object* object) {
- return key->HashForObject(object);
- }
-
- static inline Handle<Object> AsHandle(Isolate* isolate, HashTableKey* key);
-
- static const int kPrefixSize = 0;
- static const int kEntrySize = 2;
-};
-
-
-// MapCache.
-//
-// Maps keys that are a fixed array of unique names to a map.
-// Used for canonicalize maps for object literals.
-class MapCache: public HashTable<MapCache, MapCacheShape, HashTableKey*> {
- public:
- // Find cached value for a name key, otherwise return null.
- Object* Lookup(FixedArray* key);
- static Handle<MapCache> Put(
- Handle<MapCache> map_cache, Handle<FixedArray> key, Handle<Map> value);
- DECLARE_CAST(MapCache)
-
- private:
- DISALLOW_IMPLICIT_CONSTRUCTORS(MapCache);
-};
-
-
template <typename Derived, typename Shape, typename Key>
class Dictionary: public HashTable<Derived, Shape, Key> {
protected:
@@ -3529,6 +3588,10 @@
// Returns the number of enumerable elements in the dictionary.
int NumberOfEnumElements();
+ // Returns true if the dictionary contains any elements that are non-writable,
+ // non-configurable, non-enumerable, or have getters/setters.
+ bool HasComplexElements();
+
enum SortMode { UNSORTED, SORTED };
// Copies keys to preallocated fixed array.
void CopyKeysTo(FixedArray* storage,
@@ -3560,7 +3623,7 @@
static Handle<Derived> EnsureCapacity(Handle<Derived> obj, int n, Key key);
#ifdef OBJECT_PRINT
- void Print(OStream& os); // NOLINT
+ void Print(std::ostream& os); // NOLINT
#endif
// Returns the key (slow).
Object* SlowReverseLookup(Object* value);
@@ -3580,6 +3643,11 @@
Handle<Object> value,
PropertyDetails details);
+ // Returns iteration indices array for the |dictionary|.
+ // Values are direct indices in the |HashTable| array.
+ static Handle<FixedArray> BuildIterationIndicesArray(
+ Handle<Derived> dictionary);
+
protected:
// Generic at put operation.
MUST_USE_RESULT static Handle<Derived> AtPut(
@@ -3596,7 +3664,9 @@
uint32_t hash);
// Generate new enumeration indices to avoid enumeration index overflow.
- static void GenerateNewEnumerationIndices(Handle<Derived> dictionary);
+ // Returns iteration indices array for the |dictionary|.
+ static Handle<FixedArray> GenerateNewEnumerationIndices(
+ Handle<Derived> dictionary);
static const int kMaxNumberKeyIndex = DerivedHashTable::kPrefixStartIndex;
static const int kNextEnumerationIndexIndex = kMaxNumberKeyIndex + 1;
};
@@ -3625,7 +3695,7 @@
// Copies enumerable keys to preallocated fixed array.
void CopyEnumKeysTo(FixedArray* storage);
- inline static void DoGenerateNewEnumerationIndices(
+ inline static Handle<FixedArray> DoGenerateNewEnumerationIndices(
Handle<NameDictionary> dictionary);
// Find entry for key, otherwise return kNotFound. Optimized version of
@@ -3904,6 +3974,33 @@
static const int kNotFound = -1;
static const int kMinCapacity = 4;
+ static const int kNumberOfBucketsIndex = 0;
+ static const int kNumberOfElementsIndex = kNumberOfBucketsIndex + 1;
+ static const int kNumberOfDeletedElementsIndex = kNumberOfElementsIndex + 1;
+ static const int kHashTableStartIndex = kNumberOfDeletedElementsIndex + 1;
+ static const int kNextTableIndex = kNumberOfElementsIndex;
+
+ static const int kNumberOfBucketsOffset =
+ kHeaderSize + kNumberOfBucketsIndex * kPointerSize;
+ static const int kNumberOfElementsOffset =
+ kHeaderSize + kNumberOfElementsIndex * kPointerSize;
+ static const int kNumberOfDeletedElementsOffset =
+ kHeaderSize + kNumberOfDeletedElementsIndex * kPointerSize;
+ static const int kHashTableStartOffset =
+ kHeaderSize + kHashTableStartIndex * kPointerSize;
+ static const int kNextTableOffset =
+ kHeaderSize + kNextTableIndex * kPointerSize;
+
+ static const int kEntrySize = entrysize + 1;
+ static const int kChainOffset = entrysize;
+
+ static const int kLoadFactor = 2;
+
+ // NumberOfDeletedElements is set to kClearedTableSentinel when
+ // the table is cleared, which allows iterator transitions to
+ // optimize that case.
+ static const int kClearedTableSentinel = -1;
+
private:
static Handle<Derived> Rehash(Handle<Derived> table, int new_capacity);
@@ -3945,18 +4042,8 @@
return set(kRemovedHolesIndex + index, Smi::FromInt(removed_index));
}
- static const int kNumberOfBucketsIndex = 0;
- static const int kNumberOfElementsIndex = kNumberOfBucketsIndex + 1;
- static const int kNumberOfDeletedElementsIndex = kNumberOfElementsIndex + 1;
- static const int kHashTableStartIndex = kNumberOfDeletedElementsIndex + 1;
-
- static const int kNextTableIndex = kNumberOfElementsIndex;
static const int kRemovedHolesIndex = kHashTableStartIndex;
- static const int kEntrySize = entrysize + 1;
- static const int kChainOffset = entrysize;
-
- static const int kLoadFactor = 2;
static const int kMaxCapacity =
(FixedArray::kMaxLength - kHashTableStartIndex)
/ (1 + (kEntrySize * kLoadFactor));
@@ -3995,7 +4082,6 @@
return get(EntryToIndex(entry) + kValueOffset);
}
- private:
static const int kValueOffset = 1;
};
@@ -4295,13 +4381,13 @@
};
// Properties of scopes.
- class ScopeTypeField: public BitField<ScopeType, 0, 3> {};
- class CallsEvalField: public BitField<bool, 3, 1> {};
- class StrictModeField: public BitField<StrictMode, 4, 1> {};
- class FunctionVariableField: public BitField<FunctionVariableInfo, 5, 2> {};
- class FunctionVariableMode: public BitField<VariableMode, 7, 3> {};
- class AsmModuleField : public BitField<bool, 10, 1> {};
- class AsmFunctionField : public BitField<bool, 11, 1> {};
+ class ScopeTypeField : public BitField<ScopeType, 0, 4> {};
+ class CallsEvalField : public BitField<bool, 4, 1> {};
+ class StrictModeField : public BitField<StrictMode, 5, 1> {};
+ class FunctionVariableField : public BitField<FunctionVariableInfo, 6, 2> {};
+ class FunctionVariableMode : public BitField<VariableMode, 8, 3> {};
+ class AsmModuleField : public BitField<bool, 11, 1> {};
+ class AsmFunctionField : public BitField<bool, 12, 1> {};
// BitFields representing the encoded information for context locals in the
// ContextLocalInfoEntries part.
@@ -4781,6 +4867,7 @@
#undef FIXED_TYPED_ARRAY_TRAITS
+
// DeoptimizationInputData is a fixed array used to hold the deoptimization
// data for code generated by the Hydrogen/Lithium compiler. It also
// contains information about functions that were inlined. If N different
@@ -4862,7 +4949,7 @@
DECLARE_CAST(DeoptimizationInputData)
#ifdef ENABLE_DISASSEMBLER
- void DeoptimizationInputDataPrint(OStream& os); // NOLINT
+ void DeoptimizationInputDataPrint(std::ostream& os); // NOLINT
#endif
private:
@@ -4907,7 +4994,7 @@
DECLARE_CAST(DeoptimizationOutputData)
#if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
- void DeoptimizationOutputDataPrint(OStream& os); // NOLINT
+ void DeoptimizationOutputDataPrint(std::ostream& os); // NOLINT
#endif
};
@@ -4973,9 +5060,9 @@
// Printing
static const char* ICState2String(InlineCacheState state);
static const char* StubType2String(StubType type);
- static void PrintExtraICState(OStream& os, // NOLINT
+ static void PrintExtraICState(std::ostream& os, // NOLINT
Kind kind, ExtraICState extra);
- void Disassemble(const char* name, OStream& os); // NOLINT
+ void Disassemble(const char* name, std::ostream& os); // NOLINT
#endif // ENABLE_DISASSEMBLER
// [instruction_size]: Size of the native instructions
@@ -5054,12 +5141,7 @@
inline bool is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
inline bool is_keyed_stub();
inline bool is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
- inline bool is_weak_stub();
- inline void mark_as_weak_stub();
- inline bool is_invalidated_weak_stub();
- inline void mark_as_invalidated_weak_stub();
-
- inline bool CanBeWeakStub() {
+ inline bool embeds_maps_weakly() {
Kind k = kind();
return (k == LOAD_IC || k == STORE_IC || k == KEYED_LOAD_IC ||
k == KEYED_STORE_IC || k == COMPARE_NIL_IC) &&
@@ -5102,6 +5184,12 @@
inline bool is_compiled_optimizable();
inline void set_compiled_optimizable(bool value);
+ // [has_reloc_info_for_serialization]: For FUNCTION kind, tells if its
+ // reloc info includes runtime and external references to support
+ // serialization/deserialization.
+ inline bool has_reloc_info_for_serialization();
+ inline void set_has_reloc_info_for_serialization(bool value);
+
// [allow_osr_at_loop_nesting_level]: For FUNCTION kind, tells for
// how long the function has been marked for OSR and therefore which
// level of loop nesting we are willing to do on-stack replacement
@@ -5115,6 +5203,10 @@
inline void set_profiler_ticks(int ticks);
// [builtin_index]: For BUILTIN kind, tells which builtin index it has.
+ // For builtins, tells which builtin index it has.
+ // Note that builtins can have a code kind other than BUILTIN, which means
+ // that for arbitrary code objects, this index value may be random garbage.
+ // To verify in that case, compare the code object to the indexed builtin.
inline int builtin_index();
inline void set_builtin_index(int id);
@@ -5307,6 +5399,7 @@
// compilation stub.
static void MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate);
static void MarkCodeAsExecuted(byte* sequence, Isolate* isolate);
+ void MakeYoung(Isolate* isolate);
void MakeOlder(MarkingParity);
static bool IsYoungSequence(Isolate* isolate, byte* sequence);
bool IsOld();
@@ -5325,26 +5418,25 @@
void VerifyEmbeddedObjectsDependency();
#endif
- inline bool CanContainWeakObjects() {
- return is_optimized_code() || is_weak_stub();
- }
+#ifdef DEBUG
+ void VerifyEmbeddedObjectsInFullCode();
+#endif // DEBUG
+
+ inline bool CanContainWeakObjects() { return is_optimized_code(); }
inline bool IsWeakObject(Object* object) {
return (is_optimized_code() && !is_turbofanned() &&
- IsWeakObjectInOptimizedCode(object)) ||
- (is_weak_stub() && IsWeakObjectInIC(object));
+ IsWeakObjectInOptimizedCode(object));
}
static inline bool IsWeakObjectInOptimizedCode(Object* object);
- static inline bool IsWeakObjectInIC(Object* object);
// Max loop nesting marker used to postpose OSR. We don't take loop
// nesting that is deeper than 5 levels into account.
static const int kMaxLoopNestingMarker = 6;
// Layout description.
- static const int kInstructionSizeOffset = HeapObject::kHeaderSize;
- static const int kRelocationInfoOffset = kInstructionSizeOffset + kIntSize;
+ static const int kRelocationInfoOffset = HeapObject::kHeaderSize;
static const int kHandlerTableOffset = kRelocationInfoOffset + kPointerSize;
static const int kDeoptimizationDataOffset =
kHandlerTableOffset + kPointerSize;
@@ -5353,22 +5445,24 @@
kDeoptimizationDataOffset + kPointerSize;
static const int kNextCodeLinkOffset = kTypeFeedbackInfoOffset + kPointerSize;
static const int kGCMetadataOffset = kNextCodeLinkOffset + kPointerSize;
- static const int kICAgeOffset =
- kGCMetadataOffset + kPointerSize;
+ static const int kInstructionSizeOffset = kGCMetadataOffset + kPointerSize;
+ static const int kICAgeOffset = kInstructionSizeOffset + kIntSize;
static const int kFlagsOffset = kICAgeOffset + kIntSize;
static const int kKindSpecificFlags1Offset = kFlagsOffset + kIntSize;
static const int kKindSpecificFlags2Offset =
kKindSpecificFlags1Offset + kIntSize;
// Note: We might be able to squeeze this into the flags above.
static const int kPrologueOffset = kKindSpecificFlags2Offset + kIntSize;
- static const int kConstantPoolOffset = kPrologueOffset + kPointerSize;
+ static const int kConstantPoolOffset = kPrologueOffset + kIntSize;
- static const int kHeaderPaddingStart = kConstantPoolOffset + kIntSize;
+ static const int kHeaderPaddingStart = kConstantPoolOffset + kPointerSize;
// Add padding to align the instruction start following right after
// the Code object header.
static const int kHeaderSize =
(kHeaderPaddingStart + kCodeAlignmentMask) & ~kCodeAlignmentMask;
+ // Ensure that the slot for the constant pool pointer is aligned.
+ STATIC_ASSERT((kConstantPoolOffset & kPointerAlignmentMask) == 0);
// Byte offsets within kKindSpecificFlags1Offset.
static const int kOptimizableOffset = kKindSpecificFlags1Offset;
@@ -5378,6 +5472,8 @@
public BitField<bool, 0, 1> {}; // NOLINT
class FullCodeFlagsHasDebugBreakSlotsField: public BitField<bool, 1, 1> {};
class FullCodeFlagsIsCompiledOptimizable: public BitField<bool, 2, 1> {};
+ class FullCodeFlagsHasRelocInfoForSerialization
+ : public BitField<bool, 3, 1> {};
static const int kProfilerTicksOffset = kFullCodeFlags + 1;
@@ -5395,9 +5491,7 @@
static const int kHasFunctionCacheBit =
kStackSlotsFirstBit + kStackSlotsBitCount;
static const int kMarkedForDeoptimizationBit = kHasFunctionCacheBit + 1;
- static const int kWeakStubBit = kMarkedForDeoptimizationBit + 1;
- static const int kInvalidatedWeakStubBit = kWeakStubBit + 1;
- static const int kIsTurbofannedBit = kInvalidatedWeakStubBit + 1;
+ static const int kIsTurbofannedBit = kMarkedForDeoptimizationBit + 1;
STATIC_ASSERT(kStackSlotsFirstBit + kStackSlotsBitCount <= 32);
STATIC_ASSERT(kIsTurbofannedBit + 1 <= 32);
@@ -5408,9 +5502,6 @@
}; // NOLINT
class MarkedForDeoptimizationField
: public BitField<bool, kMarkedForDeoptimizationBit, 1> {}; // NOLINT
- class WeakStubField : public BitField<bool, kWeakStubBit, 1> {}; // NOLINT
- class InvalidatedWeakStubField
- : public BitField<bool, kInvalidatedWeakStubBit, 1> {}; // NOLINT
class IsTurbofannedField : public BitField<bool, kIsTurbofannedBit, 1> {
}; // NOLINT
@@ -5492,11 +5583,6 @@
class DependentCode: public FixedArray {
public:
enum DependencyGroup {
- // Group of IC stubs that weakly embed this map and depend on being
- // invalidated when the map is garbage collected. Dependent IC stubs form
- // a linked list. This group stores only the head of the list. This means
- // that the number_of_entries(kWeakICGroup) is 0 or 1.
- kWeakICGroup,
// Group of code that weakly embed this map and depend on being
// deoptimized when the map is garbage collected.
kWeakCodeGroup,
@@ -5557,7 +5643,6 @@
bool MarkCodeForDeoptimization(Isolate* isolate,
DependentCode::DependencyGroup group);
- void AddToDependentICList(Handle<Code> stub);
// The following low-level accessors should only be used by this class
// and the mark compact collector.
@@ -5637,15 +5722,20 @@
class OwnsDescriptors : public BitField<bool, 21, 1> {};
class HasInstanceCallHandler : public BitField<bool, 22, 1> {};
class Deprecated : public BitField<bool, 23, 1> {};
- class IsFrozen : public BitField<bool, 24, 1> {};
- class IsUnstable : public BitField<bool, 25, 1> {};
- class IsMigrationTarget : public BitField<bool, 26, 1> {};
- class DoneInobjectSlackTracking : public BitField<bool, 27, 1> {};
- // Bit 28 is free.
+ class IsUnstable : public BitField<bool, 24, 1> {};
+ class IsMigrationTarget : public BitField<bool, 25, 1> {};
+ // Bits 26 and 27 are free.
// Keep this bit field at the very end for better code in
// Builtins::kJSConstructStubGeneric stub.
- class ConstructionCount: public BitField<int, 29, 3> {};
+ // This counter is used for in-object slack tracking and for map aging.
+ // The in-object slack tracking is considered enabled when the counter is
+ // in the range [kSlackTrackingCounterStart, kSlackTrackingCounterEnd].
+ class Counter : public BitField<int, 28, 4> {};
+ static const int kSlackTrackingCounterStart = 14;
+ static const int kSlackTrackingCounterEnd = 8;
+ static const int kRetainingCounterStart = kSlackTrackingCounterEnd - 1;
+ static const int kRetainingCounterEnd = 0;
// Tells whether the object in the prototype property will be used
// for instances created from this function. If the prototype
@@ -5718,7 +5808,7 @@
inline bool is_prototype_map();
inline void set_elements_kind(ElementsKind elements_kind) {
- DCHECK(elements_kind < kElementsKindCount);
+ DCHECK(static_cast<int>(elements_kind) < kElementsKindCount);
DCHECK(kElementsKindCount <= (1 << Map::ElementsKindBits::kSize));
set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
DCHECK(this->elements_kind() == elements_kind);
@@ -5783,7 +5873,9 @@
inline Map* elements_transition_map();
inline Map* GetTransition(int transition_index);
- inline int SearchTransition(Name* name);
+ inline int SearchSpecialTransition(Symbol* name);
+ inline int SearchTransition(PropertyKind kind, Name* name,
+ PropertyAttributes attributes);
inline FixedArrayBase* GetInitialElements();
DECL_ACCESSORS(transitions, TransitionArray)
@@ -5813,8 +5905,8 @@
Handle<HeapType> type1,
Handle<HeapType> type2,
Isolate* isolate);
- static void GeneralizeFieldType(Handle<Map> map,
- int modify_index,
+ static void GeneralizeFieldType(Handle<Map> map, int modify_index,
+ Representation new_representation,
Handle<HeapType> new_field_type);
static Handle<Map> GeneralizeRepresentation(
Handle<Map> map,
@@ -5838,7 +5930,8 @@
int descriptor_number,
Handle<Object> value);
- static Handle<Map> Normalize(Handle<Map> map, PropertyNormalizationMode mode);
+ static Handle<Map> Normalize(Handle<Map> map, PropertyNormalizationMode mode,
+ const char* reason);
// Returns the constructor name (the name (possibly, inferred name) of the
// function that was used to instantiate the object).
@@ -5861,13 +5954,33 @@
// [prototype]: implicit prototype object.
DECL_ACCESSORS(prototype, Object)
+ // TODO(jkummerow): make set_prototype private.
+ void SetPrototype(Handle<Object> prototype,
+ PrototypeOptimizationMode proto_mode = FAST_PROTOTYPE);
+ bool ShouldRegisterAsPrototypeUser(Handle<JSObject> prototype);
+ bool CanUseOptimizationsBasedOnPrototypeRegistry();
// [constructor]: points back to the function responsible for this map.
DECL_ACCESSORS(constructor, Object)
// [instance descriptors]: describes the object.
DECL_ACCESSORS(instance_descriptors, DescriptorArray)
- inline void InitializeDescriptors(DescriptorArray* descriptors);
+
+ // [layout descriptor]: describes the object layout.
+ DECL_ACCESSORS(layout_descriptor, LayoutDescriptor)
+ // |layout descriptor| accessor which can be used from GC.
+ inline LayoutDescriptor* layout_descriptor_gc_safe();
+ inline bool HasFastPointerLayout() const;
+
+ // |layout descriptor| accessor that is safe to call even when
+ // FLAG_unbox_double_fields is disabled (in this case Map does not contain
+ // |layout_descriptor| field at all).
+ inline LayoutDescriptor* GetLayoutDescriptor();
+
+ inline void UpdateDescriptors(DescriptorArray* descriptors,
+ LayoutDescriptor* layout_descriptor);
+ inline void InitializeDescriptors(DescriptorArray* descriptors,
+ LayoutDescriptor* layout_descriptor);
// [stub cache]: contains stubs compiled for this map.
DECL_ACCESSORS(code_cache, Object)
@@ -5921,8 +6034,8 @@
Name* name,
LookupResult* result);
- inline void LookupTransition(JSObject* holder,
- Name* name,
+ inline void LookupTransition(JSObject* holder, Name* name,
+ PropertyAttributes attributes,
LookupResult* result);
inline PropertyDetails GetLastDescriptorDetails();
@@ -5966,16 +6079,12 @@
inline void set_owns_descriptors(bool owns_descriptors);
inline bool has_instance_call_handler();
inline void set_has_instance_call_handler();
- inline void freeze();
- inline bool is_frozen();
inline void mark_unstable();
inline bool is_stable();
inline void set_migration_target(bool value);
inline bool is_migration_target();
- inline void set_done_inobject_slack_tracking(bool value);
- inline bool done_inobject_slack_tracking();
- inline void set_construction_count(int value);
- inline int construction_count();
+ inline void set_counter(int value);
+ inline int counter();
inline void deprecate();
inline bool is_deprecated();
inline bool CanBeDeprecated();
@@ -6027,7 +6136,10 @@
static Handle<Map> CopyForObserved(Handle<Map> map);
- static Handle<Map> CopyForFreeze(Handle<Map> map);
+ static Handle<Map> CopyForPreventExtensions(Handle<Map> map,
+ PropertyAttributes attrs_to_add,
+ Handle<Symbol> transition_marker,
+ const char* reason);
// Maximal number of fast properties. Used to restrict the number of map
// transitions to avoid an explosion in the number of maps for objects used as
// dictionaries.
@@ -6047,7 +6159,7 @@
// Returns a copy of the map, with all transitions dropped from the
// instance descriptors.
- static Handle<Map> Copy(Handle<Map> map);
+ static Handle<Map> Copy(Handle<Map> map, const char* reason);
static Handle<Map> Create(Isolate* isolate, int inobject_properties);
// Returns the next free property index (only valid for FAST MODE).
@@ -6082,6 +6194,8 @@
static void AppendCallbackDescriptors(Handle<Map> map,
Handle<Object> descriptors);
+ static inline int SlackForArraySize(int old_size, int size_limit);
+
static void EnsureDescriptorSlack(Handle<Map> map, int slack);
// Returns the found code or undefined if absent.
@@ -6122,6 +6236,7 @@
bool IsJSObjectMap() {
return instance_type() >= FIRST_JS_OBJECT_TYPE;
}
+ bool IsStringMap() { return instance_type() < FIRST_NONSTRING_TYPE; }
bool IsJSProxyMap() {
InstanceType type = instance_type();
return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
@@ -6146,11 +6261,11 @@
static void AddDependentCode(Handle<Map> map,
DependentCode::DependencyGroup group,
Handle<Code> code);
- static void AddDependentIC(Handle<Map> map,
- Handle<Code> stub);
bool IsMapInArrayPrototypeChain();
+ static Handle<WeakCell> WeakCellForMap(Handle<Map> map);
+
// Dispatched behavior.
DECLARE_PRINTER(Map)
DECLARE_VERIFIER(Map)
@@ -6173,10 +6288,11 @@
// the original map. That way we can transition to the same map if the same
// prototype is set, rather than creating a new map every time. The
// transitions are in the form of a map where the keys are prototype objects
- // and the values are the maps the are transitioned to.
+ // and the values are the maps they transition to.
static const int kMaxCachedPrototypeTransitions = 256;
static Handle<Map> TransitionToPrototype(Handle<Map> map,
- Handle<Object> prototype);
+ Handle<Object> prototype,
+ PrototypeOptimizationMode mode);
static const int kMaxPreAllocatedPropertyFields = 255;
@@ -6194,7 +6310,13 @@
kConstructorOffset + kPointerSize;
static const int kDescriptorsOffset =
kTransitionsOrBackPointerOffset + kPointerSize;
+#if V8_DOUBLE_FIELDS_UNBOXING
+ static const int kLayoutDecriptorOffset = kDescriptorsOffset + kPointerSize;
+ static const int kCodeCacheOffset = kLayoutDecriptorOffset + kPointerSize;
+#else
+ static const int kLayoutDecriptorOffset = 1; // Must not be ever accessed.
static const int kCodeCacheOffset = kDescriptorsOffset + kPointerSize;
+#endif
static const int kDependentCodeOffset = kCodeCacheOffset + kPointerSize;
static const int kSize = kDependentCodeOffset + kPointerSize;
@@ -6272,6 +6394,18 @@
// The "shared" flags of both this map and |other| are ignored.
bool EquivalentToForNormalization(Map* other, PropertyNormalizationMode mode);
+ // Returns true if given field is unboxed double.
+ inline bool IsUnboxedDoubleField(FieldIndex index);
+
+#if TRACE_MAPS
+ static void TraceTransition(const char* what, Map* from, Map* to, Name* name);
+ static void TraceAllTransitions(Map* map);
+#endif
+
+ static inline Handle<Map> CopyInstallDescriptorsForTesting(
+ Handle<Map> map, int new_descriptor, Handle<DescriptorArray> descriptors,
+ Handle<LayoutDescriptor> layout_descriptor);
+
private:
static void ConnectElementsTransition(Handle<Map> parent, Handle<Map> child);
static void ConnectTransition(Handle<Map> parent, Handle<Map> child,
@@ -6283,18 +6417,17 @@
Handle<DescriptorArray> descriptors,
Descriptor* descriptor);
static Handle<Map> CopyInstallDescriptors(
- Handle<Map> map,
- int new_descriptor,
- Handle<DescriptorArray> descriptors);
+ Handle<Map> map, int new_descriptor, Handle<DescriptorArray> descriptors,
+ Handle<LayoutDescriptor> layout_descriptor);
static Handle<Map> CopyAddDescriptor(Handle<Map> map,
Descriptor* descriptor,
TransitionFlag flag);
static Handle<Map> CopyReplaceDescriptors(
- Handle<Map> map,
- Handle<DescriptorArray> descriptors,
- TransitionFlag flag,
- MaybeHandle<Name> maybe_name,
- SimpleTransitionFlag simple_flag = FULL_TRANSITION);
+ Handle<Map> map, Handle<DescriptorArray> descriptors,
+ Handle<LayoutDescriptor> layout_descriptor, TransitionFlag flag,
+ MaybeHandle<Name> maybe_name, const char* reason,
+ SimpleTransitionFlag simple_flag);
+
static Handle<Map> CopyReplaceDescriptor(Handle<Map> map,
Handle<DescriptorArray> descriptors,
Descriptor* descriptor,
@@ -6322,11 +6455,15 @@
void ZapTransitions();
void DeprecateTransitionTree();
- void DeprecateTarget(Name* key, DescriptorArray* new_descriptors);
+ bool DeprecateTarget(PropertyKind kind, Name* key,
+ PropertyAttributes attributes,
+ DescriptorArray* new_descriptors,
+ LayoutDescriptor* new_layout_descriptor);
Map* FindLastMatchMap(int verbatim, int length, DescriptorArray* descriptors);
void UpdateFieldType(int descriptor_number, Handle<Name> name,
+ Representation new_representation,
Handle<HeapType> new_type);
void PrintGeneralization(FILE* file,
@@ -6428,8 +6565,8 @@
// [context_data]: context data for the context this script was compiled in.
DECL_ACCESSORS(context_data, Object)
- // [wrapper]: the wrapper cache.
- DECL_ACCESSORS(wrapper, Foreign)
+ // [wrapper]: the wrapper cache. This is either undefined or a WeakCell.
+ DECL_ACCESSORS(wrapper, HeapObject)
// [type]: the script type.
DECL_ACCESSORS(type, Smi)
@@ -6491,7 +6628,6 @@
// Get the JS object wrapping the given script; create it if none exists.
static Handle<JSObject> GetWrapper(Handle<Script> script);
- void ClearWrapperCache();
// Dispatched behavior.
DECLARE_PRINTER(Script)
@@ -6543,9 +6679,11 @@
V(Array.prototype, pop, ArrayPop) \
V(Array.prototype, shift, ArrayShift) \
V(Function.prototype, apply, FunctionApply) \
+ V(Function.prototype, call, FunctionCall) \
V(String.prototype, charCodeAt, StringCharCodeAt) \
V(String.prototype, charAt, StringCharAt) \
V(String, fromCharCode, StringFromCharCode) \
+ V(Math, random, MathRandom) \
V(Math, floor, MathFloor) \
V(Math, round, MathRound) \
V(Math, ceil, MathCeil) \
@@ -6556,6 +6694,13 @@
V(Math, pow, MathPow) \
V(Math, max, MathMax) \
V(Math, min, MathMin) \
+ V(Math, cos, MathCos) \
+ V(Math, sin, MathSin) \
+ V(Math, tan, MathTan) \
+ V(Math, acos, MathAcos) \
+ V(Math, asin, MathAsin) \
+ V(Math, atan, MathAtan) \
+ V(Math, atan2, MathAtan2) \
V(Math, imul, MathImul) \
V(Math, clz32, MathClz32) \
V(Math, fround, MathFround)
@@ -6657,6 +6802,13 @@
// available.
DECL_ACCESSORS(feedback_vector, TypeFeedbackVector)
+#if TRACE_MAPS
+ // [unique_id] - For --trace-maps purposes, an identifier that's persistent
+ // even if the GC moves this SharedFunctionInfo.
+ inline int unique_id() const;
+ inline void set_unique_id(int value);
+#endif
+
// [instance class name]: class name for instances.
DECL_ACCESSORS(instance_class_name, Object)
@@ -6759,6 +6911,13 @@
// False if the function definitely does not allocate an arguments object.
DECL_BOOLEAN_ACCESSORS(uses_arguments)
+ // Indicates that this function uses a super property.
+ // This is needed to set up the [[HomeObject]] on the function instance.
+ DECL_BOOLEAN_ACCESSORS(uses_super_property)
+
+ // Indicates that this function uses the super constructor.
+ DECL_BOOLEAN_ACCESSORS(uses_super_constructor_call)
+
// True if the function has any duplicated parameter names.
DECL_BOOLEAN_ACCESSORS(has_duplicate_parameters)
@@ -6803,9 +6962,15 @@
// Indicates that this function is a concise method.
DECL_BOOLEAN_ACCESSORS(is_concise_method)
+ // Indicates that this function is a default constructor.
+ DECL_BOOLEAN_ACCESSORS(is_default_constructor)
+
// Indicates that this function is an asm function.
DECL_BOOLEAN_ACCESSORS(asm_function)
+ // Indicates that the the shared function info is deserialized from cache.
+ DECL_BOOLEAN_ACCESSORS(deserialized)
+
inline FunctionKind kind();
inline void set_kind(FunctionKind kind);
@@ -6820,7 +6985,7 @@
// shared function info.
void DisableOptimization(BailoutReason reason);
- inline BailoutReason DisableOptimizationReason();
+ inline BailoutReason disable_optimization_reason();
// Lookup the bailout ID and DCHECK that it exists in the non-optimized
// code, returns whether it asserted (i.e., always true if assertions are
@@ -6855,7 +7020,7 @@
inline void set_opt_count_and_bailout_reason(int value);
inline int opt_count_and_bailout_reason() const;
- void set_bailout_reason(BailoutReason reason) {
+ void set_disable_optimization_reason(BailoutReason reason) {
set_opt_count_and_bailout_reason(
DisabledOptimizationReasonBits::update(opt_count_and_bailout_reason(),
reason));
@@ -6900,10 +7065,16 @@
static const int kInferredNameOffset = kDebugInfoOffset + kPointerSize;
static const int kFeedbackVectorOffset =
kInferredNameOffset + kPointerSize;
+#if TRACE_MAPS
+ static const int kUniqueIdOffset = kFeedbackVectorOffset + kPointerSize;
+ static const int kLastPointerFieldOffset = kUniqueIdOffset;
+#else
+ static const int kLastPointerFieldOffset = kFeedbackVectorOffset;
+#endif
+
#if V8_HOST_ARCH_32_BIT
// Smi fields.
- static const int kLengthOffset =
- kFeedbackVectorOffset + kPointerSize;
+ static const int kLengthOffset = kLastPointerFieldOffset + kPointerSize;
static const int kFormalParameterCountOffset = kLengthOffset + kPointerSize;
static const int kExpectedNofPropertiesOffset =
kFormalParameterCountOffset + kPointerSize;
@@ -6934,12 +7105,12 @@
// garbage collections.
// To avoid wasting space on 64-bit architectures we use
// the following trick: we group integer fields into pairs
- // First integer in each pair is shifted left by 1.
- // By doing this we guarantee that LSB of each kPointerSize aligned
- // word is not set and thus this word cannot be treated as pointer
- // to HeapObject during old space traversal.
- static const int kLengthOffset =
- kFeedbackVectorOffset + kPointerSize;
+// The least significant integer in each pair is shifted left by 1.
+// By doing this we guarantee that LSB of each kPointerSize aligned
+// word is not set and thus this word cannot be treated as pointer
+// to HeapObject during old space traversal.
+#if V8_TARGET_LITTLE_ENDIAN
+ static const int kLengthOffset = kLastPointerFieldOffset + kPointerSize;
static const int kFormalParameterCountOffset =
kLengthOffset + kIntSize;
@@ -6971,12 +7142,42 @@
// Total size.
static const int kSize = kProfilerTicksOffset + kIntSize;
-#endif
+#elif V8_TARGET_BIG_ENDIAN
+ static const int kFormalParameterCountOffset =
+ kLastPointerFieldOffset + kPointerSize;
+ static const int kLengthOffset = kFormalParameterCountOffset + kIntSize;
+
+ static const int kNumLiteralsOffset = kLengthOffset + kIntSize;
+ static const int kExpectedNofPropertiesOffset = kNumLiteralsOffset + kIntSize;
+
+ static const int kStartPositionAndTypeOffset =
+ kExpectedNofPropertiesOffset + kIntSize;
+ static const int kEndPositionOffset = kStartPositionAndTypeOffset + kIntSize;
+
+ static const int kCompilerHintsOffset = kEndPositionOffset + kIntSize;
+ static const int kFunctionTokenPositionOffset =
+ kCompilerHintsOffset + kIntSize;
+
+ static const int kCountersOffset = kFunctionTokenPositionOffset + kIntSize;
+ static const int kOptCountAndBailoutReasonOffset = kCountersOffset + kIntSize;
+
+ static const int kProfilerTicksOffset =
+ kOptCountAndBailoutReasonOffset + kIntSize;
+ static const int kAstNodeCountOffset = kProfilerTicksOffset + kIntSize;
+
+ // Total size.
+ static const int kSize = kAstNodeCountOffset + kIntSize;
+
+#else
+#error Unknown byte ordering
+#endif // Big endian
+#endif // 64-bit
+
static const int kAlignedSize = POINTER_SIZE_ALIGN(kSize);
typedef FixedBodyDescriptor<kNameOffset,
- kFeedbackVectorOffset + kPointerSize,
+ kLastPointerFieldOffset + kPointerSize,
kSize> BodyDescriptor;
// Bit positions in start_position_and_type.
@@ -6994,6 +7195,8 @@
kOptimizationDisabled,
kStrictModeFunction,
kUsesArguments,
+ kUsesSuperProperty,
+ kUsesSuperConstructorCall,
kHasDuplicateParameters,
kNative,
kInlineBuiltin,
@@ -7006,11 +7209,13 @@
kIsArrow,
kIsGenerator,
kIsConciseMethod,
+ kIsDefaultConstructor,
kIsAsmFunction,
+ kDeserialized,
kCompilerHintsCount // Pseudo entry
};
- class FunctionKindBits : public BitField<FunctionKind, kIsArrow, 3> {};
+ class FunctionKindBits : public BitField<FunctionKind, kIsArrow, 4> {};
class DeoptCountBits : public BitField<int, 0, 4> {};
class OptReenableTriesBits : public BitField<int, 4, 18> {};
@@ -7073,7 +7278,7 @@
};
-OStream& operator<<(OStream& os, const SourceCodeOf& v);
+std::ostream& operator<<(std::ostream& os, const SourceCodeOf& v);
class JSGeneratorObject: public JSObject {
@@ -7215,8 +7420,7 @@
// Mark this function for lazy recompilation. The function will be
// recompiled the next time it is executed.
void MarkForOptimization();
- void MarkForConcurrentOptimization();
- void MarkInOptimizationQueue();
+ void AttemptConcurrentOptimization();
// Tells whether or not the function is already marked for lazy
// recompilation.
@@ -7236,13 +7440,12 @@
// Here is the algorithm to reclaim the unused inobject space:
// - Detect the first constructor call for this JSFunction.
// When it happens enter the "in progress" state: initialize construction
- // counter in the initial_map and set the |done_inobject_slack_tracking|
- // flag.
+ // counter in the initial_map.
// - While the tracking is in progress create objects filled with
// one_pointer_filler_map instead of undefined_value. This way they can be
// resized quickly and safely.
- // - Once enough (kGenerousAllocationCount) objects have been created
- // compute the 'slack' (traverse the map transition tree starting from the
+ // - Once enough objects have been created compute the 'slack'
+ // (traverse the map transition tree starting from the
// initial_map and find the lowest value of unused_property_fields).
// - Traverse the transition tree again and decrease the instance size
// of every map. Existing objects will resize automatically (they are
@@ -7255,23 +7458,17 @@
// Important: inobject slack tracking is not attempted during the snapshot
// creation.
- static const int kGenerousAllocationCount = Map::ConstructionCount::kMax;
- static const int kFinishSlackTracking = 1;
- static const int kNoSlackTracking = 0;
-
// True if the initial_map is set and the object constructions countdown
// counter is not zero.
+ static const int kGenerousAllocationCount =
+ Map::kSlackTrackingCounterStart - Map::kSlackTrackingCounterEnd + 1;
inline bool IsInobjectSlackTrackingInProgress();
// Starts the tracking.
// Initializes object constructions countdown counter in the initial map.
- // IsInobjectSlackTrackingInProgress is normally true after this call,
- // except when tracking have not been started (e.g. the map has no unused
- // properties or the snapshot is being built).
void StartInobjectSlackTracking();
// Completes the tracking.
- // IsInobjectSlackTrackingInProgress is false after this call.
void CompleteInobjectSlackTracking();
// [literals_or_bindings]: Fixed array holding either
@@ -7443,19 +7640,18 @@
// [native context]: the natives corresponding to this global object.
DECL_ACCESSORS(native_context, Context)
- // [global context]: the most recent (i.e. innermost) global context.
- DECL_ACCESSORS(global_context, Context)
-
// [global proxy]: the global proxy object of the context
DECL_ACCESSORS(global_proxy, JSObject)
DECLARE_CAST(GlobalObject)
+ static void InvalidatePropertyCell(Handle<GlobalObject> object,
+ Handle<Name> name);
+
// Layout description.
static const int kBuiltinsOffset = JSObject::kHeaderSize;
static const int kNativeContextOffset = kBuiltinsOffset + kPointerSize;
- static const int kGlobalContextOffset = kNativeContextOffset + kPointerSize;
- static const int kGlobalProxyOffset = kGlobalContextOffset + kPointerSize;
+ static const int kGlobalProxyOffset = kNativeContextOffset + kPointerSize;
static const int kHeaderSize = kGlobalProxyOffset + kPointerSize;
private:
@@ -7815,12 +8011,11 @@
FixedArray::kHeaderSize + kIrregexpCaptureCountIndex * kPointerSize;
// In-object fields.
- static const int kSourceFieldIndex = 0;
- static const int kGlobalFieldIndex = 1;
- static const int kIgnoreCaseFieldIndex = 2;
- static const int kMultilineFieldIndex = 3;
- static const int kLastIndexFieldIndex = 4;
- static const int kInObjectFieldCount = 5;
+ static const int kGlobalFieldIndex = 0;
+ static const int kIgnoreCaseFieldIndex = 1;
+ static const int kMultilineFieldIndex = 2;
+ static const int kLastIndexFieldIndex = 3;
+ static const int kInObjectFieldCount = 4;
// The uninitialized value for a regexp code object.
static const int kUninitializedValue = -1;
@@ -7857,6 +8052,17 @@
};
+// This cache is used in two different variants. For regexp caching, it simply
+// maps identifying info of the regexp to the cached regexp object. Scripts and
+// eval code only gets cached after a second probe for the code object. To do
+// so, on first "put" only a hash identifying the source is entered into the
+// cache, mapping it to a lifetime count of the hash. On each call to Age all
+// such lifetimes get reduced, and removed once they reach zero. If a second put
+// is called while such a hash is live in the cache, the hash gets replaced by
+// an actual cache entry. Age also removes stale live entries from the cache.
+// Such entries are identified by SharedFunctionInfos pointing to either the
+// recompilation stub, or to "old" code. This avoids memory leaks due to
+// premature caching of scripts and eval strings that are never needed later.
class CompilationCacheTable: public HashTable<CompilationCacheTable,
CompilationCacheShape,
HashTableKey*> {
@@ -7878,6 +8084,8 @@
Handle<CompilationCacheTable> cache, Handle<String> src,
JSRegExp::Flags flags, Handle<FixedArray> value);
void Remove(Object* value);
+ void Age();
+ static const int kHashGenerations = 10;
DECLARE_CAST(CompilationCacheTable)
@@ -7890,6 +8098,7 @@
public:
DECL_ACCESSORS(default_cache, FixedArray)
DECL_ACCESSORS(normal_type_cache, Object)
+ DECL_ACCESSORS(weak_cell_cache, Object)
// Add the code object to the cache.
static void Update(
@@ -7917,7 +8126,8 @@
static const int kDefaultCacheOffset = HeapObject::kHeaderSize;
static const int kNormalTypeCacheOffset =
kDefaultCacheOffset + kPointerSize;
- static const int kSize = kNormalTypeCacheOffset + kPointerSize;
+ static const int kWeakCellCacheOffset = kNormalTypeCacheOffset + kPointerSize;
+ static const int kSize = kWeakCellCacheOffset + kPointerSize;
private:
static void UpdateDefaultCache(
@@ -8048,7 +8258,6 @@
inline void set_inlined_type_change_checksum(int checksum);
inline bool matches_inlined_type_change_checksum(int checksum);
-
DECLARE_CAST(TypeFeedbackInfo)
// Dispatched behavior.
@@ -8231,14 +8440,11 @@
static void DigestTransitionFeedback(Handle<AllocationSite> site,
ElementsKind to_kind);
- enum Reason {
- TENURING,
- TRANSITIONS
- };
+ static void RegisterForDeoptOnTenureChange(Handle<AllocationSite> site,
+ CompilationInfo* info);
- static void AddDependentCompilationInfo(Handle<AllocationSite> site,
- Reason reason,
- CompilationInfo* info);
+ static void RegisterForDeoptOnTransitionChange(Handle<AllocationSite> site,
+ CompilationInfo* info);
DECLARE_PRINTER(AllocationSite)
DECLARE_VERIFIER(AllocationSite)
@@ -8270,7 +8476,10 @@
kSize> BodyDescriptor;
private:
- inline DependentCode::DependencyGroup ToDependencyGroup(Reason reason);
+ static void AddDependentCompilationInfo(Handle<AllocationSite> site,
+ DependentCode::DependencyGroup group,
+ CompilationInfo* info);
+
bool PretenuringDecisionMade() {
return pretenure_decision() != kUndecided;
}
@@ -8363,6 +8572,11 @@
// Reusable parts of the hashing algorithm.
INLINE(static uint32_t AddCharacterCore(uint32_t running_hash, uint16_t c));
INLINE(static uint32_t GetHashCore(uint32_t running_hash));
+ INLINE(static uint32_t ComputeRunningHash(uint32_t running_hash,
+ const uc16* chars, int length));
+ INLINE(static uint32_t ComputeRunningHashOneByte(uint32_t running_hash,
+ const char* chars,
+ int length));
protected:
// Returns the value to store in the hash field of a string with
@@ -8399,6 +8613,7 @@
private:
inline IteratingStringHasher(int len, uint32_t seed)
: StringHasher(len, seed) {}
+ void VisitConsString(ConsString* cons_string);
DISALLOW_COPY_AND_ASSIGN(IteratingStringHasher);
};
@@ -8479,10 +8694,19 @@
DECLARE_CAST(Name)
DECLARE_PRINTER(Name)
+#if TRACE_MAPS
+ void NameShortPrint();
+ int NameShortPrint(Vector<char> str);
+#endif
// Layout description.
- static const int kHashFieldOffset = HeapObject::kHeaderSize;
- static const int kSize = kHashFieldOffset + kPointerSize;
+ static const int kHashFieldSlot = HeapObject::kHeaderSize;
+#if V8_TARGET_LITTLE_ENDIAN || !V8_HOST_ARCH_64_BIT
+ static const int kHashFieldOffset = kHashFieldSlot;
+#else
+ static const int kHashFieldOffset = kHashFieldSlot + kIntSize;
+#endif
+ static const int kSize = kHashFieldSlot + kPointerSize;
// Mask constant for checking if a name has a computed hash code
// and if it is a string that is an array index. The least significant bit
@@ -8568,10 +8792,18 @@
typedef FixedBodyDescriptor<kNameOffset, kFlagsOffset, kSize> BodyDescriptor;
+ void SymbolShortPrint(std::ostream& os);
+
private:
static const int kPrivateBit = 0;
static const int kOwnBit = 1;
+ const char* PrivateSymbolToName() const;
+
+#if TRACE_MAPS
+ friend class Name; // For PrivateSymbolToName.
+#endif
+
DISALLOW_IMPLICIT_CONSTRUCTORS(Symbol);
};
@@ -8672,6 +8904,9 @@
friend class String;
};
+ template <typename Char>
+ INLINE(Vector<const Char> GetCharVector());
+
// Get and set the length of the string.
inline int length() const;
inline void set_length(int value);
@@ -8787,8 +9022,8 @@
// Dispatched behavior.
void StringShortPrint(StringStream* accumulator);
- void PrintUC16(OStream& os, int start = 0, int end = -1); // NOLINT
-#ifdef OBJECT_PRINT
+ void PrintUC16(std::ostream& os, int start = 0, int end = -1); // NOLINT
+#if defined(DEBUG) || defined(OBJECT_PRINT)
char* ToAsciiArray();
#endif
DECLARE_PRINTER(String)
@@ -9257,6 +9492,8 @@
FlatStringReader(Isolate* isolate, Vector<const char> input);
void PostGarbageCollection();
inline uc32 Get(int index);
+ template <typename Char>
+ inline Char Get(int index);
int length() { return length_; }
private:
String** str_;
@@ -9266,26 +9503,13 @@
};
-// A ConsStringOp that returns null.
-// Useful when the operation to apply on a ConsString
-// requires an expensive data structure.
-class ConsStringNullOp {
- public:
- inline ConsStringNullOp() {}
- static inline String* Operate(String*, unsigned*, int32_t*, unsigned*);
- private:
- DISALLOW_COPY_AND_ASSIGN(ConsStringNullOp);
-};
-
-
// This maintains an off-stack representation of the stack frames required
// to traverse a ConsString, allowing an entirely iterative and restartable
// traversal of the entire string
-class ConsStringIteratorOp {
+class ConsStringIterator {
public:
- inline ConsStringIteratorOp() {}
- inline explicit ConsStringIteratorOp(ConsString* cons_string,
- int offset = 0) {
+ inline ConsStringIterator() {}
+ inline explicit ConsStringIterator(ConsString* cons_string, int offset = 0) {
Reset(cons_string, offset);
}
inline void Reset(ConsString* cons_string, int offset = 0) {
@@ -9325,14 +9549,13 @@
int depth_;
int maximum_depth_;
int consumed_;
- DISALLOW_COPY_AND_ASSIGN(ConsStringIteratorOp);
+ DISALLOW_COPY_AND_ASSIGN(ConsStringIterator);
};
class StringCharacterStream {
public:
inline StringCharacterStream(String* string,
- ConsStringIteratorOp* op,
int offset = 0);
inline uint16_t GetNext();
inline bool HasMore();
@@ -9341,13 +9564,13 @@
inline void VisitTwoByteString(const uint16_t* chars, int length);
private:
+ ConsStringIterator iter_;
bool is_one_byte_;
union {
const uint8_t* buffer8_;
const uint16_t* buffer16_;
};
const uint8_t* end_;
- ConsStringIteratorOp* op_;
DISALLOW_COPY_AND_ASSIGN(StringCharacterStream);
};
@@ -9467,8 +9690,10 @@
// of the cell's current type and the value's type. If the change causes
// a change of the type of the cell's contents, code dependent on the cell
// will be deoptimized.
- static void SetValueInferType(Handle<PropertyCell> cell,
- Handle<Object> value);
+ // Usually returns the value that was passed in, but may perform
+ // non-observable modifications on it, such as internalize strings.
+ static Handle<Object> SetValueInferType(Handle<PropertyCell> cell,
+ Handle<Object> value);
// Computes the new type of the cell's contents for the given value, but
// without actually modifying the 'type' field.
@@ -9506,6 +9731,37 @@
};
+class WeakCell : public HeapObject {
+ public:
+ inline Object* value() const;
+
+ // This should not be called by anyone except GC.
+ inline void clear();
+
+ // This should not be called by anyone except allocator.
+ inline void initialize(HeapObject* value);
+
+ inline bool cleared() const;
+
+ DECL_ACCESSORS(next, Object)
+
+ DECLARE_CAST(WeakCell)
+
+ DECLARE_PRINTER(WeakCell)
+ DECLARE_VERIFIER(WeakCell)
+
+ // Layout description.
+ static const int kValueOffset = HeapObject::kHeaderSize;
+ static const int kNextOffset = kValueOffset + kPointerSize;
+ static const int kSize = kNextOffset + kPointerSize;
+
+ typedef FixedBodyDescriptor<kValueOffset, kSize, kSize> BodyDescriptor;
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(WeakCell);
+};
+
+
// The JSProxy describes EcmaScript Harmony proxies
class JSProxy: public JSReceiver {
public:
@@ -9712,7 +9968,7 @@
DECL_ACCESSORS(kind, Object)
#ifdef OBJECT_PRINT
- void OrderedHashTableIteratorPrint(OStream& os); // NOLINT
+ void OrderedHashTableIteratorPrint(std::ostream& os); // NOLINT
#endif
static const int kTableOffset = JSObject::kHeaderSize;
@@ -9856,6 +10112,9 @@
inline bool should_be_freed();
inline void set_should_be_freed(bool value);
+ inline bool is_neuterable();
+ inline void set_is_neuterable(bool value);
+
// [weak_next]: linked list of array buffers.
DECL_ACCESSORS(weak_next, Object)
@@ -9885,6 +10144,7 @@
// Bit position in a flag
static const int kIsExternalBit = 0;
static const int kShouldBeFreed = 1;
+ static const int kIsNeuterableBit = 2;
DISALLOW_IMPLICIT_CONSTRUCTORS(JSArrayBuffer);
};
@@ -10027,10 +10287,13 @@
uint32_t index,
Handle<Object> value);
- static bool IsReadOnlyLengthDescriptor(Handle<Map> jsarray_map);
+ static bool HasReadOnlyLength(Handle<JSArray> array);
static bool WouldChangeReadOnlyLength(Handle<JSArray> array, uint32_t index);
static MaybeHandle<Object> ReadOnlyLengthError(Handle<JSArray> array);
+ // TODO(adamk): Remove this method in favor of HasReadOnlyLength().
+ static bool IsReadOnlyLengthDescriptor(Handle<Map> jsarray_map);
+
// Initialize the array with the given capacity. The function may
// fail due to out-of-memory situations, but only if the requested
// capacity is non-zero.
@@ -10383,6 +10646,11 @@
DECL_ACCESSORS(deleter, Object)
DECL_ACCESSORS(enumerator, Object)
DECL_ACCESSORS(data, Object)
+ DECL_BOOLEAN_ACCESSORS(can_intercept_symbols)
+ DECL_BOOLEAN_ACCESSORS(all_can_read)
+
+ inline int flags() const;
+ inline void set_flags(int flags);
DECLARE_CAST(InterceptorInfo)
@@ -10396,7 +10664,11 @@
static const int kDeleterOffset = kQueryOffset + kPointerSize;
static const int kEnumeratorOffset = kDeleterOffset + kPointerSize;
static const int kDataOffset = kEnumeratorOffset + kPointerSize;
- static const int kSize = kDataOffset + kPointerSize;
+ static const int kFlagsOffset = kDataOffset + kPointerSize;
+ static const int kSize = kFlagsOffset + kPointerSize;
+
+ static const int kCanInterceptSymbolsBit = 0;
+ static const int kAllCanReadBit = 1;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(InterceptorInfo);