Merge V8 at 3.7.12.28
Bug: 5688872
Change-Id: Iddb40cae44d51a2b449f2858951e0472771f5981
diff --git a/src/code-stubs.h b/src/code-stubs.h
index 64c89b9..6bda5da 100644
--- a/src/code-stubs.h
+++ b/src/code-stubs.h
@@ -30,6 +30,7 @@
#include "allocation.h"
#include "globals.h"
+#include "codegen.h"
namespace v8 {
namespace internal {
@@ -45,27 +46,22 @@
V(Compare) \
V(CompareIC) \
V(MathPow) \
+ V(RecordWrite) \
+ V(StoreBufferOverflow) \
+ V(RegExpExec) \
V(TranscendentalCache) \
V(Instanceof) \
- /* All stubs above this line only exist in a few versions, which are */ \
- /* generated ahead of time. Therefore compiling a call to one of */ \
- /* them can't cause a new stub to be compiled, so compiling a call to */ \
- /* them is GC safe. The ones below this line exist in many variants */ \
- /* so code compiling a call to one can cause a GC. This means they */ \
- /* can't be called from other stubs, since stub generation code is */ \
- /* not GC safe. */ \
V(ConvertToDouble) \
V(WriteInt32ToHeapNumber) \
V(StackCheck) \
V(FastNewClosure) \
V(FastNewContext) \
+ V(FastNewBlockContext) \
V(FastCloneShallowArray) \
- V(RevertToNumber) \
+ V(FastCloneShallowObject) \
V(ToBoolean) \
V(ToNumber) \
- V(CounterOp) \
V(ArgumentsAccess) \
- V(RegExpExec) \
V(RegExpConstructResult) \
V(NumberToString) \
V(CEntry) \
@@ -73,7 +69,9 @@
V(KeyedLoadElement) \
V(KeyedStoreElement) \
V(DebuggerStatement) \
- V(StringDictionaryNegativeLookup)
+ V(StringDictionaryLookup) \
+ V(ElementsTransitionAndStore) \
+ V(StoreArrayLiteralElement)
// List of code stubs only used on ARM platforms.
#ifdef V8_TARGET_ARCH_ARM
@@ -121,11 +119,6 @@
// Retrieve the code for the stub. Generate the code if needed.
Handle<Code> GetCode();
- // Retrieve the code for the stub if already generated. Do not
- // generate the code if not already generated and instead return a
- // retry after GC Failure object.
- MUST_USE_RESULT MaybeObject* TryGetCode();
-
static Major MajorKeyFromKey(uint32_t key) {
return static_cast<Major>(MajorKeyBits::decode(key));
}
@@ -142,14 +135,35 @@
virtual ~CodeStub() {}
+ bool CompilingCallsToThisStubIsGCSafe() {
+ bool is_pregenerated = IsPregenerated();
+ Code* code = NULL;
+ CHECK(!is_pregenerated || FindCodeInCache(&code));
+ return is_pregenerated;
+ }
+
+ // See comment above, where Instanceof is defined.
+ virtual bool IsPregenerated() { return false; }
+
+ static void GenerateStubsAheadOfTime();
+ static void GenerateFPStubs();
+
+ // Some stubs put untagged junk on the stack that cannot be scanned by the
+ // GC. This means that we must be statically sure that no GC can occur while
+ // they are running. If that is the case they should override this to return
+ // true, which will cause an assertion if we try to call something that can
+ // GC or if we try to put a stack frame on top of the junk, which would not
+ // result in a traversable stack.
+ virtual bool SometimesSetsUpAFrame() { return true; }
+
+ // Lookup the code in the (possibly custom) cache.
+ bool FindCodeInCache(Code** code_out);
+
protected:
static const int kMajorBits = 6;
static const int kMinorBits = kBitsPerInt - kSmiTagSize - kMajorBits;
private:
- // Lookup the code in the (possibly custom) cache.
- bool FindCodeInCache(Code** code_out);
-
// Nonvirtual wrapper around the stub-specific Generate function. Call
// this function to set up the macro assembler and generate the code.
void GenerateCode(MacroAssembler* masm);
@@ -162,7 +176,11 @@
void RecordCodeGeneration(Code* code, MacroAssembler* masm);
// Finish the code object after it has been generated.
- virtual void FinishCode(Code* code) { }
+ virtual void FinishCode(Handle<Code> code) { }
+
+ // Activate newly generated stub. Is called after
+ // registering stub in the stub cache.
+ virtual void Activate(Code* code) { }
// Returns information for computing the number key.
virtual Major MajorKey() = 0;
@@ -178,9 +196,7 @@
// Returns a name for logging/debugging purposes.
SmartArrayPointer<const char> GetName();
- virtual void PrintName(StringStream* stream) {
- stream->Add("%s", MajorName(MajorKey(), false));
- }
+ virtual void PrintName(StringStream* stream);
// Returns whether the code generated for this stub needs to be allocated as
// a fixed (non-moveable) code object.
@@ -193,9 +209,6 @@
MajorKeyBits::encode(MajorKey());
}
- // See comment above, where Instanceof is defined.
- bool AllowsStubCalls() { return MajorKey() <= Instanceof; }
-
class MajorKeyBits: public BitField<uint32_t, 0, kMajorBits> {};
class MinorKeyBits: public BitField<uint32_t, kMajorBits, kMinorBits> {};
@@ -286,16 +299,17 @@
class FastNewClosureStub : public CodeStub {
public:
- explicit FastNewClosureStub(StrictModeFlag strict_mode)
- : strict_mode_(strict_mode) { }
+ explicit FastNewClosureStub(LanguageMode language_mode)
+ : language_mode_(language_mode) { }
void Generate(MacroAssembler* masm);
private:
Major MajorKey() { return FastNewClosure; }
- int MinorKey() { return strict_mode_; }
+ int MinorKey() { return language_mode_ == CLASSIC_MODE
+ ? kNonStrictMode : kStrictMode; }
- StrictModeFlag strict_mode_;
+ LanguageMode language_mode_;
};
@@ -304,7 +318,7 @@
static const int kMaximumSlots = 64;
explicit FastNewContextStub(int slots) : slots_(slots) {
- ASSERT(slots_ > 0 && slots <= kMaximumSlots);
+ ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
}
void Generate(MacroAssembler* masm);
@@ -317,6 +331,24 @@
};
+class FastNewBlockContextStub : public CodeStub {
+ public:
+ static const int kMaximumSlots = 64;
+
+ explicit FastNewBlockContextStub(int slots) : slots_(slots) {
+ ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
+ }
+
+ void Generate(MacroAssembler* masm);
+
+ private:
+ int slots_;
+
+ Major MajorKey() { return FastNewBlockContext; }
+ int MinorKey() { return slots_; }
+};
+
+
class FastCloneShallowArrayStub : public CodeStub {
public:
// Maximum length of copied elements array.
@@ -324,14 +356,16 @@
enum Mode {
CLONE_ELEMENTS,
- COPY_ON_WRITE_ELEMENTS
+ CLONE_DOUBLE_ELEMENTS,
+ COPY_ON_WRITE_ELEMENTS,
+ CLONE_ANY_ELEMENTS
};
FastCloneShallowArrayStub(Mode mode, int length)
: mode_(mode),
length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) {
- ASSERT(length_ >= 0);
- ASSERT(length_ <= kMaximumClonedLength);
+ ASSERT_GE(length_, 0);
+ ASSERT_LE(length_, kMaximumClonedLength);
}
void Generate(MacroAssembler* masm);
@@ -342,12 +376,32 @@
Major MajorKey() { return FastCloneShallowArray; }
int MinorKey() {
- ASSERT(mode_ == 0 || mode_ == 1);
- return (length_ << 1) | mode_;
+ ASSERT(mode_ == 0 || mode_ == 1 || mode_ == 2 || mode_ == 3);
+ return length_ * 4 + mode_;
}
};
+class FastCloneShallowObjectStub : public CodeStub {
+ public:
+ // Maximum number of properties in copied object.
+ static const int kMaximumClonedProperties = 6;
+
+ explicit FastCloneShallowObjectStub(int length) : length_(length) {
+ ASSERT_GE(length_, 0);
+ ASSERT_LE(length_, kMaximumClonedProperties);
+ }
+
+ void Generate(MacroAssembler* masm);
+
+ private:
+ int length_;
+
+ Major MajorKey() { return FastCloneShallowObject; }
+ int MinorKey() { return length_; }
+};
+
+
class InstanceofStub: public CodeStub {
public:
enum Flags {
@@ -410,7 +464,9 @@
class OpField: public BitField<int, 0, 3> { };
class StateField: public BitField<int, 3, 5> { };
- virtual void FinishCode(Code* code) { code->set_compare_state(state_); }
+ virtual void FinishCode(Handle<Code> code) {
+ code->set_compare_state(state_);
+ }
virtual CodeStub::Major MajorKey() { return CompareIC; }
virtual int MinorKey();
@@ -513,7 +569,7 @@
int MinorKey();
virtual int GetCodeKind() { return Code::COMPARE_IC; }
- virtual void FinishCode(Code* code) {
+ virtual void FinishCode(Handle<Code> code) {
code->set_compare_state(CompareIC::GENERIC);
}
@@ -531,11 +587,18 @@
class CEntryStub : public CodeStub {
public:
- explicit CEntryStub(int result_size)
- : result_size_(result_size), save_doubles_(false) { }
+ explicit CEntryStub(int result_size,
+ SaveFPRegsMode save_doubles = kDontSaveFPRegs)
+ : result_size_(result_size), save_doubles_(save_doubles) { }
void Generate(MacroAssembler* masm);
- void SaveDoubles() { save_doubles_ = true; }
+
+ // The version of this stub that doesn't save doubles is generated ahead of
+ // time, so it's OK to call it from other stubs that can't cope with GC during
+ // their code generation. On machines that always have gp registers (x64) we
+ // can generate both variants ahead of time.
+ virtual bool IsPregenerated();
+ static void GenerateAheadOfTime();
private:
void GenerateCore(MacroAssembler* masm,
@@ -550,7 +613,7 @@
// Number of pointers/values returned.
const int result_size_;
- bool save_doubles_;
+ SaveFPRegsMode save_doubles_;
Major MajorKey() { return CEntry; }
int MinorKey();
@@ -571,6 +634,10 @@
private:
Major MajorKey() { return JSEntry; }
int MinorKey() { return 0; }
+
+ virtual void FinishCode(Handle<Code> code);
+
+ int handler_offset_;
};
@@ -647,10 +714,32 @@
void Generate(MacroAssembler* masm);
+ virtual void FinishCode(Handle<Code> code);
+
+ static void Clear(Heap* heap, Address address);
+
+ static Object* GetCachedValue(Address address);
+
static int ExtractArgcFromMinorKey(int minor_key) {
return ArgcBits::decode(minor_key);
}
+ // The object that indicates an uninitialized cache.
+ static Handle<Object> UninitializedSentinel(Isolate* isolate) {
+ return isolate->factory()->the_hole_value();
+ }
+
+ // A raw version of the uninitialized sentinel that's safe to read during
+ // garbage collection (e.g., for patching the cache).
+ static Object* RawUninitializedSentinel(Heap* heap) {
+ return heap->raw_unchecked_the_hole_value();
+ }
+
+ // The object that indicates a megamorphic state.
+ static Handle<Object> MegamorphicSentinel(Isolate* isolate) {
+ return isolate->factory()->undefined_value();
+ }
+
private:
int argc_;
CallFunctionFlags flags_;
@@ -658,8 +747,8 @@
virtual void PrintName(StringStream* stream);
// Minor key encoding in 32 bits with Bitfield <Type, shift, size>.
- class FlagBits: public BitField<CallFunctionFlags, 0, 1> {};
- class ArgcBits: public BitField<unsigned, 1, 32 - 1> {};
+ class FlagBits: public BitField<CallFunctionFlags, 0, 2> {};
+ class ArgcBits: public BitField<unsigned, 2, 32 - 2> {};
Major MajorKey() { return CallFunction; }
int MinorKey() {
@@ -670,6 +759,10 @@
bool ReceiverMightBeImplicit() {
return (flags_ & RECEIVER_MIGHT_BE_IMPLICIT) != 0;
}
+
+ bool RecordCallTarget() {
+ return (flags_ & RECORD_CALL_TARGET) != 0;
+ }
};
@@ -698,7 +791,6 @@
public:
StringCharCodeAtGenerator(Register object,
Register index,
- Register scratch,
Register result,
Label* receiver_not_string,
Label* index_not_number,
@@ -706,15 +798,11 @@
StringIndexFlags index_flags)
: object_(object),
index_(index),
- scratch_(scratch),
result_(result),
receiver_not_string_(receiver_not_string),
index_not_number_(index_not_number),
index_out_of_range_(index_out_of_range),
index_flags_(index_flags) {
- ASSERT(!scratch_.is(object_));
- ASSERT(!scratch_.is(index_));
- ASSERT(!scratch_.is(result_));
ASSERT(!result_.is(object_));
ASSERT(!result_.is(index_));
}
@@ -732,7 +820,6 @@
private:
Register object_;
Register index_;
- Register scratch_;
Register result_;
Label* receiver_not_string_;
@@ -795,8 +882,7 @@
public:
StringCharAtGenerator(Register object,
Register index,
- Register scratch1,
- Register scratch2,
+ Register scratch,
Register result,
Label* receiver_not_string,
Label* index_not_number,
@@ -804,13 +890,12 @@
StringIndexFlags index_flags)
: char_code_at_generator_(object,
index,
- scratch1,
- scratch2,
+ scratch,
receiver_not_string,
index_not_number,
index_out_of_range,
index_flags),
- char_from_code_generator_(scratch2, result) {}
+ char_from_code_generator_(scratch, result) {}
// Generates the fast case code. On the fallthrough path |result|
// register contains the result.
@@ -934,11 +1019,13 @@
virtual int GetCodeKind() { return Code::TO_BOOLEAN_IC; }
virtual void PrintName(StringStream* stream);
+ virtual bool SometimesSetsUpAFrame() { return false; }
+
private:
Major MajorKey() { return ToBoolean; }
int MinorKey() { return (tos_.code() << NUMBER_OF_TYPES) | types_.ToByte(); }
- virtual void FinishCode(Code* code) {
+ virtual void FinishCode(Handle<Code> code) {
code->set_to_boolean_state(types_.ToByte());
}
@@ -952,6 +1039,56 @@
Types types_;
};
+
+class ElementsTransitionAndStoreStub : public CodeStub {
+ public:
+ ElementsTransitionAndStoreStub(ElementsKind from,
+ ElementsKind to,
+ bool is_jsarray,
+ StrictModeFlag strict_mode)
+ : from_(from),
+ to_(to),
+ is_jsarray_(is_jsarray),
+ strict_mode_(strict_mode) {}
+
+ private:
+ class FromBits: public BitField<ElementsKind, 0, 8> {};
+ class ToBits: public BitField<ElementsKind, 8, 8> {};
+ class IsJSArrayBits: public BitField<bool, 16, 8> {};
+ class StrictModeBits: public BitField<StrictModeFlag, 24, 8> {};
+
+ Major MajorKey() { return ElementsTransitionAndStore; }
+ int MinorKey() {
+ return FromBits::encode(from_) |
+ ToBits::encode(to_) |
+ IsJSArrayBits::encode(is_jsarray_) |
+ StrictModeBits::encode(strict_mode_);
+ }
+
+ void Generate(MacroAssembler* masm);
+
+ ElementsKind from_;
+ ElementsKind to_;
+ bool is_jsarray_;
+ StrictModeFlag strict_mode_;
+
+ DISALLOW_COPY_AND_ASSIGN(ElementsTransitionAndStoreStub);
+};
+
+
+class StoreArrayLiteralElementStub : public CodeStub {
+ public:
+ explicit StoreArrayLiteralElementStub() {}
+
+ private:
+ Major MajorKey() { return StoreArrayLiteralElement; }
+ int MinorKey() { return 0; }
+
+ void Generate(MacroAssembler* masm);
+
+ DISALLOW_COPY_AND_ASSIGN(StoreArrayLiteralElementStub);
+};
+
} } // namespace v8::internal
#endif // V8_CODE_STUBS_H_