Version 3.18.4
Added a preliminary API for ES6 ArrayBuffers
Replaced qsort with std::sort. (Chromium issue 2639)
Performance and stability improvements on all platforms.
git-svn-id: http://v8.googlecode.com/svn/trunk@14456 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/api.cc b/src/api.cc
index df7dd11..2b24ab0 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -52,6 +52,7 @@
#include "profile-generator-inl.h"
#include "property-details.h"
#include "property.h"
+#include "runtime.h"
#include "runtime-profiler.h"
#include "scanner-character-streams.h"
#include "snapshot.h"
@@ -63,11 +64,9 @@
#define LOG_API(isolate, expr) LOG(isolate, ApiEntryCall(expr))
-#define ENTER_V8(isolate) \
- ASSERT((isolate)->IsInitialized()); \
- i::VMState __state__((isolate), i::OTHER)
-#define LEAVE_V8(isolate) \
- i::VMState __state__((isolate), i::EXTERNAL)
+#define ENTER_V8(isolate) \
+ ASSERT((isolate)->IsInitialized()); \
+ i::VMState<i::OTHER> __state__((isolate))
namespace v8 {
@@ -131,7 +130,7 @@
const char* message) {
i::Isolate* isolate = i::Isolate::Current();
if (isolate->IsInitialized()) {
- i::VMState __state__(isolate, i::OTHER);
+ i::VMState<i::OTHER> state(isolate);
API_Fatal(location, message);
} else {
API_Fatal(location, message);
@@ -216,14 +215,7 @@
i::V8::SetFatalError();
FatalErrorCallback callback = GetFatalErrorHandler();
const char* message = "Allocation failed - process out of memory";
- {
- if (isolate->IsInitialized()) {
- LEAVE_V8(isolate);
- callback(location, message);
- } else {
- callback(location, message);
- }
- }
+ callback(location, message);
// If the callback returns, we stop execution.
UNREACHABLE();
}
@@ -2754,6 +2746,15 @@
}
+void v8::ArrayBuffer::CheckCast(Value* that) {
+ if (IsDeadCheck(i::Isolate::Current(), "v8::ArrayBuffer::Cast()")) return;
+ i::Handle<i::Object> obj = Utils::OpenHandle(that);
+ ApiCheck(obj->IsJSArrayBuffer(),
+ "v8::ArrayBuffer::Cast()",
+ "Could not convert to ArrayBuffer");
+}
+
+
void v8::Date::CheckCast(v8::Value* that) {
i::Isolate* isolate = i::Isolate::Current();
if (IsDeadCheck(isolate, "v8::Date::Cast()")) return;
@@ -4054,14 +4055,6 @@
return str->length();
}
-bool String::MayContainNonAscii() const {
- i::Handle<i::String> str = Utils::OpenHandle(this);
- if (IsDeadCheck(str->GetIsolate(), "v8::String::MayContainNonAscii()")) {
- return false;
- }
- return !str->HasOnlyAsciiChars();
-}
-
bool String::IsOneByte() const {
i::Handle<i::String> str = Utils::OpenHandle(this);
@@ -4515,25 +4508,6 @@
FlattenString(str); // Flatten the string for efficiency.
}
- if (str->HasOnlyAsciiChars()) {
- // WriteToFlat is faster than using the StringCharacterStream.
- if (length == -1) length = str->length() + 1;
- int len = i::Min(length, str->length() - start);
- i::String::WriteToFlat(*str,
- reinterpret_cast<uint8_t*>(buffer),
- start,
- start + len);
- if (!(options & PRESERVE_ASCII_NULL)) {
- for (int i = 0; i < len; i++) {
- if (buffer[i] == '\0') buffer[i] = ' ';
- }
- }
- if (!(options & NO_NULL_TERMINATION) && length > len) {
- buffer[len] = '\0';
- }
- return len;
- }
-
int end = length;
if ((length == -1) || (length > str->length() - start)) {
end = str->length() - start;
@@ -5792,6 +5766,46 @@
}
+size_t v8::ArrayBuffer::ByteLength() const {
+ i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+ if (IsDeadCheck(isolate, "v8::ArrayBuffer::ByteLength()")) return 0;
+ i::Handle<i::JSArrayBuffer> obj = Utils::OpenHandle(this);
+ return static_cast<size_t>(obj->byte_length()->Number());
+}
+
+
+void* v8::ArrayBuffer::Data() const {
+ i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+ if (IsDeadCheck(isolate, "v8::ArrayBuffer::Data()")) return 0;
+ i::Handle<i::JSArrayBuffer> obj = Utils::OpenHandle(this);
+ return obj->backing_store();
+}
+
+
+Local<ArrayBuffer> v8::ArrayBuffer::New(size_t byte_length) {
+ i::Isolate* isolate = i::Isolate::Current();
+ EnsureInitializedForIsolate(isolate, "v8::ArrayBuffer::New(size_t)");
+ LOG_API(isolate, "v8::ArrayBuffer::New(size_t)");
+ ENTER_V8(isolate);
+ i::Handle<i::JSArrayBuffer> obj =
+ isolate->factory()->NewJSArrayBuffer();
+ i::Runtime::SetupArrayBufferAllocatingData(isolate, obj, byte_length);
+ return Utils::ToLocal(obj);
+}
+
+
+Local<ArrayBuffer> v8::ArrayBuffer::New(void* data, size_t byte_length) {
+ i::Isolate* isolate = i::Isolate::Current();
+ EnsureInitializedForIsolate(isolate, "v8::ArrayBuffer::New(void*, size_t)");
+ LOG_API(isolate, "v8::ArrayBuffer::New(void*, size_t)");
+ ENTER_V8(isolate);
+ i::Handle<i::JSArrayBuffer> obj =
+ isolate->factory()->NewJSArrayBuffer();
+ i::Runtime::SetupArrayBuffer(isolate, obj, data, byte_length);
+ return Utils::ToLocal(obj);
+}
+
+
Local<Symbol> v8::Symbol::New(Isolate* isolate) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
EnsureInitializedForIsolate(i_isolate, "v8::Symbol::New()");
@@ -6028,6 +6042,31 @@
}
+void Isolate::SetObjectGroupId(const Persistent<Value>& object,
+ UniqueId id) {
+ i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(this);
+ internal_isolate->global_handles()->SetObjectGroupId(
+ reinterpret_cast<i::Object**>(*object), id);
+}
+
+
+void Isolate::SetReferenceFromGroup(UniqueId id,
+ const Persistent<Value>& object) {
+ i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(this);
+ internal_isolate->global_handles()
+ ->SetReferenceFromGroup(id, reinterpret_cast<i::Object**>(*object));
+}
+
+
+void Isolate::SetReference(const Persistent<Object>& parent,
+ const Persistent<Value>& child) {
+ i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(this);
+ internal_isolate->global_handles()->SetReference(
+ i::Handle<i::HeapObject>::cast(Utils::OpenHandle(*parent)).location(),
+ reinterpret_cast<i::Object**>(*child));
+}
+
+
void V8::SetGlobalGCPrologueCallback(GCCallback callback) {
i::Isolate* isolate = i::Isolate::Current();
if (IsDeadCheck(isolate, "v8::V8::SetGlobalGCPrologueCallback()")) return;
@@ -7227,6 +7266,12 @@
}
+void HeapProfiler::SetRetainedObjectInfo(UniqueId id,
+ RetainedObjectInfo* info) {
+ reinterpret_cast<i::HeapProfiler*>(this)->SetRetainedObjectInfo(id, info);
+}
+
+
v8::Testing::StressType internal::Testing::stress_type_ =
v8::Testing::kStressTypeOpt;
diff --git a/src/api.h b/src/api.h
index 0cd16f1..f62541d 100644
--- a/src/api.h
+++ b/src/api.h
@@ -170,6 +170,7 @@
V(RegExp, JSRegExp) \
V(Object, JSObject) \
V(Array, JSArray) \
+ V(ArrayBuffer, JSArrayBuffer) \
V(String, String) \
V(Symbol, Symbol) \
V(Script, Object) \
@@ -205,6 +206,8 @@
v8::internal::Handle<v8::internal::JSObject> obj);
static inline Local<Array> ToLocal(
v8::internal::Handle<v8::internal::JSArray> obj);
+ static inline Local<ArrayBuffer> ToLocal(
+ v8::internal::Handle<v8::internal::JSArrayBuffer> obj);
static inline Local<Message> MessageToLocal(
v8::internal::Handle<v8::internal::Object> obj);
static inline Local<StackTrace> StackTraceToLocal(
@@ -275,6 +278,7 @@
MAKE_TO_LOCAL(ToLocal, JSRegExp, RegExp)
MAKE_TO_LOCAL(ToLocal, JSObject, Object)
MAKE_TO_LOCAL(ToLocal, JSArray, Array)
+MAKE_TO_LOCAL(ToLocal, JSArrayBuffer, ArrayBuffer)
MAKE_TO_LOCAL(ToLocal, FunctionTemplateInfo, FunctionTemplate)
MAKE_TO_LOCAL(ToLocal, ObjectTemplateInfo, ObjectTemplate)
MAKE_TO_LOCAL(ToLocal, SignatureInfo, Signature)
diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc
index ebb9e12..3cc2797 100644
--- a/src/arm/builtins-arm.cc
+++ b/src/arm/builtins-arm.cc
@@ -306,8 +306,7 @@
// entering the generic code. In both cases argc in r0 needs to be preserved.
// Both registers are preserved by this code so no need to differentiate between
// construct call and normal call.
-static void ArrayNativeCode(MacroAssembler* masm,
- Label* call_generic_code) {
+void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code) {
Counters* counters = masm->isolate()->counters();
Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array,
has_non_smi_element, finish, cant_transition_map, not_double;
@@ -532,7 +531,7 @@
}
-void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
+void Builtins::Generate_CommonArrayConstructCode(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r0 : number of arguments
// -- r1 : constructor function
@@ -550,51 +549,17 @@
__ Assert(ne, "Unexpected initial map for Array function");
__ CompareObjectType(r3, r3, r4, MAP_TYPE);
__ Assert(eq, "Unexpected initial map for Array function");
-
- if (FLAG_optimize_constructed_arrays) {
- // We should either have undefined in r2 or a valid jsglobalpropertycell
- Label okay_here;
- Handle<Object> undefined_sentinel(
- masm->isolate()->heap()->undefined_value(), masm->isolate());
- Handle<Map> global_property_cell_map(
- masm->isolate()->heap()->global_property_cell_map());
- __ cmp(r2, Operand(undefined_sentinel));
- __ b(eq, &okay_here);
- __ ldr(r3, FieldMemOperand(r2, 0));
- __ cmp(r3, Operand(global_property_cell_map));
- __ Assert(eq, "Expected property cell in register ebx");
- __ bind(&okay_here);
- }
}
+ Label generic_constructor;
+ // Run the native code for the Array function called as a constructor.
+ ArrayNativeCode(masm, &generic_constructor);
- if (FLAG_optimize_constructed_arrays) {
- Label not_zero_case, not_one_case;
- __ tst(r0, r0);
- __ b(ne, ¬_zero_case);
- ArrayNoArgumentConstructorStub no_argument_stub;
- __ TailCallStub(&no_argument_stub);
-
- __ bind(¬_zero_case);
- __ cmp(r0, Operand(1));
- __ b(gt, ¬_one_case);
- ArraySingleArgumentConstructorStub single_argument_stub;
- __ TailCallStub(&single_argument_stub);
-
- __ bind(¬_one_case);
- ArrayNArgumentsConstructorStub n_argument_stub;
- __ TailCallStub(&n_argument_stub);
- } else {
- Label generic_constructor;
- // Run the native code for the Array function called as a constructor.
- ArrayNativeCode(masm, &generic_constructor);
-
- // Jump to the generic construct code in case the specialized code cannot
- // handle the construction.
- __ bind(&generic_constructor);
- Handle<Code> generic_construct_stub =
- masm->isolate()->builtins()->JSConstructStubGeneric();
- __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
- }
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
}
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index 1f69dbe..cc6caca 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -96,16 +96,33 @@
}
-static void InitializeArrayConstructorDescriptor(Isolate* isolate,
+void CompareNilICStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { r0 };
+ descriptor->register_param_count_ = 1;
+ descriptor->register_params_ = registers;
+ descriptor->deoptimization_handler_ =
+ FUNCTION_ADDR(CompareNilIC_Miss);
+ descriptor->miss_handler_ =
+ ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate);
+}
+
+
+static void InitializeArrayConstructorDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor,
+ int constant_stack_parameter_count) {
// register state
- // r1 -- constructor function
+ // r0 -- number of arguments
// r2 -- type info cell with elements kind
- // r0 -- number of arguments to the constructor function
- static Register registers[] = { r1, r2 };
- descriptor->register_param_count_ = 2;
- // stack param count needs (constructor pointer, and single argument)
- descriptor->stack_parameter_count_ = &r0;
+ static Register registers[] = { r2 };
+ descriptor->register_param_count_ = 1;
+ if (constant_stack_parameter_count != 0) {
+ // stack param count needs (constructor pointer, and single argument)
+ descriptor->stack_parameter_count_ = &r0;
+ }
+ descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
descriptor->register_params_ = registers;
descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
descriptor->deoptimization_handler_ =
@@ -116,21 +133,21 @@
void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
}
void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
}
void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
}
@@ -3010,6 +3027,9 @@
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
+ if (FLAG_optimize_constructed_arrays) {
+ ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
+ }
}
@@ -4714,7 +4734,7 @@
TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(),
LAST_FAST_ELEMENTS_KIND);
__ cmp(r3, Operand(terminal_kind_sentinel));
- __ b(ne, &miss);
+ __ b(gt, &miss);
// Make sure the function is the Array() function
__ LoadArrayFunction(r3);
__ cmp(r1, r3);
@@ -5930,16 +5950,16 @@
__ bind(&non_ascii);
// At least one of the strings is two-byte. Check whether it happens
- // to contain only ASCII characters.
+ // to contain only one byte characters.
// r4: first instance type.
// r5: second instance type.
- __ tst(r4, Operand(kAsciiDataHintMask));
- __ tst(r5, Operand(kAsciiDataHintMask), ne);
+ __ tst(r4, Operand(kOneByteDataHintMask));
+ __ tst(r5, Operand(kOneByteDataHintMask), ne);
__ b(ne, &ascii_data);
__ eor(r4, r4, Operand(r5));
- STATIC_ASSERT(kOneByteStringTag != 0 && kAsciiDataHintTag != 0);
- __ and_(r4, r4, Operand(kOneByteStringTag | kAsciiDataHintTag));
- __ cmp(r4, Operand(kOneByteStringTag | kAsciiDataHintTag));
+ STATIC_ASSERT(kOneByteStringTag != 0 && kOneByteDataHintTag != 0);
+ __ and_(r4, r4, Operand(kOneByteStringTag | kOneByteDataHintTag));
+ __ cmp(r4, Operand(kOneByteStringTag | kOneByteDataHintTag));
__ b(eq, &ascii_data);
// Allocate a two byte cons string.
@@ -7153,6 +7173,196 @@
__ Ret();
}
+
+template<class T>
+static void CreateArrayDispatch(MacroAssembler* masm) {
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ cmp(r3, Operand(kind));
+ __ b(ne, &next);
+ T stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
+ // r2 - type info cell
+ // r3 - kind
+ // r0 - number of arguments
+ // r1 - constructor?
+ // sp[0] - last argument
+ ASSERT(FAST_SMI_ELEMENTS == 0);
+ ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ ASSERT(FAST_ELEMENTS == 2);
+ ASSERT(FAST_HOLEY_ELEMENTS == 3);
+ ASSERT(FAST_DOUBLE_ELEMENTS == 4);
+ ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
+
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ // is the low bit set? If so, we are holey and that is good.
+ __ tst(r3, Operand(1));
+ Label normal_sequence;
+ __ b(ne, &normal_sequence);
+
+ // look at the first argument
+ __ ldr(r5, MemOperand(sp, 0));
+ __ cmp(r5, Operand::Zero());
+ __ b(eq, &normal_sequence);
+
+ // We are going to create a holey array, but our kind is non-holey.
+ // Fix kind and retry
+ __ add(r3, r3, Operand(1));
+ __ cmp(r2, Operand(undefined_sentinel));
+ __ b(eq, &normal_sequence);
+
+ // Save the resulting elements kind in type info
+ __ SmiTag(r3);
+ __ str(r3, FieldMemOperand(r2, kPointerSize));
+ __ SmiUntag(r3);
+
+ __ bind(&normal_sequence);
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ cmp(r3, Operand(kind));
+ __ b(ne, &next);
+ ArraySingleArgumentConstructorStub stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+template<class T>
+static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
+ int to_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= to_index; ++i) {
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ T stub(kind);
+ stub.GetCode(isolate)->set_is_pregenerated(true);
+ }
+}
+
+
+void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
+ isolate);
+}
+
+
+void ArrayConstructorStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- r0 : argc (only if argument_count_ == ANY)
+ // -- r1 : constructor
+ // -- r2 : type info cell
+ // -- sp[0] : return address
+ // -- sp[4] : last argument
+ // -----------------------------------
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ if (FLAG_debug_code) {
+ // The array construct code is only set for the global and natives
+ // builtin Array functions which always have maps.
+
+ // Initial map for the builtin Array function should be a map.
+ __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
+ // Will both indicate a NULL and a Smi.
+ __ tst(r3, Operand(kSmiTagMask));
+ __ Assert(ne, "Unexpected initial map for Array function");
+ __ CompareObjectType(r3, r3, r4, MAP_TYPE);
+ __ Assert(eq, "Unexpected initial map for Array function");
+
+ // We should either have undefined in ebx or a valid jsglobalpropertycell
+ Label okay_here;
+ Handle<Map> global_property_cell_map(
+ masm->isolate()->heap()->global_property_cell_map());
+ __ cmp(r2, Operand(undefined_sentinel));
+ __ b(eq, &okay_here);
+ __ ldr(r3, FieldMemOperand(r2, 0));
+ __ cmp(r3, Operand(global_property_cell_map));
+ __ Assert(eq, "Expected property cell in register ebx");
+ __ bind(&okay_here);
+ }
+
+ if (FLAG_optimize_constructed_arrays) {
+ Label no_info, switch_ready;
+ // Get the elements kind and case on that.
+ __ cmp(r2, Operand(undefined_sentinel));
+ __ b(eq, &no_info);
+ __ ldr(r3, FieldMemOperand(r2, kPointerSize));
+
+ // There is no info if the call site went megamorphic either
+ // TODO(mvstanton): Really? I thought if it was the array function that
+ // the cell wouldn't get stamped as megamorphic.
+ __ cmp(r3,
+ Operand(TypeFeedbackCells::MegamorphicSentinel(masm->isolate())));
+ __ b(eq, &no_info);
+ __ SmiUntag(r3);
+ __ jmp(&switch_ready);
+ __ bind(&no_info);
+ __ mov(r3, Operand(GetInitialFastElementsKind()));
+ __ bind(&switch_ready);
+
+ if (argument_count_ == ANY) {
+ Label not_zero_case, not_one_case;
+ __ tst(r0, r0);
+ __ b(ne, ¬_zero_case);
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+
+ __ bind(¬_zero_case);
+ __ cmp(r0, Operand(1));
+ __ b(gt, ¬_one_case);
+ CreateArrayDispatchOneArgument(masm);
+
+ __ bind(¬_one_case);
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else if (argument_count_ == NONE) {
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+ } else if (argument_count_ == ONE) {
+ CreateArrayDispatchOneArgument(masm);
+ } else if (argument_count_ == MORE_THAN_ONE) {
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else {
+ UNREACHABLE();
+ }
+ } else {
+ Label generic_constructor;
+ // Run the native code for the Array function called as a constructor.
+ ArrayNativeCode(masm, &generic_constructor);
+
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
+ }
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/src/arm/code-stubs-arm.h b/src/arm/code-stubs-arm.h
index 39a9fc8..0b1a8b8 100644
--- a/src/arm/code-stubs-arm.h
+++ b/src/arm/code-stubs-arm.h
@@ -34,6 +34,9 @@
namespace internal {
+void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code);
+
+
// Compute a transcendental math function natively, or call the
// TranscendentalCache runtime function.
class TranscendentalCacheStub: public PlatformCodeStub {
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index 7ecc2b3..0ef4be0 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -1976,6 +1976,104 @@
}
+void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
+ Expression *value,
+ JSGeneratorObject::ResumeMode resume_mode) {
+ // The value stays in r0, and is ultimately read by the resumed generator, as
+ // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. r1
+ // will hold the generator object until the activation has been resumed.
+ VisitForStackValue(generator);
+ VisitForAccumulatorValue(value);
+ __ pop(r1);
+
+ // Check generator state.
+ Label wrong_state, done;
+ __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
+ STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
+ STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
+ __ cmp(r3, Operand(Smi::FromInt(0)));
+ __ b(le, &wrong_state);
+
+ // Load suspended function and context.
+ __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
+ __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
+
+ // Load receiver and store as the first argument.
+ __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
+ __ push(r2);
+
+ // Push holes for the rest of the arguments to the generator function.
+ __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
+ __ ldr(r3,
+ FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
+ Label push_argument_holes, push_frame;
+ __ bind(&push_argument_holes);
+ __ sub(r3, r3, Operand(1), SetCC);
+ __ b(mi, &push_frame);
+ __ push(r2);
+ __ jmp(&push_argument_holes);
+
+ // Enter a new JavaScript frame, and initialize its slots as they were when
+ // the generator was suspended.
+ Label resume_frame;
+ __ bind(&push_frame);
+ __ bl(&resume_frame);
+ __ jmp(&done);
+ __ bind(&resume_frame);
+ __ push(lr); // Return address.
+ __ push(fp); // Caller's frame pointer.
+ __ mov(fp, sp);
+ __ push(cp); // Callee's context.
+ __ push(r4); // Callee's JS Function.
+
+ // Load the operand stack size.
+ __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
+ __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
+ __ SmiUntag(r3);
+
+ // If we are sending a value and there is no operand stack, we can jump back
+ // in directly.
+ if (resume_mode == JSGeneratorObject::SEND) {
+ Label slow_resume;
+ __ cmp(r3, Operand(0));
+ __ b(ne, &slow_resume);
+ __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
+ __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
+ __ SmiUntag(r2);
+ __ add(r3, r3, r2);
+ __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
+ __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
+ __ Jump(r3);
+ __ bind(&slow_resume);
+ }
+
+ // Otherwise, we push holes for the operand stack and call the runtime to fix
+ // up the stack and the handlers.
+ Label push_operand_holes, call_resume;
+ __ bind(&push_operand_holes);
+ __ sub(r3, r3, Operand(1), SetCC);
+ __ b(mi, &call_resume);
+ __ push(r2);
+ __ b(&push_operand_holes);
+ __ bind(&call_resume);
+ __ push(r1);
+ __ push(result_register());
+ __ Push(Smi::FromInt(resume_mode));
+ __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
+ // Not reached: the runtime call returns elsewhere.
+ __ stop("not-reached");
+
+ // Throw error if we attempt to operate on a running generator.
+ __ bind(&wrong_state);
+ __ push(r1);
+ __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
+
+ __ bind(&done);
+ context()->Plug(result_register());
+}
+
+
void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Literal* key = prop->key()->AsLiteral();
@@ -4437,28 +4535,22 @@
VisitForAccumulatorValue(sub_expr);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
- Heap::RootListIndex nil_value = nil == kNullValue ?
- Heap::kNullValueRootIndex :
- Heap::kUndefinedValueRootIndex;
- __ LoadRoot(r1, nil_value);
- __ cmp(r0, r1);
- if (expr->op() == Token::EQ_STRICT) {
+ EqualityKind kind = expr->op() == Token::EQ_STRICT
+ ? kStrictEquality : kNonStrictEquality;
+ if (kind == kStrictEquality) {
+ Heap::RootListIndex nil_value = nil == kNullValue ?
+ Heap::kNullValueRootIndex :
+ Heap::kUndefinedValueRootIndex;
+ __ LoadRoot(r1, nil_value);
+ __ cmp(r0, r1);
Split(eq, if_true, if_false, fall_through);
} else {
- Heap::RootListIndex other_nil_value = nil == kNullValue ?
- Heap::kUndefinedValueRootIndex :
- Heap::kNullValueRootIndex;
- __ b(eq, if_true);
- __ LoadRoot(r1, other_nil_value);
- __ cmp(r0, r1);
- __ b(eq, if_true);
- __ JumpIfSmi(r0, if_false);
- // It can be an undetectable object.
- __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
- __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
- __ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
- __ cmp(r1, Operand(1 << Map::kIsUndetectable));
- Split(eq, if_true, if_false, fall_through);
+ Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(),
+ kNonStrictEquality,
+ nil);
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
+ __ cmp(r0, Operand(0));
+ Split(ne, if_true, if_false, fall_through);
}
context()->Plug(if_true, if_false);
}
diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc
index b240de7..66c108d 100644
--- a/src/arm/lithium-arm.cc
+++ b/src/arm/lithium-arm.cc
@@ -2431,7 +2431,8 @@
ASSERT(info()->IsStub());
CodeStubInterfaceDescriptor* descriptor =
info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
- Register reg = descriptor->register_params_[instr->index()];
+ int index = static_cast<int>(instr->index());
+ Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index);
return DefineFixed(result, reg);
}
}
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index 2996c97..29e01b9 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -2942,19 +2942,20 @@
if (NeedsEagerFrame()) {
__ mov(sp, fp);
__ ldm(ia_w, sp, fp.bit() | lr.bit());
-
- if (instr->has_constant_parameter_count()) {
- int parameter_count = ToInteger32(instr->constant_parameter_count());
- int32_t sp_delta = (parameter_count + 1) * kPointerSize;
- if (sp_delta != 0) {
- __ add(sp, sp, Operand(sp_delta));
- }
- } else {
- Register reg = ToRegister(instr->parameter_count());
- __ add(reg, reg, Operand(1));
- __ add(sp, sp, Operand(reg, LSL, kPointerSizeLog2));
- }
}
+ if (instr->has_constant_parameter_count()) {
+ int parameter_count = ToInteger32(instr->constant_parameter_count());
+ int32_t sp_delta = (parameter_count + 1) * kPointerSize;
+ if (sp_delta != 0) {
+ __ add(sp, sp, Operand(sp_delta));
+ }
+ } else {
+ Register reg = ToRegister(instr->parameter_count());
+ // The argument count parameter is a smi
+ __ SmiUntag(reg);
+ __ add(sp, sp, Operand(reg, LSL, kPointerSizeLog2));
+ }
+
__ Jump(lr);
}
@@ -4233,10 +4234,18 @@
__ mov(r0, Operand(instr->arity()));
__ mov(r2, Operand(instr->hydrogen()->property_cell()));
- Handle<Code> array_construct_code =
- isolate()->builtins()->ArrayConstructCode();
-
- CallCode(array_construct_code, RelocInfo::CONSTRUCT_CALL, instr);
+ Object* cell_value = instr->hydrogen()->property_cell()->value();
+ ElementsKind kind = static_cast<ElementsKind>(Smi::cast(cell_value)->value());
+ if (instr->arity() == 0) {
+ ArrayNoArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else if (instr->arity() == 1) {
+ ArraySingleArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else {
+ ArrayNArgumentsConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ }
}
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index bbbe520..b7cd3db 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -2264,8 +2264,9 @@
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
- PrepareCallCFunction(0, r0);
- CallCFunction(ExternalReference::log_enter_external_function(isolate()), 0);
+ PrepareCallCFunction(1, r0);
+ mov(r0, Operand(ExternalReference::isolate_address(isolate())));
+ CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
PopSafepointRegisters();
}
@@ -2278,8 +2279,9 @@
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
- PrepareCallCFunction(0, r0);
- CallCFunction(ExternalReference::log_leave_external_function(isolate()), 0);
+ PrepareCallCFunction(1, r0);
+ mov(r0, Operand(ExternalReference::isolate_address(isolate())));
+ CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
PopSafepointRegisters();
}
diff --git a/src/assembler.h b/src/assembler.h
index 9323767..32424cf 100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -50,7 +50,7 @@
namespace internal {
-struct StatsCounter;
+class StatsCounter;
// -----------------------------------------------------------------------------
// Platform independent assembler base class.
diff --git a/src/ast.cc b/src/ast.cc
index d6af89b..d241355 100644
--- a/src/ast.cc
+++ b/src/ast.cc
@@ -70,6 +70,11 @@
}
+bool Expression::IsUndefinedLiteral() {
+ return AsLiteral() != NULL && AsLiteral()->handle()->IsUndefined();
+}
+
+
VariableProxy::VariableProxy(Isolate* isolate, Variable* var)
: Expression(isolate),
name_(var->name()),
@@ -352,7 +357,8 @@
}
-// Check for the pattern: void <literal> equals <expression>
+// Check for the pattern: void <literal> equals <expression> or
+// undefined equals <expression>
static bool MatchLiteralCompareUndefined(Expression* left,
Token::Value op,
Expression* right,
@@ -361,6 +367,10 @@
*expr = right;
return true;
}
+ if (left->IsUndefinedLiteral() && Token::IsEqualityOp(op)) {
+ *expr = right;
+ return true;
+ }
return false;
}
diff --git a/src/ast.h b/src/ast.h
index 594b780..10ae7de 100644
--- a/src/ast.h
+++ b/src/ast.h
@@ -339,6 +339,9 @@
// True iff the expression is the null literal.
bool IsNullLiteral();
+ // True iff the expression is the undefined literal.
+ bool IsUndefinedLiteral();
+
// Type feedback information for assignments and properties.
virtual bool IsMonomorphic() {
UNREACHABLE();
@@ -939,15 +942,18 @@
public:
DECLARE_NODE_TYPE(WithStatement)
+ Scope* scope() { return scope_; }
Expression* expression() const { return expression_; }
Statement* statement() const { return statement_; }
protected:
- WithStatement(Expression* expression, Statement* statement)
- : expression_(expression),
+ WithStatement(Scope* scope, Expression* expression, Statement* statement)
+ : scope_(scope),
+ expression_(expression),
statement_(statement) { }
private:
+ Scope* scope_;
Expression* expression_;
Statement* statement_;
};
@@ -2784,9 +2790,11 @@
VISIT_AND_RETURN(ReturnStatement, stmt)
}
- WithStatement* NewWithStatement(Expression* expression,
+ WithStatement* NewWithStatement(Scope* scope,
+ Expression* expression,
Statement* statement) {
- WithStatement* stmt = new(zone_) WithStatement(expression, statement);
+ WithStatement* stmt = new(zone_) WithStatement(
+ scope, expression, statement);
VISIT_AND_RETURN(WithStatement, stmt)
}
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index 12f0cda..85bf96e 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -43,6 +43,7 @@
#include "extensions/externalize-string-extension.h"
#include "extensions/gc-extension.h"
#include "extensions/statistics-extension.h"
+#include "code-stubs.h"
namespace v8 {
namespace internal {
@@ -862,8 +863,6 @@
InstallFunction(global, "Array", JS_ARRAY_TYPE, JSArray::kSize,
isolate->initial_object_prototype(),
Builtins::kArrayCode, true);
- array_function->shared()->set_construct_stub(
- isolate->builtins()->builtin(Builtins::kArrayConstructCode));
array_function->shared()->DontAdaptArguments();
// This seems a bit hackish, but we need to make sure Array.length
@@ -890,6 +889,17 @@
// as the constructor. 'Array' property on a global object can be
// overwritten by JS code.
native_context()->set_array_function(*array_function);
+
+ if (FLAG_optimize_constructed_arrays) {
+ // Cache the array maps, needed by ArrayConstructorStub
+ CacheInitialJSArrayMaps(native_context(), initial_map);
+ ArrayConstructorStub array_constructor_stub(isolate);
+ Handle<Code> code = array_constructor_stub.GetCode(isolate);
+ array_function->shared()->set_construct_stub(*code);
+ } else {
+ array_function->shared()->set_construct_stub(
+ isolate->builtins()->builtin(Builtins::kCommonArrayConstructCode));
+ }
}
{ // --- N u m b e r ---
@@ -1303,10 +1313,12 @@
if (FLAG_harmony_typed_arrays) {
{ // -- A r r a y B u f f e r
- InstallFunction(global, "__ArrayBuffer", JS_ARRAY_BUFFER_TYPE,
- JSArrayBuffer::kSize,
- isolate()->initial_object_prototype(),
- Builtins::kIllegal, true);
+ Handle<JSFunction> array_buffer_fun =
+ InstallFunction(global, "__ArrayBuffer", JS_ARRAY_BUFFER_TYPE,
+ JSArrayBuffer::kSize,
+ isolate()->initial_object_prototype(),
+ Builtins::kIllegal, true);
+ native_context()->set_array_buffer_fun(*array_buffer_fun);
}
{
// -- T y p e d A r r a y s
@@ -1533,13 +1545,8 @@
factory()->NewJSObject(isolate()->object_function(), TENURED);
SetPrototype(array_function, prototype);
- // TODO(mvstanton): For performance reasons, this code would have to
- // be changed to successfully run with FLAG_optimize_constructed_arrays.
- // The next checkin to enable FLAG_optimize_constructed_arrays by
- // default will address this.
- CHECK(!FLAG_optimize_constructed_arrays);
array_function->shared()->set_construct_stub(
- isolate()->builtins()->builtin(Builtins::kArrayConstructCode));
+ isolate()->builtins()->builtin(Builtins::kCommonArrayConstructCode));
array_function->shared()->DontAdaptArguments();
diff --git a/src/builtins.cc b/src/builtins.cc
index 30edf57..5718180 100644
--- a/src/builtins.cc
+++ b/src/builtins.cc
@@ -192,9 +192,8 @@
RUNTIME_FUNCTION(MaybeObject*, ArrayConstructor_StubFailure) {
CONVERT_ARG_STUB_CALLER_ARGS(caller_args);
- // ASSERT(args.length() == 3);
- Handle<JSFunction> function = args.at<JSFunction>(1);
- Handle<Object> type_info = args.at<Object>(2);
+ ASSERT(args.length() == 2);
+ Handle<Object> type_info = args.at<Object>(1);
JSArray* array = NULL;
bool holey = false;
@@ -226,8 +225,7 @@
}
}
- ASSERT(function->has_initial_map());
- ElementsKind kind = function->initial_map()->elements_kind();
+ ElementsKind kind = GetInitialFastElementsKind();
if (holey) {
kind = GetHoleyElementsKind(kind);
}
@@ -934,7 +932,7 @@
if (start < kMinInt || start > kMaxInt) {
return CallJsBuiltin(isolate, "ArraySplice", args);
}
- relative_start = static_cast<int>(start);
+ relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
} else if (!arg1->IsUndefined()) {
return CallJsBuiltin(isolate, "ArraySplice", args);
}
@@ -1321,7 +1319,7 @@
v8::Handle<v8::Value> value;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
ExternalCallbackScope call_scope(isolate,
v8::ToCData<Address>(callback_obj));
value = callback(new_args);
@@ -1398,7 +1396,7 @@
v8::Handle<v8::Value> value;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
ExternalCallbackScope call_scope(isolate,
v8::ToCData<Address>(callback_obj));
value = callback(new_args);
diff --git a/src/builtins.h b/src/builtins.h
index 12ed56a..ab77228 100644
--- a/src/builtins.h
+++ b/src/builtins.h
@@ -199,7 +199,7 @@
Code::kNoExtraICState) \
V(ArrayCode, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
- V(ArrayConstructCode, BUILTIN, UNINITIALIZED, \
+ V(CommonArrayConstructCode, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
\
V(StringConstructCode, BUILTIN, UNINITIALIZED, \
@@ -388,7 +388,7 @@
static void Generate_InternalArrayCode(MacroAssembler* masm);
static void Generate_ArrayCode(MacroAssembler* masm);
- static void Generate_ArrayConstructCode(MacroAssembler* masm);
+ static void Generate_CommonArrayConstructCode(MacroAssembler* masm);
static void Generate_StringConstructCode(MacroAssembler* masm);
static void Generate_OnStackReplacement(MacroAssembler* masm);
diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc
index ee93a74..b672079 100644
--- a/src/code-stubs-hydrogen.cc
+++ b/src/code-stubs-hydrogen.cc
@@ -129,9 +129,10 @@
stack_parameter_count = new(zone) HParameter(param_count,
HParameter::REGISTER_PARAMETER,
Representation::Integer32());
+ stack_parameter_count->set_type(HType::Smi());
// it's essential to bind this value to the environment in case of deopt
- start_environment->Bind(param_count, stack_parameter_count);
AddInstruction(stack_parameter_count);
+ start_environment->Bind(param_count, stack_parameter_count);
arguments_length_ = stack_parameter_count;
} else {
ASSERT(descriptor_->environment_length() == param_count);
@@ -153,17 +154,26 @@
// arguments above
HInstruction* stack_pop_count = stack_parameter_count;
if (descriptor_->function_mode_ == JS_FUNCTION_STUB_MODE) {
- HInstruction* amount = graph()->GetConstant1();
- stack_pop_count = AddInstruction(
- HAdd::New(zone, context_, stack_parameter_count, amount));
- stack_pop_count->ChangeRepresentation(Representation::Integer32());
- stack_pop_count->ClearFlag(HValue::kCanOverflow);
+ if (!stack_parameter_count->IsConstant() &&
+ descriptor_->hint_stack_parameter_count_ < 0) {
+ HInstruction* amount = graph()->GetConstant1();
+ stack_pop_count = AddInstruction(
+ HAdd::New(zone, context_, stack_parameter_count, amount));
+ stack_pop_count->ChangeRepresentation(Representation::Integer32());
+ stack_pop_count->ClearFlag(HValue::kCanOverflow);
+ } else {
+ int count = descriptor_->hint_stack_parameter_count_;
+ stack_pop_count = AddInstruction(new(zone)
+ HConstant(count, Representation::Integer32()));
+ }
}
- HReturn* hreturn_instruction = new(zone) HReturn(return_value,
- context_,
- stack_pop_count);
- current_block()->Finish(hreturn_instruction);
+ if (!current_block()->IsFinished()) {
+ HReturn* hreturn_instruction = new(zone) HReturn(return_value,
+ context_,
+ stack_pop_count);
+ current_block()->Finish(hreturn_instruction);
+ }
return true;
}
@@ -477,10 +487,18 @@
template <>
HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
- HInstruction* deopt = new(zone()) HSoftDeoptimize();
- AddInstruction(deopt);
- current_block()->MarkAsDeoptimizing();
- return GetParameter(0);
+ // ----------- S t a t e -------------
+ // -- Parameter 1 : type info cell
+ // -- Parameter 0 : constructor
+ // -----------------------------------
+ // Get the right map
+ // Should be a constant
+ JSArrayBuilder array_builder(
+ this,
+ casted_stub()->elements_kind(),
+ GetParameter(ArrayConstructorStubBase::kPropertyCell),
+ casted_stub()->mode());
+ return array_builder.AllocateEmptyArray();
}
@@ -492,10 +510,49 @@
template <>
HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
BuildCodeStub() {
- HInstruction* deopt = new(zone()) HSoftDeoptimize();
- AddInstruction(deopt);
- current_block()->MarkAsDeoptimizing();
- return GetParameter(0);
+ // Smi check and range check on the input arg.
+ HValue* constant_one = graph()->GetConstant1();
+ HValue* constant_zero = graph()->GetConstant0();
+
+ HInstruction* elements = AddInstruction(
+ new(zone()) HArgumentsElements(false));
+ HInstruction* argument = AddInstruction(
+ new(zone()) HAccessArgumentsAt(elements, constant_one, constant_zero));
+
+ HConstant* max_alloc_length =
+ new(zone()) HConstant(JSObject::kInitialMaxFastElementArray,
+ Representation::Tagged());
+ AddInstruction(max_alloc_length);
+ const int initial_capacity = JSArray::kPreallocatedArrayElements;
+ HConstant* initial_capacity_node =
+ new(zone()) HConstant(initial_capacity, Representation::Tagged());
+ AddInstruction(initial_capacity_node);
+
+ // Since we're forcing Integer32 representation for this HBoundsCheck,
+ // there's no need to Smi-check the index.
+ HBoundsCheck* checked_arg = AddBoundsCheck(argument, max_alloc_length,
+ ALLOW_SMI_KEY,
+ Representation::Tagged());
+ IfBuilder if_builder(this);
+ if_builder.IfCompare(checked_arg, constant_zero, Token::EQ);
+ if_builder.Then();
+ Push(initial_capacity_node); // capacity
+ Push(constant_zero); // length
+ if_builder.Else();
+ Push(checked_arg); // capacity
+ Push(checked_arg); // length
+ if_builder.End();
+
+ // Figure out total size
+ HValue* length = Pop();
+ HValue* capacity = Pop();
+
+ JSArrayBuilder array_builder(
+ this,
+ casted_stub()->elements_kind(),
+ GetParameter(ArrayConstructorStubBase::kPropertyCell),
+ casted_stub()->mode());
+ return array_builder.AllocateArray(capacity, length, true);
}
@@ -506,10 +563,46 @@
template <>
HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
- HInstruction* deopt = new(zone()) HSoftDeoptimize();
- AddInstruction(deopt);
- current_block()->MarkAsDeoptimizing();
- return GetParameter(0);
+ ElementsKind kind = casted_stub()->elements_kind();
+ HValue* length = GetArgumentsLength();
+
+ JSArrayBuilder array_builder(
+ this,
+ kind,
+ GetParameter(ArrayConstructorStubBase::kPropertyCell),
+ casted_stub()->mode());
+
+ // We need to fill with the hole if it's a smi array in the multi-argument
+ // case because we might have to bail out while copying arguments into
+ // the array because they aren't compatible with a smi array.
+ // If it's a double array, no problem, and if it's fast then no
+ // problem either because doubles are boxed.
+ bool fill_with_hole = IsFastSmiElementsKind(kind);
+ HValue* new_object = array_builder.AllocateArray(length,
+ length,
+ fill_with_hole);
+ HValue* elements = array_builder.GetElementsLocation();
+ ASSERT(elements != NULL);
+
+ // Now populate the elements correctly.
+ LoopBuilder builder(this,
+ context(),
+ LoopBuilder::kPostIncrement);
+ HValue* start = graph()->GetConstant0();
+ HValue* key = builder.BeginBody(start, length, Token::LT);
+ HInstruction* argument_elements = AddInstruction(
+ new(zone()) HArgumentsElements(false));
+ HInstruction* argument = AddInstruction(new(zone()) HAccessArgumentsAt(
+ argument_elements, length, key));
+
+ // Checks to prevent incompatible stores
+ if (IsFastSmiElementsKind(kind)) {
+ AddInstruction(new(zone()) HCheckSmi(argument));
+ }
+
+ AddInstruction(new(zone()) HStoreKeyed(elements, key, argument, kind));
+ builder.EndBody();
+ return new_object;
}
@@ -517,4 +610,30 @@
return DoGenerateCode(this);
}
+
+template <>
+HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeUninitializedStub() {
+ CompareNilICStub* stub = casted_stub();
+ HIfContinuation continuation;
+ Handle<Map> sentinel_map(graph()->isolate()->heap()->meta_map());
+ BuildCompareNil(GetParameter(0), stub->GetKind(),
+ stub->GetTypes(), sentinel_map,
+ RelocInfo::kNoPosition, &continuation);
+ IfBuilder if_nil(this, &continuation);
+ if_nil.Then();
+ if (continuation.IsFalseReachable()) {
+ if_nil.Else();
+ if_nil.Return(graph()->GetConstantSmi0());
+ }
+ if_nil.End();
+ return continuation.IsTrueReachable()
+ ? graph()->GetConstantSmi1()
+ : graph()->GetConstantUndefined();
+}
+
+
+Handle<Code> CompareNilICStub::GenerateCode() {
+ return DoGenerateCode(this);
+}
+
} } // namespace v8::internal
diff --git a/src/code-stubs.cc b/src/code-stubs.cc
index 3a4243d..df9855d 100644
--- a/src/code-stubs.cc
+++ b/src/code-stubs.cc
@@ -41,6 +41,7 @@
CodeStubInterfaceDescriptor::CodeStubInterfaceDescriptor()
: register_param_count_(-1),
stack_parameter_count_(NULL),
+ hint_stack_parameter_count_(-1),
function_mode_(NOT_JS_FUNCTION_STUB_MODE),
register_params_(NULL),
deoptimization_handler_(NULL),
@@ -407,6 +408,42 @@
}
+CompareNilICStub::Types CompareNilICStub::GetPatchedICFlags(
+ Code::ExtraICState extra_ic_state,
+ Handle<Object> object,
+ bool* already_monomorphic) {
+ Types types = TypesField::decode(extra_ic_state);
+ NilValue nil = NilValueField::decode(extra_ic_state);
+ EqualityKind kind = EqualityKindField::decode(extra_ic_state);
+ ASSERT(types != CompareNilICStub::kFullCompare);
+ *already_monomorphic =
+ (types & CompareNilICStub::kCompareAgainstMonomorphicMap) != 0;
+ if (kind == kStrictEquality) {
+ if (nil == kNullValue) {
+ return CompareNilICStub::kCompareAgainstNull;
+ } else {
+ return CompareNilICStub::kCompareAgainstUndefined;
+ }
+ } else {
+ if (object->IsNull()) {
+ types = static_cast<CompareNilICStub::Types>(
+ types | CompareNilICStub::kCompareAgainstNull);
+ } else if (object->IsUndefined()) {
+ types = static_cast<CompareNilICStub::Types>(
+ types | CompareNilICStub::kCompareAgainstUndefined);
+ } else if (object->IsUndetectableObject() || !object->IsHeapObject()) {
+ types = CompareNilICStub::kFullCompare;
+ } else if ((types & CompareNilICStub::kCompareAgainstMonomorphicMap) != 0) {
+ types = CompareNilICStub::kFullCompare;
+ } else {
+ types = static_cast<CompareNilICStub::Types>(
+ types | CompareNilICStub::kCompareAgainstMonomorphicMap);
+ }
+ }
+ return types;
+}
+
+
void InstanceofStub::PrintName(StringStream* stream) {
const char* args = "";
if (HasArgsInRegisters()) {
@@ -657,4 +694,45 @@
}
+static void InstallDescriptor(Isolate* isolate, HydrogenCodeStub* stub) {
+ int major_key = stub->MajorKey();
+ CodeStubInterfaceDescriptor* descriptor =
+ isolate->code_stub_interface_descriptor(major_key);
+ if (!descriptor->initialized()) {
+ stub->InitializeInterfaceDescriptor(isolate, descriptor);
+ }
+}
+
+
+void ArrayConstructorStubBase::InstallDescriptors(Isolate* isolate) {
+ ArrayNoArgumentConstructorStub stub1(GetInitialFastElementsKind());
+ InstallDescriptor(isolate, &stub1);
+ ArraySingleArgumentConstructorStub stub2(GetInitialFastElementsKind());
+ InstallDescriptor(isolate, &stub2);
+ ArrayNArgumentsConstructorStub stub3(GetInitialFastElementsKind());
+ InstallDescriptor(isolate, &stub3);
+}
+
+
+ArrayConstructorStub::ArrayConstructorStub(Isolate* isolate)
+ : argument_count_(ANY) {
+ ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
+}
+
+
+ArrayConstructorStub::ArrayConstructorStub(Isolate* isolate,
+ int argument_count) {
+ if (argument_count == 0) {
+ argument_count_ = NONE;
+ } else if (argument_count == 1) {
+ argument_count_ = ONE;
+ } else if (argument_count >= 2) {
+ argument_count_ = MORE_THAN_ONE;
+ } else {
+ UNREACHABLE();
+ }
+ ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
+}
+
+
} } // namespace v8::internal
diff --git a/src/code-stubs.h b/src/code-stubs.h
index 55d7e5d..ea895d6 100644
--- a/src/code-stubs.h
+++ b/src/code-stubs.h
@@ -47,6 +47,7 @@
V(StringCompare) \
V(Compare) \
V(CompareIC) \
+ V(CompareNilIC) \
V(MathPow) \
V(StringLength) \
V(FunctionPrototype) \
@@ -83,6 +84,7 @@
V(TransitionElementsKind) \
V(StoreArrayLiteralElement) \
V(StubFailureTrampoline) \
+ V(ArrayConstructor) \
V(ProfileEntryHook) \
/* IC Handler stubs */ \
V(LoadField)
@@ -266,6 +268,9 @@
CodeStubInterfaceDescriptor();
int register_param_count_;
const Register* stack_parameter_count_;
+ // if hint_stack_parameter_count_ > 0, the code stub can optimize the
+ // return sequence. Default value is -1, which means it is ignored.
+ int hint_stack_parameter_count_;
StubFunctionMode function_mode_;
Register* register_params_;
Address deoptimization_handler_;
@@ -277,8 +282,17 @@
}
return register_param_count_;
}
+
+ bool initialized() const { return register_param_count_ >= 0; }
};
+// A helper to make up for the fact that type Register is not fully
+// defined outside of the platform directories
+#define DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index) \
+ ((index) == (descriptor)->register_param_count_) \
+ ? *((descriptor)->stack_parameter_count_) \
+ : (descriptor)->register_params_[(index)]
+
class HydrogenCodeStub : public CodeStub {
public:
@@ -622,6 +636,22 @@
};
+class ArrayConstructorStub: public PlatformCodeStub {
+ public:
+ enum ArgumentCountKey { ANY, NONE, ONE, MORE_THAN_ONE };
+ ArrayConstructorStub(Isolate* isolate, int argument_count);
+ explicit ArrayConstructorStub(Isolate* isolate);
+
+ void Generate(MacroAssembler* masm);
+
+ private:
+ virtual CodeStub::Major MajorKey() { return ArrayConstructor; }
+ virtual int MinorKey() { return argument_count_; }
+
+ ArgumentCountKey argument_count_;
+};
+
+
class MathPowStub: public PlatformCodeStub {
public:
enum ExponentType { INTEGER, DOUBLE, TAGGED, ON_STACK};
@@ -946,6 +976,102 @@
};
+class CompareNilICStub : public HydrogenCodeStub {
+ public:
+ enum Types {
+ kCompareAgainstNull = 1 << 0,
+ kCompareAgainstUndefined = 1 << 1,
+ kCompareAgainstMonomorphicMap = 1 << 2,
+ kCompareAgainstUndetectable = 1 << 3,
+ kFullCompare = kCompareAgainstNull | kCompareAgainstUndefined |
+ kCompareAgainstUndetectable
+ };
+
+ CompareNilICStub(EqualityKind kind, NilValue nil, Types types)
+ : HydrogenCodeStub(CODE_STUB_IS_NOT_MISS), bit_field_(0) {
+ bit_field_ = EqualityKindField::encode(kind) |
+ NilValueField::encode(nil) |
+ TypesField::encode(types);
+ }
+
+ virtual InlineCacheState GetICState() {
+ Types types = GetTypes();
+ if (types == kFullCompare) {
+ return MEGAMORPHIC;
+ } else if ((types & kCompareAgainstMonomorphicMap) != 0) {
+ return MONOMORPHIC;
+ } else {
+ return PREMONOMORPHIC;
+ }
+ }
+
+ virtual Code::Kind GetCodeKind() const { return Code::COMPARE_NIL_IC; }
+
+ Handle<Code> GenerateCode();
+
+ static Handle<Code> GetUninitialized(Isolate* isolate,
+ EqualityKind kind,
+ NilValue nil) {
+ return CompareNilICStub(kind, nil).GetCode(isolate);
+ }
+
+ virtual void InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor);
+
+ static void InitializeForIsolate(Isolate* isolate) {
+ CompareNilICStub compare_stub(kStrictEquality, kNullValue);
+ compare_stub.InitializeInterfaceDescriptor(
+ isolate,
+ isolate->code_stub_interface_descriptor(CodeStub::CompareNilIC));
+ }
+
+ virtual Code::ExtraICState GetExtraICState() {
+ return bit_field_;
+ }
+
+ EqualityKind GetKind() { return EqualityKindField::decode(bit_field_); }
+ NilValue GetNilValue() { return NilValueField::decode(bit_field_); }
+ Types GetTypes() { return TypesField::decode(bit_field_); }
+
+ static Types TypesFromExtraICState(
+ Code::ExtraICState state) {
+ return TypesField::decode(state);
+ }
+ static EqualityKind EqualityKindFromExtraICState(
+ Code::ExtraICState state) {
+ return EqualityKindField::decode(state);
+ }
+ static NilValue NilValueFromExtraICState(Code::ExtraICState state) {
+ return NilValueField::decode(state);
+ }
+
+ static Types GetPatchedICFlags(Code::ExtraICState extra_ic_state,
+ Handle<Object> object,
+ bool* already_monomorphic);
+
+ private:
+ friend class CompareNilIC;
+
+ class EqualityKindField : public BitField<EqualityKind, 0, 1> {};
+ class NilValueField : public BitField<NilValue, 1, 1> {};
+ class TypesField : public BitField<Types, 3, 4> {};
+
+ CompareNilICStub(EqualityKind kind, NilValue nil)
+ : HydrogenCodeStub(CODE_STUB_IS_MISS), bit_field_(0) {
+ bit_field_ = EqualityKindField::encode(kind) |
+ NilValueField::encode(nil);
+ }
+
+ virtual CodeStub::Major MajorKey() { return CompareNilIC; }
+ virtual int NotMissMinorKey() { return bit_field_; }
+
+ int bit_field_;
+
+ DISALLOW_COPY_AND_ASSIGN(CompareNilICStub);
+};
+
+
class CEntryStub : public PlatformCodeStub {
public:
explicit CEntryStub(int result_size,
@@ -1447,10 +1573,48 @@
};
-class ArrayNoArgumentConstructorStub : public HydrogenCodeStub {
+class ArrayConstructorStubBase : public HydrogenCodeStub {
public:
- ArrayNoArgumentConstructorStub()
+ ArrayConstructorStubBase(ElementsKind kind, AllocationSiteMode mode)
: HydrogenCodeStub(CODE_STUB_IS_NOT_MISS) {
+ bit_field_ = ElementsKindBits::encode(kind) |
+ AllocationSiteModeBits::encode(mode == TRACK_ALLOCATION_SITE);
+ }
+
+ ElementsKind elements_kind() const {
+ return ElementsKindBits::decode(bit_field_);
+ }
+
+ AllocationSiteMode mode() const {
+ return AllocationSiteModeBits::decode(bit_field_)
+ ? TRACK_ALLOCATION_SITE
+ : DONT_TRACK_ALLOCATION_SITE;
+ }
+
+ virtual bool IsPregenerated() { return true; }
+ static void GenerateStubsAheadOfTime(Isolate* isolate);
+ static void InstallDescriptors(Isolate* isolate);
+
+ // Parameters accessed via CodeStubGraphBuilder::GetParameter()
+ static const int kPropertyCell = 0;
+
+ private:
+ int NotMissMinorKey() { return bit_field_; }
+
+ class ElementsKindBits: public BitField<ElementsKind, 0, 8> {};
+ class AllocationSiteModeBits: public BitField<bool, 8, 1> {};
+ uint32_t bit_field_;
+
+ DISALLOW_COPY_AND_ASSIGN(ArrayConstructorStubBase);
+};
+
+
+class ArrayNoArgumentConstructorStub : public ArrayConstructorStubBase {
+ public:
+ ArrayNoArgumentConstructorStub(
+ ElementsKind kind,
+ AllocationSiteMode mode = TRACK_ALLOCATION_SITE)
+ : ArrayConstructorStubBase(kind, mode) {
}
virtual Handle<Code> GenerateCode();
@@ -1461,16 +1625,18 @@
private:
Major MajorKey() { return ArrayNoArgumentConstructor; }
- int NotMissMinorKey() { return 0; }
DISALLOW_COPY_AND_ASSIGN(ArrayNoArgumentConstructorStub);
};
-class ArraySingleArgumentConstructorStub : public HydrogenCodeStub {
+class ArraySingleArgumentConstructorStub : public ArrayConstructorStubBase {
public:
- ArraySingleArgumentConstructorStub()
- : HydrogenCodeStub(CODE_STUB_IS_NOT_MISS) {}
+ ArraySingleArgumentConstructorStub(
+ ElementsKind kind,
+ AllocationSiteMode mode = TRACK_ALLOCATION_SITE)
+ : ArrayConstructorStubBase(kind, mode) {
+ }
virtual Handle<Code> GenerateCode();
@@ -1480,16 +1646,18 @@
private:
Major MajorKey() { return ArraySingleArgumentConstructor; }
- int NotMissMinorKey() { return 0; }
DISALLOW_COPY_AND_ASSIGN(ArraySingleArgumentConstructorStub);
};
-class ArrayNArgumentsConstructorStub : public HydrogenCodeStub {
+class ArrayNArgumentsConstructorStub : public ArrayConstructorStubBase {
public:
- ArrayNArgumentsConstructorStub()
- : HydrogenCodeStub(CODE_STUB_IS_NOT_MISS) {}
+ ArrayNArgumentsConstructorStub(
+ ElementsKind kind,
+ AllocationSiteMode mode = TRACK_ALLOCATION_SITE) :
+ ArrayConstructorStubBase(kind, mode) {
+ }
virtual Handle<Code> GenerateCode();
@@ -1499,7 +1667,6 @@
private:
Major MajorKey() { return ArrayNArgumentsConstructor; }
- int NotMissMinorKey() { return 0; }
DISALLOW_COPY_AND_ASSIGN(ArrayNArgumentsConstructorStub);
};
diff --git a/src/compiler.cc b/src/compiler.cc
index 89ced41..dce8171 100644
--- a/src/compiler.cc
+++ b/src/compiler.cc
@@ -125,11 +125,8 @@
int CompilationInfo::num_parameters() const {
- if (IsStub()) {
- return 0;
- } else {
- return scope()->num_parameters();
- }
+ ASSERT(!IsStub());
+ return scope()->num_parameters();
}
@@ -147,8 +144,7 @@
return Code::ComputeFlags(code_stub()->GetCodeKind(),
code_stub()->GetICState(),
code_stub()->GetExtraICState(),
- Code::NORMAL,
- 0);
+ Code::NORMAL, -1);
} else {
return Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
}
@@ -628,7 +624,7 @@
isolate->counters()->total_compile_size()->Increment(source_length);
// The VM is in the COMPILER state until exiting this function.
- VMState state(isolate, COMPILER);
+ VMState<COMPILER> state(isolate);
CompilationCache* compilation_cache = isolate->compilation_cache();
@@ -702,7 +698,7 @@
isolate->counters()->total_compile_size()->Increment(source_length);
// The VM is in the COMPILER state until exiting this function.
- VMState state(isolate, COMPILER);
+ VMState<COMPILER> state(isolate);
// Do a lookup in the compilation cache; if the entry is not there, invoke
// the compiler and add the result to the cache.
@@ -865,7 +861,7 @@
ZoneScope zone_scope(info->zone(), DELETE_ON_EXIT);
// The VM is in the COMPILER state until exiting this function.
- VMState state(isolate, COMPILER);
+ VMState<COMPILER> state(isolate);
PostponeInterruptsScope postpone(isolate);
@@ -929,7 +925,7 @@
}
SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(closure));
- VMState state(isolate, PARALLEL_COMPILER);
+ VMState<COMPILER> state(isolate);
PostponeInterruptsScope postpone(isolate);
Handle<SharedFunctionInfo> shared = info->shared_info();
@@ -1004,7 +1000,7 @@
}
Isolate* isolate = info->isolate();
- VMState state(isolate, PARALLEL_COMPILER);
+ VMState<COMPILER> state(isolate);
Logger::TimerEventScope timer(
isolate, Logger::TimerEventScope::v8_recompile_synchronous);
// If crankshaft succeeded, install the optimized code else install
diff --git a/src/contexts.h b/src/contexts.h
index abeb812..0024e13 100644
--- a/src/contexts.h
+++ b/src/contexts.h
@@ -123,6 +123,7 @@
V(GLOBAL_EVAL_FUN_INDEX, JSFunction, global_eval_fun) \
V(INSTANTIATE_FUN_INDEX, JSFunction, instantiate_fun) \
V(CONFIGURE_INSTANCE_FUN_INDEX, JSFunction, configure_instance_fun) \
+ V(ARRAY_BUFFER_FUN_INDEX, JSFunction, array_buffer_fun) \
V(FUNCTION_MAP_INDEX, Map, function_map) \
V(STRICT_MODE_FUNCTION_MAP_INDEX, Map, strict_mode_function_map) \
V(FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX, Map, function_without_prototype_map) \
@@ -276,6 +277,7 @@
GLOBAL_EVAL_FUN_INDEX,
INSTANTIATE_FUN_INDEX,
CONFIGURE_INSTANCE_FUN_INDEX,
+ ARRAY_BUFFER_FUN_INDEX,
MESSAGE_LISTENERS_INDEX,
MAKE_MESSAGE_FUN_INDEX,
GET_STACK_TRACE_LINE_INDEX,
diff --git a/src/counters.cc b/src/counters.cc
index 7c8265e..fa192ba 100644
--- a/src/counters.cc
+++ b/src/counters.cc
@@ -45,57 +45,38 @@
}
-// Start the timer.
-void StatsCounterTimer::Start() {
- if (!counter_.Enabled())
- return;
- stop_time_ = 0;
- start_time_ = OS::Ticks();
-}
-
-// Stop the timer and record the results.
-void StatsCounterTimer::Stop() {
- if (!counter_.Enabled())
- return;
- stop_time_ = OS::Ticks();
-
- // Compute the delta between start and stop, in milliseconds.
- int milliseconds = static_cast<int>(stop_time_ - start_time_) / 1000;
- counter_.Increment(milliseconds);
-}
-
void Histogram::AddSample(int sample) {
if (Enabled()) {
- Isolate::Current()->stats_table()->AddHistogramSample(histogram_, sample);
+ isolate()->stats_table()->AddHistogramSample(histogram_, sample);
}
}
void* Histogram::CreateHistogram() const {
- return Isolate::Current()->stats_table()->
+ return isolate()->stats_table()->
CreateHistogram(name_, min_, max_, num_buckets_);
}
// Start the timer.
void HistogramTimer::Start() {
- if (histogram_.Enabled()) {
+ if (Enabled()) {
stop_time_ = 0;
start_time_ = OS::Ticks();
}
if (FLAG_log_internal_timer_events) {
- LOG(Isolate::Current(), TimerEvent(Logger::START, histogram_.name_));
+ LOG(isolate(), TimerEvent(Logger::START, name()));
}
}
// Stop the timer and record the results.
void HistogramTimer::Stop() {
- if (histogram_.Enabled()) {
+ if (Enabled()) {
stop_time_ = OS::Ticks();
// Compute the delta between start and stop, in milliseconds.
int milliseconds = static_cast<int>(stop_time_ - start_time_) / 1000;
- histogram_.AddSample(milliseconds);
+ AddSample(milliseconds);
}
if (FLAG_log_internal_timer_events) {
- LOG(Isolate::Current(), TimerEvent(Logger::END, histogram_.name_));
+ LOG(isolate(), TimerEvent(Logger::END, name()));
}
}
diff --git a/src/counters.h b/src/counters.h
index 577280f..a633fea 100644
--- a/src/counters.h
+++ b/src/counters.h
@@ -113,14 +113,11 @@
// The row has a 32bit value for each process/thread in the table and also
// a name (stored in the table metadata). Since the storage location can be
// thread-specific, this class cannot be shared across threads.
-//
-// This class is designed to be POD initialized. It will be registered with
-// the counter system on first use. For example:
-// StatsCounter c = { "c:myctr", NULL, false };
-struct StatsCounter {
- const char* name_;
- int* ptr_;
- bool lookup_done_;
+class StatsCounter {
+ public:
+ StatsCounter() { }
+ explicit StatsCounter(const char* name)
+ : name_(name), ptr_(NULL), lookup_done_(false) { }
// Sets the counter to a specific value.
void Set(int value) {
@@ -177,39 +174,29 @@
private:
int* FindLocationInStatsTable() const;
-};
-// StatsCounterTimer t = { { L"t:foo", NULL, false }, 0, 0 };
-struct StatsCounterTimer {
- StatsCounter counter_;
-
- int64_t start_time_;
- int64_t stop_time_;
-
- // Start the timer.
- void Start();
-
- // Stop the timer and record the results.
- void Stop();
-
- // Returns true if the timer is running.
- bool Running() {
- return counter_.Enabled() && start_time_ != 0 && stop_time_ == 0;
- }
+ const char* name_;
+ int* ptr_;
+ bool lookup_done_;
};
// A Histogram represents a dynamically created histogram in the StatsTable.
-//
-// This class is designed to be POD initialized. It will be registered with
-// the histogram system on first use. For example:
-// Histogram h = { "myhist", 0, 10000, 50, NULL, false };
-struct Histogram {
- const char* name_;
- int min_;
- int max_;
- int num_buckets_;
- void* histogram_;
- bool lookup_done_;
+// It will be registered with the histogram system on first use.
+class Histogram {
+ public:
+ Histogram() { }
+ Histogram(const char* name,
+ int min,
+ int max,
+ int num_buckets,
+ Isolate* isolate)
+ : name_(name),
+ min_(min),
+ max_(max),
+ num_buckets_(num_buckets),
+ histogram_(NULL),
+ lookup_done_(false),
+ isolate_(isolate) { }
// Add a single sample to this histogram.
void AddSample(int sample);
@@ -234,17 +221,33 @@
return histogram_;
}
+ const char* name() { return name_; }
+ Isolate* isolate() const { return isolate_; }
+
private:
void* CreateHistogram() const;
+
+ const char* name_;
+ int min_;
+ int max_;
+ int num_buckets_;
+ void* histogram_;
+ bool lookup_done_;
+ Isolate* isolate_;
};
-// A HistogramTimer allows distributions of results to be created
-// HistogramTimer t = { {L"foo", 0, 10000, 50, NULL, false}, 0, 0 };
-struct HistogramTimer {
- Histogram histogram_;
-
- int64_t start_time_;
- int64_t stop_time_;
+// A HistogramTimer allows distributions of results to be created.
+class HistogramTimer : public Histogram {
+ public:
+ HistogramTimer() { }
+ HistogramTimer(const char* name,
+ int min,
+ int max,
+ int num_buckets,
+ Isolate* isolate)
+ : Histogram(name, min, max, num_buckets, isolate),
+ start_time_(0),
+ stop_time_(0) { }
// Start the timer.
void Start();
@@ -254,12 +257,12 @@
// Returns true if the timer is running.
bool Running() {
- return histogram_.Enabled() && (start_time_ != 0) && (stop_time_ == 0);
+ return Enabled() && (start_time_ != 0) && (stop_time_ == 0);
}
- void Reset() {
- histogram_.Reset();
- }
+ private:
+ int64_t start_time_;
+ int64_t stop_time_;
};
// Helper class for scoping a HistogramTimer.
diff --git a/src/cpu-profiler.cc b/src/cpu-profiler.cc
index 47c2a94..51d2942 100644
--- a/src/cpu-profiler.cc
+++ b/src/cpu-profiler.cc
@@ -44,9 +44,11 @@
static const int kProfilerStackSize = 64 * KB;
-ProfilerEventsProcessor::ProfilerEventsProcessor(ProfileGenerator* generator)
+ProfilerEventsProcessor::ProfilerEventsProcessor(
+ ProfileGenerator* generator, CpuProfilesCollection* profiles)
: Thread(Thread::Options("v8:ProfEvntProc", kProfilerStackSize)),
generator_(generator),
+ profiles_(profiles),
running_(true),
ticks_buffer_(sizeof(TickSampleEventRecord),
kTickSamplesBufferChunkSize,
@@ -65,7 +67,7 @@
rec->type = CodeEventRecord::CODE_CREATION;
rec->order = ++enqueue_order_;
rec->start = start;
- rec->entry = generator_->NewCodeEntry(tag, prefix, name);
+ rec->entry = profiles_->NewCodeEntry(tag, prefix, name);
rec->size = 1;
rec->shared = NULL;
events_buffer_.Enqueue(evt_rec);
@@ -85,7 +87,7 @@
rec->type = CodeEventRecord::CODE_CREATION;
rec->order = ++enqueue_order_;
rec->start = start;
- rec->entry = generator_->NewCodeEntry(tag, name, resource_name, line_number);
+ rec->entry = profiles_->NewCodeEntry(tag, name, resource_name, line_number);
rec->size = size;
rec->shared = shared;
events_buffer_.Enqueue(evt_rec);
@@ -102,7 +104,7 @@
rec->type = CodeEventRecord::CODE_CREATION;
rec->order = ++enqueue_order_;
rec->start = start;
- rec->entry = generator_->NewCodeEntry(tag, name);
+ rec->entry = profiles_->NewCodeEntry(tag, name);
rec->size = size;
rec->shared = NULL;
events_buffer_.Enqueue(evt_rec);
@@ -119,7 +121,7 @@
rec->type = CodeEventRecord::CODE_CREATION;
rec->order = ++enqueue_order_;
rec->start = start;
- rec->entry = generator_->NewCodeEntry(tag, args_count);
+ rec->entry = profiles_->NewCodeEntry(tag, args_count);
rec->size = size;
rec->shared = NULL;
events_buffer_.Enqueue(evt_rec);
@@ -162,7 +164,7 @@
rec->type = CodeEventRecord::CODE_CREATION;
rec->order = ++enqueue_order_;
rec->start = start;
- rec->entry = generator_->NewCodeEntry(tag, prefix, name);
+ rec->entry = profiles_->NewCodeEntry(tag, prefix, name);
rec->size = size;
events_buffer_.Enqueue(evt_rec);
}
@@ -443,7 +445,7 @@
saved_logging_nesting_ = isolate_->logger()->logging_nesting_;
isolate_->logger()->logging_nesting_ = 0;
generator_ = new ProfileGenerator(profiles_);
- processor_ = new ProfilerEventsProcessor(generator_);
+ processor_ = new ProfilerEventsProcessor(generator_, profiles_);
is_profiling_ = true;
processor_->StartSynchronously();
// Enumerate stuff we already have in the heap.
@@ -458,7 +460,7 @@
isolate_->logger()->LogAccessorCallbacks();
}
// Enable stack sampling.
- Sampler* sampler = reinterpret_cast<Sampler*>(isolate_->logger()->ticker_);
+ Sampler* sampler = isolate_->logger()->sampler();
sampler->IncreaseProfilingDepth();
if (!sampler->IsActive()) {
sampler->Start();
diff --git a/src/cpu-profiler.h b/src/cpu-profiler.h
index 6e2b0e0..da7ea6d 100644
--- a/src/cpu-profiler.h
+++ b/src/cpu-profiler.h
@@ -125,7 +125,8 @@
// methods called by event producers: VM and stack sampler threads.
class ProfilerEventsProcessor : public Thread {
public:
- explicit ProfilerEventsProcessor(ProfileGenerator* generator);
+ ProfilerEventsProcessor(ProfileGenerator* generator,
+ CpuProfilesCollection* profiles);
virtual ~ProfilerEventsProcessor() {}
// Thread control.
@@ -178,6 +179,7 @@
INLINE(static bool FilterOutCodeCreateEvent(Logger::LogEventsAndTags tag));
ProfileGenerator* generator_;
+ CpuProfilesCollection* profiles_;
bool running_;
UnboundQueue<CodeEventsContainer> events_buffer_;
SamplingCircularQueue ticks_buffer_;
diff --git a/src/d8.cc b/src/d8.cc
index fe9fdca..22ace17 100644
--- a/src/d8.cc
+++ b/src/d8.cc
@@ -42,6 +42,13 @@
#ifdef V8_SHARED
#include <assert.h>
+#endif // V8_SHARED
+
+#ifndef V8_SHARED
+#include <algorithm>
+#endif // !V8_SHARED
+
+#ifdef V8_SHARED
#include "../include/v8-testing.h"
#endif // V8_SHARED
@@ -1573,9 +1580,8 @@
};
-int CompareKeys(const void* a, const void* b) {
- return strcmp(static_cast<const CounterAndKey*>(a)->key,
- static_cast<const CounterAndKey*>(b)->key);
+inline bool operator<(const CounterAndKey& lhs, const CounterAndKey& rhs) {
+ return strcmp(lhs.key, rhs.key) < 0;
}
#endif // V8_SHARED
@@ -1595,7 +1601,7 @@
counters[j].counter = i.CurrentValue();
counters[j].key = i.CurrentKey();
}
- qsort(counters, number_of_counters, sizeof(counters[0]), CompareKeys);
+ std::sort(counters, counters + number_of_counters);
printf("+----------------------------------------------------------------+"
"-------------+\n");
printf("| Name |"
diff --git a/src/execution.cc b/src/execution.cc
index c67fbc2..6d8c3c1 100644
--- a/src/execution.cc
+++ b/src/execution.cc
@@ -76,7 +76,7 @@
Isolate* isolate = function->GetIsolate();
// Entering JavaScript.
- VMState state(isolate, JS);
+ VMState<JS> state(isolate);
// Placeholder for return value.
MaybeObject* value = reinterpret_cast<Object*>(kZapValue);
diff --git a/src/factory.cc b/src/factory.cc
index e668e27..f36006c 100644
--- a/src/factory.cc
+++ b/src/factory.cc
@@ -1046,6 +1046,16 @@
}
+Handle<JSArrayBuffer> Factory::NewJSArrayBuffer() {
+ JSFunction* array_buffer_fun =
+ isolate()->context()->native_context()->array_buffer_fun();
+ CALL_HEAP_FUNCTION(
+ isolate(),
+ isolate()->heap()->AllocateJSObject(array_buffer_fun),
+ JSArrayBuffer);
+}
+
+
Handle<JSProxy> Factory::NewJSProxy(Handle<Object> handler,
Handle<Object> prototype) {
CALL_HEAP_FUNCTION(
diff --git a/src/factory.h b/src/factory.h
index b6bfa8a..caac78d 100644
--- a/src/factory.h
+++ b/src/factory.h
@@ -313,6 +313,8 @@
uint32_t length,
EnsureElementsMode mode);
+ Handle<JSArrayBuffer> NewJSArrayBuffer();
+
Handle<JSProxy> NewJSProxy(Handle<Object> handler, Handle<Object> prototype);
// Change the type of the argument into a JS object/function and reinitialize.
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index 7d905bf..0a6bf67 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -673,9 +673,6 @@
DEFINE_bool(trace_isolates, false, "trace isolate state changes")
-// VM state
-DEFINE_bool(log_state_changes, false, "Log state changes.")
-
// Regexp
DEFINE_bool(regexp_possessive_quantifier,
false,
@@ -723,6 +720,7 @@
DEFINE_bool(log_timer_events, false,
"Time events including external callbacks.")
DEFINE_implication(log_timer_events, log_internal_timer_events)
+DEFINE_implication(log_internal_timer_events, prof)
//
// Disassembler only flags
diff --git a/src/full-codegen.cc b/src/full-codegen.cc
index b73ceed..dc646b1 100644
--- a/src/full-codegen.cc
+++ b/src/full-codegen.cc
@@ -923,6 +923,20 @@
}
+void FullCodeGenerator::EmitGeneratorSend(CallRuntime* expr) {
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 2);
+ EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::SEND);
+}
+
+
+void FullCodeGenerator::EmitGeneratorThrow(CallRuntime* expr) {
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 2);
+ EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::THROW);
+}
+
+
void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
switch (expr->op()) {
case Token::COMMA:
@@ -1241,9 +1255,12 @@
__ CallRuntime(Runtime::kPushWithContext, 2);
StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
+ Scope* saved_scope = scope();
+ scope_ = stmt->scope();
{ WithOrCatch body(this);
Visit(stmt->statement());
}
+ scope_ = saved_scope;
// Pop context.
LoadContextField(context_register(), Context::PREVIOUS_INDEX);
diff --git a/src/full-codegen.h b/src/full-codegen.h
index b9647c2..3734ae5 100644
--- a/src/full-codegen.h
+++ b/src/full-codegen.h
@@ -486,6 +486,11 @@
INLINE_RUNTIME_FUNCTION_LIST(EMIT_INLINE_RUNTIME_CALL)
#undef EMIT_INLINE_RUNTIME_CALL
+ // Platform-specific code for resuming generators.
+ void EmitGeneratorResume(Expression *generator,
+ Expression *value,
+ JSGeneratorObject::ResumeMode resume_mode);
+
// Platform-specific code for loading variables.
void EmitLoadGlobalCheckExtensions(Variable* var,
TypeofState typeof_state,
diff --git a/src/generator.js b/src/generator.js
index 481d4d3..5e61091 100644
--- a/src/generator.js
+++ b/src/generator.js
@@ -44,7 +44,7 @@
['[Generator].prototype.next', this]);
}
- // TODO(wingo): Implement.
+ return %_GeneratorSend(this, void 0);
}
function GeneratorObjectSend(value) {
@@ -53,7 +53,7 @@
['[Generator].prototype.send', this]);
}
- // TODO(wingo): Implement.
+ return %_GeneratorSend(this, value);
}
function GeneratorObjectThrow(exn) {
@@ -62,16 +62,7 @@
['[Generator].prototype.throw', this]);
}
- // TODO(wingo): Implement.
-}
-
-function GeneratorObjectClose() {
- if (!IS_GENERATOR(this)) {
- throw MakeTypeError('incompatible_method_receiver',
- ['[Generator].prototype.close', this]);
- }
-
- // TODO(wingo): Implement.
+ return %_GeneratorThrow(this, exn);
}
function SetUpGenerators() {
@@ -81,8 +72,7 @@
DONT_ENUM | DONT_DELETE | READ_ONLY,
["next", GeneratorObjectNext,
"send", GeneratorObjectSend,
- "throw", GeneratorObjectThrow,
- "close", GeneratorObjectClose]);
+ "throw", GeneratorObjectThrow]);
%SetProperty(GeneratorObjectPrototype, "constructor",
GeneratorFunctionPrototype, DONT_ENUM | DONT_DELETE | READ_ONLY);
%SetPrototype(GeneratorFunctionPrototype, $Function.prototype);
diff --git a/src/global-handles.cc b/src/global-handles.cc
index cb3115a..7ee89d7 100644
--- a/src/global-handles.cc
+++ b/src/global-handles.cc
@@ -37,7 +37,13 @@
ObjectGroup::~ObjectGroup() {
- if (info_ != NULL) info_->Dispose();
+ if (info != NULL) info->Dispose();
+ delete[] objects;
+}
+
+
+ImplicitRefGroup::~ImplicitRefGroup() {
+ delete[] children;
}
@@ -267,7 +273,7 @@
ASSERT(!object_->IsExternalTwoByteString() ||
ExternalTwoByteString::cast(object_)->resource() != NULL);
// Leaving V8.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
if (near_death_callback_ != NULL) {
if (IsWeakCallback::decode(flags_)) {
WeakReferenceCallback callback =
@@ -438,7 +444,8 @@
first_block_(NULL),
first_used_block_(NULL),
first_free_(NULL),
- post_gc_processing_count_(0) {}
+ post_gc_processing_count_(0),
+ object_group_connections_(kObjectGroupConnectionsCapacity) {}
GlobalHandles::~GlobalHandles() {
@@ -578,15 +585,16 @@
bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v,
WeakSlotCallbackWithHeap can_skip) {
+ ComputeObjectGroupsAndImplicitReferences();
int last = 0;
bool any_group_was_visited = false;
for (int i = 0; i < object_groups_.length(); i++) {
ObjectGroup* entry = object_groups_.at(i);
ASSERT(entry != NULL);
- Object*** objects = entry->objects_;
+ Object*** objects = entry->objects;
bool group_should_be_visited = false;
- for (size_t j = 0; j < entry->length_; j++) {
+ for (size_t j = 0; j < entry->length; j++) {
Object* object = *objects[j];
if (object->IsHeapObject()) {
if (!can_skip(isolate_->heap(), &object)) {
@@ -603,7 +611,7 @@
// An object in the group requires visiting, so iterate over all
// objects in the group.
- for (size_t j = 0; j < entry->length_; ++j) {
+ for (size_t j = 0; j < entry->length; ++j) {
Object* object = *objects[j];
if (object->IsHeapObject()) {
v->VisitPointer(&object);
@@ -613,7 +621,7 @@
// Once the entire group has been iterated over, set the object
// group to NULL so it won't be processed again.
- entry->Dispose();
+ delete entry;
object_groups_.at(i) = NULL;
}
object_groups_.Rewind(last);
@@ -824,7 +832,23 @@
if (info != NULL) info->Dispose();
return;
}
- object_groups_.Add(ObjectGroup::New(handles, length, info));
+ ObjectGroup* group = new ObjectGroup(length);
+ for (size_t i = 0; i < length; ++i)
+ group->objects[i] = handles[i];
+ group->info = info;
+ object_groups_.Add(group);
+}
+
+
+void GlobalHandles::SetObjectGroupId(Object** handle,
+ UniqueId id) {
+ object_group_connections_.Add(ObjectGroupConnection(id, handle));
+}
+
+
+void GlobalHandles::SetRetainedObjectInfo(UniqueId id,
+ RetainedObjectInfo* info) {
+ retainer_infos_.Add(ObjectGroupRetainerInfo(id, info));
}
@@ -838,23 +862,45 @@
}
#endif
if (length == 0) return;
- implicit_ref_groups_.Add(ImplicitRefGroup::New(parent, children, length));
+ ImplicitRefGroup* group = new ImplicitRefGroup(parent, length);
+ for (size_t i = 0; i < length; ++i)
+ group->children[i] = children[i];
+ implicit_ref_groups_.Add(group);
+}
+
+
+void GlobalHandles::SetReferenceFromGroup(UniqueId id, Object** child) {
+ ASSERT(!Node::FromLocation(child)->is_independent());
+ implicit_ref_connections_.Add(ObjectGroupConnection(id, child));
+}
+
+
+void GlobalHandles::SetReference(HeapObject** parent, Object** child) {
+ ASSERT(!Node::FromLocation(child)->is_independent());
+ ImplicitRefGroup* group = new ImplicitRefGroup(parent, 1);
+ group->children[0] = child;
+ implicit_ref_groups_.Add(group);
}
void GlobalHandles::RemoveObjectGroups() {
- for (int i = 0; i < object_groups_.length(); i++) {
- object_groups_.at(i)->Dispose();
- }
+ for (int i = 0; i < object_groups_.length(); i++)
+ delete object_groups_.at(i);
object_groups_.Clear();
+ for (int i = 0; i < retainer_infos_.length(); ++i)
+ retainer_infos_[i].info->Dispose();
+ retainer_infos_.Clear();
+ object_group_connections_.Clear();
+ object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
}
void GlobalHandles::RemoveImplicitRefGroups() {
for (int i = 0; i < implicit_ref_groups_.length(); i++) {
- implicit_ref_groups_.at(i)->Dispose();
+ delete implicit_ref_groups_.at(i);
}
implicit_ref_groups_.Clear();
+ implicit_ref_connections_.Clear();
}
@@ -863,4 +909,108 @@
}
+void GlobalHandles::ComputeObjectGroupsAndImplicitReferences() {
+ if (object_group_connections_.length() == 0) {
+ for (int i = 0; i < retainer_infos_.length(); ++i)
+ retainer_infos_[i].info->Dispose();
+ retainer_infos_.Clear();
+ implicit_ref_connections_.Clear();
+ return;
+ }
+
+ object_group_connections_.Sort();
+ retainer_infos_.Sort();
+ implicit_ref_connections_.Sort();
+
+ int info_index = 0; // For iterating retainer_infos_.
+ UniqueId current_group_id(0);
+ int current_group_start = 0;
+
+ int current_implicit_refs_start = 0;
+ int current_implicit_refs_end = 0;
+ for (int i = 0; i <= object_group_connections_.length(); ++i) {
+ if (i == 0)
+ current_group_id = object_group_connections_[i].id;
+ if (i == object_group_connections_.length() ||
+ current_group_id != object_group_connections_[i].id) {
+ // Group detected: objects in indices [current_group_start, i[.
+
+ // Find out which implicit references are related to this group. (We want
+ // to ignore object groups which only have 1 object, but that object is
+ // needed as a representative object for the implicit refrerence group.)
+ while (current_implicit_refs_start < implicit_ref_connections_.length() &&
+ implicit_ref_connections_[current_implicit_refs_start].id <
+ current_group_id)
+ ++current_implicit_refs_start;
+ current_implicit_refs_end = current_implicit_refs_start;
+ while (current_implicit_refs_end < implicit_ref_connections_.length() &&
+ implicit_ref_connections_[current_implicit_refs_end].id ==
+ current_group_id)
+ ++current_implicit_refs_end;
+
+ if (current_implicit_refs_end > current_implicit_refs_start) {
+ // Find a representative object for the implicit references.
+ HeapObject** representative = NULL;
+ for (int j = current_group_start; j < i; ++j) {
+ Object** object = object_group_connections_[j].object;
+ if ((*object)->IsHeapObject()) {
+ representative = reinterpret_cast<HeapObject**>(object);
+ break;
+ }
+ }
+ if (representative) {
+ ImplicitRefGroup* group = new ImplicitRefGroup(
+ representative,
+ current_implicit_refs_end - current_implicit_refs_start);
+ for (int j = current_implicit_refs_start;
+ j < current_implicit_refs_end;
+ ++j) {
+ group->children[j - current_implicit_refs_start] =
+ implicit_ref_connections_[j].object;
+ }
+ implicit_ref_groups_.Add(group);
+ }
+ current_implicit_refs_start = current_implicit_refs_end;
+ }
+
+ // Find a RetainedObjectInfo for the group.
+ RetainedObjectInfo* info = NULL;
+ while (info_index < retainer_infos_.length() &&
+ retainer_infos_[info_index].id < current_group_id) {
+ retainer_infos_[info_index].info->Dispose();
+ ++info_index;
+ }
+ if (info_index < retainer_infos_.length() &&
+ retainer_infos_[info_index].id == current_group_id) {
+ // This object group has an associated ObjectGroupRetainerInfo.
+ info = retainer_infos_[info_index].info;
+ ++info_index;
+ }
+
+ // Ignore groups which only contain one object.
+ if (i > current_group_start + 1) {
+ ObjectGroup* group = new ObjectGroup(i - current_group_start);
+ for (int j = current_group_start; j < i; ++j) {
+ group->objects[j - current_group_start] =
+ object_group_connections_[j].object;
+ }
+ group->info = info;
+ object_groups_.Add(group);
+ } else if (info) {
+ info->Dispose();
+ }
+
+ if (i < object_group_connections_.length()) {
+ current_group_id = object_group_connections_[i].id;
+ current_group_start = i;
+ }
+ }
+ }
+ object_group_connections_.Clear();
+ object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
+ retainer_infos_.Clear();
+ implicit_ref_connections_.Clear();
+}
+
+
} } // namespace v8::internal
diff --git a/src/global-handles.h b/src/global-handles.h
index 90707b0..81e1476 100644
--- a/src/global-handles.h
+++ b/src/global-handles.h
@@ -28,6 +28,7 @@
#ifndef V8_GLOBAL_HANDLES_H_
#define V8_GLOBAL_HANDLES_H_
+#include "../include/v8.h"
#include "../include/v8-profiler.h"
#include "list.h"
@@ -46,70 +47,76 @@
// At GC the destroyed global handles are removed from the free list
// and deallocated.
+// Data structures for tracking object groups and implicit references.
+
// An object group is treated like a single JS object: if one of object in
// the group is alive, all objects in the same group are considered alive.
// An object group is used to simulate object relationship in a DOM tree.
-class ObjectGroup {
- public:
- static ObjectGroup* New(Object*** handles,
- size_t length,
- v8::RetainedObjectInfo* info) {
+
+// An implicit references group consists of two parts: a parent object and a
+// list of children objects. If the parent is alive, all the children are alive
+// too.
+
+struct ObjectGroup {
+ explicit ObjectGroup(size_t length)
+ : info(NULL), length(length) {
ASSERT(length > 0);
- ObjectGroup* group = reinterpret_cast<ObjectGroup*>(
- malloc(OFFSET_OF(ObjectGroup, objects_[length])));
- group->length_ = length;
- group->info_ = info;
- CopyWords(group->objects_, handles, static_cast<int>(length));
- return group;
+ objects = new Object**[length];
}
-
- void Dispose() {
- if (info_ != NULL) info_->Dispose();
- free(this);
- }
-
- size_t length_;
- v8::RetainedObjectInfo* info_;
- Object** objects_[1]; // Variable sized array.
-
- private:
- void* operator new(size_t size);
- void operator delete(void* p);
~ObjectGroup();
- DISALLOW_IMPLICIT_CONSTRUCTORS(ObjectGroup);
+
+ v8::RetainedObjectInfo* info;
+ Object*** objects;
+ size_t length;
};
-// An implicit references group consists of two parts: a parent object and
-// a list of children objects. If the parent is alive, all the children
-// are alive too.
-class ImplicitRefGroup {
- public:
- static ImplicitRefGroup* New(HeapObject** parent,
- Object*** children,
- size_t length) {
+struct ImplicitRefGroup {
+ ImplicitRefGroup(HeapObject** parent, size_t length)
+ : parent(parent), length(length) {
ASSERT(length > 0);
- ImplicitRefGroup* group = reinterpret_cast<ImplicitRefGroup*>(
- malloc(OFFSET_OF(ImplicitRefGroup, children_[length])));
- group->parent_ = parent;
- group->length_ = length;
- CopyWords(group->children_, children, length);
- return group;
+ children = new Object**[length];
}
-
- void Dispose() {
- free(this);
- }
-
- HeapObject** parent_;
- size_t length_;
- Object** children_[1]; // Variable sized array.
-
- private:
- void* operator new(size_t size);
- void operator delete(void* p);
~ImplicitRefGroup();
- DISALLOW_IMPLICIT_CONSTRUCTORS(ImplicitRefGroup);
+
+ HeapObject** parent;
+ Object*** children;
+ size_t length;
+};
+
+
+// For internal bookkeeping.
+struct ObjectGroupConnection {
+ ObjectGroupConnection(UniqueId id, Object** object)
+ : id(id), object(object) {}
+
+ bool operator==(const ObjectGroupConnection& other) const {
+ return id == other.id;
+ }
+
+ bool operator<(const ObjectGroupConnection& other) const {
+ return id < other.id;
+ }
+
+ UniqueId id;
+ Object** object;
+};
+
+
+struct ObjectGroupRetainerInfo {
+ ObjectGroupRetainerInfo(UniqueId id, RetainedObjectInfo* info)
+ : id(id), info(info) {}
+
+ bool operator==(const ObjectGroupRetainerInfo& other) const {
+ return id == other.id;
+ }
+
+ bool operator<(const ObjectGroupRetainerInfo& other) const {
+ return id < other.id;
+ }
+
+ UniqueId id;
+ RetainedObjectInfo* info;
};
@@ -218,6 +225,16 @@
size_t length,
v8::RetainedObjectInfo* info);
+ // Associates handle with the object group represented by id.
+ // Should be only used in GC callback function before a collection.
+ // All groups are destroyed after a garbage collection.
+ void SetObjectGroupId(Object** handle, UniqueId id);
+
+ // Set RetainedObjectInfo for an object group. Should not be called more than
+ // once for a group. Should not be called for a group which contains no
+ // handles.
+ void SetRetainedObjectInfo(UniqueId id, RetainedObjectInfo* info);
+
// Add an implicit references' group.
// Should be only used in GC callback function before a collection.
// All groups are destroyed after a mark-compact collection.
@@ -225,11 +242,23 @@
Object*** children,
size_t length);
- // Returns the object groups.
- List<ObjectGroup*>* object_groups() { return &object_groups_; }
+ // Adds an implicit reference from a group to an object. Should be only used
+ // in GC callback function before a collection. All implicit references are
+ // destroyed after a mark-compact collection.
+ void SetReferenceFromGroup(UniqueId id, Object** child);
- // Returns the implicit references' groups.
+ // Adds an implicit reference from a parent object to a child object. Should
+ // be only used in GC callback function before a collection. All implicit
+ // references are destroyed after a mark-compact collection.
+ void SetReference(HeapObject** parent, Object** child);
+
+ List<ObjectGroup*>* object_groups() {
+ ComputeObjectGroupsAndImplicitReferences();
+ return &object_groups_;
+ }
+
List<ImplicitRefGroup*>* implicit_ref_groups() {
+ ComputeObjectGroupsAndImplicitReferences();
return &implicit_ref_groups_;
}
@@ -250,6 +279,15 @@
private:
explicit GlobalHandles(Isolate* isolate);
+ // Migrates data from the internal representation (object_group_connections_,
+ // retainer_infos_ and implicit_ref_connections_) to the public and more
+ // efficient representation (object_groups_ and implicit_ref_groups_).
+ void ComputeObjectGroupsAndImplicitReferences();
+
+ // v8::internal::List is inefficient even for small number of elements, if we
+ // don't assign any initial capacity.
+ static const int kObjectGroupConnectionsCapacity = 20;
+
// Internal node structures.
class Node;
class NodeBlock;
@@ -275,9 +313,17 @@
int post_gc_processing_count_;
+ // Object groups and implicit references, public and more efficient
+ // representation.
List<ObjectGroup*> object_groups_;
List<ImplicitRefGroup*> implicit_ref_groups_;
+ // Object groups and implicit references, temporary representation while
+ // constructing the groups.
+ List<ObjectGroupConnection> object_group_connections_;
+ List<ObjectGroupRetainerInfo> retainer_infos_;
+ List<ObjectGroupConnection> implicit_ref_connections_;
+
friend class Isolate;
DISALLOW_COPY_AND_ASSIGN(GlobalHandles);
diff --git a/src/handles-inl.h b/src/handles-inl.h
index f12a811..5a3e9ed 100644
--- a/src/handles-inl.h
+++ b/src/handles-inl.h
@@ -91,6 +91,10 @@
handle < roots_array_start + Heap::kStrongRootListLength) {
return true;
}
+ if (isolate->optimizing_compiler_thread()->IsOptimizerThread() &&
+ !Heap::RelocationLock::IsLockedByOptimizerThread(isolate->heap())) {
+ return false;
+ }
switch (isolate->HandleDereferenceGuardState()) {
case HandleDereferenceGuard::ALLOW:
return true;
diff --git a/src/handles.cc b/src/handles.cc
index 059ff24..5a5773e 100644
--- a/src/handles.cc
+++ b/src/handles.cc
@@ -565,7 +565,7 @@
LOG(isolate, ApiObjectAccess("interceptor-named-enum", *object));
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
result = enum_fun(info);
}
}
@@ -590,7 +590,7 @@
LOG(isolate, ApiObjectAccess("interceptor-indexed-enum", *object));
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
result = enum_fun(info);
#if ENABLE_EXTRA_CHECKS
CHECK(result.IsEmpty() || v8::Utils::OpenHandle(*result)->IsJSObject());
diff --git a/src/heap-inl.h b/src/heap-inl.h
index ab1fdb4..f937426 100644
--- a/src/heap-inl.h
+++ b/src/heap-inl.h
@@ -211,6 +211,7 @@
MaybeObject* Heap::AllocateRaw(int size_in_bytes,
AllocationSpace space,
AllocationSpace retry_space) {
+ SLOW_ASSERT(!isolate_->optimizing_compiler_thread()->IsOptimizerThread());
ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
ASSERT(space != NEW_SPACE ||
retry_space == OLD_POINTER_SPACE ||
diff --git a/src/heap-profiler.cc b/src/heap-profiler.cc
index 5c1badf..4f6fdb1 100644
--- a/src/heap-profiler.cc
+++ b/src/heap-profiler.cc
@@ -140,5 +140,10 @@
snapshots_->ObjectMoveEvent(from, to);
}
+void HeapProfiler::SetRetainedObjectInfo(UniqueId id,
+ RetainedObjectInfo* info) {
+ // TODO(yurus, marja): Don't route this information through GlobalHandles.
+ heap()->isolate()->global_handles()->SetRetainedObjectInfo(id, info);
+}
} } // namespace v8::internal
diff --git a/src/heap-profiler.h b/src/heap-profiler.h
index 3f3138d..1ed73b9 100644
--- a/src/heap-profiler.h
+++ b/src/heap-profiler.h
@@ -80,6 +80,8 @@
return snapshots_->is_tracking_objects();
}
+ void SetRetainedObjectInfo(UniqueId id, RetainedObjectInfo* info);
+
private:
Heap* heap() const { return snapshots_->heap(); }
diff --git a/src/heap-snapshot-generator.cc b/src/heap-snapshot-generator.cc
index 855a1d7..3d890f7 100644
--- a/src/heap-snapshot-generator.cc
+++ b/src/heap-snapshot-generator.cc
@@ -1936,18 +1936,19 @@
Isolate* isolate = Isolate::Current();
const GCType major_gc_type = kGCTypeMarkSweepCompact;
// Record objects that are joined into ObjectGroups.
- isolate->heap()->CallGCPrologueCallbacks(major_gc_type);
+ isolate->heap()->CallGCPrologueCallbacks(
+ major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos);
List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
for (int i = 0; i < groups->length(); ++i) {
ObjectGroup* group = groups->at(i);
- if (group->info_ == NULL) continue;
- List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info_);
- for (size_t j = 0; j < group->length_; ++j) {
- HeapObject* obj = HeapObject::cast(*group->objects_[j]);
+ if (group->info == NULL) continue;
+ List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info);
+ for (size_t j = 0; j < group->length; ++j) {
+ HeapObject* obj = HeapObject::cast(*group->objects[j]);
list->Add(obj);
in_groups_.Insert(obj);
}
- group->info_ = NULL; // Acquire info object ownership.
+ group->info = NULL; // Acquire info object ownership.
}
isolate->global_handles()->RemoveObjectGroups();
isolate->heap()->CallGCEpilogueCallbacks(major_gc_type);
@@ -1963,12 +1964,12 @@
isolate->global_handles()->implicit_ref_groups();
for (int i = 0; i < groups->length(); ++i) {
ImplicitRefGroup* group = groups->at(i);
- HeapObject* parent = *group->parent_;
+ HeapObject* parent = *group->parent;
int parent_entry =
filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
ASSERT(parent_entry != HeapEntry::kNoEntry);
- Object*** children = group->children_;
- for (size_t j = 0; j < group->length_; ++j) {
+ Object*** children = group->children;
+ for (size_t j = 0; j < group->length; ++j) {
Object* child = *children[j];
HeapEntry* child_entry =
filler_->FindOrAddEntry(child, native_entries_allocator_);
diff --git a/src/heap.cc b/src/heap.cc
index 82d586c..fb2f9d9 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -163,7 +163,8 @@
#endif
promotion_queue_(this),
configured_(false),
- chunks_queued_for_free_(NULL) {
+ chunks_queued_for_free_(NULL),
+ relocation_mutex_(NULL) {
// Allow build-time customization of the max semispace size. Building
// V8 with snapshots and a non-default max semispace size is much
// easier if you can define it as part of the build environment.
@@ -606,7 +607,7 @@
const char* gc_reason,
const char* collector_reason) {
// The VM is in the GC state until exiting this function.
- VMState state(isolate_, GC);
+ VMState<GC> state(isolate_);
#ifdef DEBUG
// Reset the allocation timeout to the GC interval, but make sure to
@@ -892,8 +893,8 @@
{
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
- VMState state(isolate_, EXTERNAL);
- CallGCPrologueCallbacks(gc_type);
+ VMState<EXTERNAL> state(isolate_);
+ CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags);
}
EnsureFromSpaceIsCommitted();
@@ -1014,7 +1015,7 @@
{
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
- VMState state(isolate_, EXTERNAL);
+ VMState<EXTERNAL> state(isolate_);
CallGCEpilogueCallbacks(gc_type);
}
@@ -1028,13 +1029,13 @@
}
-void Heap::CallGCPrologueCallbacks(GCType gc_type) {
+void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) {
if (gc_type == kGCTypeMarkSweepCompact && global_gc_prologue_callback_) {
global_gc_prologue_callback_();
}
for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
if (gc_type & gc_prologue_callbacks_[i].gc_type) {
- gc_prologue_callbacks_[i].callback(gc_type, kNoGCCallbackFlags);
+ gc_prologue_callbacks_[i].callback(gc_type, flags);
}
}
}
@@ -1300,6 +1301,8 @@
void Heap::Scavenge() {
+ RelocationLock relocation_lock(this);
+
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
#endif
@@ -3421,14 +3424,14 @@
return Failure::OutOfMemoryException(0x4);
}
- bool is_ascii_data_in_two_byte_string = false;
+ bool is_one_byte_data_in_two_byte_string = false;
if (!is_one_byte) {
// At least one of the strings uses two-byte representation so we
// can't use the fast case code for short ASCII strings below, but
// we can try to save memory if all chars actually fit in ASCII.
- is_ascii_data_in_two_byte_string =
- first->HasOnlyAsciiChars() && second->HasOnlyAsciiChars();
- if (is_ascii_data_in_two_byte_string) {
+ is_one_byte_data_in_two_byte_string =
+ first->HasOnlyOneByteChars() && second->HasOnlyOneByteChars();
+ if (is_one_byte_data_in_two_byte_string) {
isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
}
}
@@ -3463,7 +3466,7 @@
for (int i = 0; i < second_length; i++) *dest++ = src[i];
return result;
} else {
- if (is_ascii_data_in_two_byte_string) {
+ if (is_one_byte_data_in_two_byte_string) {
Object* result;
{ MaybeObject* maybe_result = AllocateRawOneByteString(length);
if (!maybe_result->ToObject(&result)) return maybe_result;
@@ -3488,7 +3491,7 @@
}
}
- Map* map = (is_one_byte || is_ascii_data_in_two_byte_string) ?
+ Map* map = (is_one_byte || is_one_byte_data_in_two_byte_string) ?
cons_ascii_string_map() : cons_string_map();
Object* result;
@@ -3634,11 +3637,11 @@
// For small strings we check whether the resource contains only
// one byte characters. If yes, we use a different string map.
- static const size_t kAsciiCheckLengthLimit = 32;
- bool is_one_byte = length <= kAsciiCheckLengthLimit &&
+ static const size_t kOneByteCheckLengthLimit = 32;
+ bool is_one_byte = length <= kOneByteCheckLengthLimit &&
String::IsOneByte(resource->data(), static_cast<int>(length));
Map* map = is_one_byte ?
- external_string_with_ascii_data_map() : external_string_map();
+ external_string_with_one_byte_data_map() : external_string_map();
Object* result;
{ MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
if (!maybe_result->ToObject(&result)) return maybe_result;
@@ -4974,14 +4977,14 @@
case EXTERNAL_STRING_TYPE: return external_internalized_string_map();
case EXTERNAL_ASCII_STRING_TYPE:
return external_ascii_internalized_string_map();
- case EXTERNAL_STRING_WITH_ASCII_DATA_TYPE:
- return external_internalized_string_with_ascii_data_map();
+ case EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE:
+ return external_internalized_string_with_one_byte_data_map();
case SHORT_EXTERNAL_STRING_TYPE:
return short_external_internalized_string_map();
case SHORT_EXTERNAL_ASCII_STRING_TYPE:
return short_external_ascii_internalized_string_map();
- case SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE:
- return short_external_internalized_string_with_ascii_data_map();
+ case SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE:
+ return short_external_internalized_string_with_one_byte_data_map();
default: return NULL; // No match found.
}
}
@@ -6635,6 +6638,11 @@
store_buffer()->SetUp();
+ if (FLAG_parallel_recompilation) relocation_mutex_ = OS::CreateMutex();
+#ifdef DEBUG
+ relocation_mutex_locked_by_optimizer_thread_ = false;
+#endif // DEBUG
+
return true;
}
@@ -6737,6 +6745,8 @@
incremental_marking()->TearDown();
isolate_->memory_allocator()->TearDown();
+
+ delete relocation_mutex_;
}
@@ -7696,7 +7706,8 @@
if (!getter_obj->IsJSFunction()) continue;
getter_fun = JSFunction::cast(getter_obj);
String* key = isolate->heap()->hidden_stack_trace_string();
- if (key != getter_fun->GetHiddenProperty(key)) continue;
+ Object* value = getter_fun->GetHiddenProperty(key);
+ if (key != value) continue;
}
budget--;
@@ -7866,4 +7877,15 @@
ClearObjectStats();
}
+
+Heap::RelocationLock::RelocationLock(Heap* heap) : heap_(heap) {
+ if (FLAG_parallel_recompilation) {
+ heap_->relocation_mutex_->Lock();
+#ifdef DEBUG
+ heap_->relocation_mutex_locked_by_optimizer_thread_ =
+ heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread();
+#endif // DEBUG
+ }
+}
+
} } // namespace v8::internal
diff --git a/src/heap.h b/src/heap.h
index 3b8a9ea..7722079 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -95,12 +95,14 @@
V(Map, sliced_string_map, SlicedStringMap) \
V(Map, sliced_ascii_string_map, SlicedAsciiStringMap) \
V(Map, external_string_map, ExternalStringMap) \
- V(Map, external_string_with_ascii_data_map, ExternalStringWithAsciiDataMap) \
+ V(Map, \
+ external_string_with_one_byte_data_map, \
+ ExternalStringWithOneByteDataMap) \
V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
V(Map, short_external_string_map, ShortExternalStringMap) \
V(Map, \
- short_external_string_with_ascii_data_map, \
- ShortExternalStringWithAsciiDataMap) \
+ short_external_string_with_one_byte_data_map, \
+ ShortExternalStringWithOneByteDataMap) \
V(Map, internalized_string_map, InternalizedStringMap) \
V(Map, ascii_internalized_string_map, AsciiInternalizedStringMap) \
V(Map, cons_internalized_string_map, ConsInternalizedStringMap) \
@@ -109,8 +111,8 @@
external_internalized_string_map, \
ExternalInternalizedStringMap) \
V(Map, \
- external_internalized_string_with_ascii_data_map, \
- ExternalInternalizedStringWithAsciiDataMap) \
+ external_internalized_string_with_one_byte_data_map, \
+ ExternalInternalizedStringWithOneByteDataMap) \
V(Map, \
external_ascii_internalized_string_map, \
ExternalAsciiInternalizedStringMap) \
@@ -118,8 +120,8 @@
short_external_internalized_string_map, \
ShortExternalInternalizedStringMap) \
V(Map, \
- short_external_internalized_string_with_ascii_data_map, \
- ShortExternalInternalizedStringWithAsciiDataMap) \
+ short_external_internalized_string_with_one_byte_data_map, \
+ ShortExternalInternalizedStringWithOneByteDataMap) \
V(Map, \
short_external_ascii_internalized_string_map, \
ShortExternalAsciiInternalizedStringMap) \
@@ -240,6 +242,8 @@
V(elements_field_string, "%elements") \
V(length_field_string, "%length") \
V(function_class_string, "Function") \
+ V(properties_field_symbol, "%properties") \
+ V(payload_field_symbol, "%payload") \
V(illegal_argument_string, "illegal argument") \
V(MakeReferenceError_string, "MakeReferenceError") \
V(MakeSyntaxError_string, "MakeSyntaxError") \
@@ -693,6 +697,12 @@
// Please note this does not perform a garbage collection.
MUST_USE_RESULT MaybeObject* AllocateFunctionPrototype(JSFunction* function);
+ // Allocates a JS ArrayBuffer object.
+ // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
+ // failed.
+ // Please note this does not perform a garbage collection.
+ MUST_USE_RESULT MaybeObject* AllocateJSArrayBuffer();
+
// Allocates a Harmony proxy or function proxy.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed.
@@ -1543,7 +1553,8 @@
8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
intptr_t OldGenPromotionLimit(intptr_t old_gen_size) {
- const int divisor = FLAG_stress_compaction ? 10 : 3;
+ const int divisor = FLAG_stress_compaction ? 10 :
+ new_space_high_promotion_mode_active_ ? 1 : 3;
intptr_t limit =
Max(old_gen_size + old_gen_size / divisor, kMinimumPromotionLimit);
limit += new_space_.Capacity();
@@ -1553,7 +1564,8 @@
}
intptr_t OldGenAllocationLimit(intptr_t old_gen_size) {
- const int divisor = FLAG_stress_compaction ? 8 : 2;
+ const int divisor = FLAG_stress_compaction ? 8 :
+ new_space_high_promotion_mode_active_ ? 1 : 2;
intptr_t limit =
Max(old_gen_size + old_gen_size / divisor, kMinimumAllocationLimit);
limit += new_space_.Capacity();
@@ -1753,7 +1765,7 @@
inline Isolate* isolate();
- void CallGCPrologueCallbacks(GCType gc_type);
+ void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
void CallGCEpilogueCallbacks(GCType gc_type);
inline bool OldGenerationAllocationLimitReached();
@@ -1848,6 +1860,31 @@
void CheckpointObjectStats();
+ // We don't use a ScopedLock here since we want to lock the heap
+ // only when FLAG_parallel_recompilation is true.
+ class RelocationLock {
+ public:
+ explicit RelocationLock(Heap* heap);
+
+ ~RelocationLock() {
+ if (FLAG_parallel_recompilation) {
+#ifdef DEBUG
+ heap_->relocation_mutex_locked_by_optimizer_thread_ = false;
+#endif // DEBUG
+ heap_->relocation_mutex_->Unlock();
+ }
+ }
+
+#ifdef DEBUG
+ static bool IsLockedByOptimizerThread(Heap* heap) {
+ return heap->relocation_mutex_locked_by_optimizer_thread_;
+ }
+#endif // DEBUG
+
+ private:
+ Heap* heap_;
+ };
+
private:
Heap();
@@ -2322,6 +2359,11 @@
MemoryChunk* chunks_queued_for_free_;
+ Mutex* relocation_mutex_;
+#ifdef DEBUG
+ bool relocation_mutex_locked_by_optimizer_thread_;
+#endif // DEBUG;
+
friend class Factory;
friend class GCTracer;
friend class DisallowAllocationFailure;
diff --git a/src/hydrogen-instructions.cc b/src/hydrogen-instructions.cc
index 6916209..5f0cd9d 100644
--- a/src/hydrogen-instructions.cc
+++ b/src/hydrogen-instructions.cc
@@ -1310,20 +1310,18 @@
switch (op()) {
case kMathFloor: return "floor";
case kMathRound: return "round";
- case kMathCeil: return "ceil";
case kMathAbs: return "abs";
case kMathLog: return "log";
case kMathSin: return "sin";
case kMathCos: return "cos";
case kMathTan: return "tan";
- case kMathASin: return "asin";
- case kMathACos: return "acos";
- case kMathATan: return "atan";
case kMathExp: return "exp";
case kMathSqrt: return "sqrt";
- default: break;
+ case kMathPowHalf: return "pow-half";
+ default:
+ UNREACHABLE();
+ return NULL;
}
- return "(unknown operation)";
}
@@ -1453,7 +1451,7 @@
HValue* HMul::Canonicalize() {
if (IsIdentityOperation(left(), right(), 1)) return left();
if (IsIdentityOperation(right(), left(), 1)) return right();
- return HArithmeticBinaryOperation::Canonicalize();
+ return this;
}
@@ -2248,12 +2246,10 @@
Representation left_rep = left()->representation();
Representation right_rep = right()->representation();
- if (left_rep.is_more_general_than(rep) &&
- left()->CheckFlag(kFlexibleRepresentation)) {
+ if (left_rep.is_more_general_than(rep) && !left_rep.IsTagged()) {
rep = left_rep;
}
- if (right_rep.is_more_general_than(rep) &&
- right()->CheckFlag(kFlexibleRepresentation)) {
+ if (right_rep.is_more_general_than(rep) && !right_rep.IsTagged()) {
rep = right_rep;
}
// Consider observed output representation, but ignore it if it's Double,
@@ -2268,7 +2264,8 @@
void HBinaryOperation::AssumeRepresentation(Representation r) {
- set_observed_input_representation(r, r);
+ set_observed_input_representation(1, r);
+ set_observed_input_representation(2, r);
HValue::AssumeRepresentation(r);
}
@@ -3466,6 +3463,42 @@
}
+void HPhi::SimplifyConstantInputs() {
+ // Convert constant inputs to integers when all uses are truncating.
+ // This must happen before representation inference takes place.
+ if (!CheckUsesForFlag(kTruncatingToInt32)) return;
+ for (int i = 0; i < OperandCount(); ++i) {
+ if (!OperandAt(i)->IsConstant()) return;
+ }
+ HGraph* graph = block()->graph();
+ for (int i = 0; i < OperandCount(); ++i) {
+ HConstant* operand = HConstant::cast(OperandAt(i));
+ if (operand->HasInteger32Value()) {
+ continue;
+ } else if (operand->HasDoubleValue()) {
+ HConstant* integer_input =
+ new(graph->zone()) HConstant(DoubleToInt32(operand->DoubleValue()),
+ Representation::Integer32());
+ integer_input->InsertAfter(operand);
+ SetOperandAt(i, integer_input);
+ } else if (operand == graph->GetConstantTrue()) {
+ SetOperandAt(i, graph->GetConstant1());
+ } else {
+ // This catches |false|, |undefined|, strings and objects.
+ SetOperandAt(i, graph->GetConstant0());
+ }
+ }
+ // Overwrite observed input representations because they are likely Tagged.
+ for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
+ HValue* use = it.value();
+ if (use->IsBinaryOperation()) {
+ HBinaryOperation::cast(use)->set_observed_input_representation(
+ it.index(), Representation::Integer32());
+ }
+ }
+}
+
+
void HPhi::InferRepresentation(HInferRepresentation* h_infer) {
ASSERT(CheckFlag(kFlexibleRepresentation));
// If there are non-Phi uses, and all of them have observed the same
diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h
index 5ac0ff1..aa89f71 100644
--- a/src/hydrogen-instructions.h
+++ b/src/hydrogen-instructions.h
@@ -2603,24 +2603,26 @@
switch (op) {
case kMathFloor:
case kMathRound:
- case kMathCeil:
set_representation(Representation::Integer32());
break;
case kMathAbs:
// Not setting representation here: it is None intentionally.
SetFlag(kFlexibleRepresentation);
+ // TODO(svenpanne) This flag is actually only needed if representation()
+ // is tagged, and not when it is an unboxed double or unboxed integer.
SetGVNFlag(kChangesNewSpacePromotion);
break;
- case kMathSqrt:
- case kMathPowHalf:
case kMathLog:
case kMathSin:
case kMathCos:
case kMathTan:
set_representation(Representation::Double());
+ // These operations use the TranscendentalCache, so they may allocate.
SetGVNFlag(kChangesNewSpacePromotion);
break;
case kMathExp:
+ case kMathSqrt:
+ case kMathPowHalf:
set_representation(Representation::Double());
break;
default:
@@ -3141,6 +3143,8 @@
return true;
}
+ void SimplifyConstantInputs();
+
protected:
virtual void DeleteFromGraph();
virtual void InternalSetOperandAt(int index, HValue* value) {
@@ -3442,10 +3446,9 @@
return right();
}
- void set_observed_input_representation(Representation left,
- Representation right) {
- observed_input_representation_[0] = left;
- observed_input_representation_[1] = right;
+ void set_observed_input_representation(int index, Representation rep) {
+ ASSERT(index >= 1 && index <= 2);
+ observed_input_representation_[index - 1] = rep;
}
virtual void initialize_output_representation(Representation observed) {
@@ -3949,6 +3952,10 @@
return Representation::Tagged();
}
+ virtual Representation observed_input_representation(int index) {
+ return Representation::Tagged();
+ }
+
DECLARE_CONCRETE_INSTRUCTION(CompareObjectEqAndBranch)
};
@@ -4415,6 +4422,17 @@
HValue* left,
HValue* right);
+ static HInstruction* NewImul(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right) {
+ HMul* mul = new(zone) HMul(context, left, right);
+ // TODO(mstarzinger): Prevent bailout on minus zero for imul.
+ mul->AssumeRepresentation(Representation::Integer32());
+ mul->ClearFlag(HValue::kCanOverflow);
+ return mul;
+ }
+
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
virtual HValue* Canonicalize();
@@ -4948,6 +4966,19 @@
SetGVNFlag(kChangesNewSpacePromotion);
}
+ static Flags DefaultFlags() {
+ return CAN_ALLOCATE_IN_NEW_SPACE;
+ }
+
+ static Flags DefaultFlags(ElementsKind kind) {
+ Flags flags = CAN_ALLOCATE_IN_NEW_SPACE;
+ if (IsFastDoubleElementsKind(kind)) {
+ flags = static_cast<HAllocate::Flags>(
+ flags | HAllocate::ALLOCATE_DOUBLE_ALIGNED);
+ }
+ return flags;
+ }
+
HValue* context() { return OperandAt(0); }
HValue* size() { return OperandAt(1); }
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index 20e1d0d..a978834 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -25,9 +25,11 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#include "v8.h"
#include "hydrogen.h"
+#include <algorithm>
+
+#include "v8.h"
#include "codegen.h"
#include "full-codegen.h"
#include "hashmap.h"
@@ -509,6 +511,7 @@
void HGraph::Verify(bool do_full_verify) const {
+ Heap::RelocationLock(isolate()->heap());
ALLOW_HANDLE_DEREF(isolate(), "debug mode verification");
for (int i = 0; i < blocks_.length(); i++) {
HBasicBlock* block = blocks_.at(i);
@@ -601,6 +604,19 @@
}
+HConstant* HGraph::GetConstantSmi(SetOncePointer<HConstant>* pointer,
+ int32_t value) {
+ if (!pointer->is_set()) {
+ HConstant* constant =
+ new(zone()) HConstant(Handle<Object>(Smi::FromInt(value), isolate()),
+ Representation::Tagged());
+ constant->InsertAfter(GetConstantUndefined());
+ pointer->set(constant);
+ }
+ return pointer->get();
+}
+
+
HConstant* HGraph::GetConstant0() {
return GetConstantInt32(&constant_0_, 0);
}
@@ -636,6 +652,18 @@
DEFINE_GET_CONSTANT(True, true, HType::Boolean(), true)
DEFINE_GET_CONSTANT(False, false, HType::Boolean(), false)
DEFINE_GET_CONSTANT(Hole, the_hole, HType::Tagged(), false)
+DEFINE_GET_CONSTANT(Null, null, HType::Tagged(), false)
+
+
+HConstant* HGraph::GetConstantSmi0() {
+ return GetConstantSmi(&constant_smi_0_, 0);
+}
+
+
+HConstant* HGraph::GetConstantSmi1() {
+ return GetConstantSmi(&constant_smi_1_, 1);
+}
+
#undef DEFINE_GET_CONSTANT
@@ -768,8 +796,9 @@
HBasicBlock* true_block = last_true_block_ == NULL
? first_true_block_
: last_true_block_;
- HBasicBlock* false_block =
- did_else_ ? builder_->current_block() : first_false_block_;
+ HBasicBlock* false_block = did_else_ && (first_false_block_ != NULL)
+ ? builder_->current_block()
+ : first_false_block_;
continuation->Capture(true_block, false_block, position_);
captured_ = true;
End();
@@ -802,7 +831,6 @@
void HGraphBuilder::IfBuilder::Deopt() {
- ASSERT(!(did_then_ ^ did_else_));
HBasicBlock* block = builder_->current_block();
block->FinishExitWithDeoptimization(HDeoptimize::kUseAll);
if (did_else_) {
@@ -813,6 +841,19 @@
}
+void HGraphBuilder::IfBuilder::Return(HValue* value) {
+ HBasicBlock* block = builder_->current_block();
+ block->Finish(new(zone()) HReturn(value,
+ builder_->environment()->LookupContext(),
+ builder_->graph()->GetConstantMinus1()));
+ if (did_else_) {
+ first_false_block_ = NULL;
+ } else {
+ first_true_block_ = NULL;
+ }
+}
+
+
void HGraphBuilder::IfBuilder::End() {
if (!captured_) {
ASSERT(did_then_);
@@ -1341,8 +1382,10 @@
total_size->ChangeRepresentation(Representation::Integer32());
total_size->ClearFlag(HValue::kCanOverflow);
- HAllocate::Flags flags = HAllocate::CAN_ALLOCATE_IN_NEW_SPACE;
+ HAllocate::Flags flags = HAllocate::DefaultFlags(kind);
if (FLAG_pretenure_literals) {
+ // TODO(hpayer): When pretenuring can be internalized, flags can become
+ // private to HAllocate.
if (IsFastDoubleElementsKind(kind)) {
flags = static_cast<HAllocate::Flags>(
flags | HAllocate::CAN_ALLOCATE_IN_OLD_DATA_SPACE);
@@ -1351,10 +1394,6 @@
flags | HAllocate::CAN_ALLOCATE_IN_OLD_POINTER_SPACE);
}
}
- if (IsFastDoubleElementsKind(kind)) {
- flags = static_cast<HAllocate::Flags>(
- flags | HAllocate::ALLOCATE_DOUBLE_ALIGNED);
- }
HValue* elements =
AddInstruction(new(zone) HAllocate(context, total_size,
@@ -1390,6 +1429,63 @@
}
+HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array,
+ HValue* array_map,
+ AllocationSiteMode mode,
+ HValue* allocation_site_payload,
+ HValue* length_field) {
+
+ BuildStoreMap(array, array_map);
+
+ HConstant* empty_fixed_array =
+ new(zone()) HConstant(
+ Handle<FixedArray>(isolate()->heap()->empty_fixed_array()),
+ Representation::Tagged());
+ AddInstruction(empty_fixed_array);
+
+ AddInstruction(new(zone()) HStoreNamedField(array,
+ isolate()->factory()->properties_field_symbol(),
+ empty_fixed_array,
+ true,
+ JSArray::kPropertiesOffset));
+
+ HInstruction* length_store = AddInstruction(
+ new(zone()) HStoreNamedField(array,
+ isolate()->factory()->length_field_string(),
+ length_field,
+ true,
+ JSArray::kLengthOffset));
+ length_store->SetGVNFlag(kChangesArrayLengths);
+
+ if (mode == TRACK_ALLOCATION_SITE) {
+ BuildCreateAllocationSiteInfo(array,
+ JSArray::kSize,
+ allocation_site_payload);
+ }
+
+ int elements_location = JSArray::kSize;
+ if (mode == TRACK_ALLOCATION_SITE) {
+ elements_location += AllocationSiteInfo::kSize;
+ }
+
+ HInnerAllocatedObject* elements = new(zone()) HInnerAllocatedObject(
+ array,
+ elements_location);
+ AddInstruction(elements);
+
+ HInstruction* elements_store = AddInstruction(
+ new(zone()) HStoreNamedField(
+ array,
+ isolate()->factory()->elements_field_string(),
+ elements,
+ true,
+ JSArray::kElementsOffset));
+ elements_store->SetGVNFlag(kChangesElementsPointer);
+
+ return elements;
+}
+
+
HInstruction* HGraphBuilder::BuildStoreMap(HValue* object,
HValue* map) {
Zone* zone = this->zone();
@@ -1503,13 +1599,38 @@
: AddInstruction(new(zone) HConstant(nan_double,
Representation::Double()));
- LoopBuilder builder(this, context, LoopBuilder::kPostIncrement);
+ // Special loop unfolding case
+ static const int kLoopUnfoldLimit = 4;
+ bool unfold_loop = false;
+ int initial_capacity = JSArray::kPreallocatedArrayElements;
+ if (from->IsConstant() && to->IsConstant() &&
+ initial_capacity <= kLoopUnfoldLimit) {
+ HConstant* constant_from = HConstant::cast(from);
+ HConstant* constant_to = HConstant::cast(to);
- HValue* key = builder.BeginBody(from, to, Token::LT);
+ if (constant_from->HasInteger32Value() &&
+ constant_from->Integer32Value() == 0 &&
+ constant_to->HasInteger32Value() &&
+ constant_to->Integer32Value() == initial_capacity) {
+ unfold_loop = true;
+ }
+ }
- AddInstruction(new(zone) HStoreKeyed(elements, key, hole, elements_kind));
+ if (unfold_loop) {
+ for (int i = 0; i < initial_capacity; i++) {
+ HInstruction* key = AddInstruction(new(zone)
+ HConstant(i, Representation::Integer32()));
+ AddInstruction(new(zone) HStoreKeyed(elements, key, hole, elements_kind));
+ }
+ } else {
+ LoopBuilder builder(this, context, LoopBuilder::kPostIncrement);
- builder.EndBody();
+ HValue* key = builder.BeginBody(from, to, Token::LT);
+
+ AddInstruction(new(zone) HStoreKeyed(elements, key, hole, elements_kind));
+
+ builder.EndBody();
+ }
}
@@ -1576,12 +1697,7 @@
: FixedArray::SizeFor(length);
}
- HAllocate::Flags allocate_flags = HAllocate::CAN_ALLOCATE_IN_NEW_SPACE;
- if (IsFastDoubleElementsKind(kind)) {
- allocate_flags = static_cast<HAllocate::Flags>(
- allocate_flags | HAllocate::ALLOCATE_DOUBLE_ALIGNED);
- }
-
+ HAllocate::Flags allocate_flags = HAllocate::DefaultFlags(kind);
// Allocate both the JS array and the elements array in one big
// allocation. This avoids multiple limit checks.
HValue* size_in_bytes =
@@ -1610,15 +1726,7 @@
// Create an allocation site info if requested.
if (mode == TRACK_ALLOCATION_SITE) {
- HValue* alloc_site =
- AddInstruction(new(zone) HInnerAllocatedObject(object, JSArray::kSize));
- Handle<Map> alloc_site_map(isolate()->heap()->allocation_site_info_map());
- BuildStoreMap(alloc_site, alloc_site_map);
- int alloc_payload_offset = AllocationSiteInfo::kPayloadOffset;
- AddInstruction(new(zone) HStoreNamedField(alloc_site,
- factory->empty_string(),
- boilerplate,
- true, alloc_payload_offset));
+ BuildCreateAllocationSiteInfo(object, JSArray::kSize, boilerplate);
}
if (length > 0) {
@@ -1667,6 +1775,205 @@
}
+void HGraphBuilder::BuildCompareNil(
+ HValue* value,
+ EqualityKind kind,
+ CompareNilICStub::Types types,
+ Handle<Map> map,
+ int position,
+ HIfContinuation* continuation) {
+ IfBuilder if_nil(this, position);
+ bool needs_or = false;
+ if ((types & CompareNilICStub::kCompareAgainstNull) != 0) {
+ if (needs_or) if_nil.Or();
+ if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull());
+ needs_or = true;
+ }
+ if ((types & CompareNilICStub::kCompareAgainstUndefined) != 0) {
+ if (needs_or) if_nil.Or();
+ if_nil.If<HCompareObjectEqAndBranch>(value,
+ graph()->GetConstantUndefined());
+ needs_or = true;
+ }
+ // Handle either undetectable or monomorphic, not both.
+ ASSERT(((types & CompareNilICStub::kCompareAgainstUndetectable) == 0) ||
+ ((types & CompareNilICStub::kCompareAgainstMonomorphicMap) == 0));
+ if ((types & CompareNilICStub::kCompareAgainstUndetectable) != 0) {
+ if (needs_or) if_nil.Or();
+ if_nil.If<HIsUndetectableAndBranch>(value);
+ } else {
+ if_nil.Then();
+ if_nil.Else();
+ if ((types & CompareNilICStub::kCompareAgainstMonomorphicMap) != 0) {
+ BuildCheckNonSmi(value);
+ // For ICs, the map checked below is a sentinel map that gets replaced by
+ // the monomorphic map when the code is used as a template to generate a
+ // new IC. For optimized functions, there is no sentinel map, the map
+ // emitted below is the actual monomorphic map.
+ BuildCheckMap(value, map);
+ } else {
+ if (kind == kNonStrictEquality) {
+ if_nil.Deopt();
+ }
+ }
+ }
+
+ if_nil.CaptureContinuation(continuation);
+}
+
+
+HValue* HGraphBuilder::BuildCreateAllocationSiteInfo(HValue* previous_object,
+ int previous_object_size,
+ HValue* payload) {
+ HInnerAllocatedObject* alloc_site = new(zone())
+ HInnerAllocatedObject(previous_object, previous_object_size);
+ AddInstruction(alloc_site);
+ Handle<Map> alloc_site_map(isolate()->heap()->allocation_site_info_map());
+ BuildStoreMap(alloc_site, alloc_site_map);
+ AddInstruction(new(zone()) HStoreNamedField(alloc_site,
+ isolate()->factory()->payload_string(),
+ payload,
+ true,
+ AllocationSiteInfo::kPayloadOffset));
+ return alloc_site;
+}
+
+
+HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
+ ElementsKind kind,
+ HValue* allocation_site_payload,
+ AllocationSiteMode mode) :
+ builder_(builder),
+ kind_(kind),
+ allocation_site_payload_(allocation_site_payload) {
+ if (mode == DONT_TRACK_ALLOCATION_SITE) {
+ mode_ = mode;
+ } else {
+ mode_ = AllocationSiteInfo::GetMode(kind);
+ }
+}
+
+
+HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode(HValue* context) {
+ // Get the global context, the native context, the map array
+ HInstruction* global_object = AddInstruction(new(zone())
+ HGlobalObject(context));
+ HInstruction* native_context = AddInstruction(new(zone())
+ HLoadNamedField(global_object, true, GlobalObject::kNativeContextOffset));
+ int offset = Context::kHeaderSize +
+ kPointerSize * Context::JS_ARRAY_MAPS_INDEX;
+ HInstruction* map_array = AddInstruction(new(zone())
+ HLoadNamedField(native_context, true, offset));
+ offset = kind_ * kPointerSize + FixedArrayBase::kHeaderSize;
+ return AddInstruction(new(zone()) HLoadNamedField(map_array, true, offset));
+}
+
+
+HValue* HGraphBuilder::JSArrayBuilder::EstablishAllocationSize(
+ HValue* length_node) {
+ HValue* context = builder()->environment()->LookupContext();
+ ASSERT(length_node != NULL);
+
+ int base_size = JSArray::kSize;
+ if (mode_ == TRACK_ALLOCATION_SITE) {
+ base_size += AllocationSiteInfo::kSize;
+ }
+
+ if (IsFastDoubleElementsKind(kind_)) {
+ base_size += FixedDoubleArray::kHeaderSize;
+ } else {
+ base_size += FixedArray::kHeaderSize;
+ }
+
+ HInstruction* elements_size_value = new(zone())
+ HConstant(elements_size(), Representation::Integer32());
+ AddInstruction(elements_size_value);
+ HInstruction* mul = HMul::New(zone(), context, length_node,
+ elements_size_value);
+ mul->ChangeRepresentation(Representation::Integer32());
+ mul->ClearFlag(HValue::kCanOverflow);
+ AddInstruction(mul);
+
+ HInstruction* base = new(zone()) HConstant(base_size,
+ Representation::Integer32());
+ AddInstruction(base);
+ HInstruction* total_size = HAdd::New(zone(), context, base, mul);
+ total_size->ChangeRepresentation(Representation::Integer32());
+ total_size->ClearFlag(HValue::kCanOverflow);
+ AddInstruction(total_size);
+ return total_size;
+}
+
+
+HValue* HGraphBuilder::JSArrayBuilder::EstablishEmptyArrayAllocationSize() {
+ int base_size = JSArray::kSize;
+ if (mode_ == TRACK_ALLOCATION_SITE) {
+ base_size += AllocationSiteInfo::kSize;
+ }
+
+ base_size += IsFastDoubleElementsKind(kind_)
+ ? FixedDoubleArray::SizeFor(initial_capacity())
+ : FixedArray::SizeFor(initial_capacity());
+
+ HConstant* array_size =
+ new(zone()) HConstant(base_size, Representation::Integer32());
+ AddInstruction(array_size);
+ return array_size;
+}
+
+
+HValue* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
+ HValue* size_in_bytes = EstablishEmptyArrayAllocationSize();
+ HConstant* capacity =
+ new(zone()) HConstant(initial_capacity(), Representation::Integer32());
+ AddInstruction(capacity);
+ return AllocateArray(size_in_bytes,
+ capacity,
+ builder()->graph()->GetConstant0(),
+ true);
+}
+
+
+HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* capacity,
+ HValue* length_field,
+ bool fill_with_hole) {
+ HValue* size_in_bytes = EstablishAllocationSize(capacity);
+ return AllocateArray(size_in_bytes, capacity, length_field, fill_with_hole);
+}
+
+
+HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
+ HValue* capacity,
+ HValue* length_field,
+ bool fill_with_hole) {
+ HValue* context = builder()->environment()->LookupContext();
+
+ // Allocate (dealing with failure appropriately)
+ HAllocate::Flags flags = HAllocate::DefaultFlags(kind_);
+ HAllocate* new_object = new(zone()) HAllocate(context, size_in_bytes,
+ HType::JSArray(), flags);
+ AddInstruction(new_object);
+
+ // Fill in the fields: map, properties, length
+ HValue* map = EmitMapCode(context);
+ elements_location_ = builder()->BuildJSArrayHeader(new_object,
+ map,
+ mode_,
+ allocation_site_payload_,
+ length_field);
+
+ // Initialize the elements
+ builder()->BuildInitializeElements(elements_location_, kind_, capacity);
+
+ if (fill_with_hole) {
+ builder()->BuildFillElementsWithHole(context, elements_location_, kind_,
+ graph()->GetConstant0(), capacity);
+ }
+
+ return new_object;
+}
+
+
HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info,
TypeFeedbackOracle* oracle)
: HGraphBuilder(info),
@@ -3440,7 +3747,12 @@
}
}
- // (3a) Use the phi reachability information from step 2 to
+ // Simplify constant phi inputs where possible.
+ for (int i = 0; i < phi_count; ++i) {
+ phi_list->at(i)->SimplifyConstantInputs();
+ }
+
+ // Use the phi reachability information from step 2 to
// push information about values which can't be converted to integer
// without deoptimization through the phi use-def chains, avoiding
// unnecessary deoptimizations later.
@@ -3457,7 +3769,7 @@
}
}
- // (3b) Use the phi reachability information from step 2 to
+ // Use the phi reachability information from step 2 to
// sum up the non-phi use counts of all connected phis.
for (int i = 0; i < phi_count; ++i) {
HPhi* phi = phi_list->at(i);
@@ -8006,14 +8318,12 @@
};
-static int CompareHotness(void const* a, void const* b) {
- FunctionSorter const* function1 = reinterpret_cast<FunctionSorter const*>(a);
- FunctionSorter const* function2 = reinterpret_cast<FunctionSorter const*>(b);
- int diff = function1->ticks() - function2->ticks();
- if (diff != 0) return -diff;
- diff = function1->ast_length() - function2->ast_length();
- if (diff != 0) return diff;
- return function1->src_length() - function2->src_length();
+inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) {
+ int diff = lhs.ticks() - rhs.ticks();
+ if (diff != 0) return diff > 0;
+ diff = lhs.ast_length() - rhs.ast_length();
+ if (diff != 0) return diff < 0;
+ return lhs.src_length() < rhs.src_length();
}
@@ -8056,10 +8366,7 @@
}
}
- qsort(reinterpret_cast<void*>(&order[0]),
- ordered_functions,
- sizeof(order[0]),
- &CompareHotness);
+ std::sort(order, order + ordered_functions);
HBasicBlock* number_block = NULL;
@@ -8636,6 +8943,18 @@
return true;
}
break;
+ case kMathImul:
+ if (expr->arguments()->length() == 2) {
+ HValue* right = Pop();
+ HValue* left = Pop();
+ Drop(1); // Receiver.
+ HValue* context = environment()->LookupContext();
+ HInstruction* op = HMul::NewImul(zone(), context, left, right);
+ if (drop_extra) Drop(1); // Optionally drop the function.
+ ast_context()->ReturnInstruction(op, expr->id());
+ return true;
+ }
+ break;
default:
// Not supported for inlining yet.
break;
@@ -8783,6 +9102,18 @@
return true;
}
break;
+ case kMathImul:
+ if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) {
+ AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
+ HValue* right = Pop();
+ HValue* left = Pop();
+ Drop(1); // Receiver.
+ HValue* context = environment()->LookupContext();
+ HInstruction* result = HMul::NewImul(zone(), context, left, right);
+ ast_context()->ReturnInstruction(result, expr->id());
+ return true;
+ }
+ break;
default:
// Not yet supported for inlining.
break;
@@ -9215,19 +9546,31 @@
} else {
// The constructor function is both an operand to the instruction and an
// argument to the construct call.
+ bool use_call_new_array = FLAG_optimize_constructed_arrays &&
+ !(expr->target().is_null()) &&
+ *(expr->target()) == isolate()->global_context()->array_function();
+
CHECK_ALIVE(VisitArgument(expr->expression()));
HValue* constructor = HPushArgument::cast(Top())->argument();
CHECK_ALIVE(VisitArgumentList(expr->arguments()));
HCallNew* call;
- if (FLAG_optimize_constructed_arrays &&
- !(expr->target().is_null()) &&
- *(expr->target()) == isolate()->global_context()->array_function()) {
- Handle<Object> feedback = oracle()->GetInfo(expr->CallNewFeedbackId());
- ASSERT(feedback->IsSmi());
- Handle<JSGlobalPropertyCell> cell =
- isolate()->factory()->NewJSGlobalPropertyCell(feedback);
+ if (use_call_new_array) {
AddInstruction(new(zone()) HCheckFunction(constructor,
Handle<JSFunction>(isolate()->global_context()->array_function())));
+ Handle<Object> feedback = oracle()->GetInfo(expr->CallNewFeedbackId());
+ ASSERT(feedback->IsSmi());
+
+ // TODO(mvstanton): It would be better to use the already created global
+ // property cell that is shared by full code gen. That way, any transition
+ // information that happened after crankshaft won't be lost. The right
+ // way to do that is to begin passing the cell to the type feedback oracle
+ // instead of just the value in the cell. Do this in a follow-up checkin.
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(feedback);
+
+ // TODO(mvstanton): Here we should probably insert code to check if the
+ // type cell elements kind is different from when we compiled, and deopt
+ // in that case. Do this in a follow-up checin.
call = new(zone()) HCallNewArray(context, constructor, argument_count,
cell);
} else {
@@ -9372,7 +9715,8 @@
info = TypeInfo::Unknown();
}
if (instr->IsBinaryOperation()) {
- HBinaryOperation::cast(instr)->set_observed_input_representation(rep, rep);
+ HBinaryOperation::cast(instr)->set_observed_input_representation(1, rep);
+ HBinaryOperation::cast(instr)->set_observed_input_representation(2, rep);
}
return ast_context()->ReturnInstruction(instr, expr->id());
}
@@ -9813,7 +10157,8 @@
if (instr->IsBinaryOperation()) {
HBinaryOperation* binop = HBinaryOperation::cast(instr);
- binop->set_observed_input_representation(left_rep, right_rep);
+ binop->set_observed_input_representation(1, left_rep);
+ binop->set_observed_input_representation(2, right_rep);
binop->initialize_output_representation(result_rep);
}
return instr;
@@ -10193,7 +10538,8 @@
if (combined_rep.IsTagged() || combined_rep.IsNone()) {
HCompareGeneric* result =
new(zone()) HCompareGeneric(context, left, right, op);
- result->set_observed_input_representation(left_rep, right_rep);
+ result->set_observed_input_representation(1, left_rep);
+ result->set_observed_input_representation(2, right_rep);
result->set_position(expr->position());
return ast_context()->ReturnInstruction(result, expr->id());
} else {
@@ -10215,9 +10561,24 @@
ASSERT(current_block()->HasPredecessor());
EqualityKind kind =
expr->op() == Token::EQ_STRICT ? kStrictEquality : kNonStrictEquality;
- HIsNilAndBranch* instr = new(zone()) HIsNilAndBranch(value, kind, nil);
- instr->set_position(expr->position());
- return ast_context()->ReturnControl(instr, expr->id());
+ HIfContinuation continuation;
+ TypeFeedbackId id = expr->CompareOperationFeedbackId();
+ CompareNilICStub::Types types;
+ if (kind == kStrictEquality) {
+ if (nil == kNullValue) {
+ types = CompareNilICStub::kCompareAgainstNull;
+ } else {
+ types = CompareNilICStub::kCompareAgainstUndefined;
+ }
+ } else {
+ types = static_cast<CompareNilICStub::Types>(
+ oracle()->CompareNilTypes(id));
+ if (types == 0) types = CompareNilICStub::kFullCompare;
+ }
+ Handle<Map> map_handle(oracle()->CompareNilMonomorphicReceiverType(id));
+ BuildCompareNil(value, kind, types, map_handle,
+ expr->position(), &continuation);
+ return ast_context()->ReturnContinuation(&continuation, expr->id());
}
@@ -10340,15 +10701,7 @@
// Build Allocation Site Info if desired
if (create_allocation_site_info) {
- HValue* alloc_site =
- AddInstruction(new(zone) HInnerAllocatedObject(target, JSArray::kSize));
- Handle<Map> alloc_site_map(isolate()->heap()->allocation_site_info_map());
- BuildStoreMap(alloc_site, alloc_site_map);
- int alloc_payload_offset = AllocationSiteInfo::kPayloadOffset;
- AddInstruction(new(zone) HStoreNamedField(alloc_site,
- factory->payload_string(),
- original_boilerplate,
- true, alloc_payload_offset));
+ BuildCreateAllocationSiteInfo(target, JSArray::kSize, original_boilerplate);
}
if (object_elements != NULL) {
@@ -11140,6 +11493,17 @@
}
+// Support for generators.
+void HOptimizedGraphBuilder::GenerateGeneratorSend(CallRuntime* call) {
+ return Bailout("inlined runtime function: GeneratorSend");
+}
+
+
+void HOptimizedGraphBuilder::GenerateGeneratorThrow(CallRuntime* call) {
+ return Bailout("inlined runtime function: GeneratorThrow");
+}
+
+
#undef CHECK_BAILOUT
#undef CHECK_ALIVE
diff --git a/src/hydrogen.h b/src/hydrogen.h
index 9d3a780..ab721bd 100644
--- a/src/hydrogen.h
+++ b/src/hydrogen.h
@@ -36,6 +36,7 @@
#include "hydrogen-instructions.h"
#include "type-info.h"
#include "zone.h"
+#include "scopes.h"
namespace v8 {
namespace internal {
@@ -304,10 +305,13 @@
HConstant* GetConstantUndefined() const { return undefined_constant_.get(); }
HConstant* GetConstant0();
HConstant* GetConstant1();
+ HConstant* GetConstantSmi0();
+ HConstant* GetConstantSmi1();
HConstant* GetConstantMinus1();
HConstant* GetConstantTrue();
HConstant* GetConstantFalse();
HConstant* GetConstantHole();
+ HConstant* GetConstantNull();
HConstant* GetInvalidContext();
HBasicBlock* CreateBasicBlock();
@@ -395,6 +399,8 @@
private:
HConstant* GetConstantInt32(SetOncePointer<HConstant>* pointer,
int32_t integer_value);
+ HConstant* GetConstantSmi(SetOncePointer<HConstant>* pointer,
+ int32_t integer_value);
void MarkAsDeoptimizingRecursively(HBasicBlock* block);
void NullifyUnreachableInstructions();
@@ -424,10 +430,13 @@
SetOncePointer<HConstant> undefined_constant_;
SetOncePointer<HConstant> constant_0_;
SetOncePointer<HConstant> constant_1_;
+ SetOncePointer<HConstant> constant_smi_0_;
+ SetOncePointer<HConstant> constant_smi_1_;
SetOncePointer<HConstant> constant_minus1_;
SetOncePointer<HConstant> constant_true_;
SetOncePointer<HConstant> constant_false_;
SetOncePointer<HConstant> constant_the_hole_;
+ SetOncePointer<HConstant> constant_null_;
SetOncePointer<HConstant> constant_invalid_context_;
SetOncePointer<HArgumentsObject> arguments_object_;
@@ -890,7 +899,6 @@
HBasicBlock* false_branch,
int position) {
ASSERT(!continuation_captured_);
- ASSERT(true_branch != NULL || false_branch != NULL);
true_branch_ = true_branch;
false_branch_ = false_branch;
position_ = position;
@@ -940,6 +948,10 @@
HGraph* CreateGraph();
+ // Bailout environment manipulation.
+ void Push(HValue* value) { environment()->Push(value); }
+ HValue* Pop() { return environment()->Pop(); }
+
// Adding instructions.
HInstruction* AddInstruction(HInstruction* instr);
void AddSimulate(BailoutId id,
@@ -1125,6 +1137,8 @@
End();
}
+ void Return(HValue* value);
+
private:
void AddCompare(HControlInstruction* compare);
@@ -1203,6 +1217,46 @@
void BuildNewSpaceArrayCheck(HValue* length,
ElementsKind kind);
+ class JSArrayBuilder {
+ public:
+ JSArrayBuilder(HGraphBuilder* builder,
+ ElementsKind kind,
+ HValue* allocation_site_payload,
+ AllocationSiteMode mode);
+
+ HValue* AllocateEmptyArray();
+ HValue* AllocateArray(HValue* capacity, HValue* length_field,
+ bool fill_with_hole);
+ HValue* GetElementsLocation() { return elements_location_; }
+
+ private:
+ Zone* zone() const { return builder_->zone(); }
+ int elements_size() const {
+ return IsFastDoubleElementsKind(kind_) ? kDoubleSize : kPointerSize;
+ }
+ HInstruction* AddInstruction(HInstruction* instr) {
+ return builder_->AddInstruction(instr);
+ }
+ HGraphBuilder* builder() { return builder_; }
+ HGraph* graph() { return builder_->graph(); }
+ int initial_capacity() {
+ STATIC_ASSERT(JSArray::kPreallocatedArrayElements > 0);
+ return JSArray::kPreallocatedArrayElements;
+ }
+
+ HValue* EmitMapCode(HValue* context);
+ HValue* EstablishEmptyArrayAllocationSize();
+ HValue* EstablishAllocationSize(HValue* length_node);
+ HValue* AllocateArray(HValue* size_in_bytes, HValue* capacity,
+ HValue* length_field, bool fill_with_hole);
+
+ HGraphBuilder* builder_;
+ ElementsKind kind_;
+ AllocationSiteMode mode_;
+ HValue* allocation_site_payload_;
+ HInnerAllocatedObject* elements_location_;
+ };
+
HValue* BuildAllocateElements(HValue* context,
ElementsKind kind,
HValue* capacity);
@@ -1215,6 +1269,16 @@
ElementsKind kind,
HValue* capacity);
+ // array must have been allocated with enough room for
+ // 1) the JSArray, 2) a AllocationSiteInfo if mode requires it,
+ // 3) a FixedArray or FixedDoubleArray.
+ // A pointer to the Fixed(Double)Array is returned.
+ HInnerAllocatedObject* BuildJSArrayHeader(HValue* array,
+ HValue* array_map,
+ AllocationSiteMode mode,
+ HValue* allocation_site_payload,
+ HValue* length_field);
+
HValue* BuildGrowElementsCapacity(HValue* object,
HValue* elements,
ElementsKind kind,
@@ -1241,6 +1305,18 @@
ElementsKind kind,
int length);
+ void BuildCompareNil(
+ HValue* value,
+ EqualityKind kind,
+ CompareNilICStub::Types types,
+ Handle<Map> map,
+ int position,
+ HIfContinuation* continuation);
+
+ HValue* BuildCreateAllocationSiteInfo(HValue* previous_object,
+ int previous_object_size,
+ HValue* payload);
+
private:
HGraphBuilder();
CompilationInfo* info_;
@@ -1319,10 +1395,6 @@
void AddSoftDeoptimize();
- // Bailout environment manipulation.
- void Push(HValue* value) { environment()->Push(value); }
- HValue* Pop() { return environment()->Pop(); }
-
void Bailout(const char* reason);
HBasicBlock* CreateJoin(HBasicBlock* first,
diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc
index 08bc227..c6e10f4 100644
--- a/src/ia32/builtins-ia32.cc
+++ b/src/ia32/builtins-ia32.cc
@@ -1207,9 +1207,9 @@
// that for a construct call the constructor function in edi needs to be
// preserved for entering the generic code. In both cases argc in eax needs to
// be preserved.
-static void ArrayNativeCode(MacroAssembler* masm,
- bool construct_call,
- Label* call_generic_code) {
+void ArrayNativeCode(MacroAssembler* masm,
+ bool construct_call,
+ Label* call_generic_code) {
Label argc_one_or_more, argc_two_or_more, prepare_generic_code_call,
empty_array, not_empty_array, finish, cant_transition_map, not_double;
@@ -1494,7 +1494,7 @@
}
-void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
+void Builtins::Generate_CommonArrayConstructCode(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : argc
// -- ebx : type info cell
@@ -1513,50 +1513,18 @@
__ Assert(not_zero, "Unexpected initial map for Array function");
__ CmpObjectType(ecx, MAP_TYPE, ecx);
__ Assert(equal, "Unexpected initial map for Array function");
-
- if (FLAG_optimize_constructed_arrays) {
- // We should either have undefined in ebx or a valid jsglobalpropertycell
- Label okay_here;
- Handle<Object> undefined_sentinel(
- masm->isolate()->heap()->undefined_value(), masm->isolate());
- Handle<Map> global_property_cell_map(
- masm->isolate()->heap()->global_property_cell_map());
- __ cmp(ebx, Immediate(undefined_sentinel));
- __ j(equal, &okay_here);
- __ cmp(FieldOperand(ebx, 0), Immediate(global_property_cell_map));
- __ Assert(equal, "Expected property cell in register ebx");
- __ bind(&okay_here);
- }
}
- if (FLAG_optimize_constructed_arrays) {
- Label not_zero_case, not_one_case;
- __ test(eax, eax);
- __ j(not_zero, ¬_zero_case);
- ArrayNoArgumentConstructorStub no_argument_stub;
- __ TailCallStub(&no_argument_stub);
+ Label generic_constructor;
+ // Run the native code for the Array function called as constructor.
+ ArrayNativeCode(masm, true, &generic_constructor);
- __ bind(¬_zero_case);
- __ cmp(eax, 1);
- __ j(greater, ¬_one_case);
- ArraySingleArgumentConstructorStub single_argument_stub;
- __ TailCallStub(&single_argument_stub);
-
- __ bind(¬_one_case);
- ArrayNArgumentsConstructorStub n_argument_stub;
- __ TailCallStub(&n_argument_stub);
- } else {
- Label generic_constructor;
- // Run the native code for the Array function called as constructor.
- ArrayNativeCode(masm, true, &generic_constructor);
-
- // Jump to the generic construct code in case the specialized code cannot
- // handle the construction.
- __ bind(&generic_constructor);
- Handle<Code> generic_construct_stub =
- masm->isolate()->builtins()->JSConstructStubGeneric();
- __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
- }
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
}
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index 8c6801d..96d2411 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -101,16 +101,21 @@
}
-static void InitializeArrayConstructorDescriptor(Isolate* isolate,
- CodeStubInterfaceDescriptor* descriptor) {
+static void InitializeArrayConstructorDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor,
+ int constant_stack_parameter_count) {
// register state
- // edi -- constructor function
+ // eax -- number of arguments
// ebx -- type info cell with elements kind
- // eax -- number of arguments to the constructor function
- static Register registers[] = { edi, ebx };
- descriptor->register_param_count_ = 2;
- // stack param count needs (constructor pointer, and single argument)
- descriptor->stack_parameter_count_ = &eax;
+ static Register registers[] = { ebx };
+ descriptor->register_param_count_ = 1;
+
+ if (constant_stack_parameter_count != 0) {
+ // stack param count needs (constructor pointer, and single argument)
+ descriptor->stack_parameter_count_ = &eax;
+ }
+ descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
descriptor->register_params_ = registers;
descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
descriptor->deoptimization_handler_ =
@@ -121,21 +126,34 @@
void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
}
void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
}
void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
+}
+
+
+void CompareNilICStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { eax };
+ descriptor->register_param_count_ = 1;
+ descriptor->register_params_ = registers;
+ descriptor->deoptimization_handler_ =
+ FUNCTION_ADDR(CompareNilIC_Miss);
+ descriptor->miss_handler_ =
+ ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate);
}
@@ -4952,6 +4970,9 @@
StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
// It is important that the store buffer overflow stubs are generated first.
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
+ if (FLAG_optimize_constructed_arrays) {
+ ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
+ }
}
@@ -5038,12 +5059,17 @@
__ dec(Operand::StaticVariable(scope_depth));
}
- // Make sure we're not trying to return 'the hole' from the runtime
- // call as this may lead to crashes in the IC code later.
+ // Runtime functions should not return 'the hole'. Allowing it to escape may
+ // lead to crashes in the IC code later.
if (FLAG_debug_code) {
Label okay;
__ cmp(eax, masm->isolate()->factory()->the_hole_value());
__ j(not_equal, &okay, Label::kNear);
+ // TODO(wingo): Currently SuspendJSGeneratorObject returns the hole. Change
+ // to return another sentinel like a harmony symbol.
+ __ cmp(ebx, Immediate(ExternalReference(
+ Runtime::kSuspendJSGeneratorObject, masm->isolate())));
+ __ j(equal, &okay, Label::kNear);
__ int3();
__ bind(&okay);
}
@@ -5802,17 +5828,17 @@
__ ret(2 * kPointerSize);
__ bind(&non_ascii);
// At least one of the strings is two-byte. Check whether it happens
- // to contain only ASCII characters.
+ // to contain only one byte characters.
// ecx: first instance type AND second instance type.
// edi: second instance type.
- __ test(ecx, Immediate(kAsciiDataHintMask));
+ __ test(ecx, Immediate(kOneByteDataHintMask));
__ j(not_zero, &ascii_data);
__ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
__ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
__ xor_(edi, ecx);
- STATIC_ASSERT(kOneByteStringTag != 0 && kAsciiDataHintTag != 0);
- __ and_(edi, kOneByteStringTag | kAsciiDataHintTag);
- __ cmp(edi, kOneByteStringTag | kAsciiDataHintTag);
+ STATIC_ASSERT(kOneByteStringTag != 0 && kOneByteDataHintTag != 0);
+ __ and_(edi, kOneByteStringTag | kOneByteDataHintTag);
+ __ cmp(edi, kOneByteStringTag | kOneByteDataHintTag);
__ j(equal, &ascii_data);
// Allocate a two byte cons string.
__ AllocateTwoByteConsString(ecx, edi, no_reg, &call_runtime);
@@ -7711,6 +7737,197 @@
__ ret(0);
}
+
+template<class T>
+static void CreateArrayDispatch(MacroAssembler* masm) {
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ cmp(edx, kind);
+ __ j(not_equal, &next);
+ T stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
+ // ebx - type info cell
+ // edx - kind
+ // eax - number of arguments
+ // edi - constructor?
+ // esp[0] - return address
+ // esp[4] - last argument
+ ASSERT(FAST_SMI_ELEMENTS == 0);
+ ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ ASSERT(FAST_ELEMENTS == 2);
+ ASSERT(FAST_HOLEY_ELEMENTS == 3);
+ ASSERT(FAST_DOUBLE_ELEMENTS == 4);
+ ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
+
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ // is the low bit set? If so, we are holey and that is good.
+ __ test_b(edx, 1);
+ Label normal_sequence;
+ __ j(not_zero, &normal_sequence);
+
+ // look at the first argument
+ __ mov(ecx, Operand(esp, kPointerSize));
+ __ test(ecx, ecx);
+ __ j(zero, &normal_sequence);
+
+ // We are going to create a holey array, but our kind is non-holey.
+ // Fix kind and retry
+ __ inc(edx);
+ __ cmp(ebx, Immediate(undefined_sentinel));
+ __ j(equal, &normal_sequence);
+
+ // Save the resulting elements kind in type info
+ __ SmiTag(edx);
+ __ mov(FieldOperand(ebx, kPointerSize), edx);
+ __ SmiUntag(edx);
+
+ __ bind(&normal_sequence);
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ cmp(edx, kind);
+ __ j(not_equal, &next);
+ ArraySingleArgumentConstructorStub stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+template<class T>
+static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
+ int to_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= to_index; ++i) {
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ T stub(kind);
+ stub.GetCode(isolate)->set_is_pregenerated(true);
+ }
+}
+
+
+void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
+ isolate);
+}
+
+
+void ArrayConstructorStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- eax : argc (only if argument_count_ == ANY)
+ // -- ebx : type info cell
+ // -- edi : constructor
+ // -- esp[0] : return address
+ // -- esp[4] : last argument
+ // -----------------------------------
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ if (FLAG_debug_code) {
+ // The array construct code is only set for the global and natives
+ // builtin Array functions which always have maps.
+
+ // Initial map for the builtin Array function should be a map.
+ __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
+ // Will both indicate a NULL and a Smi.
+ __ test(ecx, Immediate(kSmiTagMask));
+ __ Assert(not_zero, "Unexpected initial map for Array function");
+ __ CmpObjectType(ecx, MAP_TYPE, ecx);
+ __ Assert(equal, "Unexpected initial map for Array function");
+
+ // We should either have undefined in ebx or a valid jsglobalpropertycell
+ Label okay_here;
+ Handle<Map> global_property_cell_map(
+ masm->isolate()->heap()->global_property_cell_map());
+ __ cmp(ebx, Immediate(undefined_sentinel));
+ __ j(equal, &okay_here);
+ __ cmp(FieldOperand(ebx, 0), Immediate(global_property_cell_map));
+ __ Assert(equal, "Expected property cell in register ebx");
+ __ bind(&okay_here);
+ }
+
+ if (FLAG_optimize_constructed_arrays) {
+ Label no_info, switch_ready;
+ // Get the elements kind and case on that.
+ __ cmp(ebx, Immediate(undefined_sentinel));
+ __ j(equal, &no_info);
+ __ mov(edx, FieldOperand(ebx, kPointerSize));
+
+ // There is no info if the call site went megamorphic either
+
+ // TODO(mvstanton): Really? I thought if it was the array function that
+ // the cell wouldn't get stamped as megamorphic.
+ __ cmp(edx, Immediate(TypeFeedbackCells::MegamorphicSentinel(
+ masm->isolate())));
+ __ j(equal, &no_info);
+ __ SmiUntag(edx);
+ __ jmp(&switch_ready);
+ __ bind(&no_info);
+ __ mov(edx, Immediate(GetInitialFastElementsKind()));
+ __ bind(&switch_ready);
+
+ if (argument_count_ == ANY) {
+ Label not_zero_case, not_one_case;
+ __ test(eax, eax);
+ __ j(not_zero, ¬_zero_case);
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+
+ __ bind(¬_zero_case);
+ __ cmp(eax, 1);
+ __ j(greater, ¬_one_case);
+ CreateArrayDispatchOneArgument(masm);
+
+ __ bind(¬_one_case);
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else if (argument_count_ == NONE) {
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+ } else if (argument_count_ == ONE) {
+ CreateArrayDispatchOneArgument(masm);
+ } else if (argument_count_ == MORE_THAN_ONE) {
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else {
+ UNREACHABLE();
+ }
+ } else {
+ Label generic_constructor;
+ // Run the native code for the Array function called as constructor.
+ ArrayNativeCode(masm, true, &generic_constructor);
+
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
+ }
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/src/ia32/code-stubs-ia32.h b/src/ia32/code-stubs-ia32.h
index 07563cd..fbf1a68 100644
--- a/src/ia32/code-stubs-ia32.h
+++ b/src/ia32/code-stubs-ia32.h
@@ -36,6 +36,10 @@
namespace internal {
+void ArrayNativeCode(MacroAssembler* masm,
+ bool construct_call,
+ Label* call_generic_code);
+
// Compute a transcendental math function natively, or call the
// TranscendentalCache runtime function.
class TranscendentalCacheStub: public PlatformCodeStub {
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 113ca4b..f71a76d 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -1937,6 +1937,102 @@
}
+void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
+ Expression *value,
+ JSGeneratorObject::ResumeMode resume_mode) {
+ // The value stays in eax, and is ultimately read by the resumed generator, as
+ // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. ebx
+ // will hold the generator object until the activation has been resumed.
+ VisitForStackValue(generator);
+ VisitForAccumulatorValue(value);
+ __ pop(ebx);
+
+ // Check generator state.
+ Label wrong_state, done;
+ STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
+ STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
+ __ cmp(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
+ Immediate(Smi::FromInt(0)));
+ __ j(less_equal, &wrong_state);
+
+ // Load suspended function and context.
+ __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
+ __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
+
+ // Push receiver.
+ __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
+
+ // Push holes for arguments to generator function.
+ __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ mov(edx,
+ FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ mov(ecx, isolate()->factory()->the_hole_value());
+ Label push_argument_holes, push_frame;
+ __ bind(&push_argument_holes);
+ __ sub(edx, Immediate(1));
+ __ j(carry, &push_frame);
+ __ push(ecx);
+ __ jmp(&push_argument_holes);
+
+ // Enter a new JavaScript frame, and initialize its slots as they were when
+ // the generator was suspended.
+ Label resume_frame;
+ __ bind(&push_frame);
+ __ call(&resume_frame);
+ __ jmp(&done);
+ __ bind(&resume_frame);
+ __ push(ebp); // Caller's frame pointer.
+ __ mov(ebp, esp);
+ __ push(esi); // Callee's context.
+ __ push(edi); // Callee's JS Function.
+
+ // Load the operand stack size.
+ __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
+ __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
+ __ SmiUntag(edx);
+
+ // If we are sending a value and there is no operand stack, we can jump back
+ // in directly.
+ if (resume_mode == JSGeneratorObject::SEND) {
+ Label slow_resume;
+ __ cmp(edx, Immediate(0));
+ __ j(not_zero, &slow_resume);
+ __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
+ __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
+ __ SmiUntag(ecx);
+ __ add(edx, ecx);
+ __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
+ Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
+ __ jmp(edx);
+ __ bind(&slow_resume);
+ }
+
+ // Otherwise, we push holes for the operand stack and call the runtime to fix
+ // up the stack and the handlers.
+ Label push_operand_holes, call_resume;
+ __ bind(&push_operand_holes);
+ __ sub(edx, Immediate(1));
+ __ j(carry, &call_resume);
+ __ push(ecx);
+ __ jmp(&push_operand_holes);
+ __ bind(&call_resume);
+ __ push(ebx);
+ __ push(result_register());
+ __ Push(Smi::FromInt(resume_mode));
+ __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
+ // Not reached: the runtime call returns elsewhere.
+ __ Abort("Generator failed to resume.");
+
+ // Throw error if we attempt to operate on a running generator.
+ __ bind(&wrong_state);
+ __ push(ebx);
+ __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
+
+ __ bind(&done);
+ context()->Plug(result_register());
+}
+
+
void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Literal* key = prop->key()->AsLiteral();
@@ -4438,24 +4534,21 @@
VisitForAccumulatorValue(sub_expr);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
- Handle<Object> nil_value = nil == kNullValue ?
- isolate()->factory()->null_value() :
- isolate()->factory()->undefined_value();
- __ cmp(eax, nil_value);
- if (expr->op() == Token::EQ_STRICT) {
+
+ EqualityKind kind = expr->op() == Token::EQ_STRICT
+ ? kStrictEquality : kNonStrictEquality;
+ Handle<Object> nil_value = nil == kNullValue
+ ? isolate()->factory()->null_value()
+ : isolate()->factory()->undefined_value();
+ if (kind == kStrictEquality) {
+ __ cmp(eax, nil_value);
Split(equal, if_true, if_false, fall_through);
} else {
- Handle<Object> other_nil_value = nil == kNullValue ?
- isolate()->factory()->undefined_value() :
- isolate()->factory()->null_value();
- __ j(equal, if_true);
- __ cmp(eax, other_nil_value);
- __ j(equal, if_true);
- __ JumpIfSmi(eax, if_false);
- // It can be an undetectable object.
- __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
- __ movzx_b(edx, FieldOperand(edx, Map::kBitFieldOffset));
- __ test(edx, Immediate(1 << Map::kIsUndetectable));
+ Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(),
+ kNonStrictEquality,
+ nil);
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
+ __ test(eax, eax);
Split(not_zero, if_true, if_false, fall_through);
}
context()->Plug(if_true, if_false);
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index 056891d..d93c27a 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -2777,6 +2777,8 @@
__ Ret((parameter_count + extra_value_count) * kPointerSize, ecx);
} else {
Register reg = ToRegister(instr->parameter_count());
+ // The argument count parameter is a smi
+ __ SmiUntag(reg);
Register return_addr_reg = reg.is(ecx) ? ebx : ecx;
if (dynamic_frame_alignment && FLAG_debug_code) {
ASSERT(extra_value_count == 2);
@@ -4209,11 +4211,20 @@
ASSERT(ToRegister(instr->result()).is(eax));
ASSERT(FLAG_optimize_constructed_arrays);
- __ mov(ebx, instr->hydrogen()->property_cell());
- Handle<Code> array_construct_code =
- isolate()->builtins()->ArrayConstructCode();
__ Set(eax, Immediate(instr->arity()));
- CallCode(array_construct_code, RelocInfo::CONSTRUCT_CALL, instr);
+ __ mov(ebx, instr->hydrogen()->property_cell());
+ Object* cell_value = instr->hydrogen()->property_cell()->value();
+ ElementsKind kind = static_cast<ElementsKind>(Smi::cast(cell_value)->value());
+ if (instr->arity() == 0) {
+ ArrayNoArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else if (instr->arity() == 1) {
+ ArraySingleArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else {
+ ArrayNArgumentsConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ }
}
diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc
index 494daa6..c023fd1 100644
--- a/src/ia32/lithium-ia32.cc
+++ b/src/ia32/lithium-ia32.cc
@@ -99,7 +99,7 @@
bool LInstruction::HasDoubleRegisterInput() {
for (int i = 0; i < InputCount(); i++) {
LOperand* op = InputAt(i);
- if (op->IsDoubleRegister()) {
+ if (op != NULL && op->IsDoubleRegister()) {
return true;
}
}
@@ -2516,6 +2516,7 @@
LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
info()->MarkAsDeferredCalling();
LOperand* context = UseAny(instr->context());
+ // TODO(mvstanton): why can't size be a constant if possible?
LOperand* size = UseTempRegister(instr->size());
LOperand* temp = TempRegister();
LAllocate* result = new(zone()) LAllocate(context, size, temp);
@@ -2577,7 +2578,8 @@
ASSERT(info()->IsStub());
CodeStubInterfaceDescriptor* descriptor =
info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
- Register reg = descriptor->register_params_[instr->index()];
+ int index = static_cast<int>(instr->index());
+ Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index);
return DefineFixed(result, reg);
}
}
diff --git a/src/ia32/lithium-ia32.h b/src/ia32/lithium-ia32.h
index 6bbf61b..49462cb 100644
--- a/src/ia32/lithium-ia32.h
+++ b/src/ia32/lithium-ia32.h
@@ -1486,6 +1486,7 @@
LOperand* parameter_count() { return inputs_[2]; }
DECLARE_CONCRETE_INSTRUCTION(Return, "return")
+ DECLARE_HYDROGEN_ACCESSOR(Return)
};
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc
index bad052c..733dbdb 100644
--- a/src/ia32/macro-assembler-ia32.cc
+++ b/src/ia32/macro-assembler-ia32.cc
@@ -1984,8 +1984,10 @@
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
- PrepareCallCFunction(0, eax);
- CallCFunction(ExternalReference::log_enter_external_function(isolate()), 0);
+ PrepareCallCFunction(1, eax);
+ mov(Operand(esp, 0),
+ Immediate(ExternalReference::isolate_address(isolate())));
+ CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
PopSafepointRegisters();
}
@@ -1995,8 +1997,10 @@
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
- PrepareCallCFunction(0, eax);
- CallCFunction(ExternalReference::log_leave_external_function(isolate()), 0);
+ PrepareCallCFunction(1, eax);
+ mov(Operand(esp, 0),
+ Immediate(ExternalReference::isolate_address(isolate())));
+ CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
PopSafepointRegisters();
}
diff --git a/src/ic.cc b/src/ic.cc
index e0ebddd..40676ab 100644
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -347,6 +347,7 @@
case Code::CALL_IC: return CallIC::Clear(address, target);
case Code::KEYED_CALL_IC: return KeyedCallIC::Clear(address, target);
case Code::COMPARE_IC: return CompareIC::Clear(address, target);
+ case Code::COMPARE_NIL_IC: return CompareNilIC::Clear(address, target);
case Code::UNARY_OP_IC:
case Code::BINARY_OP_IC:
case Code::TO_BOOLEAN_IC:
@@ -2770,6 +2771,93 @@
}
+Code* CompareNilIC::GetRawUninitialized(EqualityKind kind,
+ NilValue nil) {
+ CompareNilICStub stub(kind, nil);
+ Code* code = NULL;
+ CHECK(stub.FindCodeInCache(&code, Isolate::Current()));
+ return code;
+}
+
+
+void CompareNilIC::Clear(Address address, Code* target) {
+ if (target->ic_state() == UNINITIALIZED) return;
+ Code::ExtraICState state = target->extended_extra_ic_state();
+
+ EqualityKind kind =
+ CompareNilICStub::EqualityKindFromExtraICState(state);
+ NilValue nil =
+ CompareNilICStub::NilValueFromExtraICState(state);
+
+ SetTargetAtAddress(address, GetRawUninitialized(kind, nil));
+}
+
+
+MaybeObject* CompareNilIC::DoCompareNilSlow(EqualityKind kind,
+ NilValue nil,
+ Handle<Object> object) {
+ if (kind == kStrictEquality) {
+ if (nil == kNullValue) {
+ return Smi::FromInt(object->IsNull());
+ } else {
+ return Smi::FromInt(object->IsUndefined());
+ }
+ }
+ if (object->IsNull() || object->IsUndefined()) {
+ return Smi::FromInt(true);
+ }
+ return Smi::FromInt(object->IsUndetectableObject());
+}
+
+
+MaybeObject* CompareNilIC::CompareNil(Handle<Object> object) {
+ Code::ExtraICState extra_ic_state = target()->extended_extra_ic_state();
+
+ // Extract the current supported types from the patched IC and calculate what
+ // types must be supported as a result of the miss.
+ bool already_monomorphic;
+ CompareNilICStub::Types types =
+ CompareNilICStub::GetPatchedICFlags(extra_ic_state,
+ object, &already_monomorphic);
+
+ EqualityKind kind =
+ CompareNilICStub::EqualityKindFromExtraICState(extra_ic_state);
+ NilValue nil =
+ CompareNilICStub::NilValueFromExtraICState(extra_ic_state);
+
+ // Find or create the specialized stub to support the new set of types.
+ CompareNilICStub stub(kind, nil, types);
+ Handle<Code> code;
+ if ((types & CompareNilICStub::kCompareAgainstMonomorphicMap) != 0) {
+ Handle<Map> monomorphic_map(already_monomorphic
+ ? target()->FindFirstMap()
+ : HeapObject::cast(*object)->map());
+ code = isolate()->stub_cache()->ComputeCompareNil(monomorphic_map,
+ nil,
+ stub.GetTypes());
+ } else {
+ code = stub.GetCode(isolate());
+ }
+
+ patch(*code);
+
+ return DoCompareNilSlow(kind, nil, object);
+}
+
+
+void CompareNilIC::patch(Code* code) {
+ set_target(code);
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, CompareNilIC_Miss) {
+ HandleScope scope(isolate);
+ Handle<Object> object = args.at<Object>(0);
+ CompareNilIC ic(isolate);
+ return ic.CompareNil(object);
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Unreachable) {
UNREACHABLE();
CHECK(false);
diff --git a/src/ic.h b/src/ic.h
index 6c676e6..4bf259a 100644
--- a/src/ic.h
+++ b/src/ic.h
@@ -59,6 +59,7 @@
ICU(UnaryOp_Patch) \
ICU(BinaryOp_Patch) \
ICU(CompareIC_Miss) \
+ ICU(CompareNilIC_Miss) \
ICU(Unreachable) \
ICU(ToBoolean_Patch)
//
@@ -776,6 +777,26 @@
};
+class CompareNilIC: public IC {
+ public:
+ explicit CompareNilIC(Isolate* isolate) : IC(EXTRA_CALL_FRAME, isolate) {}
+
+ MUST_USE_RESULT MaybeObject* CompareNil(Handle<Object> object);
+
+ static Handle<Code> GetUninitialized();
+
+ static Code* GetRawUninitialized(EqualityKind kind, NilValue nil);
+
+ static void Clear(Address address, Code* target);
+
+ void patch(Code* code);
+
+ static MUST_USE_RESULT MaybeObject* DoCompareNilSlow(EqualityKind kind,
+ NilValue nil,
+ Handle<Object> object);
+};
+
+
class ToBooleanIC: public IC {
public:
explicit ToBooleanIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { }
@@ -790,6 +811,8 @@
DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_MissFromStubFailure);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissFromStubFailure);
+DECLARE_RUNTIME_FUNCTION(MaybeObject*, CompareNilIC_Miss);
+
} } // namespace v8::internal
diff --git a/src/isolate.cc b/src/isolate.cc
index 82589a1..79a9020 100644
--- a/src/isolate.cc
+++ b/src/isolate.cc
@@ -930,7 +930,7 @@
HandleScope scope(this);
Handle<JSObject> receiver_handle(receiver);
Handle<Object> data(AccessCheckInfo::cast(data_obj)->data(), this);
- { VMState state(this, EXTERNAL);
+ { VMState<EXTERNAL> state(this);
thread_local_top()->failed_access_check_callback_(
v8::Utils::ToLocal(receiver_handle),
type,
@@ -1009,7 +1009,7 @@
bool result = false;
{
// Leaving JavaScript.
- VMState state(this, EXTERNAL);
+ VMState<EXTERNAL> state(this);
result = callback(v8::Utils::ToLocal(receiver_handle),
v8::Utils::ToLocal(key_handle),
type,
@@ -1051,7 +1051,7 @@
bool result = false;
{
// Leaving JavaScript.
- VMState state(this, EXTERNAL);
+ VMState<EXTERNAL> state(this);
result = callback(v8::Utils::ToLocal(receiver_handle),
index,
type,
@@ -2051,7 +2051,7 @@
logger_ = new Logger(this);
}
if (counters_ == NULL) {
- counters_ = new Counters;
+ counters_ = new Counters(this);
}
}
@@ -2116,7 +2116,7 @@
heap_profiler_ = new HeapProfiler(heap());
// Enable logging before setting up the heap
- logger_->SetUp();
+ logger_->SetUp(this);
// Initialize other runtime facilities
#if defined(USE_SIMULATOR)
@@ -2243,6 +2243,8 @@
DONT_TRACK_ALLOCATION_SITE, 0);
stub.InitializeInterfaceDescriptor(
this, code_stub_interface_descriptor(CodeStub::FastCloneShallowArray));
+ CompareNilICStub::InitializeForIsolate(this);
+ ArrayConstructorStubBase::InstallDescriptors(this);
}
if (FLAG_parallel_recompilation) optimizing_compiler_thread_.Start();
diff --git a/src/isolate.h b/src/isolate.h
index fe5e571..71d86f4 100644
--- a/src/isolate.h
+++ b/src/isolate.h
@@ -85,7 +85,7 @@
class ThreadManager;
class ThreadState;
class ThreadVisitor; // Defined in v8threads.h
-class VMState;
+template <StateTag Tag> class VMState;
// 'void function pointer', used to roundtrip the
// ExternalReference::ExternalReferenceRedirector since we can not include
@@ -1031,7 +1031,7 @@
return thread_local_top_.current_vm_state_;
}
- void SetCurrentVMState(StateTag state) {
+ void set_current_vm_state(StateTag state) {
thread_local_top_.current_vm_state_ = state;
}
diff --git a/src/list-inl.h b/src/list-inl.h
index 408859e..d815a7e 100644
--- a/src/list-inl.h
+++ b/src/list-inl.h
@@ -216,7 +216,7 @@
template<typename T, class P>
void List<T, P>::Sort() {
- Sort(PointerValueCompare<T>);
+ ToVector().Sort();
}
diff --git a/src/lithium.cc b/src/lithium.cc
index 58e6aa6..10d7f71 100644
--- a/src/lithium.cc
+++ b/src/lithium.cc
@@ -329,7 +329,6 @@
can_eliminate = false;
}
}
-
if (can_eliminate) {
label->set_replacement(GetLabel(goto_instr->block_id()));
}
diff --git a/src/log-utils.cc b/src/log-utils.cc
index cef7dba..a44dca0 100644
--- a/src/log-utils.cc
+++ b/src/log-utils.cc
@@ -79,13 +79,8 @@
FLAG_prof_auto = false;
}
- bool open_log_file = FLAG_log || FLAG_log_runtime || FLAG_log_api
- || FLAG_log_code || FLAG_log_gc || FLAG_log_handles || FLAG_log_suspect
- || FLAG_log_regexp || FLAG_log_state_changes || FLAG_ll_prof
- || FLAG_log_internal_timer_events;
-
// If we're logging anything, we need to open the log file.
- if (open_log_file) {
+ if (Log::InitLogAtStart()) {
if (strcmp(FLAG_logfile, "-") == 0) {
OpenStdout();
} else if (strcmp(FLAG_logfile, kLogToTemporaryFile) == 0) {
diff --git a/src/log-utils.h b/src/log-utils.h
index d0cb828..a1867f2 100644
--- a/src/log-utils.h
+++ b/src/log-utils.h
@@ -44,6 +44,12 @@
// Disables logging, but preserves acquired resources.
void stop() { is_stopped_ = true; }
+ static bool InitLogAtStart() {
+ return FLAG_log || FLAG_log_runtime || FLAG_log_api
+ || FLAG_log_code || FLAG_log_gc || FLAG_log_handles || FLAG_log_suspect
+ || FLAG_log_regexp || FLAG_ll_prof || FLAG_log_internal_timer_events;
+ }
+
// Frees all resources acquired in Initialize and Open... functions.
// When a temporary file is used for the log, returns its stream descriptor,
// leaving the file open.
diff --git a/src/log.cc b/src/log.cc
index 57abdef..e52d0f3 100644
--- a/src/log.cc
+++ b/src/log.cc
@@ -335,15 +335,6 @@
void AppendString(String* str) {
if (str == NULL) return;
- if (str->HasOnlyAsciiChars()) {
- int utf8_length = Min(str->length(), kUtf8BufferSize - utf8_pos_);
- String::WriteToFlat(str,
- reinterpret_cast<uint8_t*>(utf8_buffer_ + utf8_pos_),
- 0,
- utf8_length);
- utf8_pos_ += utf8_length;
- return;
- }
int uc16_length = Min(str->length(), kUtf16BufferSize);
String::WriteToFlat(str, utf16_buffer, 0, uc16_length);
int previous = unibrow::Utf16::kNoPreviousCharacter;
@@ -658,13 +649,17 @@
}
-void Logger::EnterExternal() {
- LOG(ISOLATE, TimerEvent(START, TimerEventScope::v8_external));
+void Logger::EnterExternal(Isolate* isolate) {
+ LOG(isolate, TimerEvent(START, TimerEventScope::v8_external));
+ ASSERT(isolate->current_vm_state() == JS);
+ isolate->set_current_vm_state(EXTERNAL);
}
-void Logger::LeaveExternal() {
- LOG(ISOLATE, TimerEvent(END, TimerEventScope::v8_external));
+void Logger::LeaveExternal(Isolate* isolate) {
+ LOG(isolate, TimerEvent(END, TimerEventScope::v8_external));
+ ASSERT(isolate->current_vm_state() == EXTERNAL);
+ isolate->set_current_vm_state(JS);
}
@@ -1595,6 +1590,7 @@
case Code::UNARY_OP_IC: // fall through
case Code::BINARY_OP_IC: // fall through
case Code::COMPARE_IC: // fall through
+ case Code::COMPARE_NIL_IC: // fall through
case Code::TO_BOOLEAN_IC: // fall through
case Code::STUB:
description =
@@ -1816,7 +1812,7 @@
}
-bool Logger::SetUp() {
+bool Logger::SetUp(Isolate* isolate) {
// Tests and EnsureInitialize() can call this twice in a row. It's harmless.
if (is_initialized_) return true;
is_initialized_ = true;
@@ -1832,23 +1828,13 @@
FLAG_prof_auto = false;
}
- // TODO(isolates): this assert introduces cyclic dependency (logger
- // -> thread local top -> heap -> logger).
- // ASSERT(VMState::is_outermost_external());
-
log_->Initialize();
if (FLAG_ll_prof) LogCodeInfo();
- Isolate* isolate = Isolate::Current();
ticker_ = new Ticker(isolate, kSamplingIntervalMs);
- bool start_logging = FLAG_log || FLAG_log_runtime || FLAG_log_api
- || FLAG_log_code || FLAG_log_gc || FLAG_log_handles || FLAG_log_suspect
- || FLAG_log_regexp || FLAG_log_state_changes || FLAG_ll_prof
- || FLAG_log_internal_timer_events;
-
- if (start_logging) {
+ if (Log::InitLogAtStart()) {
logging_nesting_ = 1;
}
diff --git a/src/log.h b/src/log.h
index 2683330..8db13df 100644
--- a/src/log.h
+++ b/src/log.h
@@ -163,7 +163,7 @@
#undef DECLARE_ENUM
// Acquires resources for logging if the right flags are set.
- bool SetUp();
+ bool SetUp(Isolate* isolate);
// Sets the current code event handler.
void SetCodeEventHandler(uint32_t options,
@@ -292,8 +292,8 @@
void TimerEvent(StartEnd se, const char* name);
- static void EnterExternal();
- static void LeaveExternal();
+ static void EnterExternal(Isolate* isolate);
+ static void LeaveExternal(Isolate* isolate);
class TimerEventScope {
public:
@@ -466,7 +466,7 @@
friend class LogMessageBuilder;
friend class TimeLog;
friend class Profiler;
- friend class VMState;
+ template <StateTag Tag> friend class VMState;
friend class LoggerTestHelper;
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index f49179f..62dee48 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -1939,14 +1939,14 @@
ImplicitRefGroup* entry = ref_groups->at(i);
ASSERT(entry != NULL);
- if (!IsMarked(*entry->parent_)) {
+ if (!IsMarked(*entry->parent)) {
(*ref_groups)[last++] = entry;
continue;
}
- Object*** children = entry->children_;
+ Object*** children = entry->children;
// A parent object is marked, so mark all child heap objects.
- for (size_t j = 0; j < entry->length_; ++j) {
+ for (size_t j = 0; j < entry->length; ++j) {
if ((*children[j])->IsHeapObject()) {
HeapObject* child = HeapObject::cast(*children[j]);
MarkBit mark = Marking::MarkBitFrom(child);
@@ -1956,7 +1956,7 @@
// Once the entire group has been marked, dispose it because it's
// not needed anymore.
- entry->Dispose();
+ delete entry;
}
ref_groups->Rewind(last);
}
@@ -3125,6 +3125,8 @@
void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
+ Heap::RelocationLock relocation_lock(heap());
+
bool code_slots_filtering_required;
{ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
code_slots_filtering_required = MarkInvalidatedCode();
diff --git a/src/math.js b/src/math.js
index e5ab70c..9ba1934 100644
--- a/src/math.js
+++ b/src/math.js
@@ -213,6 +213,13 @@
return %_MathTan(x);
}
+// Non-standard extension.
+function MathImul(x, y) {
+ if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
+ if (!IS_NUMBER(y)) y = NonNumberToNumber(y);
+ return %NumberImul(x, y);
+}
+
// -------------------------------------------------------------------
@@ -283,7 +290,8 @@
"atan2", MathAtan2,
"pow", MathPow,
"max", MathMax,
- "min", MathMin
+ "min", MathMin,
+ "imul", MathImul
));
}
diff --git a/src/messages.js b/src/messages.js
index 67fe3cc..15a39b7 100644
--- a/src/messages.js
+++ b/src/messages.js
@@ -31,6 +31,8 @@
// Error
cyclic_proto: ["Cyclic __proto__ value"],
code_gen_from_strings: ["%0"],
+ generator_running: ["Generator is already running"],
+ generator_finished: ["Generator has already finished"],
// TypeError
unexpected_token: ["Unexpected token ", "%0"],
unexpected_token_number: ["Unexpected number"],
@@ -158,7 +160,7 @@
symbol_to_string: ["Conversion from symbol to string"],
invalid_module_path: ["Module does not export '", "%0", "', or export is not itself a module"],
module_type_error: ["Module '", "%0", "' used improperly"],
- module_export_undefined: ["Export '", "%0", "' is not defined in module"],
+ module_export_undefined: ["Export '", "%0", "' is not defined in module"]
};
diff --git a/src/mips/builtins-mips.cc b/src/mips/builtins-mips.cc
index 1901f9c..700bcc4 100644
--- a/src/mips/builtins-mips.cc
+++ b/src/mips/builtins-mips.cc
@@ -317,8 +317,7 @@
// entering the generic code. In both cases argc in a0 needs to be preserved.
// Both registers are preserved by this code so no need to differentiate between
// construct call and normal call.
-static void ArrayNativeCode(MacroAssembler* masm,
- Label* call_generic_code) {
+void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code) {
Counters* counters = masm->isolate()->counters();
Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array,
has_non_smi_element, finish, cant_transition_map, not_double;
@@ -546,7 +545,7 @@
}
-void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
+void Builtins::Generate_CommonArrayConstructCode(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : number of arguments
// -- a1 : constructor function
@@ -566,48 +565,17 @@
__ GetObjectType(a3, a3, t0);
__ Assert(eq, "Unexpected initial map for Array function (4)",
t0, Operand(MAP_TYPE));
-
- if (FLAG_optimize_constructed_arrays) {
- // We should either have undefined in a2 or a valid jsglobalpropertycell
- Label okay_here;
- Handle<Object> undefined_sentinel(
- masm->isolate()->heap()->undefined_value(), masm->isolate());
- Handle<Map> global_property_cell_map(
- masm->isolate()->heap()->global_property_cell_map());
- __ Branch(&okay_here, eq, a2, Operand(undefined_sentinel));
- __ lw(a3, FieldMemOperand(a2, 0));
- __ Assert(eq, "Expected property cell in register a3",
- a3, Operand(global_property_cell_map));
- __ bind(&okay_here);
- }
}
+ Label generic_constructor;
+ // Run the native code for the Array function called as a constructor.
+ ArrayNativeCode(masm, &generic_constructor);
- if (FLAG_optimize_constructed_arrays) {
- Label not_zero_case, not_one_case;
- __ Branch(¬_zero_case, ne, a0, Operand(zero_reg));
- ArrayNoArgumentConstructorStub no_argument_stub;
- __ TailCallStub(&no_argument_stub);
-
- __ bind(¬_zero_case);
- __ Branch(¬_one_case, gt, a0, Operand(1));
- ArraySingleArgumentConstructorStub single_argument_stub;
- __ TailCallStub(&single_argument_stub);
-
- __ bind(¬_one_case);
- ArrayNArgumentsConstructorStub n_argument_stub;
- __ TailCallStub(&n_argument_stub);
- } else {
- Label generic_constructor;
- // Run the native code for the Array function called as a constructor.
- ArrayNativeCode(masm, &generic_constructor);
-
- // Jump to the generic construct code in case the specialized code cannot
- // handle the construction.
- __ bind(&generic_constructor);
- Handle<Code> generic_construct_stub =
- masm->isolate()->builtins()->JSConstructStubGeneric();
- __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
- }
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
}
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
index cc1cf4e..733c369 100644
--- a/src/mips/code-stubs-mips.cc
+++ b/src/mips/code-stubs-mips.cc
@@ -97,16 +97,33 @@
}
-static void InitializeArrayConstructorDescriptor(Isolate* isolate,
+void CompareNilICStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { a0 };
+ descriptor->register_param_count_ = 1;
+ descriptor->register_params_ = registers;
+ descriptor->deoptimization_handler_ =
+ FUNCTION_ADDR(CompareNilIC_Miss);
+ descriptor->miss_handler_ =
+ ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate);
+}
+
+
+static void InitializeArrayConstructorDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor,
+ int constant_stack_parameter_count) {
// register state
- // a1 -- constructor function
+ // a0 -- number of arguments
// a2 -- type info cell with elements kind
- // a0 -- number of arguments to the constructor function
- static Register registers[] = { a1, a2 };
- descriptor->register_param_count_ = 2;
- // stack param count needs (constructor pointer, and single argument)
- descriptor->stack_parameter_count_ = &a0;
+ static Register registers[] = { a2 };
+ descriptor->register_param_count_ = 1;
+ if (constant_stack_parameter_count != 0) {
+ // stack param count needs (constructor pointer, and single argument)
+ descriptor->stack_parameter_count_ = &a0;
+ }
+ descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
descriptor->register_params_ = registers;
descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
descriptor->deoptimization_handler_ =
@@ -117,21 +134,21 @@
void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
}
void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
}
void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
}
@@ -3329,6 +3346,9 @@
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
+ if (FLAG_optimize_constructed_arrays) {
+ ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
+ }
}
@@ -5083,7 +5103,7 @@
Handle<Object> terminal_kind_sentinel =
TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(),
LAST_FAST_ELEMENTS_KIND);
- __ Branch(&miss, ne, a3, Operand(terminal_kind_sentinel));
+ __ Branch(&miss, gt, a3, Operand(terminal_kind_sentinel));
// Make sure the function is the Array() function
__ LoadArrayFunction(a3);
__ Branch(&megamorphic, ne, a1, Operand(a3));
@@ -7549,6 +7569,189 @@
}
+template<class T>
+static void CreateArrayDispatch(MacroAssembler* masm) {
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ Branch(&next, ne, a3, Operand(kind));
+ T stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
+ // a2 - type info cell
+ // a3 - kind
+ // a0 - number of arguments
+ // a1 - constructor?
+ // sp[0] - last argument
+ ASSERT(FAST_SMI_ELEMENTS == 0);
+ ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ ASSERT(FAST_ELEMENTS == 2);
+ ASSERT(FAST_HOLEY_ELEMENTS == 3);
+ ASSERT(FAST_DOUBLE_ELEMENTS == 4);
+ ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
+
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ // is the low bit set? If so, we are holey and that is good.
+ Label normal_sequence;
+ __ And(at, a3, Operand(1));
+ __ Branch(&normal_sequence, ne, at, Operand(zero_reg));
+
+ // look at the first argument
+ __ lw(t1, MemOperand(sp, 0));
+ __ Branch(&normal_sequence, eq, t1, Operand(zero_reg));
+
+ // We are going to create a holey array, but our kind is non-holey.
+ // Fix kind and retry
+ __ Addu(a3, a3, Operand(1));
+ __ Branch(&normal_sequence, eq, a2, Operand(undefined_sentinel));
+
+ // Save the resulting elements kind in type info
+ __ SmiTag(a3);
+ __ sw(a3, FieldMemOperand(a2, kPointerSize));
+ __ SmiUntag(a3);
+
+ __ bind(&normal_sequence);
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ Branch(&next, ne, a3, Operand(kind));
+ ArraySingleArgumentConstructorStub stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+template<class T>
+static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
+ int to_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= to_index; ++i) {
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ T stub(kind);
+ stub.GetCode(isolate)->set_is_pregenerated(true);
+ }
+}
+
+
+void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
+ isolate);
+}
+
+
+void ArrayConstructorStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- a0 : argc (only if argument_count_ == ANY)
+ // -- a1 : constructor
+ // -- a2 : type info cell
+ // -- sp[0] : return address
+ // -- sp[4] : last argument
+ // -----------------------------------
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ if (FLAG_debug_code) {
+ // The array construct code is only set for the global and natives
+ // builtin Array functions which always have maps.
+
+ // Initial map for the builtin Array function should be a map.
+ __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
+ // Will both indicate a NULL and a Smi.
+ __ And(at, a3, Operand(kSmiTagMask));
+ __ Assert(ne, "Unexpected initial map for Array function",
+ at, Operand(zero_reg));
+ __ GetObjectType(a3, a3, t0);
+ __ Assert(eq, "Unexpected initial map for Array function",
+ t0, Operand(MAP_TYPE));
+
+ // We should either have undefined in ebx or a valid jsglobalpropertycell
+ Label okay_here;
+ Handle<Map> global_property_cell_map(
+ masm->isolate()->heap()->global_property_cell_map());
+ __ Branch(&okay_here, eq, a2, Operand(undefined_sentinel));
+ __ lw(a3, FieldMemOperand(a2, 0));
+ __ Assert(eq, "Expected property cell in register ebx",
+ a3, Operand(global_property_cell_map));
+ __ bind(&okay_here);
+ }
+
+ if (FLAG_optimize_constructed_arrays) {
+ Label no_info, switch_ready;
+ // Get the elements kind and case on that.
+ __ Branch(&no_info, eq, a2, Operand(undefined_sentinel));
+ __ lw(a3, FieldMemOperand(a2, kPointerSize));
+
+ // There is no info if the call site went megamorphic either
+ // TODO(mvstanton): Really? I thought if it was the array function that
+ // the cell wouldn't get stamped as megamorphic.
+ __ Branch(&no_info, eq, a3,
+ Operand(TypeFeedbackCells::MegamorphicSentinel(masm->isolate())));
+ __ SmiUntag(a3);
+ __ jmp(&switch_ready);
+ __ bind(&no_info);
+ __ li(a3, Operand(GetInitialFastElementsKind()));
+ __ bind(&switch_ready);
+
+ if (argument_count_ == ANY) {
+ Label not_zero_case, not_one_case;
+ __ And(at, a0, a0);
+ __ Branch(¬_zero_case, ne, at, Operand(zero_reg));
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+
+ __ bind(¬_zero_case);
+ __ Branch(¬_one_case, gt, a0, Operand(1));
+ CreateArrayDispatchOneArgument(masm);
+
+ __ bind(¬_one_case);
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else if (argument_count_ == NONE) {
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+ } else if (argument_count_ == ONE) {
+ CreateArrayDispatchOneArgument(masm);
+ } else if (argument_count_ == MORE_THAN_ONE) {
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else {
+ UNREACHABLE();
+ }
+ } else {
+ Label generic_constructor;
+ // Run the native code for the Array function called as a constructor.
+ ArrayNativeCode(masm, &generic_constructor);
+
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
+ }
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/src/mips/code-stubs-mips.h b/src/mips/code-stubs-mips.h
index 2370d45..3a84644 100644
--- a/src/mips/code-stubs-mips.h
+++ b/src/mips/code-stubs-mips.h
@@ -35,6 +35,9 @@
namespace internal {
+void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code);
+
+
// Compute a transcendental math function natively, or call the
// TranscendentalCache runtime function.
class TranscendentalCacheStub: public PlatformCodeStub {
diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc
index 884b62a..a6fd39a 100644
--- a/src/mips/full-codegen-mips.cc
+++ b/src/mips/full-codegen-mips.cc
@@ -1980,6 +1980,102 @@
}
+void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
+ Expression *value,
+ JSGeneratorObject::ResumeMode resume_mode) {
+ // The value stays in a0, and is ultimately read by the resumed generator, as
+ // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. a1
+ // will hold the generator object until the activation has been resumed.
+ VisitForStackValue(generator);
+ VisitForAccumulatorValue(value);
+ __ pop(a1);
+
+ // Check generator state.
+ Label wrong_state, done;
+ __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
+ STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
+ STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
+ __ Branch(&wrong_state, le, a3, Operand(zero_reg));
+
+ // Load suspended function and context.
+ __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
+ __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
+
+ // Load receiver and store as the first argument.
+ __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
+ __ push(a2);
+
+ // Push holes for the rest of the arguments to the generator function.
+ __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
+ __ lw(a3,
+ FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
+ Label push_argument_holes, push_frame;
+ __ bind(&push_argument_holes);
+ __ Subu(a3, a3, Operand(1));
+ __ Branch(&push_frame, lt, a3, Operand(zero_reg));
+ __ push(a2);
+ __ jmp(&push_argument_holes);
+
+ // Enter a new JavaScript frame, and initialize its slots as they were when
+ // the generator was suspended.
+ Label resume_frame;
+ __ bind(&push_frame);
+ __ Call(&resume_frame);
+ __ jmp(&done);
+ __ bind(&resume_frame);
+ __ push(ra); // Return address.
+ __ push(fp); // Caller's frame pointer.
+ __ mov(fp, sp);
+ __ push(cp); // Callee's context.
+ __ push(t0); // Callee's JS Function.
+
+ // Load the operand stack size.
+ __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
+ __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
+ __ SmiUntag(a3);
+
+ // If we are sending a value and there is no operand stack, we can jump back
+ // in directly.
+ if (resume_mode == JSGeneratorObject::SEND) {
+ Label slow_resume;
+ __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
+ __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset));
+ __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
+ __ SmiUntag(a2);
+ __ Addu(a3, a3, Operand(a2));
+ __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
+ __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
+ __ Jump(a3);
+ __ bind(&slow_resume);
+ }
+
+ // Otherwise, we push holes for the operand stack and call the runtime to fix
+ // up the stack and the handlers.
+ Label push_operand_holes, call_resume;
+ __ bind(&push_operand_holes);
+ __ Subu(a3, a3, Operand(1));
+ __ Branch(&call_resume, lt, a3, Operand(zero_reg));
+ __ push(a2);
+ __ b(&push_operand_holes);
+ __ bind(&call_resume);
+ __ push(a1);
+ __ push(result_register());
+ __ Push(Smi::FromInt(resume_mode));
+ __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
+ // Not reached: the runtime call returns elsewhere.
+ __ stop("not-reached");
+
+ // Throw error if we attempt to operate on a running generator.
+ __ bind(&wrong_state);
+ __ push(a1);
+ __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
+
+ __ bind(&done);
+ context()->Plug(result_register());
+}
+
+
void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Literal* key = prop->key()->AsLiteral();
@@ -4452,26 +4548,21 @@
VisitForAccumulatorValue(sub_expr);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
- Heap::RootListIndex nil_value = nil == kNullValue ?
- Heap::kNullValueRootIndex :
- Heap::kUndefinedValueRootIndex;
+ EqualityKind kind = expr->op() == Token::EQ_STRICT
+ ? kStrictEquality : kNonStrictEquality;
__ mov(a0, result_register());
- __ LoadRoot(a1, nil_value);
- if (expr->op() == Token::EQ_STRICT) {
+ if (kind == kStrictEquality) {
+ Heap::RootListIndex nil_value = nil == kNullValue ?
+ Heap::kNullValueRootIndex :
+ Heap::kUndefinedValueRootIndex;
+ __ LoadRoot(a1, nil_value);
Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
} else {
- Heap::RootListIndex other_nil_value = nil == kNullValue ?
- Heap::kUndefinedValueRootIndex :
- Heap::kNullValueRootIndex;
- __ Branch(if_true, eq, a0, Operand(a1));
- __ LoadRoot(a1, other_nil_value);
- __ Branch(if_true, eq, a0, Operand(a1));
- __ JumpIfSmi(a0, if_false);
- // It can be an undetectable object.
- __ lw(a1, FieldMemOperand(a0, HeapObject::kMapOffset));
- __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
- __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
- Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
+ Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(),
+ kNonStrictEquality,
+ nil);
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
+ Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
}
context()->Plug(if_true, if_false);
}
diff --git a/src/mips/lithium-codegen-mips.cc b/src/mips/lithium-codegen-mips.cc
index d8742a6..0c2983f 100644
--- a/src/mips/lithium-codegen-mips.cc
+++ b/src/mips/lithium-codegen-mips.cc
@@ -2546,20 +2546,21 @@
if (NeedsEagerFrame()) {
__ mov(sp, fp);
__ Pop(ra, fp);
-
- if (instr->has_constant_parameter_count()) {
- int parameter_count = ToInteger32(instr->constant_parameter_count());
- int32_t sp_delta = (parameter_count + 1) * kPointerSize;
- if (sp_delta != 0) {
- __ Addu(sp, sp, Operand(sp_delta));
- }
- } else {
- Register reg = ToRegister(instr->parameter_count());
- __ Addu(reg, reg, Operand(1));
- __ sll(at, reg, kPointerSizeLog2);
- __ Addu(sp, sp, at);
- }
}
+ if (instr->has_constant_parameter_count()) {
+ int parameter_count = ToInteger32(instr->constant_parameter_count());
+ int32_t sp_delta = (parameter_count + 1) * kPointerSize;
+ if (sp_delta != 0) {
+ __ Addu(sp, sp, Operand(sp_delta));
+ }
+ } else {
+ Register reg = ToRegister(instr->parameter_count());
+ // The argument count parameter is a smi
+ __ SmiUntag(reg);
+ __ sll(at, reg, kPointerSizeLog2);
+ __ Addu(sp, sp, at);
+ }
+
__ Jump(ra);
}
@@ -3890,10 +3891,18 @@
__ li(a0, Operand(instr->arity()));
__ li(a2, Operand(instr->hydrogen()->property_cell()));
- Handle<Code> array_construct_code =
- isolate()->builtins()->ArrayConstructCode();
-
- CallCode(array_construct_code, RelocInfo::CONSTRUCT_CALL, instr);
+ Object* cell_value = instr->hydrogen()->property_cell()->value();
+ ElementsKind kind = static_cast<ElementsKind>(Smi::cast(cell_value)->value());
+ if (instr->arity() == 0) {
+ ArrayNoArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else if (instr->arity() == 1) {
+ ArraySingleArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else {
+ ArrayNArgumentsConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ }
}
diff --git a/src/mips/lithium-mips.cc b/src/mips/lithium-mips.cc
index 41487f6..c2f8986 100644
--- a/src/mips/lithium-mips.cc
+++ b/src/mips/lithium-mips.cc
@@ -2306,7 +2306,8 @@
ASSERT(info()->IsStub());
CodeStubInterfaceDescriptor* descriptor =
info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
- Register reg = descriptor->register_params_[instr->index()];
+ int index = static_cast<int>(instr->index());
+ Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index);
return DefineFixed(result, reg);
}
}
diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc
index 66cb7e4..220d9fe 100644
--- a/src/mips/macro-assembler-mips.cc
+++ b/src/mips/macro-assembler-mips.cc
@@ -3925,8 +3925,9 @@
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
- PrepareCallCFunction(0, a0);
- CallCFunction(ExternalReference::log_enter_external_function(isolate()), 0);
+ PrepareCallCFunction(1, a0);
+ li(a0, Operand(ExternalReference::isolate_address(isolate())));
+ CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
PopSafepointRegisters();
}
@@ -3945,8 +3946,9 @@
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
- PrepareCallCFunction(0, a0);
- CallCFunction(ExternalReference::log_leave_external_function(isolate()), 0);
+ PrepareCallCFunction(1, a0);
+ li(a0, Operand(ExternalReference::isolate_address(isolate())));
+ CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
PopSafepointRegisters();
}
diff --git a/src/objects-debug.cc b/src/objects-debug.cc
index 32adf09..ee6df1d 100644
--- a/src/objects-debug.cc
+++ b/src/objects-debug.cc
@@ -416,6 +416,7 @@
// initialized by the generator. Hence these weak checks.
VerifyObjectField(kFunctionOffset);
VerifyObjectField(kContextOffset);
+ VerifyObjectField(kReceiverOffset);
VerifyObjectField(kOperandStackOffset);
VerifyObjectField(kContinuationOffset);
}
diff --git a/src/objects-inl.h b/src/objects-inl.h
index a14fccb..08378f1 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -355,14 +355,14 @@
}
-bool String::HasOnlyAsciiChars() {
+bool String::HasOnlyOneByteChars() {
uint32_t type = map()->instance_type();
- return (type & kAsciiDataHintMask) == kAsciiDataHintTag;
+ return (type & kOneByteDataHintMask) == kOneByteDataHintTag;
}
bool String::IsOneByteConvertible() {
- return HasOnlyAsciiChars() || IsOneByteRepresentation();
+ return HasOnlyOneByteChars() || IsOneByteRepresentation();
}
@@ -1299,6 +1299,44 @@
}
+bool JSObject::ShouldTrackAllocationInfo() {
+ if (map()->CanTrackAllocationSite()) {
+ if (!IsJSArray()) {
+ return true;
+ }
+
+ return AllocationSiteInfo::GetMode(GetElementsKind()) ==
+ TRACK_ALLOCATION_SITE;
+ }
+ return false;
+}
+
+
+// Heuristic: We only need to create allocation site info if the boilerplate
+// elements kind is the initial elements kind.
+AllocationSiteMode AllocationSiteInfo::GetMode(
+ ElementsKind boilerplate_elements_kind) {
+ if (FLAG_track_allocation_sites &&
+ IsFastSmiElementsKind(boilerplate_elements_kind)) {
+ return TRACK_ALLOCATION_SITE;
+ }
+
+ return DONT_TRACK_ALLOCATION_SITE;
+}
+
+
+AllocationSiteMode AllocationSiteInfo::GetMode(ElementsKind from,
+ ElementsKind to) {
+ if (FLAG_track_allocation_sites &&
+ IsFastSmiElementsKind(from) &&
+ (IsFastObjectElementsKind(to) || IsFastDoubleElementsKind(to))) {
+ return TRACK_ALLOCATION_SITE;
+ }
+
+ return DONT_TRACK_ALLOCATION_SITE;
+}
+
+
MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
ValidateElements();
ElementsKind elements_kind = map()->elements_kind();
@@ -3634,6 +3672,12 @@
}
+Code::ExtraICState Code::extended_extra_ic_state() {
+ ASSERT(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
+ return ExtractExtendedExtraICStateFromFlags(flags());
+}
+
+
Code::StubType Code::type() {
return ExtractTypeFromFlags(flags());
}
@@ -3663,6 +3707,7 @@
kind() == UNARY_OP_IC ||
kind() == BINARY_OP_IC ||
kind() == COMPARE_IC ||
+ kind() == COMPARE_NIL_IC ||
kind() == LOAD_IC ||
kind() == KEYED_LOAD_IC ||
kind() == TO_BOOLEAN_IC);
@@ -3676,6 +3721,7 @@
kind() == UNARY_OP_IC ||
kind() == BINARY_OP_IC ||
kind() == COMPARE_IC ||
+ kind() == COMPARE_NIL_IC ||
kind() == LOAD_IC ||
kind() == KEYED_LOAD_IC ||
kind() == STORE_IC ||
@@ -3689,7 +3735,7 @@
bool Code::is_pregenerated() {
- return kind() == STUB && IsPregeneratedField::decode(flags());
+ return (kind() == STUB && IsPregeneratedField::decode(flags()));
}
@@ -3940,13 +3986,23 @@
int argc,
InlineCacheHolderFlag holder) {
ASSERT(argc <= Code::kMaxArguments);
+ // Since the extended extra ic state overlaps with the argument count
+ // for CALL_ICs, do so checks to make sure that they don't interfere.
+ ASSERT((kind != Code::CALL_IC &&
+ kind != Code::KEYED_CALL_IC) ||
+ (ExtraICStateField::encode(extra_ic_state) | true));
// Compute the bit mask.
unsigned int bits = KindField::encode(kind)
| ICStateField::encode(ic_state)
| TypeField::encode(type)
- | ExtraICStateField::encode(extra_ic_state)
- | (argc << kArgumentsCountShift)
+ | ExtendedExtraICStateField::encode(extra_ic_state)
| CacheHolderField::encode(holder);
+ // TODO(danno): This is a bit of a hack right now since there are still
+ // clients of this API that pass "extra" values in for argc. These clients
+ // should be retrofitted to used ExtendedExtraICState.
+ if (kind != Code::COMPARE_NIL_IC) {
+ bits |= (argc << kArgumentsCountShift);
+ }
return static_cast<Flags>(bits);
}
@@ -3975,6 +4031,12 @@
}
+Code::ExtraICState Code::ExtractExtendedExtraICStateFromFlags(
+ Flags flags) {
+ return ExtendedExtraICStateField::decode(flags);
+}
+
+
Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
return TypeField::decode(flags);
}
@@ -5030,6 +5092,7 @@
ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
+ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
@@ -5124,7 +5187,8 @@
int Code::stub_info() {
- ASSERT(kind() == COMPARE_IC || kind() == BINARY_OP_IC || kind() == LOAD_IC);
+ ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
+ kind() == BINARY_OP_IC || kind() == LOAD_IC);
Object* value = READ_FIELD(this, kTypeFeedbackInfoOffset);
return Smi::cast(value)->value();
}
@@ -5132,6 +5196,7 @@
void Code::set_stub_info(int value) {
ASSERT(kind() == COMPARE_IC ||
+ kind() == COMPARE_NIL_IC ||
kind() == BINARY_OP_IC ||
kind() == STUB ||
kind() == LOAD_IC ||
diff --git a/src/objects-printer.cc b/src/objects-printer.cc
index f21481a..5aeeec6 100644
--- a/src/objects-printer.cc
+++ b/src/objects-printer.cc
@@ -495,11 +495,11 @@
return "CONS_STRING";
case EXTERNAL_STRING_TYPE:
case EXTERNAL_ASCII_STRING_TYPE:
- case EXTERNAL_STRING_WITH_ASCII_DATA_TYPE:
+ case EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE:
return "EXTERNAL_STRING";
case SHORT_EXTERNAL_STRING_TYPE:
case SHORT_EXTERNAL_ASCII_STRING_TYPE:
- case SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE:
+ case SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE:
return "SHORT_EXTERNAL_STRING";
case INTERNALIZED_STRING_TYPE: return "INTERNALIZED_STRING";
case ASCII_INTERNALIZED_STRING_TYPE: return "ASCII_INTERNALIZED_STRING";
@@ -508,11 +508,11 @@
return "CONS_ASCII_INTERNALIZED_STRING";
case EXTERNAL_INTERNALIZED_STRING_TYPE:
case EXTERNAL_ASCII_INTERNALIZED_STRING_TYPE:
- case EXTERNAL_INTERNALIZED_STRING_WITH_ASCII_DATA_TYPE:
+ case EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE:
return "EXTERNAL_INTERNALIZED_STRING";
case SHORT_EXTERNAL_INTERNALIZED_STRING_TYPE:
case SHORT_EXTERNAL_ASCII_INTERNALIZED_STRING_TYPE:
- case SHORT_EXTERNAL_INTERNALIZED_STRING_WITH_ASCII_DATA_TYPE:
+ case SHORT_EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE:
return "SHORT_EXTERNAL_INTERNALIZED_STRING";
case FIXED_ARRAY_TYPE: return "FIXED_ARRAY";
case BYTE_ARRAY_TYPE: return "BYTE_ARRAY";
diff --git a/src/objects.cc b/src/objects.cc
index c5ad8ea..128c04d 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -344,7 +344,7 @@
v8::Handle<v8::Value> result;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
result = call_fun(v8::Utils::ToLocal(key), info);
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
@@ -1130,21 +1130,21 @@
this->set_map_no_write_barrier(
is_internalized
? (is_ascii
- ? heap->external_internalized_string_with_ascii_data_map()
+ ? heap->external_internalized_string_with_one_byte_data_map()
: heap->external_internalized_string_map())
: (is_ascii
- ? heap->external_string_with_ascii_data_map()
+ ? heap->external_string_with_one_byte_data_map()
: heap->external_string_map()));
} else {
this->set_map_no_write_barrier(
is_internalized
- ? (is_ascii
- ? heap->
- short_external_internalized_string_with_ascii_data_map()
- : heap->short_external_internalized_string_map())
- : (is_ascii
- ? heap->short_external_string_with_ascii_data_map()
- : heap->short_external_string_map()));
+ ? (is_ascii
+ ? heap->
+ short_external_internalized_string_with_one_byte_data_map()
+ : heap->short_external_internalized_string_map())
+ : (is_ascii
+ ? heap->short_external_string_with_one_byte_data_map()
+ : heap->short_external_string_map()));
}
ExternalTwoByteString* self = ExternalTwoByteString::cast(this);
self->set_resource(resource);
@@ -2119,7 +2119,7 @@
v8::Handle<v8::Value> result;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
Handle<Object> value_unhole(value->IsTheHole() ?
isolate->heap()->undefined_value() :
value,
@@ -2230,7 +2230,7 @@
v8::AccessorInfo info(args.end());
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
call_fun(v8::Utils::ToLocal(key),
v8::Utils::ToLocal(value_handle),
info);
@@ -3278,9 +3278,11 @@
} else {
LookupResult new_lookup(isolate);
self->LocalLookup(*name, &new_lookup, true);
- if (new_lookup.IsDataProperty() &&
- !Object::GetProperty(self, name)->SameValue(*old_value)) {
- EnqueueChangeRecord(self, "updated", name, old_value);
+ if (new_lookup.IsDataProperty()) {
+ Handle<Object> new_value = Object::GetProperty(self, name);
+ if (!new_value->SameValue(*old_value)) {
+ EnqueueChangeRecord(self, "updated", name, old_value);
+ }
}
}
}
@@ -3430,8 +3432,11 @@
} else {
LookupResult new_lookup(isolate);
self->LocalLookup(*name, &new_lookup, true);
- bool value_changed = new_lookup.IsDataProperty() &&
- !old_value->SameValue(*Object::GetProperty(self, name));
+ bool value_changed = false;
+ if (new_lookup.IsDataProperty()) {
+ Handle<Object> new_value = Object::GetProperty(self, name);
+ value_changed = !old_value->SameValue(*new_value);
+ }
if (new_lookup.GetAttributes() != old_attributes) {
if (!value_changed) old_value = isolate->factory()->the_hole_value();
EnqueueChangeRecord(self, "reconfigured", name, old_value);
@@ -3494,7 +3499,7 @@
v8::Handle<v8::Integer> result;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
result = query(v8::Utils::ToLocal(name_handle), info);
}
if (!result.IsEmpty()) {
@@ -3509,7 +3514,7 @@
v8::Handle<v8::Value> result;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
result = getter(v8::Utils::ToLocal(name_handle), info);
}
if (!result.IsEmpty()) return DONT_ENUM;
@@ -3635,7 +3640,7 @@
v8::Handle<v8::Integer> result;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
result = query(index, info);
}
if (!result.IsEmpty())
@@ -3648,7 +3653,7 @@
v8::Handle<v8::Value> result;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
result = getter(index, info);
}
if (!result.IsEmpty()) return NONE;
@@ -4318,7 +4323,7 @@
v8::Handle<v8::Boolean> result;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
result = deleter(v8::Utils::ToLocal(name_handle), info);
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
@@ -4355,7 +4360,7 @@
v8::Handle<v8::Boolean> result;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
result = deleter(index, info);
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
@@ -7920,31 +7925,6 @@
}
-// Heuristic: We only need to create allocation site info if the boilerplate
-// elements kind is the initial elements kind.
-AllocationSiteMode AllocationSiteInfo::GetMode(
- ElementsKind boilerplate_elements_kind) {
- if (FLAG_track_allocation_sites &&
- IsFastSmiElementsKind(boilerplate_elements_kind)) {
- return TRACK_ALLOCATION_SITE;
- }
-
- return DONT_TRACK_ALLOCATION_SITE;
-}
-
-
-AllocationSiteMode AllocationSiteInfo::GetMode(ElementsKind from,
- ElementsKind to) {
- if (FLAG_track_allocation_sites &&
- IsFastSmiElementsKind(from) &&
- (IsFastObjectElementsKind(to) || IsFastDoubleElementsKind(to))) {
- return TRACK_ALLOCATION_SITE;
- }
-
- return DONT_TRACK_ALLOCATION_SITE;
-}
-
-
uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) {
// For array indexes mix the length into the hash as an array index could
// be zero.
@@ -8389,13 +8369,13 @@
}
-MUST_USE_RESULT static MaybeObject* CacheInitialJSArrayMaps(
+static MUST_USE_RESULT MaybeObject* CacheInitialJSArrayMaps(
Context* native_context, Map* initial_map) {
// Replace all of the cached initial array maps in the native context with
// the appropriate transitioned elements kind maps.
Heap* heap = native_context->GetHeap();
MaybeObject* maybe_maps =
- heap->AllocateFixedArrayWithHoles(kElementsKindCount);
+ heap->AllocateFixedArrayWithHoles(kElementsKindCount, TENURED);
FixedArray* maps;
if (!maybe_maps->To(&maps)) return maybe_maps;
@@ -8418,6 +8398,14 @@
}
+Handle<Object> CacheInitialJSArrayMaps(Handle<Context> native_context,
+ Handle<Map> initial_map) {
+ CALL_HEAP_FUNCTION(native_context->GetIsolate(),
+ CacheInitialJSArrayMaps(*native_context, *initial_map),
+ Object);
+}
+
+
MaybeObject* JSFunction::SetInstancePrototype(Object* value) {
ASSERT(value->IsJSReceiver());
Heap* heap = GetHeap();
@@ -9021,6 +9009,12 @@
VisitExternalReferences(p, p + 1);
}
+byte Code::compare_nil_state() {
+ ASSERT(is_compare_nil_ic_stub());
+ return CompareNilICStub::TypesFromExtraICState(extended_extra_ic_state());
+}
+
+
void Code::InvalidateRelocation() {
set_relocation_info(GetHeap()->empty_byte_array());
}
@@ -9157,6 +9151,22 @@
}
+void Code::ReplaceFirstMap(Map* replace_with) {
+ ASSERT(is_inline_cache_stub());
+ AssertNoAllocation no_allocation;
+ int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
+ for (RelocIterator it(this, mask); !it.done(); it.next()) {
+ RelocInfo* info = it.rinfo();
+ Object* object = info->target_object();
+ if (object->IsMap()) {
+ info->set_target_object(replace_with);
+ return;
+ }
+ }
+ UNREACHABLE();
+}
+
+
void Code::FindAllMaps(MapHandleList* maps) {
ASSERT(is_inline_cache_stub());
AssertNoAllocation no_allocation;
@@ -9352,6 +9362,7 @@
case UNARY_OP_IC: return "UNARY_OP_IC";
case BINARY_OP_IC: return "BINARY_OP_IC";
case COMPARE_IC: return "COMPARE_IC";
+ case COMPARE_NIL_IC: return "COMPARE_NIL_IC";
case TO_BOOLEAN_IC: return "TO_BOOLEAN_IC";
}
UNREACHABLE();
@@ -10266,7 +10277,7 @@
v8::Handle<v8::Value> result;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
result = setter(index, v8::Utils::ToLocal(value_handle), info);
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
@@ -10309,7 +10320,7 @@
v8::Handle<v8::Value> result;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
result = call_fun(v8::Utils::ToLocal(key), info);
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
@@ -10375,7 +10386,7 @@
v8::AccessorInfo info(args.end());
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
call_fun(v8::Utils::ToLocal(key),
v8::Utils::ToLocal(value_handle),
info);
@@ -10951,8 +10962,8 @@
} else if (old_value->IsTheHole()) {
EnqueueChangeRecord(self, "reconfigured", name, old_value);
} else {
- bool value_changed =
- !old_value->SameValue(*Object::GetElement(self, index));
+ Handle<Object> new_value = Object::GetElement(self, index);
+ bool value_changed = !old_value->SameValue(*new_value);
if (old_attributes != new_attributes) {
if (!value_changed) old_value = isolate->factory()->the_hole_value();
EnqueueChangeRecord(self, "reconfigured", name, old_value);
@@ -11255,7 +11266,7 @@
v8::Handle<v8::Value> result;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
result = getter(index, info);
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
@@ -11565,7 +11576,7 @@
v8::Handle<v8::Value> result;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
result = getter(v8::Utils::ToLocal(name_handle), info);
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
diff --git a/src/objects.h b/src/objects.h
index 8de3c69..e32c41b 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -330,10 +330,10 @@
V(SLICED_STRING_TYPE) \
V(EXTERNAL_STRING_TYPE) \
V(EXTERNAL_ASCII_STRING_TYPE) \
- V(EXTERNAL_STRING_WITH_ASCII_DATA_TYPE) \
+ V(EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE) \
V(SHORT_EXTERNAL_STRING_TYPE) \
V(SHORT_EXTERNAL_ASCII_STRING_TYPE) \
- V(SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE) \
+ V(SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE) \
\
V(INTERNALIZED_STRING_TYPE) \
V(ASCII_INTERNALIZED_STRING_TYPE) \
@@ -341,10 +341,10 @@
V(CONS_ASCII_INTERNALIZED_STRING_TYPE) \
V(EXTERNAL_INTERNALIZED_STRING_TYPE) \
V(EXTERNAL_ASCII_INTERNALIZED_STRING_TYPE) \
- V(EXTERNAL_INTERNALIZED_STRING_WITH_ASCII_DATA_TYPE) \
+ V(EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE) \
V(SHORT_EXTERNAL_INTERNALIZED_STRING_TYPE) \
V(SHORT_EXTERNAL_ASCII_INTERNALIZED_STRING_TYPE) \
- V(SHORT_EXTERNAL_INTERNALIZED_STRING_WITH_ASCII_DATA_TYPE) \
+ V(SHORT_EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE) \
\
V(SYMBOL_TYPE) \
V(MAP_TYPE) \
@@ -461,10 +461,10 @@
ExternalAsciiString::kSize, \
external_ascii_string, \
ExternalAsciiString) \
- V(EXTERNAL_STRING_WITH_ASCII_DATA_TYPE, \
+ V(EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE, \
ExternalTwoByteString::kSize, \
- external_string_with_ascii_data, \
- ExternalStringWithAsciiData) \
+ external_string_with_one_bytei_data, \
+ ExternalStringWithOneByteData) \
V(SHORT_EXTERNAL_STRING_TYPE, \
ExternalTwoByteString::kShortSize, \
short_external_string, \
@@ -473,10 +473,10 @@
ExternalAsciiString::kShortSize, \
short_external_ascii_string, \
ShortExternalAsciiString) \
- V(SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE, \
+ V(SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE, \
ExternalTwoByteString::kShortSize, \
- short_external_string_with_ascii_data, \
- ShortExternalStringWithAsciiData) \
+ short_external_string_with_one_byte_data, \
+ ShortExternalStringWithOneByteData) \
\
V(INTERNALIZED_STRING_TYPE, \
kVariableSizeSentinel, \
@@ -502,10 +502,10 @@
ExternalAsciiString::kSize, \
external_ascii_internalized_string, \
ExternalAsciiInternalizedString) \
- V(EXTERNAL_INTERNALIZED_STRING_WITH_ASCII_DATA_TYPE, \
+ V(EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE, \
ExternalTwoByteString::kSize, \
- external_internalized_string_with_ascii_data, \
- ExternalInternalizedStringWithAsciiData) \
+ external_internalized_string_with_one_byte_data, \
+ ExternalInternalizedStringWithOneByteData) \
V(SHORT_EXTERNAL_INTERNALIZED_STRING_TYPE, \
ExternalTwoByteString::kShortSize, \
short_external_internalized_string, \
@@ -514,10 +514,10 @@
ExternalAsciiString::kShortSize, \
short_external_ascii_internalized_string, \
ShortExternalAsciiInternalizedString) \
- V(SHORT_EXTERNAL_INTERNALIZED_STRING_WITH_ASCII_DATA_TYPE, \
+ V(SHORT_EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE, \
ExternalTwoByteString::kShortSize, \
- short_external_internalized_string_with_ascii_data, \
- ShortExternalInternalizedStringWithAsciiData) \
+ short_external_internalized_string_with_one_byte_data, \
+ ShortExternalInternalizedStringWithOneByteData) \
// A struct is a simple object a set of object-valued fields. Including an
// object type in this causes the compiler to generate most of the boilerplate
@@ -605,9 +605,9 @@
STATIC_ASSERT(IS_POWER_OF_TWO(kSlicedNotConsMask) && kSlicedNotConsMask != 0);
// If bit 7 is clear, then bit 3 indicates whether this two-byte
-// string actually contains ASCII data.
-const uint32_t kAsciiDataHintMask = 0x08;
-const uint32_t kAsciiDataHintTag = 0x08;
+// string actually contains one byte data.
+const uint32_t kOneByteDataHintMask = 0x08;
+const uint32_t kOneByteDataHintTag = 0x08;
// If bit 7 is clear and string representation indicates an external string,
// then bit 4 indicates whether the data pointer is cached.
@@ -637,13 +637,13 @@
SLICED_ASCII_STRING_TYPE = kOneByteStringTag | kSlicedStringTag,
EXTERNAL_STRING_TYPE = kTwoByteStringTag | kExternalStringTag,
EXTERNAL_ASCII_STRING_TYPE = kOneByteStringTag | kExternalStringTag,
- EXTERNAL_STRING_WITH_ASCII_DATA_TYPE =
- EXTERNAL_STRING_TYPE | kAsciiDataHintTag,
+ EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE =
+ EXTERNAL_STRING_TYPE | kOneByteDataHintTag,
SHORT_EXTERNAL_STRING_TYPE = EXTERNAL_STRING_TYPE | kShortExternalStringTag,
SHORT_EXTERNAL_ASCII_STRING_TYPE =
EXTERNAL_ASCII_STRING_TYPE | kShortExternalStringTag,
- SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE =
- EXTERNAL_STRING_WITH_ASCII_DATA_TYPE | kShortExternalStringTag,
+ SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE =
+ EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE | kShortExternalStringTag,
INTERNALIZED_STRING_TYPE = STRING_TYPE | kInternalizedTag,
ASCII_INTERNALIZED_STRING_TYPE = ASCII_STRING_TYPE | kInternalizedTag,
@@ -653,14 +653,14 @@
EXTERNAL_INTERNALIZED_STRING_TYPE = EXTERNAL_STRING_TYPE | kInternalizedTag,
EXTERNAL_ASCII_INTERNALIZED_STRING_TYPE =
EXTERNAL_ASCII_STRING_TYPE | kInternalizedTag,
- EXTERNAL_INTERNALIZED_STRING_WITH_ASCII_DATA_TYPE =
- EXTERNAL_STRING_WITH_ASCII_DATA_TYPE | kInternalizedTag,
+ EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE =
+ EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE | kInternalizedTag,
SHORT_EXTERNAL_INTERNALIZED_STRING_TYPE =
SHORT_EXTERNAL_STRING_TYPE | kInternalizedTag,
SHORT_EXTERNAL_ASCII_INTERNALIZED_STRING_TYPE =
SHORT_EXTERNAL_ASCII_STRING_TYPE | kInternalizedTag,
- SHORT_EXTERNAL_INTERNALIZED_STRING_WITH_ASCII_DATA_TYPE =
- SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE | kInternalizedTag,
+ SHORT_EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE =
+ SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE | kInternalizedTag,
// Non-string names
SYMBOL_TYPE = kNotStringTag, // LAST_NAME_TYPE, FIRST_NONSTRING_TYPE
@@ -1738,6 +1738,8 @@
bool HasDictionaryArgumentsElements();
inline SeededNumberDictionary* element_dictionary(); // Gets slow elements.
+ inline bool ShouldTrackAllocationInfo();
+
inline void set_map_and_elements(
Map* map,
FixedArrayBase* value,
@@ -4354,6 +4356,7 @@
V(UNARY_OP_IC) \
V(BINARY_OP_IC) \
V(COMPARE_IC) \
+ V(COMPARE_NIL_IC) \
V(TO_BOOLEAN_IC)
enum Kind {
@@ -4465,6 +4468,8 @@
inline Kind kind();
inline InlineCacheState ic_state(); // Only valid for IC stubs.
inline ExtraICState extra_ic_state(); // Only valid for IC stubs.
+ inline ExtraICState extended_extra_ic_state(); // Only valid for
+ // non-call IC stubs.
inline StubType type(); // Only valid for monomorphic IC stubs.
inline int arguments_count(); // Only valid for call IC stubs.
@@ -4480,6 +4485,7 @@
inline bool is_unary_op_stub() { return kind() == UNARY_OP_IC; }
inline bool is_binary_op_stub() { return kind() == BINARY_OP_IC; }
inline bool is_compare_ic_stub() { return kind() == COMPARE_IC; }
+ inline bool is_compare_nil_ic_stub() { return kind() == COMPARE_NIL_IC; }
inline bool is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
// [major_key]: For kind STUB or BINARY_OP_IC, the major key.
@@ -4558,6 +4564,9 @@
inline byte to_boolean_state();
inline void set_to_boolean_state(byte value);
+ // [compare_nil]: For kind COMPARE_NIL_IC tells what state the stub is in.
+ byte compare_nil_state();
+
// [has_function_cache]: For kind STUB tells whether there is a function
// cache is passed to the stub.
inline bool has_function_cache();
@@ -4577,6 +4586,7 @@
// Find the first map in an IC stub.
Map* FindFirstMap();
void FindAllMaps(MapHandleList* maps);
+ void ReplaceFirstMap(Map* replace);
// Find the first code in an IC stub.
Code* FindFirstCode();
@@ -4629,6 +4639,7 @@
static inline Kind ExtractKindFromFlags(Flags flags);
static inline InlineCacheHolderFlag ExtractCacheHolderFromFlags(Flags flags);
static inline ExtraICState ExtractExtraICStateFromFlags(Flags flags);
+ static inline ExtraICState ExtractExtendedExtraICStateFromFlags(Flags flags);
static inline int ExtractArgumentsCountFromFlags(Flags flags);
static inline Flags RemoveTypeFromFlags(Flags flags);
@@ -4768,8 +4779,11 @@
class TypeField: public BitField<StubType, 3, 3> {};
class CacheHolderField: public BitField<InlineCacheHolderFlag, 6, 1> {};
class KindField: public BitField<Kind, 7, 4> {};
- class ExtraICStateField: public BitField<ExtraICState, 11, 5> {};
- class IsPregeneratedField: public BitField<bool, 16, 1> {};
+ class IsPregeneratedField: public BitField<bool, 11, 1> {};
+ class ExtraICStateField: public BitField<ExtraICState, 12, 5> {};
+ class ExtendedExtraICStateField: public BitField<ExtraICState, 12,
+ PlatformSmiTagging::kSmiValueSize - 12 + 1> {}; // NOLINT
+ STATIC_ASSERT(ExtraICStateField::kShift == ExtendedExtraICStateField::kShift);
// KindSpecificFlags1 layout (STUB and OPTIMIZED_FUNCTION)
static const int kStackSlotsFirstBit = 0;
@@ -4842,6 +4856,13 @@
PlatformSmiTagging::kSmiValueSize - Code::kArgumentsCountShift + 1;
static const int kMaxArguments = (1 << kArgumentsBits) - 1;
+ // ICs can use either argument count or ExtendedExtraIC, since their storage
+ // overlaps.
+ STATIC_ASSERT(ExtraICStateField::kShift +
+ ExtraICStateField::kSize + kArgumentsBits ==
+ ExtendedExtraICStateField::kShift +
+ ExtendedExtraICStateField::kSize);
+
// This constant should be encodable in an ARM instruction.
static const int kFlagsNotUsedInLookup =
TypeField::kMask | CacheHolderField::kMask;
@@ -5658,7 +5679,8 @@
V(Math, pow, MathPow) \
V(Math, random, MathRandom) \
V(Math, max, MathMax) \
- V(Math, min, MathMin)
+ V(Math, min, MathMin) \
+ V(Math, imul, MathImul)
enum BuiltinFunctionId {
@@ -6301,6 +6323,9 @@
// [context]: The context of the suspended computation.
DECL_ACCESSORS(context, Context)
+ // [receiver]: The receiver of the suspended computation.
+ DECL_ACCESSORS(receiver, Object)
+
// [continuation]: Offset into code of continuation.
//
// A positive offset indicates a suspended generator. The special
@@ -6326,10 +6351,14 @@
// Layout description.
static const int kFunctionOffset = JSObject::kHeaderSize;
static const int kContextOffset = kFunctionOffset + kPointerSize;
- static const int kContinuationOffset = kContextOffset + kPointerSize;
+ static const int kReceiverOffset = kContextOffset + kPointerSize;
+ static const int kContinuationOffset = kReceiverOffset + kPointerSize;
static const int kOperandStackOffset = kContinuationOffset + kPointerSize;
static const int kSize = kOperandStackOffset + kPointerSize;
+ // Resume mode, for use by runtime functions.
+ enum ResumeMode { SEND, THROW };
+
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(JSGeneratorObject);
};
@@ -7270,9 +7299,9 @@
// Returns NULL if no AllocationSiteInfo is available for object.
static AllocationSiteInfo* FindForJSObject(JSObject* object);
-
- static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind);
- static AllocationSiteMode GetMode(ElementsKind from, ElementsKind to);
+ static inline AllocationSiteMode GetMode(
+ ElementsKind boilerplate_elements_kind);
+ static inline AllocationSiteMode GetMode(ElementsKind from, ElementsKind to);
static const int kPayloadOffset = HeapObject::kHeaderSize;
static const int kSize = kPayloadOffset + kPointerSize;
@@ -7607,7 +7636,7 @@
// NOTE: this should be considered only a hint. False negatives are
// possible.
- inline bool HasOnlyAsciiChars();
+ inline bool HasOnlyOneByteChars();
inline bool IsOneByteConvertible();
@@ -8724,6 +8753,10 @@
};
+Handle<Object> CacheInitialJSArrayMaps(Handle<Context> native_context,
+ Handle<Map> initial_map);
+
+
// JSRegExpResult is just a JSArray with a specific initial map.
// This initial map adds in-object properties for "index" and "input"
// properties, as assigned by RegExp.prototype.exec, which allows
diff --git a/src/optimizing-compiler-thread.cc b/src/optimizing-compiler-thread.cc
index b982b94..1e2e0a8 100644
--- a/src/optimizing-compiler-thread.cc
+++ b/src/optimizing-compiler-thread.cc
@@ -88,7 +88,9 @@
// The function may have already been optimized by OSR. Simply continue.
// Mark it for installing before queuing so that we can be sure of the write
// order: marking first and (after being queued) installing code second.
- optimizing_compiler->info()->closure()->MarkForInstallingRecompiledCode();
+ { Heap::RelocationLock relocation_lock(isolate_->heap());
+ optimizing_compiler->info()->closure()->MarkForInstallingRecompiledCode();
+ }
output_queue_.Enqueue(optimizing_compiler);
}
diff --git a/src/parser.cc b/src/parser.cc
index 23fa9fe..267b872 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -2571,7 +2571,7 @@
stmt = ParseStatement(labels, CHECK_OK);
with_scope->set_end_position(scanner().location().end_pos);
}
- return factory()->NewWithStatement(expr, stmt);
+ return factory()->NewWithStatement(with_scope, expr, stmt);
}
diff --git a/src/profile-generator-inl.h b/src/profile-generator-inl.h
index 4e6302c..64dabf6 100644
--- a/src/profile-generator-inl.h
+++ b/src/profile-generator-inl.h
@@ -85,7 +85,6 @@
return gc_entry_;
case JS:
case COMPILER:
- case PARALLEL_COMPILER:
// DOM events handlers are reported as OTHER / EXTERNAL entries.
// To avoid confusing people, let's put all these entries into
// one bucket.
diff --git a/src/profile-generator.cc b/src/profile-generator.cc
index 837faa0..b1b163b 100644
--- a/src/profile-generator.cc
+++ b/src/profile-generator.cc
@@ -571,7 +571,12 @@
void CodeMap::CodeTreePrinter::Call(
const Address& key, const CodeMap::CodeEntryInfo& value) {
- OS::Print("%p %5d %s\n", key, value.size, value.entry->name());
+ // For shared function entries, 'size' field is used to store their IDs.
+ if (value.entry == kSharedFunctionCodeEntry) {
+ OS::Print("%p SharedFunctionInfo %d\n", key, value.size);
+ } else {
+ OS::Print("%p %5d %s\n", key, value.size, value.entry->name());
+ }
}
diff --git a/src/profile-generator.h b/src/profile-generator.h
index 4ddb753..761291e 100644
--- a/src/profile-generator.h
+++ b/src/profile-generator.h
@@ -107,7 +107,6 @@
INLINE(const char* name() const) { return name_; }
INLINE(const char* resource_name() const) { return resource_name_; }
INLINE(int line_number() const) { return line_number_; }
- INLINE(int shared_id() const) { return shared_id_; }
INLINE(void set_shared_id(int shared_id)) { shared_id_ = shared_id; }
INLINE(int security_token_id() const) { return security_token_id_; }
@@ -401,33 +400,6 @@
public:
explicit ProfileGenerator(CpuProfilesCollection* profiles);
- INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag,
- Name* name,
- String* resource_name,
- int line_number)) {
- return profiles_->NewCodeEntry(tag, name, resource_name, line_number);
- }
-
- INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag,
- const char* name)) {
- return profiles_->NewCodeEntry(tag, name);
- }
-
- INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag,
- const char* name_prefix,
- Name* name)) {
- return profiles_->NewCodeEntry(tag, name_prefix, name);
- }
-
- INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag,
- int args_count)) {
- return profiles_->NewCodeEntry(tag, args_count);
- }
-
- INLINE(CodeEntry* NewCodeEntry(int security_token_id)) {
- return profiles_->NewCodeEntry(security_token_id);
- }
-
void RecordTickSample(const TickSample& sample);
INLINE(CodeMap* code_map()) { return &code_map_; }
diff --git a/src/runtime.cc b/src/runtime.cc
index cd1af05..ebe88fe 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -662,6 +662,47 @@
}
+bool Runtime::SetupArrayBuffer(Isolate* isolate,
+ Handle<JSArrayBuffer> array_buffer,
+ void* data,
+ size_t allocated_length) {
+ array_buffer->set_backing_store(data);
+
+ Handle<Object> byte_length =
+ isolate->factory()->NewNumber(static_cast<double>(allocated_length));
+ CHECK(byte_length->IsSmi() || byte_length->IsHeapNumber());
+ array_buffer->set_byte_length(*byte_length);
+ return true;
+}
+
+
+bool Runtime::SetupArrayBufferAllocatingData(
+ Isolate* isolate,
+ Handle<JSArrayBuffer> array_buffer,
+ size_t allocated_length) {
+ void* data;
+ if (allocated_length != 0) {
+ data = malloc(allocated_length);
+ if (data == NULL) return false;
+ memset(data, 0, allocated_length);
+ } else {
+ data = NULL;
+ }
+
+ if (!SetupArrayBuffer(isolate, array_buffer, data, allocated_length))
+ return false;
+
+ v8::Isolate* external_isolate = reinterpret_cast<v8::Isolate*>(isolate);
+ v8::Persistent<v8::Value> weak_handle = v8::Persistent<v8::Value>::New(
+ external_isolate, v8::Utils::ToLocal(Handle<Object>::cast(array_buffer)));
+ weak_handle.MakeWeak(external_isolate, data, ArrayBufferWeakCallback);
+ weak_handle.MarkIndependent(external_isolate);
+ isolate->heap()->AdjustAmountOfExternalAllocatedMemory(allocated_length);
+
+ return true;
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayBufferInitialize) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
@@ -685,38 +726,12 @@
allocated_length = static_cast<size_t>(value);
}
- void* data;
- if (allocated_length != 0) {
- data = malloc(allocated_length);
-
- if (data == NULL) {
+ if (!Runtime::SetupArrayBufferAllocatingData(isolate,
+ holder, allocated_length)) {
return isolate->Throw(*isolate->factory()->
NewRangeError("invalid_array_buffer_length",
HandleVector<Object>(NULL, 0)));
- }
-
- memset(data, 0, allocated_length);
- } else {
- data = NULL;
}
- holder->set_backing_store(data);
-
- Object* byte_length;
- {
- MaybeObject* maybe_byte_length =
- isolate->heap()->NumberFromDouble(
- static_cast<double>(allocated_length));
- if (!maybe_byte_length->ToObject(&byte_length)) return maybe_byte_length;
- }
- CHECK(byte_length->IsSmi() || byte_length->IsHeapNumber());
- holder->set_byte_length(byte_length);
-
- v8::Isolate* external_isolate = reinterpret_cast<v8::Isolate*>(isolate);
- v8::Persistent<v8::Value> weak_handle = v8::Persistent<v8::Value>::New(
- external_isolate, v8::Utils::ToLocal(Handle<Object>::cast(holder)));
- weak_handle.MakeWeak(external_isolate, data, ArrayBufferWeakCallback);
- weak_handle.MarkIndependent(external_isolate);
- isolate->heap()->AdjustAmountOfExternalAllocatedMemory(allocated_length);
return *holder;
}
@@ -2413,6 +2428,7 @@
}
generator->set_function(function);
generator->set_context(Context::cast(frame->context()));
+ generator->set_receiver(frame->receiver());
generator->set_continuation(0);
generator->set_operand_stack(isolate->heap()->empty_fixed_array());
@@ -2459,22 +2475,83 @@
ASSERT_EQ(generator_object->operand_stack(),
isolate->heap()->empty_fixed_array());
// If there are no operands on the stack, there shouldn't be a handler
- // active either. Also, the active context will be the same as the function
- // itself, so there is no need to save the context.
- ASSERT_EQ(frame->context(), generator_object->context());
+ // active either.
ASSERT(!frame->HasHandler());
} else {
- generator_object->set_context(Context::cast(frame->context()));
// TODO(wingo): Save the operand stack and/or the stack handlers.
UNIMPLEMENTED();
}
+ // It's possible for the context to be other than the initial context even if
+ // there is no stack handler active. For example, this is the case in the
+ // body of a "with" statement. Therefore we always save the context.
+ generator_object->set_context(Context::cast(frame->context()));
+
// The return value is the hole for a suspend return, and anything else for a
// resume return.
return isolate->heap()->the_hole_value();
}
+// Note that this function is the slow path for resuming generators. It is only
+// called if the suspended activation had operands on the stack, stack handlers
+// needing rewinding, or if the resume should throw an exception. The fast path
+// is handled directly in FullCodeGenerator::EmitGeneratorResume(), which is
+// inlined into GeneratorNext, GeneratorSend, and GeneratorThrow.
+// EmitGeneratorResumeResume is called in any case, as it needs to reconstruct
+// the stack frame and make space for arguments and operands.
+RUNTIME_FUNCTION(MaybeObject*, Runtime_ResumeJSGeneratorObject) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 3);
+ CONVERT_ARG_HANDLE_CHECKED(JSGeneratorObject, generator_object, 0);
+ CONVERT_ARG_HANDLE_CHECKED(Object, value, 1);
+ CONVERT_SMI_ARG_CHECKED(resume_mode_int, 2);
+ JavaScriptFrameIterator stack_iterator(isolate);
+ JavaScriptFrame *frame = stack_iterator.frame();
+
+ ASSERT_EQ(frame->function(), generator_object->function());
+
+ STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
+ STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
+
+ Address pc = generator_object->function()->code()->instruction_start();
+ int offset = generator_object->continuation();
+ ASSERT(offset > 0);
+ frame->set_pc(pc + offset);
+ generator_object->set_continuation(JSGeneratorObject::kGeneratorExecuting);
+
+ if (generator_object->operand_stack()->length() != 0) {
+ // TODO(wingo): Copy operand stack. Rewind handlers.
+ UNIMPLEMENTED();
+ }
+
+ JSGeneratorObject::ResumeMode resume_mode =
+ static_cast<JSGeneratorObject::ResumeMode>(resume_mode_int);
+ switch (resume_mode) {
+ case JSGeneratorObject::SEND:
+ return *value;
+ case JSGeneratorObject::THROW:
+ return isolate->Throw(*value);
+ }
+
+ UNREACHABLE();
+ return isolate->ThrowIllegalOperation();
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_ThrowGeneratorStateError) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSGeneratorObject, generator, 0);
+ int continuation = generator->continuation();
+ const char *message = continuation == JSGeneratorObject::kGeneratorClosed ?
+ "generator_finished" : "generator_running";
+ Vector< Handle<Object> > argv = HandleVector<Object>(NULL, 0);
+ Handle<Object> error = isolate->factory()->NewError(message, argv);
+ return isolate->Throw(*error);
+}
+
+
MUST_USE_RESULT static MaybeObject* CharFromCode(Isolate* isolate,
Object* char_code) {
if (char_code->IsNumber()) {
@@ -6212,6 +6289,16 @@
}
+RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberImul) {
+ NoHandleAllocation ha(isolate);
+ ASSERT(args.length() == 2);
+
+ CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]);
+ CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]);
+ return isolate->heap()->NumberFromInt32(x * y);
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_StringAdd) {
NoHandleAllocation ha(isolate);
ASSERT(args.length() == 2);
@@ -8894,7 +8981,7 @@
return false;
} else {
// Callback set. Let it decide if code generation is allowed.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
return callback(v8::Utils::ToLocal(context));
}
}
diff --git a/src/runtime.h b/src/runtime.h
index 83e1641..2a102e1 100644
--- a/src/runtime.h
+++ b/src/runtime.h
@@ -145,6 +145,7 @@
F(NumberMod, 2, 1) \
F(NumberUnaryMinus, 1, 1) \
F(NumberAlloc, 0, 1) \
+ F(NumberImul, 2, 1) \
\
F(StringAdd, 2, 1) \
F(StringBuilderConcat, 3, 1) \
@@ -299,6 +300,8 @@
/* Harmony generators */ \
F(CreateJSGeneratorObject, 0, 1) \
F(SuspendJSGeneratorObject, 1, 1) \
+ F(ResumeJSGeneratorObject, 3, 1) \
+ F(ThrowGeneratorStateError, 1, 1) \
\
/* Harmony modules */ \
F(IsJSModule, 1, 1) \
@@ -560,7 +563,9 @@
F(IsRegExpEquivalent, 2, 1) \
F(HasCachedArrayIndex, 1, 1) \
F(GetCachedArrayIndex, 1, 1) \
- F(FastAsciiArrayJoin, 2, 1)
+ F(FastAsciiArrayJoin, 2, 1) \
+ F(GeneratorSend, 2, 1) \
+ F(GeneratorThrow, 2, 1)
// ----------------------------------------------------------------------------
@@ -744,6 +749,16 @@
Handle<Object> object,
Handle<Object> key);
+ static bool SetupArrayBuffer(Isolate* isolate,
+ Handle<JSArrayBuffer> array_buffer,
+ void* data,
+ size_t allocated_length);
+
+ static bool SetupArrayBufferAllocatingData(
+ Isolate* isolate,
+ Handle<JSArrayBuffer> array_buffer,
+ size_t allocated_length);
+
// Helper functions used stubs.
static void PerformGC(Object* result);
diff --git a/src/scopes.cc b/src/scopes.cc
index 10548f9..5ad970a 100644
--- a/src/scopes.cc
+++ b/src/scopes.cc
@@ -726,7 +726,9 @@
int n = 0;
for (Scope* s = this; s != scope; s = s->outer_scope_) {
ASSERT(s != NULL); // scope must be in the scope chain
- if (s->num_heap_slots() > 0) n++;
+ if (s->is_with_scope() || s->num_heap_slots() > 0) n++;
+ // Catch scopes always have heap slots.
+ ASSERT(!s->is_catch_scope() || s->num_heap_slots() > 0);
}
return n;
}
diff --git a/src/spaces.cc b/src/spaces.cc
index 7202e1b..df1c3ef 100644
--- a/src/spaces.cc
+++ b/src/spaces.cc
@@ -1817,6 +1817,7 @@
CASE(UNARY_OP_IC);
CASE(BINARY_OP_IC);
CASE(COMPARE_IC);
+ CASE(COMPARE_NIL_IC);
CASE(TO_BOOLEAN_IC);
}
}
diff --git a/src/store-buffer.cc b/src/store-buffer.cc
index 8a69164..7d73dd5 100644
--- a/src/store-buffer.cc
+++ b/src/store-buffer.cc
@@ -25,9 +25,11 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#include "v8.h"
-
#include "store-buffer.h"
+
+#include <algorithm>
+
+#include "v8.h"
#include "store-buffer-inl.h"
#include "v8-counters.h"
@@ -122,33 +124,6 @@
}
-#if V8_TARGET_ARCH_X64
-static int CompareAddresses(const void* void_a, const void* void_b) {
- intptr_t a =
- reinterpret_cast<intptr_t>(*reinterpret_cast<const Address*>(void_a));
- intptr_t b =
- reinterpret_cast<intptr_t>(*reinterpret_cast<const Address*>(void_b));
- // Unfortunately if int is smaller than intptr_t there is no branch-free
- // way to return a number with the same sign as the difference between the
- // pointers.
- if (a == b) return 0;
- if (a < b) return -1;
- ASSERT(a > b);
- return 1;
-}
-#else
-static int CompareAddresses(const void* void_a, const void* void_b) {
- intptr_t a =
- reinterpret_cast<intptr_t>(*reinterpret_cast<const Address*>(void_a));
- intptr_t b =
- reinterpret_cast<intptr_t>(*reinterpret_cast<const Address*>(void_b));
- ASSERT(sizeof(1) == sizeof(a));
- // Shift down to avoid wraparound.
- return (a >> kPointerSizeLog2) - (b >> kPointerSizeLog2);
-}
-#endif
-
-
void StoreBuffer::Uniq() {
// Remove adjacent duplicates and cells that do not point at new space.
Address previous = NULL;
@@ -283,10 +258,7 @@
void StoreBuffer::SortUniq() {
Compact();
if (old_buffer_is_sorted_) return;
- qsort(reinterpret_cast<void*>(old_start_),
- old_top_ - old_start_,
- sizeof(*old_top_),
- &CompareAddresses);
+ std::sort(old_start_, old_top_);
Uniq();
old_buffer_is_sorted_ = true;
diff --git a/src/store-buffer.h b/src/store-buffer.h
index 79046d1..514534a 100644
--- a/src/store-buffer.h
+++ b/src/store-buffer.h
@@ -37,6 +37,8 @@
namespace v8 {
namespace internal {
+class Page;
+class PagedSpace;
class StoreBuffer;
typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to);
diff --git a/src/stub-cache.cc b/src/stub-cache.cc
index 396e92c..f928cf6 100644
--- a/src/stub-cache.cc
+++ b/src/stub-cache.cc
@@ -110,15 +110,25 @@
Handle<Code> StubCache::FindIC(Handle<Name> name,
+ Handle<Map> stub_holder_map,
+ Code::Kind kind,
+ Code::StubType type,
+ Code::ExtraICState extra_state) {
+ Code::Flags flags = Code::ComputeMonomorphicFlags(kind, extra_state, type);
+ Handle<Object> probe(stub_holder_map->FindInCodeCache(*name, flags),
+ isolate_);
+ if (probe->IsCode()) return Handle<Code>::cast(probe);
+ return Handle<Code>::null();
+}
+
+
+Handle<Code> StubCache::FindIC(Handle<Name> name,
Handle<JSObject> stub_holder,
Code::Kind kind,
Code::StubType type,
Code::ExtraICState extra_ic_state) {
- Code::Flags flags = Code::ComputeMonomorphicFlags(kind, extra_ic_state, type);
- Handle<Object> probe(stub_holder->map()->FindInCodeCache(*name, flags),
- isolate_);
- if (probe->IsCode()) return Handle<Code>::cast(probe);
- return Handle<Code>::null();
+ return FindIC(name, Handle<Map>(stub_holder->map()), kind,
+ type, extra_ic_state);
}
@@ -487,7 +497,8 @@
Handle<JSGlobalPropertyCell> cell,
StrictModeFlag strict_mode) {
Handle<Code> stub = FindIC(
- name, receiver, Code::STORE_IC, Code::NORMAL, strict_mode);
+ name, Handle<JSObject>::cast(receiver),
+ Code::STORE_IC, Code::NORMAL, strict_mode);
if (!stub.is_null()) return stub;
StoreStubCompiler compiler(isolate_, strict_mode);
@@ -893,6 +904,32 @@
}
+Handle<Code> StubCache::ComputeCompareNil(Handle<Map> receiver_map,
+ NilValue nil,
+ CompareNilICStub::Types types) {
+ CompareNilICStub stub(kNonStrictEquality, nil, types);
+
+ Handle<String> name(isolate_->heap()->empty_string());
+ if (!receiver_map->is_shared()) {
+ Handle<Code> cached_ic = FindIC(name, receiver_map, Code::COMPARE_NIL_IC,
+ Code::NORMAL, stub.GetExtraICState());
+ if (!cached_ic.is_null()) return cached_ic;
+ }
+
+ Handle<Code> ic = stub.GetCode(isolate_);
+ // For monomorphic maps, use the code as a template, copying and replacing
+ // the monomorphic map that checks the object's type.
+ ic = isolate_->factory()->CopyCode(ic);
+ ic->ReplaceFirstMap(*receiver_map);
+
+ if (!receiver_map->is_shared()) {
+ Map::UpdateCodeCache(receiver_map, name, ic);
+ }
+
+ return ic;
+}
+
+
Handle<Code> StubCache::ComputeLoadElementPolymorphic(
MapHandleList* receiver_maps) {
Code::Flags flags = Code::ComputeFlags(Code::KEYED_LOAD_IC, POLYMORPHIC);
@@ -1074,7 +1111,7 @@
v8::AccessorInfo info(custom_args.end());
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
ExternalCallbackScope call_scope(isolate, setter_address);
fun(v8::Utils::ToLocal(str), v8::Utils::ToLocal(value), info);
}
@@ -1120,7 +1157,7 @@
v8::Handle<v8::Value> r;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
r = getter(v8::Utils::ToLocal(name), info);
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
@@ -1183,7 +1220,7 @@
v8::Handle<v8::Value> r;
{
// Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
+ VMState<EXTERNAL> state(isolate);
r = getter(v8::Utils::ToLocal(name), info);
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
diff --git a/src/stub-cache.h b/src/stub-cache.h
index 02bb541..dbb5e90 100644
--- a/src/stub-cache.h
+++ b/src/stub-cache.h
@@ -30,6 +30,7 @@
#include "allocation.h"
#include "arguments.h"
+#include "code-stubs.h"
#include "ic-inl.h"
#include "macro-assembler.h"
#include "objects.h"
@@ -78,6 +79,12 @@
Handle<JSObject> holder);
Handle<Code> FindIC(Handle<Name> name,
+ Handle<Map> stub_holder_map,
+ Code::Kind kind,
+ Code::StubType type,
+ Code::ExtraICState extra_state = Code::kNoExtraICState);
+
+ Handle<Code> FindIC(Handle<Name> name,
Handle<JSObject> stub_holder,
Code::Kind kind,
Code::StubType type,
@@ -271,6 +278,12 @@
// ---
+ Handle<Code> ComputeCompareNil(Handle<Map> receiver_map,
+ NilValue nil,
+ CompareNilICStub::Types types);
+
+ // ---
+
Handle<Code> ComputeLoadElementPolymorphic(MapHandleList* receiver_maps);
Handle<Code> ComputeStoreElementPolymorphic(MapHandleList* receiver_maps,
KeyedAccessStoreMode store_mode,
diff --git a/src/type-info.cc b/src/type-info.cc
index 39a01f5..3bc509a 100644
--- a/src/type-info.cc
+++ b/src/type-info.cc
@@ -218,6 +218,17 @@
}
+Handle<Map> TypeFeedbackOracle::CompareNilMonomorphicReceiverType(
+ TypeFeedbackId id) {
+ Handle<Object> maybe_code = GetInfo(id);
+ if (maybe_code->IsCode()) {
+ Map* first_map = Handle<Code>::cast(maybe_code)->FindFirstMap();
+ if (first_map != NULL) return Handle<Map>(first_map);
+ }
+ return Handle<Map>();
+}
+
+
KeyedAccessStoreMode TypeFeedbackOracle::GetStoreMode(
TypeFeedbackId ast_id) {
Handle<Object> map_or_code = GetInfo(ast_id);
@@ -625,12 +636,23 @@
}
-byte TypeFeedbackOracle::ToBooleanTypes(TypeFeedbackId ast_id) {
- Handle<Object> object = GetInfo(ast_id);
+byte TypeFeedbackOracle::ToBooleanTypes(TypeFeedbackId id) {
+ Handle<Object> object = GetInfo(id);
return object->IsCode() ? Handle<Code>::cast(object)->to_boolean_state() : 0;
}
+byte TypeFeedbackOracle::CompareNilTypes(TypeFeedbackId id) {
+ Handle<Object> object = GetInfo(id);
+ if (object->IsCode() &&
+ Handle<Code>::cast(object)->is_compare_nil_ic_stub()) {
+ return Handle<Code>::cast(object)->compare_nil_state();
+ } else {
+ return CompareNilICStub::kFullCompare;
+ }
+}
+
+
// Things are a bit tricky here: The iterator for the RelocInfos and the infos
// themselves are not GC-safe, so we first get all infos, then we create the
// dictionary (possibly triggering GC), and finally we relocate the collected
@@ -724,6 +746,7 @@
case Code::BINARY_OP_IC:
case Code::COMPARE_IC:
case Code::TO_BOOLEAN_IC:
+ case Code::COMPARE_NIL_IC:
SetInfo(ast_id, target);
break;
diff --git a/src/type-info.h b/src/type-info.h
index 583c3fc..d6d958d 100644
--- a/src/type-info.h
+++ b/src/type-info.h
@@ -253,7 +253,8 @@
bool IsForInFastCase(ForInStatement* expr);
Handle<Map> LoadMonomorphicReceiverType(Property* expr);
- Handle<Map> StoreMonomorphicReceiverType(TypeFeedbackId ast_id);
+ Handle<Map> StoreMonomorphicReceiverType(TypeFeedbackId id);
+ Handle<Map> CompareNilMonomorphicReceiverType(TypeFeedbackId id);
KeyedAccessStoreMode GetStoreMode(TypeFeedbackId ast_id);
@@ -293,6 +294,11 @@
// headers!! :-P
byte ToBooleanTypes(TypeFeedbackId ast_id);
+ // TODO(1571) We can't use CompareNilICStub::Types as the return value because
+ // of various cylces in our headers. Death to tons of implementations in
+ // headers!! :-P
+ byte CompareNilTypes(TypeFeedbackId ast_id);
+
// Get type information for arithmetic operations and compares.
TypeInfo UnaryType(UnaryOperation* expr);
void BinaryType(BinaryOperation* expr,
diff --git a/src/utils.h b/src/utils.h
index b84d592..b2c2ff1 100644
--- a/src/utils.h
+++ b/src/utils.h
@@ -30,6 +30,7 @@
#include <stdlib.h>
#include <string.h>
+#include <algorithm>
#include <climits>
#include "allocation.h"
@@ -410,15 +411,11 @@
}
void Sort(int (*cmp)(const T*, const T*)) {
- typedef int (*RawComparer)(const void*, const void*);
- qsort(start(),
- length(),
- sizeof(T),
- reinterpret_cast<RawComparer>(cmp));
+ std::sort(start(), start() + length(), RawComparer(cmp));
}
void Sort() {
- Sort(PointerValueCompare<T>);
+ std::sort(start(), start() + length());
}
void Truncate(int length) {
@@ -454,6 +451,17 @@
private:
T* start_;
int length_;
+
+ class RawComparer {
+ public:
+ explicit RawComparer(int (*cmp)(const T*, const T*)) : cmp_(cmp) {}
+ bool operator()(const T& a, const T& b) {
+ return cmp_(&a, &b) < 0;
+ }
+
+ private:
+ int (*cmp_)(const T*, const T*);
+ };
};
diff --git a/src/v8-counters.cc b/src/v8-counters.cc
index 4107dd3..ca83e38 100644
--- a/src/v8-counters.cc
+++ b/src/v8-counters.cc
@@ -32,58 +32,48 @@
namespace v8 {
namespace internal {
-Counters::Counters() {
+Counters::Counters(Isolate* isolate) {
#define HT(name, caption) \
- HistogramTimer name = { {#caption, 0, 10000, 50, NULL, false}, 0, 0 }; \
- name##_ = name;
+ name##_ = HistogramTimer(#caption, 0, 10000, 50, isolate);
HISTOGRAM_TIMER_LIST(HT)
#undef HT
#define HP(name, caption) \
- Histogram name = { #caption, 0, 101, 100, NULL, false }; \
- name##_ = name;
+ name##_ = Histogram(#caption, 0, 101, 100, isolate);
HISTOGRAM_PERCENTAGE_LIST(HP)
#undef HP
#define HM(name, caption) \
- Histogram name = { #caption, 1000, 500000, 50, NULL, false }; \
- name##_ = name;
+ name##_ = Histogram(#caption, 1000, 500000, 50, isolate);
HISTOGRAM_MEMORY_LIST(HM)
#undef HM
#define SC(name, caption) \
- StatsCounter name = { "c:" #caption, NULL, false };\
- name##_ = name;
+ name##_ = StatsCounter("c:" #caption);
STATS_COUNTER_LIST_1(SC)
STATS_COUNTER_LIST_2(SC)
#undef SC
#define SC(name) \
- StatsCounter count_of_##name = { "c:" "V8.CountOf_" #name, NULL, false };\
- count_of_##name##_ = count_of_##name; \
- StatsCounter size_of_##name = { "c:" "V8.SizeOf_" #name, NULL, false };\
- size_of_##name##_ = size_of_##name;
+ count_of_##name##_ = StatsCounter("c:" "V8.CountOf_" #name); \
+ size_of_##name##_ = StatsCounter("c:" "V8.SizeOf_" #name);
INSTANCE_TYPE_LIST(SC)
#undef SC
#define SC(name) \
- StatsCounter count_of_CODE_TYPE_##name = { \
- "c:" "V8.CountOf_CODE_TYPE-" #name, NULL, false }; \
- count_of_CODE_TYPE_##name##_ = count_of_CODE_TYPE_##name; \
- StatsCounter size_of_CODE_TYPE_##name = { \
- "c:" "V8.SizeOf_CODE_TYPE-" #name, NULL, false }; \
- size_of_CODE_TYPE_##name##_ = size_of_CODE_TYPE_##name;
+ count_of_CODE_TYPE_##name##_ = \
+ StatsCounter("c:" "V8.CountOf_CODE_TYPE-" #name); \
+ size_of_CODE_TYPE_##name##_ = \
+ StatsCounter("c:" "V8.SizeOf_CODE_TYPE-" #name);
CODE_KIND_LIST(SC)
#undef SC
#define SC(name) \
- StatsCounter count_of_FIXED_ARRAY_##name = { \
- "c:" "V8.CountOf_FIXED_ARRAY-" #name, NULL, false }; \
- count_of_FIXED_ARRAY_##name##_ = count_of_FIXED_ARRAY_##name; \
- StatsCounter size_of_FIXED_ARRAY_##name = { \
- "c:" "V8.SizeOf_FIXED_ARRAY-" #name, NULL, false }; \
- size_of_FIXED_ARRAY_##name##_ = size_of_FIXED_ARRAY_##name;
+ count_of_FIXED_ARRAY_##name##_ = \
+ StatsCounter("c:" "V8.CountOf_FIXED_ARRAY-" #name); \
+ size_of_FIXED_ARRAY_##name##_ = \
+ StatsCounter("c:" "V8.SizeOf_FIXED_ARRAY-" #name); \
FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
#undef SC
}
diff --git a/src/v8-counters.h b/src/v8-counters.h
index 374ebbc..c810cba 100644
--- a/src/v8-counters.h
+++ b/src/v8-counters.h
@@ -420,6 +420,8 @@
friend class Isolate;
+ explicit Counters(Isolate* isolate);
+
DISALLOW_IMPLICIT_CONSTRUCTORS(Counters);
};
diff --git a/src/v8globals.h b/src/v8globals.h
index 82e30f5..1706893 100644
--- a/src/v8globals.h
+++ b/src/v8globals.h
@@ -162,7 +162,6 @@
class RelocInfo;
class Deserializer;
class MessageLocation;
-class ObjectGroup;
class VirtualMemory;
class Mutex;
@@ -363,7 +362,6 @@
JS,
GC,
COMPILER,
- PARALLEL_COMPILER,
OTHER,
EXTERNAL
};
diff --git a/src/version.cc b/src/version.cc
index 2380d90..dac2bf0 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,8 +34,8 @@
// system so their names cannot be changed without changing the scripts.
#define MAJOR_VERSION 3
#define MINOR_VERSION 18
-#define BUILD_NUMBER 3
-#define PATCH_LEVEL 2
+#define BUILD_NUMBER 4
+#define PATCH_LEVEL 0
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0
diff --git a/src/vm-state-inl.h b/src/vm-state-inl.h
index fae68eb..862c17e 100644
--- a/src/vm-state-inl.h
+++ b/src/vm-state-inl.h
@@ -47,8 +47,6 @@
return "GC";
case COMPILER:
return "COMPILER";
- case PARALLEL_COMPILER:
- return "PARALLEL_COMPILER";
case OTHER:
return "OTHER";
case EXTERNAL:
@@ -60,36 +58,24 @@
}
-VMState::VMState(Isolate* isolate, StateTag tag)
+template <StateTag Tag>
+VMState<Tag>::VMState(Isolate* isolate)
: isolate_(isolate), previous_tag_(isolate->current_vm_state()) {
- if (FLAG_log_state_changes) {
- LOG(isolate, UncheckedStringEvent("Entering", StateToString(tag)));
- LOG(isolate, UncheckedStringEvent("From", StateToString(previous_tag_)));
+ if (FLAG_log_timer_events && previous_tag_ != EXTERNAL && Tag == EXTERNAL) {
+ LOG(isolate_,
+ TimerEvent(Logger::START, Logger::TimerEventScope::v8_external));
}
-
- if (FLAG_log_timer_events && previous_tag_ != EXTERNAL && tag == EXTERNAL) {
- LOG(isolate_, EnterExternal());
- }
-
- isolate_->SetCurrentVMState(tag);
+ isolate_->set_current_vm_state(Tag);
}
-VMState::~VMState() {
- if (FLAG_log_state_changes) {
+template <StateTag Tag>
+VMState<Tag>::~VMState() {
+ if (FLAG_log_timer_events && previous_tag_ != EXTERNAL && Tag == EXTERNAL) {
LOG(isolate_,
- UncheckedStringEvent("Leaving",
- StateToString(isolate_->current_vm_state())));
- LOG(isolate_,
- UncheckedStringEvent("To", StateToString(previous_tag_)));
+ TimerEvent(Logger::END, Logger::TimerEventScope::v8_external));
}
-
- if (FLAG_log_timer_events &&
- previous_tag_ != EXTERNAL && isolate_->current_vm_state() == EXTERNAL) {
- LOG(isolate_, LeaveExternal());
- }
-
- isolate_->SetCurrentVMState(previous_tag_);
+ isolate_->set_current_vm_state(previous_tag_);
}
diff --git a/src/vm-state.h b/src/vm-state.h
index 831e2d3..765b570 100644
--- a/src/vm-state.h
+++ b/src/vm-state.h
@@ -34,9 +34,10 @@
namespace v8 {
namespace internal {
+template <StateTag Tag>
class VMState BASE_EMBEDDED {
public:
- inline VMState(Isolate* isolate, StateTag tag);
+ explicit inline VMState(Isolate* isolate);
inline ~VMState();
private:
diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h
index 2445e23..5bb1292 100644
--- a/src/x64/assembler-x64.h
+++ b/src/x64/assembler-x64.h
@@ -200,6 +200,19 @@
const Register r15 = { kRegister_r15_Code };
const Register no_reg = { kRegister_no_reg_Code };
+#ifdef _WIN64
+ // Windows calling convention
+ const Register arg_reg_1 = rcx;
+ const Register arg_reg_2 = rdx;
+ const Register arg_reg_3 = r8;
+ const Register arg_reg_4 = r9;
+#else
+ // AMD64 calling convention
+ const Register arg_reg_1 = rdi;
+ const Register arg_reg_2 = rsi;
+ const Register arg_reg_3 = rdx;
+ const Register arg_reg_4 = rcx;
+#endif // _WIN64
struct XMMRegister {
static const int kMaxNumRegisters = 16;
diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc
index ba7647b..e7daa7f 100644
--- a/src/x64/builtins-x64.cc
+++ b/src/x64/builtins-x64.cc
@@ -648,11 +648,7 @@
// the stub returns.
__ subq(Operand(rsp, 0), Immediate(5));
__ Pushad();
-#ifdef _WIN64
- __ movq(rcx, Operand(rsp, kNumSafepointRegisters * kPointerSize));
-#else
- __ movq(rdi, Operand(rsp, kNumSafepointRegisters * kPointerSize));
-#endif
+ __ movq(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
{ // NOLINT
FrameScope scope(masm, StackFrame::MANUAL);
__ PrepareCallCFunction(1);
@@ -1287,8 +1283,7 @@
// entering the generic code. In both cases argc in rax needs to be preserved.
// Both registers are preserved by this code so no need to differentiate between
// a construct call and a normal call.
-static void ArrayNativeCode(MacroAssembler* masm,
- Label* call_generic_code) {
+void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code) {
Label argc_one_or_more, argc_two_or_more, empty_array, not_empty_array,
has_non_smi_element, finish, cant_transition_map, not_double;
@@ -1522,7 +1517,7 @@
}
-void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
+void Builtins::Generate_CommonArrayConstructCode(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : argc
// -- rdi : constructor
@@ -1541,53 +1536,21 @@
__ Check(not_smi, "Unexpected initial map for Array function");
__ CmpObjectType(rcx, MAP_TYPE, rcx);
__ Check(equal, "Unexpected initial map for Array function");
-
- if (FLAG_optimize_constructed_arrays) {
- // We should either have undefined in ebx or a valid jsglobalpropertycell
- Label okay_here;
- Handle<Object> undefined_sentinel(
- masm->isolate()->factory()->undefined_value());
- Handle<Map> global_property_cell_map(
- masm->isolate()->heap()->global_property_cell_map());
- __ Cmp(rbx, undefined_sentinel);
- __ j(equal, &okay_here);
- __ Cmp(FieldOperand(rbx, 0), global_property_cell_map);
- __ Assert(equal, "Expected property cell in register rbx");
- __ bind(&okay_here);
- }
}
- if (FLAG_optimize_constructed_arrays) {
- Label not_zero_case, not_one_case;
- __ testq(rax, rax);
- __ j(not_zero, ¬_zero_case);
- ArrayNoArgumentConstructorStub no_argument_stub;
- __ TailCallStub(&no_argument_stub);
-
- __ bind(¬_zero_case);
- __ cmpq(rax, Immediate(1));
- __ j(greater, ¬_one_case);
- ArraySingleArgumentConstructorStub single_argument_stub;
- __ TailCallStub(&single_argument_stub);
-
- __ bind(¬_one_case);
- ArrayNArgumentsConstructorStub n_argument_stub;
- __ TailCallStub(&n_argument_stub);
- } else {
- Label generic_constructor;
- // Run the native code for the Array function called as constructor.
- ArrayNativeCode(masm, &generic_constructor);
-
- // Jump to the generic construct code in case the specialized code cannot
- // handle the construction.
- __ bind(&generic_constructor);
- Handle<Code> generic_construct_stub =
- masm->isolate()->builtins()->JSConstructStubGeneric();
- __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
- }
+ Label generic_constructor;
+ // Run the native code for the Array function called as constructor.
+ ArrayNativeCode(masm, &generic_constructor);
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
}
+
void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : number of arguments
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index d40a5bf..3a9a023 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -96,16 +96,20 @@
}
-static void InitializeArrayConstructorDescriptor(Isolate* isolate,
- CodeStubInterfaceDescriptor* descriptor) {
+static void InitializeArrayConstructorDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor,
+ int constant_stack_parameter_count) {
// register state
- // rdi -- constructor function
+ // rax -- number of arguments
// rbx -- type info cell with elements kind
- // rax -- number of arguments to the constructor function
- static Register registers[] = { rdi, rbx };
- descriptor->register_param_count_ = 2;
- // stack param count needs (constructor pointer, and single argument)
- descriptor->stack_parameter_count_ = &rax;
+ static Register registers[] = { rbx };
+ descriptor->register_param_count_ = 1;
+ if (constant_stack_parameter_count != 0) {
+ // stack param count needs (constructor pointer, and single argument)
+ descriptor->stack_parameter_count_ = &rax;
+ }
+ descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
descriptor->register_params_ = registers;
descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
descriptor->deoptimization_handler_ =
@@ -116,21 +120,34 @@
void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
}
void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
}
void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
+}
+
+
+void CompareNilICStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { rax };
+ descriptor->register_param_count_ = 1;
+ descriptor->register_params_ = registers;
+ descriptor->deoptimization_handler_ =
+ FUNCTION_ADDR(CompareNilIC_Miss);
+ descriptor->miss_handler_ =
+ ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate);
}
@@ -525,11 +542,8 @@
__ PushCallerSaved(save_doubles_);
const int argument_count = 1;
__ PrepareCallCFunction(argument_count);
-#ifdef _WIN64
- __ LoadAddress(rcx, ExternalReference::isolate_address(masm->isolate()));
-#else
- __ LoadAddress(rdi, ExternalReference::isolate_address(masm->isolate()));
-#endif
+ __ LoadAddress(arg_reg_1,
+ ExternalReference::isolate_address(masm->isolate()));
AllowExternalCallThatCantCauseGC scope(masm);
__ CallCFunction(
@@ -1999,12 +2013,7 @@
void MathPowStub::Generate(MacroAssembler* masm) {
- // Choose register conforming to calling convention (when bailing out).
-#ifdef _WIN64
const Register exponent = rdx;
-#else
- const Register exponent = rdi;
-#endif
const Register base = rax;
const Register scratch = rcx;
const XMMRegister double_result = xmm3;
@@ -3013,20 +3022,6 @@
__ movq(Operand(rsp, (argument_slots_on_stack - 5) * kPointerSize), r8);
#endif
- // First four arguments are passed in registers on both Linux and Windows.
-#ifdef _WIN64
- Register arg4 = r9;
- Register arg3 = r8;
- Register arg2 = rdx;
- Register arg1 = rcx;
-#else
- Register arg4 = rcx;
- Register arg3 = rdx;
- Register arg2 = rsi;
- Register arg1 = rdi;
-#endif
-
- // Keep track on aliasing between argX defined above and the registers used.
// rdi: subject string
// rbx: previous index
// rcx: encoding of subject string (1 if ASCII 0 if two_byte);
@@ -3035,7 +3030,7 @@
// r15: original subject string
// Argument 2: Previous index.
- __ movq(arg2, rbx);
+ __ movq(arg_reg_2, rbx);
// Argument 4: End of string data
// Argument 3: Start of string data
@@ -3043,20 +3038,24 @@
// Prepare start and end index of the input.
// Load the length from the original sliced string if that is the case.
__ addq(rbx, r14);
- __ SmiToInteger32(arg3, FieldOperand(r15, String::kLengthOffset));
- __ addq(r14, arg3); // Using arg3 as scratch.
+ __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset));
+ __ addq(r14, arg_reg_3); // Using arg3 as scratch.
// rbx: start index of the input
// r14: end index of the input
// r15: original subject string
__ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
__ j(zero, &setup_two_byte, Label::kNear);
- __ lea(arg4, FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize));
- __ lea(arg3, FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize));
+ __ lea(arg_reg_4,
+ FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize));
+ __ lea(arg_reg_3,
+ FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize));
__ jmp(&setup_rest, Label::kNear);
__ bind(&setup_two_byte);
- __ lea(arg4, FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
- __ lea(arg3, FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
+ __ lea(arg_reg_4,
+ FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
+ __ lea(arg_reg_3,
+ FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
__ bind(&setup_rest);
// Argument 1: Original subject string.
@@ -3064,7 +3063,7 @@
// use rbp, which points exactly to one pointer size below the previous rsp.
// (Because creating a new stack frame pushes the previous rbp onto the stack
// and thereby moves up rsp by one kPointerSize.)
- __ movq(arg1, r15);
+ __ movq(arg_reg_1, r15);
// Locate the code entry and call it.
__ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
@@ -3820,7 +3819,7 @@
TypeFeedbackCells::MonomorphicArraySentinel(isolate,
LAST_FAST_ELEMENTS_KIND);
__ Cmp(rcx, terminal_kind_sentinel);
- __ j(not_equal, &miss);
+ __ j(above, &miss);
// Make sure the function is the Array() function
__ LoadArrayFunction(rcx);
__ cmpq(rdi, rcx);
@@ -4032,6 +4031,9 @@
StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
// It is important that the store buffer overflow stubs are generated first.
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
+ if (FLAG_optimize_constructed_arrays) {
+ ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
+ }
}
@@ -4089,11 +4091,7 @@
// PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
// stack is known to be aligned. This function takes one argument which is
// passed in register.
-#ifdef _WIN64
- __ movq(rcx, rax);
-#else // _WIN64
- __ movq(rdi, rax);
-#endif
+ __ movq(arg_reg_1, rax);
__ movq(kScratchRegister,
ExternalReference::perform_gc_function(masm->isolate()));
__ call(kScratchRegister);
@@ -4915,16 +4913,16 @@
__ ret(2 * kPointerSize);
__ bind(&non_ascii);
// At least one of the strings is two-byte. Check whether it happens
- // to contain only ASCII characters.
+ // to contain only one byte characters.
// rcx: first instance type AND second instance type.
// r8: first instance type.
// r9: second instance type.
- __ testb(rcx, Immediate(kAsciiDataHintMask));
+ __ testb(rcx, Immediate(kOneByteDataHintMask));
__ j(not_zero, &ascii_data);
__ xor_(r8, r9);
- STATIC_ASSERT(kOneByteStringTag != 0 && kAsciiDataHintTag != 0);
- __ andb(r8, Immediate(kOneByteStringTag | kAsciiDataHintTag));
- __ cmpb(r8, Immediate(kOneByteStringTag | kAsciiDataHintTag));
+ STATIC_ASSERT(kOneByteStringTag != 0 && kOneByteDataHintTag != 0);
+ __ andb(r8, Immediate(kOneByteStringTag | kOneByteDataHintTag));
+ __ cmpb(r8, Immediate(kOneByteStringTag | kOneByteDataHintTag));
__ j(equal, &ascii_data);
// Allocate a two byte cons string.
__ AllocateTwoByteConsString(rcx, rdi, no_reg, &call_runtime);
@@ -6481,24 +6479,16 @@
void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
-#ifdef _WIN64
- Register arg3 = r8;
- Register arg2 = rdx;
- Register arg1 = rcx;
-#else
- Register arg3 = rdx;
- Register arg2 = rsi;
- Register arg1 = rdi;
-#endif
Register address =
- arg1.is(regs_.address()) ? kScratchRegister : regs_.address();
+ arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
ASSERT(!address.is(regs_.object()));
- ASSERT(!address.is(arg1));
+ ASSERT(!address.is(arg_reg_1));
__ Move(address, regs_.address());
- __ Move(arg1, regs_.object());
+ __ Move(arg_reg_1, regs_.object());
// TODO(gc) Can we just set address arg2 in the beginning?
- __ Move(arg2, address);
- __ LoadAddress(arg3, ExternalReference::isolate_address(masm->isolate()));
+ __ Move(arg_reg_2, address);
+ __ LoadAddress(arg_reg_3,
+ ExternalReference::isolate_address(masm->isolate()));
int argument_count = 3;
AllowExternalCallThatCantCauseGC scope(masm);
@@ -6769,6 +6759,198 @@
__ Ret();
}
+
+template<class T>
+static void CreateArrayDispatch(MacroAssembler* masm) {
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ cmpl(rdx, Immediate(kind));
+ __ j(not_equal, &next);
+ T stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
+ // rbx - type info cell
+ // rdx - kind
+ // rax - number of arguments
+ // rdi - constructor?
+ // esp[0] - return address
+ // esp[4] - last argument
+ ASSERT(FAST_SMI_ELEMENTS == 0);
+ ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ ASSERT(FAST_ELEMENTS == 2);
+ ASSERT(FAST_HOLEY_ELEMENTS == 3);
+ ASSERT(FAST_DOUBLE_ELEMENTS == 4);
+ ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
+
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ // is the low bit set? If so, we are holey and that is good.
+ __ testb(rdx, Immediate(1));
+ Label normal_sequence;
+ __ j(not_zero, &normal_sequence);
+
+ // look at the first argument
+ __ movq(rcx, Operand(rsp, kPointerSize));
+ __ testq(rcx, rcx);
+ __ j(zero, &normal_sequence);
+
+ // We are going to create a holey array, but our kind is non-holey.
+ // Fix kind and retry
+ __ incl(rdx);
+ __ Cmp(rbx, undefined_sentinel);
+ __ j(equal, &normal_sequence);
+
+ // Save the resulting elements kind in type info
+ __ Integer32ToSmi(rdx, rdx);
+ __ movq(FieldOperand(rbx, kPointerSize), rdx);
+ __ SmiToInteger32(rdx, rdx);
+
+ __ bind(&normal_sequence);
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ cmpl(rdx, Immediate(kind));
+ __ j(not_equal, &next);
+ ArraySingleArgumentConstructorStub stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+template<class T>
+static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
+ int to_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= to_index; ++i) {
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ T stub(kind);
+ stub.GetCode(isolate)->set_is_pregenerated(true);
+ }
+}
+
+
+void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
+ isolate);
+}
+
+
+
+void ArrayConstructorStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- rax : argc
+ // -- rbx : type info cell
+ // -- rdi : constructor
+ // -- rsp[0] : return address
+ // -- rsp[4] : last argument
+ // -----------------------------------
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ if (FLAG_debug_code) {
+ // The array construct code is only set for the global and natives
+ // builtin Array functions which always have maps.
+
+ // Initial map for the builtin Array function should be a map.
+ __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
+ // Will both indicate a NULL and a Smi.
+ STATIC_ASSERT(kSmiTag == 0);
+ Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
+ __ Check(not_smi, "Unexpected initial map for Array function");
+ __ CmpObjectType(rcx, MAP_TYPE, rcx);
+ __ Check(equal, "Unexpected initial map for Array function");
+
+ // We should either have undefined in ebx or a valid jsglobalpropertycell
+ Label okay_here;
+ Handle<Map> global_property_cell_map(
+ masm->isolate()->heap()->global_property_cell_map());
+ __ Cmp(rbx, undefined_sentinel);
+ __ j(equal, &okay_here);
+ __ Cmp(FieldOperand(rbx, 0), global_property_cell_map);
+ __ Assert(equal, "Expected property cell in register rbx");
+ __ bind(&okay_here);
+ }
+
+ if (FLAG_optimize_constructed_arrays) {
+ Label no_info, switch_ready;
+ // Get the elements kind and case on that.
+ __ Cmp(rbx, undefined_sentinel);
+ __ j(equal, &no_info);
+ __ movq(rdx, FieldOperand(rbx, kPointerSize));
+
+ // There is no info if the call site went megamorphic either
+
+ // TODO(mvstanton): Really? I thought if it was the array function that
+ // the cell wouldn't get stamped as megamorphic.
+ __ Cmp(rdx, TypeFeedbackCells::MegamorphicSentinel(masm->isolate()));
+ __ j(equal, &no_info);
+ __ SmiToInteger32(rdx, rdx);
+ __ jmp(&switch_ready);
+ __ bind(&no_info);
+ __ movq(rdx, Immediate(GetInitialFastElementsKind()));
+ __ bind(&switch_ready);
+
+ if (argument_count_ == ANY) {
+ Label not_zero_case, not_one_case;
+ __ testq(rax, rax);
+ __ j(not_zero, ¬_zero_case);
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+
+ __ bind(¬_zero_case);
+ __ cmpl(rax, Immediate(1));
+ __ j(greater, ¬_one_case);
+ CreateArrayDispatchOneArgument(masm);
+
+ __ bind(¬_one_case);
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else if (argument_count_ == NONE) {
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+ } else if (argument_count_ == ONE) {
+ CreateArrayDispatchOneArgument(masm);
+ } else if (argument_count_ == MORE_THAN_ONE) {
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else {
+ UNREACHABLE();
+ }
+ } else {
+ Label generic_constructor;
+ // Run the native code for the Array function called as constructor.
+ ArrayNativeCode(masm, &generic_constructor);
+
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
+ }
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/src/x64/code-stubs-x64.h b/src/x64/code-stubs-x64.h
index 72a3a95..eafb960 100644
--- a/src/x64/code-stubs-x64.h
+++ b/src/x64/code-stubs-x64.h
@@ -35,6 +35,8 @@
namespace internal {
+void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code);
+
// Compute a transcendental math function natively, or call the
// TranscendentalCache runtime function.
class TranscendentalCacheStub: public PlatformCodeStub {
diff --git a/src/x64/deoptimizer-x64.cc b/src/x64/deoptimizer-x64.cc
index b8ce99b..bec158b 100644
--- a/src/x64/deoptimizer-x64.cc
+++ b/src/x64/deoptimizer-x64.cc
@@ -609,37 +609,22 @@
const int kSavedRegistersAreaSize = kNumberOfRegisters * kPointerSize +
kDoubleRegsSize;
- // When calling new_deoptimizer_function we need to pass the last argument
- // on the stack on windows and in r8 on linux. The remaining arguments are
- // all passed in registers (different ones on linux and windows though).
-
-#ifdef _WIN64
- Register arg4 = r9;
- Register arg3 = r8;
- Register arg2 = rdx;
- Register arg1 = rcx;
-#else
- Register arg4 = rcx;
- Register arg3 = rdx;
- Register arg2 = rsi;
- Register arg1 = rdi;
-#endif
-
// We use this to keep the value of the fifth argument temporarily.
// Unfortunately we can't store it directly in r8 (used for passing
// this on linux), since it is another parameter passing register on windows.
Register arg5 = r11;
// Get the bailout id from the stack.
- __ movq(arg3, Operand(rsp, kSavedRegistersAreaSize));
+ __ movq(arg_reg_3, Operand(rsp, kSavedRegistersAreaSize));
// Get the address of the location in the code object if possible
// and compute the fp-to-sp delta in register arg5.
if (type() == EAGER) {
- __ Set(arg4, 0);
+ __ Set(arg_reg_4, 0);
__ lea(arg5, Operand(rsp, kSavedRegistersAreaSize + 1 * kPointerSize));
} else {
- __ movq(arg4, Operand(rsp, kSavedRegistersAreaSize + 1 * kPointerSize));
+ __ movq(arg_reg_4,
+ Operand(rsp, kSavedRegistersAreaSize + 1 * kPointerSize));
__ lea(arg5, Operand(rsp, kSavedRegistersAreaSize + 2 * kPointerSize));
}
@@ -649,8 +634,8 @@
// Allocate a new deoptimizer object.
__ PrepareCallCFunction(6);
__ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
- __ movq(arg1, rax);
- __ Set(arg2, type());
+ __ movq(arg_reg_1, rax);
+ __ Set(arg_reg_2, type());
// Args 3 and 4 are already in the right registers.
// On windows put the arguments on the stack (PrepareCallCFunction
@@ -713,8 +698,8 @@
// Compute the output frame in the deoptimizer.
__ push(rax);
__ PrepareCallCFunction(2);
- __ movq(arg1, rax);
- __ LoadAddress(arg2, ExternalReference::isolate_address(isolate()));
+ __ movq(arg_reg_1, rax);
+ __ LoadAddress(arg_reg_2, ExternalReference::isolate_address(isolate()));
{
AllowExternalCallThatCantCauseGC scope(masm());
__ CallCFunction(
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index f9651f0..a20d468 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -1961,6 +1961,103 @@
}
+void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
+ Expression *value,
+ JSGeneratorObject::ResumeMode resume_mode) {
+ // The value stays in rax, and is ultimately read by the resumed generator, as
+ // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. rbx
+ // will hold the generator object until the activation has been resumed.
+ VisitForStackValue(generator);
+ VisitForAccumulatorValue(value);
+ __ pop(rbx);
+
+ // Check generator state.
+ Label wrong_state, done;
+ STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
+ STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
+ __ SmiCompare(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
+ Smi::FromInt(0));
+ __ j(less_equal, &wrong_state);
+
+ // Load suspended function and context.
+ __ movq(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
+ __ movq(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
+
+ // Push receiver.
+ __ push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
+
+ // Push holes for arguments to generator function.
+ __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
+ __ movsxlq(rdx,
+ FieldOperand(rdx,
+ SharedFunctionInfo::kFormalParameterCountOffset));
+ __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
+ Label push_argument_holes, push_frame;
+ __ bind(&push_argument_holes);
+ __ subq(rdx, Immediate(1));
+ __ j(carry, &push_frame);
+ __ push(rcx);
+ __ jmp(&push_argument_holes);
+
+ // Enter a new JavaScript frame, and initialize its slots as they were when
+ // the generator was suspended.
+ Label resume_frame;
+ __ bind(&push_frame);
+ __ call(&resume_frame);
+ __ jmp(&done);
+ __ bind(&resume_frame);
+ __ push(rbp); // Caller's frame pointer.
+ __ movq(rbp, rsp);
+ __ push(rsi); // Callee's context.
+ __ push(rdi); // Callee's JS Function.
+
+ // Load the operand stack size.
+ __ movq(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset));
+ __ movq(rdx, FieldOperand(rdx, FixedArray::kLengthOffset));
+ __ SmiToInteger32(rdx, rdx);
+
+ // If we are sending a value and there is no operand stack, we can jump back
+ // in directly.
+ if (resume_mode == JSGeneratorObject::SEND) {
+ Label slow_resume;
+ __ cmpq(rdx, Immediate(0));
+ __ j(not_zero, &slow_resume);
+ __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
+ __ SmiToInteger64(rcx,
+ FieldOperand(rbx, JSGeneratorObject::kContinuationOffset));
+ __ addq(rdx, rcx);
+ __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
+ Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
+ __ jmp(rdx);
+ __ bind(&slow_resume);
+ }
+
+ // Otherwise, we push holes for the operand stack and call the runtime to fix
+ // up the stack and the handlers.
+ Label push_operand_holes, call_resume;
+ __ bind(&push_operand_holes);
+ __ subq(rdx, Immediate(1));
+ __ j(carry, &call_resume);
+ __ push(rcx);
+ __ jmp(&push_operand_holes);
+ __ bind(&call_resume);
+ __ push(rbx);
+ __ push(result_register());
+ __ Push(Smi::FromInt(resume_mode));
+ __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
+ // Not reached: the runtime call returns elsewhere.
+ __ Abort("Generator failed to resume.");
+
+ // Throw error if we attempt to operate on a running generator.
+ __ bind(&wrong_state);
+ __ push(rbx);
+ __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
+
+ __ bind(&done);
+ context()->Plug(result_register());
+}
+
+
void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Literal* key = prop->key()->AsLiteral();
@@ -3001,16 +3098,10 @@
// Return a random uint32 number in rax.
// The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
__ PrepareCallCFunction(1);
-#ifdef _WIN64
- __ movq(rcx,
+ __ movq(arg_reg_1,
ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
- __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
-
-#else
- __ movq(rdi,
- ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
- __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
-#endif
+ __ movq(arg_reg_1,
+ FieldOperand(arg_reg_1, GlobalObject::kNativeContextOffset));
__ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
// Convert 32 random bits in rax to 0.(32 random bits) in a double
@@ -3108,13 +3199,8 @@
}
__ bind(&runtime);
__ PrepareCallCFunction(2);
-#ifdef _WIN64
- __ movq(rcx, object);
- __ movq(rdx, index, RelocInfo::NONE64);
-#else
- __ movq(rdi, object);
- __ movq(rsi, index, RelocInfo::NONE64);
-#endif
+ __ movq(arg_reg_1, object);
+ __ movq(arg_reg_2, index, RelocInfo::NONE64);
__ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
__ jmp(&done);
@@ -4435,24 +4521,20 @@
VisitForAccumulatorValue(sub_expr);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
- Heap::RootListIndex nil_value = nil == kNullValue ?
- Heap::kNullValueRootIndex :
- Heap::kUndefinedValueRootIndex;
- __ CompareRoot(rax, nil_value);
- if (expr->op() == Token::EQ_STRICT) {
+ EqualityKind kind = expr->op() == Token::EQ_STRICT
+ ? kStrictEquality : kNonStrictEquality;
+ if (kind == kStrictEquality) {
+ Heap::RootListIndex nil_value = nil == kNullValue ?
+ Heap::kNullValueRootIndex :
+ Heap::kUndefinedValueRootIndex;
+ __ CompareRoot(rax, nil_value);
Split(equal, if_true, if_false, fall_through);
} else {
- Heap::RootListIndex other_nil_value = nil == kNullValue ?
- Heap::kUndefinedValueRootIndex :
- Heap::kNullValueRootIndex;
- __ j(equal, if_true);
- __ CompareRoot(rax, other_nil_value);
- __ j(equal, if_true);
- __ JumpIfSmi(rax, if_false);
- // It can be an undetectable object.
- __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
- __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
- Immediate(1 << Map::kIsUndetectable));
+ Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(),
+ kNonStrictEquality,
+ nil);
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
+ __ testq(rax, rax);
Split(not_zero, if_true, if_false, fall_through);
}
context()->Plug(if_true, if_false);
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index 5e3e426..fbb7c28 100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -1644,13 +1644,8 @@
}
__ bind(&runtime);
__ PrepareCallCFunction(2);
-#ifdef _WIN64
- __ movq(rcx, object);
- __ movq(rdx, index, RelocInfo::NONE64);
-#else
- __ movq(rdi, object);
- __ movq(rsi, index, RelocInfo::NONE64);
-#endif
+ __ movq(arg_reg_1, object);
+ __ movq(arg_reg_2, index, RelocInfo::NONE64);
__ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
__ bind(&done);
@@ -2556,6 +2551,8 @@
rcx);
} else {
Register reg = ToRegister(instr->parameter_count());
+ // The argument count parameter is a smi
+ __ SmiToInteger32(reg, reg);
Register return_addr_reg = reg.is(rcx) ? rbx : rcx;
__ pop(return_addr_reg);
__ shl(reg, Immediate(kPointerSizeLog2));
@@ -3646,12 +3643,7 @@
// Having marked this as a call, we can use any registers.
// Just make sure that the input/output registers are the expected ones.
- // Choose register conforming to calling convention (when bailing out).
-#ifdef _WIN64
Register exponent = rdx;
-#else
- Register exponent = rdi;
-#endif
ASSERT(!instr->right()->IsRegister() ||
ToRegister(instr->right()).is(exponent));
ASSERT(!instr->right()->IsDoubleRegister() ||
@@ -3912,9 +3904,18 @@
__ Set(rax, instr->arity());
__ Move(rbx, instr->hydrogen()->property_cell());
- Handle<Code> array_construct_code =
- isolate()->builtins()->ArrayConstructCode();
- CallCode(array_construct_code, RelocInfo::CONSTRUCT_CALL, instr);
+ Object* cell_value = instr->hydrogen()->property_cell()->value();
+ ElementsKind kind = static_cast<ElementsKind>(Smi::cast(cell_value)->value());
+ if (instr->arity() == 0) {
+ ArrayNoArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else if (instr->arity() == 1) {
+ ArraySingleArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else {
+ ArrayNArgumentsConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ }
}
diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc
index bb25450..6707455 100644
--- a/src/x64/lithium-x64.cc
+++ b/src/x64/lithium-x64.cc
@@ -1556,12 +1556,7 @@
ASSERT(instr->left()->representation().IsDouble());
LOperand* left = UseFixedDouble(instr->left(), xmm2);
LOperand* right = exponent_type.IsDouble() ?
- UseFixedDouble(instr->right(), xmm1) :
-#ifdef _WIN64
- UseFixed(instr->right(), rdx);
-#else
- UseFixed(instr->right(), rdi);
-#endif
+ UseFixedDouble(instr->right(), xmm1) : UseFixed(instr->right(), rdx);
LPower* result = new(zone()) LPower(left, right);
return MarkAsCall(DefineFixedDouble(result, xmm3), instr,
CAN_DEOPTIMIZE_EAGERLY);
@@ -1571,11 +1566,7 @@
LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
ASSERT(instr->representation().IsDouble());
ASSERT(instr->global_object()->representation().IsTagged());
-#ifdef _WIN64
- LOperand* global_object = UseFixed(instr->global_object(), rcx);
-#else
- LOperand* global_object = UseFixed(instr->global_object(), rdi);
-#endif
+ LOperand* global_object = UseFixed(instr->global_object(), arg_reg_1);
LRandom* result = new(zone()) LRandom(global_object);
return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
}
@@ -2384,7 +2375,8 @@
ASSERT(info()->IsStub());
CodeStubInterfaceDescriptor* descriptor =
info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
- Register reg = descriptor->register_params_[instr->index()];
+ int index = static_cast<int>(instr->index());
+ Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index);
return DefineFixed(result, reg);
}
}
diff --git a/src/x64/lithium-x64.h b/src/x64/lithium-x64.h
index bc3281e..9154b04 100644
--- a/src/x64/lithium-x64.h
+++ b/src/x64/lithium-x64.h
@@ -1455,6 +1455,7 @@
LOperand* parameter_count() { return inputs_[1]; }
DECLARE_CONCRETE_INSTRUCTION(Return, "return")
+ DECLARE_HYDROGEN_ACCESSOR(Return)
};
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index c193bb3..76491a3 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -725,8 +725,9 @@
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
- PrepareCallCFunction(0);
- CallCFunction(ExternalReference::log_enter_external_function(isolate()), 0);
+ PrepareCallCFunction(1);
+ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
+ CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
PopSafepointRegisters();
}
@@ -738,8 +739,9 @@
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
- PrepareCallCFunction(0);
- CallCFunction(ExternalReference::log_leave_external_function(isolate()), 0);
+ PrepareCallCFunction(1);
+ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
+ CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
PopSafepointRegisters();
}
@@ -817,11 +819,7 @@
bind(&delete_allocated_handles);
movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
movq(prev_limit_reg, rax);
-#ifdef _WIN64
- LoadAddress(rcx, ExternalReference::isolate_address(isolate()));
-#else
- LoadAddress(rdi, ExternalReference::isolate_address(isolate()));
-#endif
+ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
LoadAddress(rax,
ExternalReference::delete_handle_scope_extensions(isolate()));
call(rax);