Version 3.25.24 (based on bleeding_edge revision r20219)
Add support for per-isolate private symbols.
No longer OOM on invalid string length (issue 3060).
Remove Failure::OutOfMemory propagation and V8::IgnoreOutOfMemoryException (issue 3060).
Tentative Windows dll build fix: Don't V8_EXPORT ScriptCompiler::Source (issue 3228).
Performance and stability improvements on all platforms.
git-svn-id: http://v8.googlecode.com/svn/trunk@20220 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/ChangeLog b/ChangeLog
index e025456..ff54fa5 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,18 @@
+2014-03-25: Version 3.25.24
+
+ Add support for per-isolate private symbols.
+
+ No longer OOM on invalid string length (issue 3060).
+
+ Remove Failure::OutOfMemory propagation and
+ V8::IgnoreOutOfMemoryException (issue 3060).
+
+ Tentative Windows dll build fix: Don't V8_EXPORT ScriptCompiler::Source
+ (issue 3228).
+
+ Performance and stability improvements on all platforms.
+
+
2014-03-24: Version 3.25.23
Rename A64 port to ARM64 port (Chromium issue 354405).
diff --git a/build/all.gyp b/build/all.gyp
index 5fbd8c2..3860379 100644
--- a/build/all.gyp
+++ b/build/all.gyp
@@ -16,6 +16,7 @@
['component!="shared_library"', {
'dependencies': [
'../tools/lexer-shell.gyp:lexer-shell',
+ '../tools/lexer-shell.gyp:parser-shell',
],
}],
]
diff --git a/build/android.gypi b/build/android.gypi
index c119271..9570f44 100644
--- a/build/android.gypi
+++ b/build/android.gypi
@@ -184,9 +184,14 @@
'-L<(android_stlport_libs)/x86',
],
}],
+ ['target_arch=="x64"', {
+ 'ldflags': [
+ '-L<(android_stlport_libs)/x86_64',
+ ],
+ }],
['target_arch=="arm64"', {
'ldflags': [
- '-L<(android_stlport_libs)/arm64',
+ '-L<(android_stlport_libs)/arm64-v8a',
],
}],
],
@@ -252,8 +257,15 @@
}], # _toolset=="target"
# Settings for building host targets using the system toolchain.
['_toolset=="host"', {
- 'cflags': [ '-m32', '-pthread' ],
- 'ldflags': [ '-m32', '-pthread' ],
+ 'conditions': [
+ ['target_arch=="x64"', {
+ 'cflags': [ '-m64', '-pthread' ],
+ 'ldflags': [ '-m64', '-pthread' ],
+ }, {
+ 'cflags': [ '-m32', '-pthread' ],
+ 'ldflags': [ '-m32', '-pthread' ],
+ }],
+ ],
'ldflags!': [
'-Wl,-z,noexecstack',
'-Wl,--gc-sections',
diff --git a/include/v8.h b/include/v8.h
index 7a153b0..3687485 100644
--- a/include/v8.h
+++ b/include/v8.h
@@ -133,6 +133,7 @@
class FunctionTemplate;
class ObjectTemplate;
class Data;
+template<typename T> class FunctionCallbackInfo;
template<typename T> class PropertyCallbackInfo;
class StackTrace;
class StackFrame;
@@ -1120,29 +1121,32 @@
private:
// Prevent copying. Not implemented.
CachedData(const CachedData&);
+ CachedData& operator=(const CachedData&);
};
/**
* Source code which can be then compiled to a UnboundScript or
* BoundScript.
*/
- class V8_EXPORT Source {
+ class Source {
public:
// Source takes ownership of CachedData.
- Source(Local<String> source_string, const ScriptOrigin& origin,
+ V8_INLINE Source(Local<String> source_string, const ScriptOrigin& origin,
CachedData* cached_data = NULL);
- Source(Local<String> source_string, CachedData* cached_data = NULL);
- ~Source();
+ V8_INLINE Source(Local<String> source_string,
+ CachedData* cached_data = NULL);
+ V8_INLINE ~Source();
// Ownership of the CachedData or its buffers is *not* transferred to the
// caller. The CachedData object is alive as long as the Source object is
// alive.
- const CachedData* GetCachedData() const;
+ V8_INLINE const CachedData* GetCachedData() const;
private:
friend class ScriptCompiler;
// Prevent copying. Not implemented.
Source(const Source&);
+ Source& operator=(const Source&);
Local<String> source_string;
@@ -1998,9 +2002,20 @@
// Returns the print name string of the symbol, or undefined if none.
Local<Value> Name() const;
- // Create a symbol. If data is not NULL, it will be used as a print name.
+ // Create a symbol. If name is not empty, it will be used as the description.
static Local<Symbol> New(
- Isolate *isolate, const char* data = NULL, int length = -1);
+ Isolate *isolate, Local<String> name = Local<String>());
+
+ // Access global symbol registry.
+ // Note that symbols created this way are never collected, so
+ // they should only be used for statically fixed properties.
+ // Also, there is only one global name space for the names used as keys.
+ // To minimize the potential for clashes, use qualified names as keys.
+ static Local<Symbol> For(Isolate *isolate, Local<String> name);
+
+ // Retrieve a global symbol. Similar to |For|, but using a separate
+ // registry that is not accessible by (and cannot clash with) JavaScript code.
+ static Local<Symbol> ForApi(Isolate *isolate, Local<String> name);
V8_INLINE static Symbol* Cast(v8::Value* obj);
private:
@@ -2019,9 +2034,18 @@
// Returns the print name string of the private symbol, or undefined if none.
Local<Value> Name() const;
- // Create a private symbol. If data is not NULL, it will be the print name.
- static Local<Private> New(Isolate *isolate,
- Local<String> name = Local<String>());
+ // Create a private symbol. If name is not empty, it will be the description.
+ static Local<Private> New(
+ Isolate *isolate, Local<String> name = Local<String>());
+
+ // Retrieve a global private symbol. If a symbol with this name has not
+ // been retrieved in the same isolate before, it is created.
+ // Note that private symbols created this way are never collected, so
+ // they should only be used for statically fixed properties.
+ // Also, there is only one global name space for the names used as keys.
+ // To minimize the potential for clashes, use qualified names as keys,
+ // e.g., "Class#property".
+ static Local<Private> ForApi(Isolate *isolate, Local<String> name);
private:
Private();
@@ -2205,6 +2229,12 @@
PropertyAttribute attribute = None,
AccessControl settings = DEFAULT);
+ void SetAccessorProperty(Local<String> name,
+ Local<Function> getter,
+ Handle<Function> setter = Handle<Function>(),
+ PropertyAttribute attribute = None,
+ AccessControl settings = DEFAULT);
+
/**
* Functionality for private properties.
* This is an experimental feature, use at your own risk.
@@ -4610,20 +4640,6 @@
static void SetArrayBufferAllocator(ArrayBuffer::Allocator* allocator);
/**
- * Ignore out-of-memory exceptions.
- *
- * V8 running out of memory is treated as a fatal error by default.
- * This means that the fatal error handler is called and that V8 is
- * terminated.
- *
- * IgnoreOutOfMemoryException can be used to not treat an
- * out-of-memory situation as a fatal error. This way, the contexts
- * that did not cause the out of memory problem might be able to
- * continue execution.
- */
- static void IgnoreOutOfMemoryException();
-
- /**
* Check if V8 is dead and therefore unusable. This is the case after
* fatal errors such as out-of-memory situations.
*/
@@ -5228,7 +5244,7 @@
void Exit();
/** Returns true if the context has experienced an out of memory situation. */
- bool HasOutOfMemoryException();
+ bool HasOutOfMemoryException() { return false; }
/** Returns an isolate associated with a current context. */
v8::Isolate* GetIsolate();
@@ -6081,6 +6097,32 @@
}
+ScriptCompiler::Source::Source(Local<String> string, const ScriptOrigin& origin,
+ CachedData* data)
+ : source_string(string),
+ resource_name(origin.ResourceName()),
+ resource_line_offset(origin.ResourceLineOffset()),
+ resource_column_offset(origin.ResourceColumnOffset()),
+ resource_is_shared_cross_origin(origin.ResourceIsSharedCrossOrigin()),
+ cached_data(data) {}
+
+
+ScriptCompiler::Source::Source(Local<String> string,
+ CachedData* data)
+ : source_string(string), cached_data(data) {}
+
+
+ScriptCompiler::Source::~Source() {
+ delete cached_data;
+}
+
+
+const ScriptCompiler::CachedData* ScriptCompiler::Source::GetCachedData()
+ const {
+ return cached_data;
+}
+
+
Handle<Boolean> Boolean::New(Isolate* isolate, bool value) {
return value ? True(isolate) : False(isolate);
}
diff --git a/src/api.cc b/src/api.cc
index 402598f..cbcbe5b 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -95,11 +95,6 @@
(isolate)->handle_scope_implementer(); \
handle_scope_implementer->DecrementCallDepth(); \
if (has_pending_exception) { \
- if (handle_scope_implementer->CallDepthIsZero() && \
- (isolate)->is_out_of_memory()) { \
- if (!(isolate)->ignore_out_of_memory()) \
- i::V8::FatalProcessOutOfMemory(NULL); \
- } \
bool call_depth_is_zero = handle_scope_implementer->CallDepthIsZero(); \
(isolate)->OptionalRescheduleException(call_depth_is_zero); \
do_callback \
@@ -1629,32 +1624,6 @@
}
-ScriptCompiler::Source::Source(Local<String> string, const ScriptOrigin& origin,
- CachedData* data)
- : source_string(string),
- resource_name(origin.ResourceName()),
- resource_line_offset(origin.ResourceLineOffset()),
- resource_column_offset(origin.ResourceColumnOffset()),
- resource_is_shared_cross_origin(origin.ResourceIsSharedCrossOrigin()),
- cached_data(data) {}
-
-
-ScriptCompiler::Source::Source(Local<String> string,
- CachedData* data)
- : source_string(string), cached_data(data) {}
-
-
-ScriptCompiler::Source::~Source() {
- delete cached_data;
-}
-
-
-const ScriptCompiler::CachedData* ScriptCompiler::Source::GetCachedData()
- const {
- return cached_data;
-}
-
-
Local<Script> UnboundScript::BindToCurrentContext() {
i::Handle<i::HeapObject> obj =
i::Handle<i::HeapObject>::cast(Utils::OpenHandle(this));
@@ -3488,6 +3457,27 @@
}
+void Object::SetAccessorProperty(Local<String> name,
+ Local<Function> getter,
+ Handle<Function> setter,
+ PropertyAttribute attribute,
+ AccessControl settings) {
+ i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+ ON_BAILOUT(isolate, "v8::Object::SetAccessorProperty()", return);
+ ENTER_V8(isolate);
+ i::HandleScope scope(isolate);
+ i::Handle<i::Object> getter_i = v8::Utils::OpenHandle(*getter);
+ i::Handle<i::Object> setter_i = v8::Utils::OpenHandle(*setter, true);
+ if (setter_i.is_null()) setter_i = isolate->factory()->null_value();
+ i::JSObject::DefineAccessor(v8::Utils::OpenHandle(this),
+ v8::Utils::OpenHandle(*name),
+ getter_i,
+ setter_i,
+ static_cast<PropertyAttributes>(attribute),
+ settings);
+}
+
+
bool v8::Object::HasOwnProperty(Handle<String> key) {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
ON_BAILOUT(isolate, "v8::Object::HasOwnProperty()",
@@ -5240,12 +5230,6 @@
}
-bool Context::HasOutOfMemoryException() {
- i::Handle<i::Context> env = Utils::OpenHandle(this);
- return env->has_out_of_memory();
-}
-
-
v8::Isolate* Context::GetIsolate() {
i::Handle<i::Context> env = Utils::OpenHandle(this);
return reinterpret_cast<Isolate*>(env->GetIsolate());
@@ -5838,7 +5822,7 @@
bool Value::IsPromise() const {
i::Handle<i::Object> val = Utils::OpenHandle(this);
- if (!i::FLAG_harmony_promises || !val->IsJSObject()) return false;
+ if (!val->IsJSObject()) return false;
i::Handle<i::JSObject> obj = i::Handle<i::JSObject>::cast(val);
i::Isolate* isolate = obj->GetIsolate();
LOG_API(isolate, "IsPromise");
@@ -6136,24 +6120,54 @@
}
-Local<Symbol> v8::Symbol::New(Isolate* isolate, const char* data, int length) {
+Local<Symbol> v8::Symbol::New(Isolate* isolate, Local<String> name) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
EnsureInitializedForIsolate(i_isolate, "v8::Symbol::New()");
LOG_API(i_isolate, "Symbol::New()");
ENTER_V8(i_isolate);
i::Handle<i::Symbol> result = i_isolate->factory()->NewSymbol();
- if (data != NULL) {
- if (length == -1) length = i::StrLength(data);
- i::Handle<i::String> name = i_isolate->factory()->NewStringFromUtf8(
- i::Vector<const char>(data, length));
- result->set_name(*name);
- }
+ if (!name.IsEmpty()) result->set_name(*Utils::OpenHandle(*name));
return Utils::ToLocal(result);
}
-Local<Private> v8::Private::New(
- Isolate* isolate, Local<String> name) {
+Local<Symbol> v8::Symbol::For(Isolate* isolate, Local<String> name) {
+ i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
+ i::Handle<i::String> i_name = Utils::OpenHandle(*name);
+ i::Handle<i::JSObject> registry = i_isolate->GetSymbolRegistry();
+ i::Handle<i::String> part = i_isolate->factory()->for_string();
+ i::Handle<i::JSObject> symbols =
+ i::Handle<i::JSObject>::cast(i::JSObject::GetProperty(registry, part));
+ i::Handle<i::Object> symbol = i::JSObject::GetProperty(symbols, i_name);
+ if (!symbol->IsSymbol()) {
+ ASSERT(symbol->IsUndefined());
+ symbol = i_isolate->factory()->NewSymbol();
+ i::Handle<i::Symbol>::cast(symbol)->set_name(*i_name);
+ i::JSObject::SetProperty(symbols, i_name, symbol, NONE, i::STRICT);
+ }
+ return Utils::ToLocal(i::Handle<i::Symbol>::cast(symbol));
+}
+
+
+Local<Symbol> v8::Symbol::ForApi(Isolate* isolate, Local<String> name) {
+ i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
+ i::Handle<i::String> i_name = Utils::OpenHandle(*name);
+ i::Handle<i::JSObject> registry = i_isolate->GetSymbolRegistry();
+ i::Handle<i::String> part = i_isolate->factory()->for_api_string();
+ i::Handle<i::JSObject> symbols =
+ i::Handle<i::JSObject>::cast(i::JSObject::GetProperty(registry, part));
+ i::Handle<i::Object> symbol = i::JSObject::GetProperty(symbols, i_name);
+ if (!symbol->IsSymbol()) {
+ ASSERT(symbol->IsUndefined());
+ symbol = i_isolate->factory()->NewSymbol();
+ i::Handle<i::Symbol>::cast(symbol)->set_name(*i_name);
+ i::JSObject::SetProperty(symbols, i_name, symbol, NONE, i::STRICT);
+ }
+ return Utils::ToLocal(i::Handle<i::Symbol>::cast(symbol));
+}
+
+
+Local<Private> v8::Private::New(Isolate* isolate, Local<String> name) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
EnsureInitializedForIsolate(i_isolate, "v8::Private::New()");
LOG_API(i_isolate, "Private::New()");
@@ -6165,6 +6179,25 @@
}
+Local<Private> v8::Private::ForApi(Isolate* isolate, Local<String> name) {
+ i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
+ i::Handle<i::String> i_name = Utils::OpenHandle(*name);
+ i::Handle<i::JSObject> registry = i_isolate->GetSymbolRegistry();
+ i::Handle<i::String> part = i_isolate->factory()->private_api_string();
+ i::Handle<i::JSObject> privates =
+ i::Handle<i::JSObject>::cast(i::JSObject::GetProperty(registry, part));
+ i::Handle<i::Object> symbol = i::JSObject::GetProperty(privates, i_name);
+ if (!symbol->IsSymbol()) {
+ ASSERT(symbol->IsUndefined());
+ symbol = i_isolate->factory()->NewPrivateSymbol();
+ i::Handle<i::Symbol>::cast(symbol)->set_name(*i_name);
+ i::JSObject::SetProperty(privates, i_name, symbol, NONE, i::STRICT);
+ }
+ Local<Symbol> result = Utils::ToLocal(i::Handle<i::Symbol>::cast(symbol));
+ return v8::Handle<Private>(reinterpret_cast<Private*>(*result));
+}
+
+
Local<Number> v8::Number::New(Isolate* isolate, double value) {
i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate);
ASSERT(internal_isolate->IsInitialized());
@@ -6204,11 +6237,6 @@
}
-void V8::IgnoreOutOfMemoryException() {
- EnterIsolateIfNeeded()->set_ignore_out_of_memory(true);
-}
-
-
bool V8::AddMessageListener(MessageCallback that, Handle<Value> data) {
i::Isolate* isolate = i::Isolate::Current();
EnsureInitializedForIsolate(isolate, "v8::V8::AddMessageListener()");
diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc
index 6cea581..297cdcc 100644
--- a/src/arm/assembler-arm.cc
+++ b/src/arm/assembler-arm.cc
@@ -354,12 +354,17 @@
Operand::Operand(Register rm, ShiftOp shift_op, int shift_imm) {
ASSERT(is_uint5(shift_imm));
- ASSERT(shift_op != ROR || shift_imm != 0); // use RRX if you mean it
+
rm_ = rm;
rs_ = no_reg;
shift_op_ = shift_op;
shift_imm_ = shift_imm & 31;
- if (shift_op == RRX) {
+
+ if ((shift_op == ROR) && (shift_imm == 0)) {
+ // ROR #0 is functionally equivalent to LSL #0 and this allow us to encode
+ // RRX as ROR #0 (See below).
+ shift_op = LSL;
+ } else if (shift_op == RRX) {
// encoded as ROR with shift_imm == 0
ASSERT(shift_imm == 0);
shift_op_ = ROR;
@@ -1788,7 +1793,9 @@
(src.shift_imm_ == 8) ||
(src.shift_imm_ == 16) ||
(src.shift_imm_ == 24));
- ASSERT(src.shift_op() == ROR);
+ // Operand maps ROR #0 to LSL #0.
+ ASSERT((src.shift_op() == ROR) ||
+ ((src.shift_op() == LSL) && (src.shift_imm_ == 0)));
emit(cond | 0x6E*B20 | 0xF*B16 | dst.code()*B12 |
((src.shift_imm_ >> 1)&0xC)*B8 | 7*B4 | src.rm().code());
}
@@ -1810,7 +1817,9 @@
(src2.shift_imm_ == 8) ||
(src2.shift_imm_ == 16) ||
(src2.shift_imm_ == 24));
- ASSERT(src2.shift_op() == ROR);
+ // Operand maps ROR #0 to LSL #0.
+ ASSERT((src2.shift_op() == ROR) ||
+ ((src2.shift_op() == LSL) && (src2.shift_imm_ == 0)));
emit(cond | 0x6E*B20 | src1.code()*B16 | dst.code()*B12 |
((src2.shift_imm_ >> 1) &0xC)*B8 | 7*B4 | src2.rm().code());
}
@@ -1830,7 +1839,9 @@
(src.shift_imm_ == 8) ||
(src.shift_imm_ == 16) ||
(src.shift_imm_ == 24));
- ASSERT(src.shift_op() == ROR);
+ // Operand maps ROR #0 to LSL #0.
+ ASSERT((src.shift_op() == ROR) ||
+ ((src.shift_op() == LSL) && (src.shift_imm_ == 0)));
emit(cond | 0x6C*B20 | 0xF*B16 | dst.code()*B12 |
((src.shift_imm_ >> 1)&0xC)*B8 | 7*B4 | src.rm().code());
}
diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h
index 49881cf..727b054 100644
--- a/src/arm/assembler-arm.h
+++ b/src/arm/assembler-arm.h
@@ -379,8 +379,9 @@
}
void split_code(int* vm, int* m) const {
ASSERT(is_valid());
- *m = (code_ & 0x10) >> 4;
- *vm = code_ & 0x0F;
+ int encoded_code = code_ << 1;
+ *m = (encoded_code & 0x10) >> 4;
+ *vm = encoded_code & 0x0F;
}
int code_;
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index 5609cd0..b3df94e 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -1501,22 +1501,9 @@
}
-static void JumpIfOOM(MacroAssembler* masm,
- Register value,
- Register scratch,
- Label* oom_label) {
- STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3);
- STATIC_ASSERT(kFailureTag == 3);
- __ and_(scratch, value, Operand(0xf));
- __ cmp(scratch, Operand(0xf));
- __ b(eq, oom_label);
-}
-
-
void CEntryStub::GenerateCore(MacroAssembler* masm,
Label* throw_normal_exception,
Label* throw_termination_exception,
- Label* throw_out_of_memory_exception,
bool do_gc,
bool always_allocate) {
// r0: result parameter for PerformGC, if any
@@ -1614,17 +1601,11 @@
__ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
__ b(eq, &retry);
- // Special handling of out of memory exceptions.
- JumpIfOOM(masm, r0, ip, throw_out_of_memory_exception);
-
// Retrieve the pending exception.
__ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
isolate)));
__ ldr(r0, MemOperand(ip));
- // See if we just retrieved an OOM exception.
- JumpIfOOM(masm, r0, ip, throw_out_of_memory_exception);
-
// Clear the pending exception.
__ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
__ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
@@ -1679,13 +1660,11 @@
Label throw_normal_exception;
Label throw_termination_exception;
- Label throw_out_of_memory_exception;
// Call into the runtime system.
GenerateCore(masm,
&throw_normal_exception,
&throw_termination_exception,
- &throw_out_of_memory_exception,
false,
false);
@@ -1693,7 +1672,6 @@
GenerateCore(masm,
&throw_normal_exception,
&throw_termination_exception,
- &throw_out_of_memory_exception,
true,
false);
@@ -1703,30 +1681,9 @@
GenerateCore(masm,
&throw_normal_exception,
&throw_termination_exception,
- &throw_out_of_memory_exception,
true,
true);
- __ bind(&throw_out_of_memory_exception);
- // Set external caught exception to false.
- Isolate* isolate = masm->isolate();
- ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
- isolate);
- __ mov(r0, Operand(false, RelocInfo::NONE32));
- __ mov(r2, Operand(external_caught));
- __ str(r0, MemOperand(r2));
-
- // Set pending exception and r0 to out of memory exception.
- Label already_have_failure;
- JumpIfOOM(masm, r0, ip, &already_have_failure);
- Failure* out_of_memory = Failure::OutOfMemoryException(0x1);
- __ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
- __ bind(&already_have_failure);
- __ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
- __ str(r0, MemOperand(r2));
- // Fall through to the next label.
-
__ bind(&throw_termination_exception);
__ ThrowUncatchable(r0);
diff --git a/src/arm/constants-arm.h b/src/arm/constants-arm.h
index 78bb66c..14f4705 100644
--- a/src/arm/constants-arm.h
+++ b/src/arm/constants-arm.h
@@ -343,7 +343,7 @@
Neon8 = 0x0,
Neon16 = 0x1,
Neon32 = 0x2,
- Neon64 = 0x4
+ Neon64 = 0x3
};
// -----------------------------------------------------------------------------
diff --git a/src/arm/disasm-arm.cc b/src/arm/disasm-arm.cc
index 49e4126..aa8ee22 100644
--- a/src/arm/disasm-arm.cc
+++ b/src/arm/disasm-arm.cc
@@ -1061,7 +1061,7 @@
if (instr->Bits(19, 16) == 0xF) {
switch (instr->Bits(11, 10)) {
case 0:
- Format(instr, "uxtb16'cond 'rd, 'rm, ror #0");
+ Format(instr, "uxtb16'cond 'rd, 'rm");
break;
case 1:
Format(instr, "uxtb16'cond 'rd, 'rm, ror #8");
@@ -1085,7 +1085,7 @@
if (instr->Bits(19, 16) == 0xF) {
switch (instr->Bits(11, 10)) {
case 0:
- Format(instr, "uxtb'cond 'rd, 'rm, ror #0");
+ Format(instr, "uxtb'cond 'rd, 'rm");
break;
case 1:
Format(instr, "uxtb'cond 'rd, 'rm, ror #8");
@@ -1100,7 +1100,7 @@
} else {
switch (instr->Bits(11, 10)) {
case 0:
- Format(instr, "uxtab'cond 'rd, 'rn, 'rm, ror #0");
+ Format(instr, "uxtab'cond 'rd, 'rn, 'rm");
break;
case 1:
Format(instr, "uxtab'cond 'rd, 'rn, 'rm, ror #8");
@@ -1566,7 +1566,8 @@
if ((instr->Bits(18, 16) == 0) && (instr->Bits(11, 6) == 0x28) &&
(instr->Bit(4) == 1)) {
// vmovl signed
- int Vd = (instr->Bit(22) << 4) | instr->VdValue();
+ if ((instr->VdValue() & 1) != 0) Unknown(instr);
+ int Vd = (instr->Bit(22) << 3) | (instr->VdValue() >> 1);
int Vm = (instr->Bit(5) << 4) | instr->VmValue();
int imm3 = instr->Bits(21, 19);
out_buffer_pos_ += OS::SNPrintF(out_buffer_ + out_buffer_pos_,
@@ -1579,7 +1580,8 @@
if ((instr->Bits(18, 16) == 0) && (instr->Bits(11, 6) == 0x28) &&
(instr->Bit(4) == 1)) {
// vmovl unsigned
- int Vd = (instr->Bit(22) << 4) | instr->VdValue();
+ if ((instr->VdValue() & 1) != 0) Unknown(instr);
+ int Vd = (instr->Bit(22) << 3) | (instr->VdValue() >> 1);
int Vm = (instr->Bit(5) << 4) | instr->VmValue();
int imm3 = instr->Bits(21, 19);
out_buffer_pos_ += OS::SNPrintF(out_buffer_ + out_buffer_pos_,
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index eec910a..e2753a8 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -111,6 +111,25 @@
};
+static void EmitStackCheck(MacroAssembler* masm_,
+ Register stack_limit_scratch,
+ int pointers = 0,
+ Register scratch = sp) {
+ Isolate* isolate = masm_->isolate();
+ Label ok;
+ ASSERT(scratch.is(sp) == (pointers == 0));
+ if (pointers != 0) {
+ __ sub(scratch, sp, Operand(pointers * kPointerSize));
+ }
+ __ LoadRoot(stack_limit_scratch, Heap::kStackLimitRootIndex);
+ __ cmp(scratch, Operand(stack_limit_scratch));
+ __ b(hs, &ok);
+ PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
+ __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
+ __ bind(&ok);
+}
+
+
// Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right. The actual
// argument count matches the formal parameter count expected by the
@@ -179,20 +198,28 @@
// Generators allocate locals, if any, in context slots.
ASSERT(!info->function()->is_generator() || locals_count == 0);
if (locals_count > 0) {
- // Emit a loop to initialize stack cells for locals when optimizing for
- // size. Otherwise, unroll the loop for maximum performance.
+ if (locals_count >= 128) {
+ EmitStackCheck(masm_, r2, locals_count, r9);
+ }
__ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
- if (FLAG_optimize_for_size && locals_count > 4) {
- Label loop;
- __ mov(r2, Operand(locals_count));
- __ bind(&loop);
- __ sub(r2, r2, Operand(1), SetCC);
- __ push(r9);
- __ b(&loop, ne);
- } else {
- for (int i = 0; i < locals_count; i++) {
+ int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
+ if (locals_count >= kMaxPushes) {
+ int loop_iterations = locals_count / kMaxPushes;
+ __ mov(r2, Operand(loop_iterations));
+ Label loop_header;
+ __ bind(&loop_header);
+ // Do pushes.
+ for (int i = 0; i < kMaxPushes; i++) {
__ push(r9);
}
+ // Continue loop if not done.
+ __ sub(r2, r2, Operand(1), SetCC);
+ __ b(&loop_header, ne);
+ }
+ int remaining = locals_count % kMaxPushes;
+ // Emit the remaining pushes.
+ for (int i = 0; i < remaining; i++) {
+ __ push(r9);
}
}
}
@@ -303,13 +330,7 @@
{ Comment cmnt(masm_, "[ Stack check");
PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
- Label ok;
- __ LoadRoot(ip, Heap::kStackLimitRootIndex);
- __ cmp(sp, Operand(ip));
- __ b(hs, &ok);
- PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
- __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
- __ bind(&ok);
+ EmitStackCheck(masm_, ip);
}
{ Comment cmnt(masm_, "[ Body");
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index c2dfde3..0fe38a0 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -4033,6 +4033,9 @@
instr->hydrogen()->value()->IsHeapObject()
? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
+ ASSERT(!(representation.IsSmi() &&
+ instr->value()->IsConstantOperand() &&
+ !IsSmi(LConstantOperand::cast(instr->value()))));
if (representation.IsHeapObject()) {
Register value = ToRegister(instr->value());
if (!instr->hydrogen()->value()->type().IsHeapObject()) {
diff --git a/src/arm/simulator-arm.cc b/src/arm/simulator-arm.cc
index a7fccdd..8f7c1e8 100644
--- a/src/arm/simulator-arm.cc
+++ b/src/arm/simulator-arm.cc
@@ -3470,7 +3470,8 @@
if ((instr->Bits(18, 16) == 0) && (instr->Bits(11, 6) == 0x28) &&
(instr->Bit(4) == 1)) {
// vmovl signed
- int Vd = (instr->Bit(22) << 4) | instr->VdValue();
+ if ((instr->VdValue() & 1) != 0) UNIMPLEMENTED();
+ int Vd = (instr->Bit(22) << 3) | (instr->VdValue() >> 1);
int Vm = (instr->Bit(5) << 4) | instr->VmValue();
int imm3 = instr->Bits(21, 19);
if ((imm3 != 1) && (imm3 != 2) && (imm3 != 4)) UNIMPLEMENTED();
@@ -3493,7 +3494,8 @@
if ((instr->Bits(18, 16) == 0) && (instr->Bits(11, 6) == 0x28) &&
(instr->Bit(4) == 1)) {
// vmovl unsigned
- int Vd = (instr->Bit(22) << 4) | instr->VdValue();
+ if ((instr->VdValue() & 1) != 0) UNIMPLEMENTED();
+ int Vd = (instr->Bit(22) << 3) | (instr->VdValue() >> 1);
int Vm = (instr->Bit(5) << 4) | instr->VmValue();
int imm3 = instr->Bits(21, 19);
if ((imm3 != 1) && (imm3 != 2) && (imm3 != 4)) UNIMPLEMENTED();
diff --git a/src/arm64/assembler-arm64-inl.h b/src/arm64/assembler-arm64-inl.h
index c509e05..b56e3ed 100644
--- a/src/arm64/assembler-arm64-inl.h
+++ b/src/arm64/assembler-arm64-inl.h
@@ -589,20 +589,19 @@
// sequences:
//
// Without relocation:
- // movz ip0, #(target & 0x000000000000ffff)
- // movk ip0, #(target & 0x00000000ffff0000)
- // movk ip0, #(target & 0x0000ffff00000000)
- // movk ip0, #(target & 0xffff000000000000)
- // blr ip0
+ // movz temp, #(target & 0x000000000000ffff)
+ // movk temp, #(target & 0x00000000ffff0000)
+ // movk temp, #(target & 0x0000ffff00000000)
+ // blr temp
//
// With relocation:
- // ldr ip0, =target
- // blr ip0
+ // ldr temp, =target
+ // blr temp
//
// The return address is immediately after the blr instruction in both cases,
// so it can be found by adding the call size to the address at the start of
// the call sequence.
- STATIC_ASSERT(Assembler::kCallSizeWithoutRelocation == 5 * kInstructionSize);
+ STATIC_ASSERT(Assembler::kCallSizeWithoutRelocation == 4 * kInstructionSize);
STATIC_ASSERT(Assembler::kCallSizeWithRelocation == 2 * kInstructionSize);
Instruction* instr = reinterpret_cast<Instruction*>(pc);
@@ -610,8 +609,7 @@
// Verify the instruction sequence.
ASSERT(instr->following(1)->IsMovk());
ASSERT(instr->following(2)->IsMovk());
- ASSERT(instr->following(3)->IsMovk());
- ASSERT(instr->following(4)->IsBranchAndLinkToRegister());
+ ASSERT(instr->following(3)->IsBranchAndLinkToRegister());
return pc + Assembler::kCallSizeWithoutRelocation;
} else {
// Verify the instruction sequence.
diff --git a/src/arm64/assembler-arm64.h b/src/arm64/assembler-arm64.h
index 79f957b..41eaaad 100644
--- a/src/arm64/assembler-arm64.h
+++ b/src/arm64/assembler-arm64.h
@@ -115,7 +115,7 @@
struct Register : public CPURegister {
static Register Create(unsigned code, unsigned size) {
- return CPURegister::Create(code, size, CPURegister::kRegister);
+ return Register(CPURegister::Create(code, size, CPURegister::kRegister));
}
Register() {
@@ -124,7 +124,14 @@
reg_type = CPURegister::kNoRegister;
}
- Register(const CPURegister& r) { // NOLINT(runtime/explicit)
+ explicit Register(const CPURegister& r) {
+ reg_code = r.reg_code;
+ reg_size = r.reg_size;
+ reg_type = r.reg_type;
+ ASSERT(IsValidOrNone());
+ }
+
+ Register(const Register& r) { // NOLINT(runtime/explicit)
reg_code = r.reg_code;
reg_size = r.reg_size;
reg_type = r.reg_type;
@@ -236,7 +243,8 @@
struct FPRegister : public CPURegister {
static FPRegister Create(unsigned code, unsigned size) {
- return CPURegister::Create(code, size, CPURegister::kFPRegister);
+ return FPRegister(
+ CPURegister::Create(code, size, CPURegister::kFPRegister));
}
FPRegister() {
@@ -245,7 +253,14 @@
reg_type = CPURegister::kNoRegister;
}
- FPRegister(const CPURegister& r) { // NOLINT(runtime/explicit)
+ explicit FPRegister(const CPURegister& r) {
+ reg_code = r.reg_code;
+ reg_size = r.reg_size;
+ reg_type = r.reg_type;
+ ASSERT(IsValidOrNone());
+ }
+
+ FPRegister(const FPRegister& r) { // NOLINT(runtime/explicit)
reg_code = r.reg_code;
reg_size = r.reg_size;
reg_type = r.reg_type;
@@ -818,16 +833,15 @@
// as it will choose the correct value for a given relocation mode.
//
// Without relocation:
- // movz ip0, #(target & 0x000000000000ffff)
- // movk ip0, #(target & 0x00000000ffff0000)
- // movk ip0, #(target & 0x0000ffff00000000)
- // movk ip0, #(target & 0xffff000000000000)
- // blr ip0
+ // movz temp, #(target & 0x000000000000ffff)
+ // movk temp, #(target & 0x00000000ffff0000)
+ // movk temp, #(target & 0x0000ffff00000000)
+ // blr temp
//
// With relocation:
- // ldr ip0, =target
- // blr ip0
- static const int kCallSizeWithoutRelocation = 5 * kInstructionSize;
+ // ldr temp, =target
+ // blr temp
+ static const int kCallSizeWithoutRelocation = 4 * kInstructionSize;
static const int kCallSizeWithRelocation = 2 * kInstructionSize;
// Size of the generated code in bytes
diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc
index f06f6bc..b512e42 100644
--- a/src/arm64/code-stubs-arm64.cc
+++ b/src/arm64/code-stubs-arm64.cc
@@ -1133,6 +1133,39 @@
}
+void StoreRegistersStateStub::Generate(MacroAssembler* masm) {
+ MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
+ UseScratchRegisterScope temps(masm);
+ Register saved_lr = temps.UnsafeAcquire(to_be_pushed_lr());
+ Register return_address = temps.AcquireX();
+ __ Mov(return_address, lr);
+ // Restore lr with the value it had before the call to this stub (the value
+ // which must be pushed).
+ __ Mov(lr, saved_lr);
+ if (save_doubles_ == kSaveFPRegs) {
+ __ PushSafepointRegistersAndDoubles();
+ } else {
+ __ PushSafepointRegisters();
+ }
+ __ Ret(return_address);
+}
+
+
+void RestoreRegistersStateStub::Generate(MacroAssembler* masm) {
+ MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
+ UseScratchRegisterScope temps(masm);
+ Register return_address = temps.AcquireX();
+ // Preserve the return address (lr will be clobbered by the pop).
+ __ Mov(return_address, lr);
+ if (save_doubles_ == kSaveFPRegs) {
+ __ PopSafepointRegistersAndDoubles();
+ } else {
+ __ PopSafepointRegisters();
+ }
+ __ Ret(return_address);
+}
+
+
void MathPowStub::Generate(MacroAssembler* masm) {
// Stack on entry:
// jssp[0]: Exponent (as a tagged value).
@@ -1394,10 +1427,28 @@
ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
BinaryOpICStub::GenerateAheadOfTime(isolate);
+ StoreRegistersStateStub::GenerateAheadOfTime(isolate);
+ RestoreRegistersStateStub::GenerateAheadOfTime(isolate);
BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
}
+void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) {
+ StoreRegistersStateStub stub1(kDontSaveFPRegs);
+ stub1.GetCode(isolate);
+ StoreRegistersStateStub stub2(kSaveFPRegs);
+ stub2.GetCode(isolate);
+}
+
+
+void RestoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) {
+ RestoreRegistersStateStub stub1(kDontSaveFPRegs);
+ stub1.GetCode(isolate);
+ RestoreRegistersStateStub stub2(kSaveFPRegs);
+ stub2.GetCode(isolate);
+}
+
+
void CodeStub::GenerateFPStubs(Isolate* isolate) {
// Floating-point code doesn't get special handling in ARM64, so there's
// nothing to do here.
@@ -1405,18 +1456,6 @@
}
-static void JumpIfOOM(MacroAssembler* masm,
- Register value,
- Register scratch,
- Label* oom_label) {
- STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3);
- STATIC_ASSERT(kFailureTag == 3);
- __ And(scratch, value, 0xf);
- __ Cmp(scratch, 0xf);
- __ B(eq, oom_label);
-}
-
-
bool CEntryStub::NeedsImmovableCode() {
// CEntryStub stores the return address on the stack before calling into
// C++ code. In some cases, the VM accesses this address, but it is not used
@@ -1441,7 +1480,6 @@
void CEntryStub::GenerateCore(MacroAssembler* masm,
Label* throw_normal,
Label* throw_termination,
- Label* throw_out_of_memory,
bool do_gc,
bool always_allocate) {
// x0 : Result parameter for PerformGC, if do_gc is true.
@@ -1589,10 +1627,6 @@
__ Tst(result, kFailureTypeTagMask << kFailureTagSize);
__ B(eq, &retry); // RETRY_AFTER_GC
- // Special handling of out-of-memory exceptions: Pass the failure result,
- // rather than the exception descriptor.
- JumpIfOOM(masm, result, x10, throw_out_of_memory);
-
// Retrieve the pending exception.
const Register& exception = result;
const Register& exception_address = x11;
@@ -1601,9 +1635,6 @@
isolate)));
__ Ldr(exception, MemOperand(exception_address));
- // See if we just retrieved an OOM exception.
- JumpIfOOM(masm, exception, x10, throw_out_of_memory);
-
// Clear the pending exception.
__ Mov(x10, Operand(isolate->factory()->the_hole_value()));
__ Str(x10, MemOperand(exception_address));
@@ -1697,13 +1728,11 @@
Label throw_normal;
Label throw_termination;
- Label throw_out_of_memory;
// Call the runtime function.
GenerateCore(masm,
&throw_normal,
&throw_termination,
- &throw_out_of_memory,
false,
false);
@@ -1714,7 +1743,6 @@
GenerateCore(masm,
&throw_normal,
&throw_termination,
- &throw_out_of_memory,
true,
false);
@@ -1723,7 +1751,6 @@
GenerateCore(masm,
&throw_normal,
&throw_termination,
- &throw_out_of_memory,
true,
true);
@@ -1740,27 +1767,6 @@
// If we throw an exception, we can end up re-entering CEntryStub before we
// pop the exit frame, so need to ensure that x21-x23 contain GC-safe values
// here.
- __ Bind(&throw_out_of_memory);
- ASM_LOCATION("Throw out of memory");
- __ Mov(argv, 0);
- __ Mov(argc, 0);
- __ Mov(target, 0);
- // Set external caught exception to false.
- Isolate* isolate = masm->isolate();
- __ Mov(x2, Operand(ExternalReference(Isolate::kExternalCaughtExceptionAddress,
- isolate)));
- __ Str(xzr, MemOperand(x2));
-
- // Set pending exception and x0 to out of memory exception.
- Label already_have_failure;
- JumpIfOOM(masm, x0, x10, &already_have_failure);
- Failure* out_of_memory = Failure::OutOfMemoryException(0x1);
- __ Mov(x0, Operand(reinterpret_cast<uint64_t>(out_of_memory)));
- __ Bind(&already_have_failure);
- __ Mov(x2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
- __ Str(x0, MemOperand(x2));
- // Fall through to the next label.
__ Bind(&throw_termination);
ASM_LOCATION("Throw termination");
diff --git a/src/arm64/code-stubs-arm64.h b/src/arm64/code-stubs-arm64.h
index e986229..7e09ffa 100644
--- a/src/arm64/code-stubs-arm64.h
+++ b/src/arm64/code-stubs-arm64.h
@@ -77,6 +77,37 @@
};
+class StoreRegistersStateStub: public PlatformCodeStub {
+ public:
+ explicit StoreRegistersStateStub(SaveFPRegsMode with_fp)
+ : save_doubles_(with_fp) {}
+
+ static Register to_be_pushed_lr() { return ip0; }
+ static void GenerateAheadOfTime(Isolate* isolate);
+ private:
+ Major MajorKey() { return StoreRegistersState; }
+ int MinorKey() { return (save_doubles_ == kSaveFPRegs) ? 1 : 0; }
+ SaveFPRegsMode save_doubles_;
+
+ void Generate(MacroAssembler* masm);
+};
+
+
+class RestoreRegistersStateStub: public PlatformCodeStub {
+ public:
+ explicit RestoreRegistersStateStub(SaveFPRegsMode with_fp)
+ : save_doubles_(with_fp) {}
+
+ static void GenerateAheadOfTime(Isolate* isolate);
+ private:
+ Major MajorKey() { return RestoreRegistersState; }
+ int MinorKey() { return (save_doubles_ == kSaveFPRegs) ? 1 : 0; }
+ SaveFPRegsMode save_doubles_;
+
+ void Generate(MacroAssembler* masm);
+};
+
+
class RecordWriteStub: public PlatformCodeStub {
public:
// Stub to record the write of 'value' at 'address' in 'object'.
diff --git a/src/arm64/full-codegen-arm64.cc b/src/arm64/full-codegen-arm64.cc
index 8aa7e37..272e1c7 100644
--- a/src/arm64/full-codegen-arm64.cc
+++ b/src/arm64/full-codegen-arm64.cc
@@ -110,6 +110,25 @@
};
+static void EmitStackCheck(MacroAssembler* masm_,
+ int pointers = 0,
+ Register scratch = jssp) {
+ Isolate* isolate = masm_->isolate();
+ Label ok;
+ ASSERT(jssp.Is(__ StackPointer()));
+ ASSERT(scratch.Is(jssp) == (pointers == 0));
+ if (pointers != 0) {
+ __ Sub(scratch, jssp, pointers * kPointerSize);
+ }
+ __ CompareRoot(scratch, Heap::kStackLimitRootIndex);
+ __ B(hs, &ok);
+ PredictableCodeSizeScope predictable(masm_,
+ Assembler::kCallSizeWithRelocation);
+ __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
+ __ Bind(&ok);
+}
+
+
// Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right. The actual
// argument count matches the formal parameter count expected by the
@@ -182,8 +201,28 @@
ASSERT(!info->function()->is_generator() || locals_count == 0);
if (locals_count > 0) {
+ if (locals_count >= 128) {
+ EmitStackCheck(masm_, locals_count, x10);
+ }
__ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
- __ PushMultipleTimes(x10, locals_count);
+ if (FLAG_optimize_for_size) {
+ __ PushMultipleTimes(x10 , locals_count);
+ } else {
+ const int kMaxPushes = 32;
+ if (locals_count >= kMaxPushes) {
+ int loop_iterations = locals_count / kMaxPushes;
+ __ Mov(x3, loop_iterations);
+ Label loop_header;
+ __ Bind(&loop_header);
+ // Do pushes.
+ __ PushMultipleTimes(x10 , kMaxPushes);
+ __ Subs(x3, x3, 1);
+ __ B(ne, &loop_header);
+ }
+ int remaining = locals_count % kMaxPushes;
+ // Emit the remaining pushes.
+ __ PushMultipleTimes(x10 , remaining);
+ }
}
}
@@ -291,14 +330,7 @@
{ Comment cmnt(masm_, "[ Stack check");
PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
- Label ok;
- ASSERT(jssp.Is(__ StackPointer()));
- __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
- __ B(hs, &ok);
- PredictableCodeSizeScope predictable(masm_,
- Assembler::kCallSizeWithRelocation);
- __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
- __ Bind(&ok);
+ EmitStackCheck(masm_);
}
{ Comment cmnt(masm_, "[ Body");
diff --git a/src/arm64/lithium-codegen-arm64.cc b/src/arm64/lithium-codegen-arm64.cc
index ddf1677..7fdd94f 100644
--- a/src/arm64/lithium-codegen-arm64.cc
+++ b/src/arm64/lithium-codegen-arm64.cc
@@ -1267,8 +1267,7 @@
bool LCodeGen::IsInteger32Constant(LConstantOperand* op) const {
- return op->IsConstantOperand() &&
- chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
+ return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
}
@@ -5241,6 +5240,9 @@
SmiCheck check_needed = instr->hydrogen()->value()->IsHeapObject()
? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
+ ASSERT(!(representation.IsSmi() &&
+ instr->value()->IsConstantOperand() &&
+ !IsInteger32Constant(LConstantOperand::cast(instr->value()))));
if (representation.IsHeapObject() &&
!instr->hydrogen()->value()->type().IsHeapObject()) {
DeoptimizeIfSmi(value, instr->environment());
diff --git a/src/arm64/lithium-codegen-arm64.h b/src/arm64/lithium-codegen-arm64.h
index f421305..d98e89c 100644
--- a/src/arm64/lithium-codegen-arm64.h
+++ b/src/arm64/lithium-codegen-arm64.h
@@ -377,14 +377,23 @@
ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
codegen_->expected_safepoint_kind_ = kind;
+ UseScratchRegisterScope temps(codegen_->masm_);
+ // Preserve the value of lr which must be saved on the stack (the call to
+ // the stub will clobber it).
+ Register to_be_pushed_lr =
+ temps.UnsafeAcquire(StoreRegistersStateStub::to_be_pushed_lr());
+ codegen_->masm_->Mov(to_be_pushed_lr, lr);
switch (codegen_->expected_safepoint_kind_) {
- case Safepoint::kWithRegisters:
- codegen_->masm_->PushSafepointRegisters();
+ case Safepoint::kWithRegisters: {
+ StoreRegistersStateStub stub(kDontSaveFPRegs);
+ codegen_->masm_->CallStub(&stub);
break;
- case Safepoint::kWithRegistersAndDoubles:
- codegen_->masm_->PushSafepointRegisters();
- codegen_->masm_->PushSafepointFPRegisters();
+ }
+ case Safepoint::kWithRegistersAndDoubles: {
+ StoreRegistersStateStub stub(kSaveFPRegs);
+ codegen_->masm_->CallStub(&stub);
break;
+ }
default:
UNREACHABLE();
}
@@ -394,13 +403,16 @@
Safepoint::Kind kind = codegen_->expected_safepoint_kind_;
ASSERT((kind & Safepoint::kWithRegisters) != 0);
switch (kind) {
- case Safepoint::kWithRegisters:
- codegen_->masm_->PopSafepointRegisters();
+ case Safepoint::kWithRegisters: {
+ RestoreRegistersStateStub stub(kDontSaveFPRegs);
+ codegen_->masm_->CallStub(&stub);
break;
- case Safepoint::kWithRegistersAndDoubles:
- codegen_->masm_->PopSafepointFPRegisters();
- codegen_->masm_->PopSafepointRegisters();
+ }
+ case Safepoint::kWithRegistersAndDoubles: {
+ RestoreRegistersStateStub stub(kSaveFPRegs);
+ codegen_->masm_->CallStub(&stub);
break;
+ }
default:
UNREACHABLE();
}
diff --git a/src/arm64/lithium-gap-resolver-arm64.cc b/src/arm64/lithium-gap-resolver-arm64.cc
index de6717a..f0a2e6b 100644
--- a/src/arm64/lithium-gap-resolver-arm64.cc
+++ b/src/arm64/lithium-gap-resolver-arm64.cc
@@ -42,6 +42,12 @@
// once we have resolved a cycle.
#define kSavedValue root
+// We use the MacroAssembler floating-point scratch register to break a cycle
+// involving double values as the MacroAssembler will not need it for the
+// operations performed by the gap resolver.
+#define kSavedDoubleValue fp_scratch
+
+
LGapResolver::LGapResolver(LCodeGen* owner)
: cgen_(owner), moves_(32, owner->zone()), root_index_(0), in_cycle_(false),
saved_destination_(NULL), need_to_restore_root_(false) { }
@@ -169,10 +175,10 @@
ASSERT(moves_[index].destination()->Equals(moves_[root_index_].source()));
ASSERT(!in_cycle_);
- // We use a register which is not allocatable by crankshaft to break the cycle
- // to be sure it doesn't interfere with the moves we are resolving.
+ // We use registers which are not allocatable by crankshaft to break the cycle
+ // to be sure they don't interfere with the moves we are resolving.
ASSERT(!kSavedValue.IsAllocatable());
- need_to_restore_root_ = true;
+ ASSERT(!kSavedDoubleValue.IsAllocatable());
// We save in a register the source of that move and we remember its
// destination. Then we mark this move as resolved so the cycle is
@@ -182,19 +188,19 @@
saved_destination_ = moves_[index].destination();
if (source->IsRegister()) {
+ need_to_restore_root_ = true;
__ Mov(kSavedValue, cgen_->ToRegister(source));
} else if (source->IsStackSlot()) {
+ need_to_restore_root_ = true;
__ Ldr(kSavedValue, cgen_->ToMemOperand(source));
} else if (source->IsDoubleRegister()) {
- // TODO(all): We should use a double register to store the value to avoid
- // the penalty of the mov across register banks. We are going to reserve
- // d31 to hold 0.0 value. We could clobber this register while breaking the
- // cycle and restore it after like we do with the root register.
- // LGapResolver::RestoreValue() will need to be updated as well when we'll
- // do that.
- __ Fmov(kSavedValue, cgen_->ToDoubleRegister(source));
+ ASSERT(cgen_->masm()->FPTmpList()->IncludesAliasOf(kSavedDoubleValue));
+ cgen_->masm()->FPTmpList()->Remove(kSavedDoubleValue);
+ __ Fmov(kSavedDoubleValue, cgen_->ToDoubleRegister(source));
} else if (source->IsDoubleStackSlot()) {
- __ Ldr(kSavedValue, cgen_->ToMemOperand(source));
+ ASSERT(cgen_->masm()->FPTmpList()->IncludesAliasOf(kSavedDoubleValue));
+ cgen_->masm()->FPTmpList()->Remove(kSavedDoubleValue);
+ __ Ldr(kSavedDoubleValue, cgen_->ToMemOperand(source));
} else {
UNREACHABLE();
}
@@ -215,9 +221,11 @@
} else if (saved_destination_->IsStackSlot()) {
__ Str(kSavedValue, cgen_->ToMemOperand(saved_destination_));
} else if (saved_destination_->IsDoubleRegister()) {
- __ Fmov(cgen_->ToDoubleRegister(saved_destination_), kSavedValue);
+ __ Fmov(cgen_->ToDoubleRegister(saved_destination_), kSavedDoubleValue);
+ cgen_->masm()->FPTmpList()->Combine(kSavedDoubleValue);
} else if (saved_destination_->IsDoubleStackSlot()) {
- __ Str(kSavedValue, cgen_->ToMemOperand(saved_destination_));
+ __ Str(kSavedDoubleValue, cgen_->ToMemOperand(saved_destination_));
+ cgen_->masm()->FPTmpList()->Combine(kSavedDoubleValue);
} else {
UNREACHABLE();
}
diff --git a/src/arm64/macro-assembler-arm64.cc b/src/arm64/macro-assembler-arm64.cc
index d7d0ab7..1b85fbf 100644
--- a/src/arm64/macro-assembler-arm64.cc
+++ b/src/arm64/macro-assembler-arm64.cc
@@ -2024,11 +2024,13 @@
Register temp = temps.AcquireX();
if (rmode == RelocInfo::NONE64) {
+ // Addresses are 48 bits so we never need to load the upper 16 bits.
uint64_t imm = reinterpret_cast<uint64_t>(target);
+ // If we don't use ARM tagged addresses, the 16 higher bits must be 0.
+ ASSERT(((imm >> 48) & 0xffff) == 0);
movz(temp, (imm >> 0) & 0xffff, 0);
movk(temp, (imm >> 16) & 0xffff, 16);
movk(temp, (imm >> 32) & 0xffff, 32);
- movk(temp, (imm >> 48) & 0xffff, 48);
} else {
LoadRelocated(temp, Operand(reinterpret_cast<intptr_t>(target), rmode));
}
@@ -4161,15 +4163,17 @@
}
-void MacroAssembler::PushSafepointFPRegisters() {
+void MacroAssembler::PushSafepointRegistersAndDoubles() {
+ PushSafepointRegisters();
PushCPURegList(CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
FPRegister::kAllocatableFPRegisters));
}
-void MacroAssembler::PopSafepointFPRegisters() {
+void MacroAssembler::PopSafepointRegistersAndDoubles() {
PopCPURegList(CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
FPRegister::kAllocatableFPRegisters));
+ PopSafepointRegisters();
}
@@ -5118,6 +5122,14 @@
}
+CPURegister UseScratchRegisterScope::UnsafeAcquire(CPURegList* available,
+ const CPURegister& reg) {
+ ASSERT(available->IncludesAliasOf(reg));
+ available->Remove(reg);
+ return reg;
+}
+
+
#define __ masm->
diff --git a/src/arm64/macro-assembler-arm64.h b/src/arm64/macro-assembler-arm64.h
index a54ed30..ad706f6 100644
--- a/src/arm64/macro-assembler-arm64.h
+++ b/src/arm64/macro-assembler-arm64.h
@@ -1728,8 +1728,8 @@
void PushSafepointRegisters();
void PopSafepointRegisters();
- void PushSafepointFPRegisters();
- void PopSafepointFPRegisters();
+ void PushSafepointRegistersAndDoubles();
+ void PopSafepointRegistersAndDoubles();
// Store value in register src in the safepoint stack slot for register dst.
void StoreToSafepointRegisterSlot(Register src, Register dst) {
@@ -2215,11 +2215,17 @@
FPRegister AcquireS() { return AcquireNextAvailable(availablefp_).S(); }
FPRegister AcquireD() { return AcquireNextAvailable(availablefp_).D(); }
+ Register UnsafeAcquire(const Register& reg) {
+ return Register(UnsafeAcquire(available_, reg));
+ }
+
Register AcquireSameSizeAs(const Register& reg);
FPRegister AcquireSameSizeAs(const FPRegister& reg);
private:
static CPURegister AcquireNextAvailable(CPURegList* available);
+ static CPURegister UnsafeAcquire(CPURegList* available,
+ const CPURegister& reg);
// Available scratch registers.
CPURegList* available_; // kRegister
diff --git a/src/arm64/simulator-arm64.cc b/src/arm64/simulator-arm64.cc
index 9feba6e..cd475b4 100644
--- a/src/arm64/simulator-arm64.cc
+++ b/src/arm64/simulator-arm64.cc
@@ -2830,7 +2830,6 @@
unsigned fm = instr->Rm();
unsigned fa = instr->Ra();
- // The C99 (and C++11) fma function performs a fused multiply-accumulate.
switch (instr->Mask(FPDataProcessing3SourceMask)) {
// fd = fa +/- (fn * fm)
case FMADD_s: set_sreg(fd, FPMulAdd(sreg(fa), sreg(fn), sreg(fm))); break;
@@ -2860,7 +2859,7 @@
// NaNs should be handled elsewhere.
ASSERT(!std::isnan(op1) && !std::isnan(op2));
- if (isinf(op1) && isinf(op2) && (op1 != op2)) {
+ if (std::isinf(op1) && std::isinf(op2) && (op1 != op2)) {
// inf + -inf returns the default NaN.
return FPDefaultNaN<T>();
} else {
@@ -2875,7 +2874,7 @@
// NaNs should be handled elsewhere.
ASSERT(!std::isnan(op1) && !std::isnan(op2));
- if ((isinf(op1) && isinf(op2)) || ((op1 == 0.0) && (op2 == 0.0))) {
+ if ((std::isinf(op1) && std::isinf(op2)) || ((op1 == 0.0) && (op2 == 0.0))) {
// inf / inf and 0.0 / 0.0 return the default NaN.
return FPDefaultNaN<T>();
} else {
@@ -2936,7 +2935,7 @@
}
T result = FPProcessNaNs(a, b);
- return isnan(result) ? result : FPMin(a, b);
+ return std::isnan(result) ? result : FPMin(a, b);
}
@@ -2945,7 +2944,7 @@
// NaNs should be handled elsewhere.
ASSERT(!std::isnan(op1) && !std::isnan(op2));
- if ((isinf(op1) && (op2 == 0.0)) || (isinf(op2) && (op1 == 0.0))) {
+ if ((std::isinf(op1) && (op2 == 0.0)) || (std::isinf(op2) && (op1 == 0.0))) {
// inf * 0.0 returns the default NaN.
return FPDefaultNaN<T>();
} else {
@@ -3017,7 +3016,7 @@
// NaNs should be handled elsewhere.
ASSERT(!std::isnan(op1) && !std::isnan(op2));
- if (isinf(op1) && isinf(op2) && (op1 == op2)) {
+ if (std::isinf(op1) && std::isinf(op2) && (op1 == op2)) {
// inf - inf returns the default NaN.
return FPDefaultNaN<T>();
} else {
diff --git a/src/arm64/stub-cache-arm64.cc b/src/arm64/stub-cache-arm64.cc
index 528e7eb..1b2e959 100644
--- a/src/arm64/stub-cache-arm64.cc
+++ b/src/arm64/stub-cache-arm64.cc
@@ -394,22 +394,21 @@
} else if (representation.IsHeapObject()) {
__ JumpIfSmi(value_reg, miss_label);
} else if (representation.IsDouble()) {
+ UseScratchRegisterScope temps(masm);
+ DoubleRegister temp_double = temps.AcquireD();
+ __ SmiUntagToDouble(temp_double, value_reg, kSpeculativeUntag);
+
Label do_store, heap_number;
__ AllocateHeapNumber(storage_reg, slow, scratch1, scratch2);
- // TODO(jbramley): Is fp_scratch the most appropriate FP scratch register?
- // It's only used in Fcmp, but it's not really safe to use it like this.
- __ JumpIfNotSmi(value_reg, &heap_number);
- __ SmiUntagToDouble(fp_scratch, value_reg);
- __ B(&do_store);
+ __ JumpIfSmi(value_reg, &do_store);
- __ Bind(&heap_number);
__ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
miss_label, DONT_DO_SMI_CHECK);
- __ Ldr(fp_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
+ __ Ldr(temp_double, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
__ Bind(&do_store);
- __ Str(fp_scratch, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
+ __ Str(temp_double, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
}
// Stub never generated for non-global objects that require access checks.
@@ -546,6 +545,11 @@
} else if (representation.IsHeapObject()) {
__ JumpIfSmi(value_reg, miss_label);
} else if (representation.IsDouble()) {
+ UseScratchRegisterScope temps(masm);
+ DoubleRegister temp_double = temps.AcquireD();
+
+ __ SmiUntagToDouble(temp_double, value_reg, kSpeculativeUntag);
+
// Load the double storage.
if (index < 0) {
int offset = (index * kPointerSize) + object->map()->instance_size();
@@ -559,19 +563,15 @@
// Store the value into the storage.
Label do_store, heap_number;
- // TODO(jbramley): Is fp_scratch the most appropriate FP scratch register?
- // It's only used in Fcmp, but it's not really safe to use it like this.
- __ JumpIfNotSmi(value_reg, &heap_number);
- __ SmiUntagToDouble(fp_scratch, value_reg);
- __ B(&do_store);
- __ Bind(&heap_number);
+ __ JumpIfSmi(value_reg, &do_store);
+
__ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
miss_label, DONT_DO_SMI_CHECK);
- __ Ldr(fp_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
+ __ Ldr(temp_double, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
__ Bind(&do_store);
- __ Str(fp_scratch, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
+ __ Str(temp_double, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
// Return the value (register x0).
ASSERT(value_reg.is(x0));
@@ -1009,12 +1009,9 @@
} else {
__ Mov(scratch3(), Operand(Handle<Object>(callback->data(), isolate())));
}
- // TODO(jbramley): Find another scratch register and combine the pushes
- // together. Can we use scratch1() here?
__ LoadRoot(scratch4(), Heap::kUndefinedValueRootIndex);
- __ Push(scratch3(), scratch4());
- __ Mov(scratch3(), ExternalReference::isolate_address(isolate()));
- __ Push(scratch4(), scratch3(), reg, name());
+ __ Mov(scratch2(), Operand(ExternalReference::isolate_address(isolate())));
+ __ Push(scratch3(), scratch4(), scratch4(), scratch2(), reg, name());
Register args_addr = scratch2();
__ Add(args_addr, __ StackPointer(), kPointerSize);
diff --git a/src/array-iterator.js b/src/array-iterator.js
index a8c5e00..3af659d 100644
--- a/src/array-iterator.js
+++ b/src/array-iterator.js
@@ -36,9 +36,9 @@
var ARRAY_ITERATOR_KIND_ENTRIES = 3;
// The spec draft also has "sparse" but it is never used.
-var iteratorObjectSymbol = NEW_PRIVATE("iterator_object");
-var arrayIteratorNextIndexSymbol = NEW_PRIVATE("iterator_next");
-var arrayIterationKindSymbol = NEW_PRIVATE("iterator_kind");
+var arrayIteratorObjectSymbol = GLOBAL_PRIVATE("ArrayIterator#object");
+var arrayIteratorNextIndexSymbol = GLOBAL_PRIVATE("ArrayIterator#next");
+var arrayIterationKindSymbol = GLOBAL_PRIVATE("ArrayIterator#kind");
function ArrayIterator() {}
@@ -46,7 +46,7 @@
function CreateArrayIterator(array, kind) {
var object = ToObject(array);
var iterator = new ArrayIterator;
- SET_PRIVATE(iterator, iteratorObjectSymbol, object);
+ SET_PRIVATE(iterator, arrayIteratorObjectSymbol, object);
SET_PRIVATE(iterator, arrayIteratorNextIndexSymbol, 0);
SET_PRIVATE(iterator, arrayIterationKindSymbol, kind);
return iterator;
@@ -60,7 +60,7 @@
// 15.4.5.2.2 ArrayIterator.prototype.next( )
function ArrayIteratorNext() {
var iterator = ToObject(this);
- var array = GET_PRIVATE(iterator, iteratorObjectSymbol);
+ var array = GET_PRIVATE(iterator, arrayIteratorObjectSymbol);
if (!array) {
throw MakeTypeError('incompatible_method_receiver',
['Array Iterator.prototype.next']);
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index 7edc5d5..c466e7e 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -152,7 +152,7 @@
void Bootstrapper::TearDown() {
if (delete_these_non_arrays_on_tear_down_ != NULL) {
int len = delete_these_non_arrays_on_tear_down_->length();
- ASSERT(len < 20); // Don't use this mechanism for unbounded allocations.
+ ASSERT(len < 24); // Don't use this mechanism for unbounded allocations.
for (int i = 0; i < len; i++) {
delete delete_these_non_arrays_on_tear_down_->at(i);
delete_these_non_arrays_on_tear_down_->at(i) = NULL;
@@ -1112,6 +1112,18 @@
native_context()->set_data_view_fun(*data_view_fun);
}
+ { // -- W e a k M a p
+ InstallFunction(global, "WeakMap", JS_WEAK_MAP_TYPE, JSWeakMap::kSize,
+ isolate->initial_object_prototype(),
+ Builtins::kIllegal, true, true);
+ }
+
+ { // -- W e a k S e t
+ InstallFunction(global, "WeakSet", JS_WEAK_SET_TYPE, JSWeakSet::kSize,
+ isolate->initial_object_prototype(),
+ Builtins::kIllegal, true, true);
+ }
+
{ // --- arguments_boilerplate_
// Make sure we can recognize argument objects at runtime.
// This is done by introducing an anonymous function with
@@ -1310,9 +1322,6 @@
delegate->shared()->DontAdaptArguments();
}
- // Initialize the out of memory slot.
- native_context()->set_out_of_memory(heap->false_value());
-
// Initialize the embedder data slot.
Handle<FixedArray> embedder_data = factory->NewFixedArray(3);
native_context()->set_embedder_data(*embedder_data);
@@ -1362,19 +1371,6 @@
}
}
- if (FLAG_harmony_weak_collections) {
- { // -- W e a k M a p
- InstallFunction(global, "WeakMap", JS_WEAK_MAP_TYPE, JSWeakMap::kSize,
- isolate()->initial_object_prototype(),
- Builtins::kIllegal, true, true);
- }
- { // -- W e a k S e t
- InstallFunction(global, "WeakSet", JS_WEAK_SET_TYPE, JSWeakSet::kSize,
- isolate()->initial_object_prototype(),
- Builtins::kIllegal, true, true);
- }
- }
-
if (FLAG_harmony_generators) {
// Create generator meta-objects and install them on the builtins object.
Handle<JSObject> builtins(native_context()->builtins());
@@ -1567,6 +1563,7 @@
void Genesis::InstallNativeFunctions() {
HandleScope scope(isolate());
INSTALL_NATIVE(JSFunction, "CreateDate", create_date_fun);
+
INSTALL_NATIVE(JSFunction, "ToNumber", to_number_fun);
INSTALL_NATIVE(JSFunction, "ToString", to_string_fun);
INSTALL_NATIVE(JSFunction, "ToDetailString", to_detail_string_fun);
@@ -1574,6 +1571,7 @@
INSTALL_NATIVE(JSFunction, "ToInteger", to_integer_fun);
INSTALL_NATIVE(JSFunction, "ToUint32", to_uint32_fun);
INSTALL_NATIVE(JSFunction, "ToInt32", to_int32_fun);
+
INSTALL_NATIVE(JSFunction, "GlobalEval", global_eval_fun);
INSTALL_NATIVE(JSFunction, "Instantiate", instantiate_fun);
INSTALL_NATIVE(JSFunction, "ConfigureTemplateInstance",
@@ -1582,6 +1580,14 @@
INSTALL_NATIVE(JSObject, "functionCache", function_cache);
INSTALL_NATIVE(JSFunction, "ToCompletePropertyDescriptor",
to_complete_property_descriptor);
+
+ INSTALL_NATIVE(JSFunction, "IsPromise", is_promise);
+ INSTALL_NATIVE(JSFunction, "PromiseCreate", promise_create);
+ INSTALL_NATIVE(JSFunction, "PromiseResolve", promise_resolve);
+ INSTALL_NATIVE(JSFunction, "PromiseReject", promise_reject);
+ INSTALL_NATIVE(JSFunction, "PromiseChain", promise_chain);
+ INSTALL_NATIVE(JSFunction, "PromiseCatch", promise_catch);
+
INSTALL_NATIVE(JSFunction, "NotifyChange", observers_notify_change);
INSTALL_NATIVE(JSFunction, "EnqueueSpliceRecord", observers_enqueue_splice);
INSTALL_NATIVE(JSFunction, "BeginPerformSplice",
@@ -1596,15 +1602,6 @@
INSTALL_NATIVE(JSFunction, "EnqueueExternalMicrotask",
enqueue_external_microtask);
- if (FLAG_harmony_promises) {
- INSTALL_NATIVE(JSFunction, "IsPromise", is_promise);
- INSTALL_NATIVE(JSFunction, "PromiseCreate", promise_create);
- INSTALL_NATIVE(JSFunction, "PromiseResolve", promise_resolve);
- INSTALL_NATIVE(JSFunction, "PromiseReject", promise_reject);
- INSTALL_NATIVE(JSFunction, "PromiseChain", promise_chain);
- INSTALL_NATIVE(JSFunction, "PromiseCatch", promise_catch);
- }
-
if (FLAG_harmony_proxies) {
INSTALL_NATIVE(JSFunction, "DerivedHasTrap", derived_has_trap);
INSTALL_NATIVE(JSFunction, "DerivedGetTrap", derived_get_trap);
@@ -2054,8 +2051,6 @@
INSTALL_EXPERIMENTAL_NATIVE(i, symbols, "symbol.js")
INSTALL_EXPERIMENTAL_NATIVE(i, proxies, "proxy.js")
INSTALL_EXPERIMENTAL_NATIVE(i, collections, "collection.js")
- INSTALL_EXPERIMENTAL_NATIVE(i, weak_collections, "weak_collection.js")
- INSTALL_EXPERIMENTAL_NATIVE(i, promises, "promise.js")
INSTALL_EXPERIMENTAL_NATIVE(i, generators, "generator.js")
INSTALL_EXPERIMENTAL_NATIVE(i, iteration, "array-iterator.js")
INSTALL_EXPERIMENTAL_NATIVE(i, strings, "harmony-string.js")
diff --git a/src/builtins.cc b/src/builtins.cc
index 8f30260..b73ed6b 100644
--- a/src/builtins.cc
+++ b/src/builtins.cc
@@ -300,33 +300,35 @@
}
+// Returns empty handle if not applicable.
MUST_USE_RESULT
-static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
- Heap* heap, Object* receiver, Arguments* args, int first_added_arg) {
- if (!receiver->IsJSArray()) return NULL;
- JSArray* array = JSArray::cast(receiver);
- if (array->map()->is_observed()) return NULL;
- if (!array->map()->is_extensible()) return NULL;
- HeapObject* elms = array->elements();
+static inline Handle<FixedArrayBase> EnsureJSArrayWithWritableFastElements(
+ Isolate* isolate,
+ Handle<Object> receiver,
+ Arguments* args,
+ int first_added_arg) {
+ if (!receiver->IsJSArray()) return Handle<FixedArrayBase>::null();
+ Handle<JSArray> array = Handle<JSArray>::cast(receiver);
+ if (array->map()->is_observed()) return Handle<FixedArrayBase>::null();
+ if (!array->map()->is_extensible()) return Handle<FixedArrayBase>::null();
+ Handle<FixedArrayBase> elms(array->elements());
+ Heap* heap = isolate->heap();
Map* map = elms->map();
if (map == heap->fixed_array_map()) {
if (args == NULL || array->HasFastObjectElements()) return elms;
} else if (map == heap->fixed_cow_array_map()) {
- MaybeObject* maybe_writable_result = array->EnsureWritableFastElements();
- if (args == NULL || array->HasFastObjectElements() ||
- !maybe_writable_result->To(&elms)) {
- return maybe_writable_result;
- }
+ elms = JSObject::EnsureWritableFastElements(array);
+ if (args == NULL || array->HasFastObjectElements()) return elms;
} else if (map == heap->fixed_double_array_map()) {
if (args == NULL) return elms;
} else {
- return NULL;
+ return Handle<FixedArrayBase>::null();
}
// Need to ensure that the arguments passed in args can be contained in
// the array.
int args_length = args->length();
- if (first_added_arg >= args_length) return array->elements();
+ if (first_added_arg >= args_length) return handle(array->elements());
ElementsKind origin_kind = array->map()->elements_kind();
ASSERT(!IsFastObjectElementsKind(origin_kind));
@@ -345,28 +347,13 @@
}
}
if (target_kind != origin_kind) {
- MaybeObject* maybe_failure = array->TransitionElementsKind(target_kind);
- if (maybe_failure->IsFailure()) return maybe_failure;
- return array->elements();
+ JSObject::TransitionElementsKind(array, target_kind);
+ return handle(array->elements());
}
return elms;
}
-// TODO(ishell): Temporary wrapper until handlified.
-MUST_USE_RESULT
-static inline Handle<Object> EnsureJSArrayWithWritableFastElementsWrapper(
- Isolate* isolate,
- Handle<Object> receiver,
- Arguments* args,
- int first_added_arg) {
- CALL_HEAP_FUNCTION(isolate,
- EnsureJSArrayWithWritableFastElements(
- isolate->heap(), *receiver, args, first_added_arg),
- Object);
-}
-
-
// TODO(ishell): Handlify when all Array* builtins are handlified.
static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
JSArray* receiver) {
@@ -409,12 +396,10 @@
BUILTIN(ArrayPush) {
HandleScope scope(isolate);
Handle<Object> receiver = args.receiver();
- Handle<Object> elms_or_null =
- EnsureJSArrayWithWritableFastElementsWrapper(isolate, receiver, &args, 1);
- RETURN_IF_EMPTY_HANDLE(isolate, elms_or_null);
- if (*elms_or_null == NULL) return CallJsBuiltin(isolate, "ArrayPush", args);
+ Handle<FixedArrayBase> elms_obj =
+ EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1);
+ if (elms_obj.is_null()) return CallJsBuiltin(isolate, "ArrayPush", args);
- Handle<FixedArrayBase> elms_obj = Handle<FixedArrayBase>::cast(elms_or_null);
Handle<JSArray> array = Handle<JSArray>::cast(receiver);
ASSERT(!array->map()->is_observed());
@@ -544,12 +529,10 @@
BUILTIN(ArrayPop) {
HandleScope scope(isolate);
Handle<Object> receiver = args.receiver();
- Handle<Object> elms_or_null =
- EnsureJSArrayWithWritableFastElementsWrapper(isolate, receiver, NULL, 0);
- RETURN_IF_EMPTY_HANDLE(isolate, elms_or_null);
- if (*elms_or_null == NULL) return CallJsBuiltin(isolate, "ArrayPop", args);
+ Handle<FixedArrayBase> elms_obj =
+ EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
+ if (elms_obj.is_null()) return CallJsBuiltin(isolate, "ArrayPop", args);
- Handle<FixedArrayBase> elms_obj = Handle<FixedArrayBase>::cast(elms_or_null);
Handle<JSArray> array = Handle<JSArray>::cast(receiver);
ASSERT(!array->map()->is_observed());
@@ -579,15 +562,13 @@
HandleScope scope(isolate);
Heap* heap = isolate->heap();
Handle<Object> receiver = args.receiver();
- Handle<Object> elms_or_null =
- EnsureJSArrayWithWritableFastElementsWrapper(isolate, receiver, NULL, 0);
- RETURN_IF_EMPTY_HANDLE(isolate, elms_or_null);
- if ((*elms_or_null == NULL) ||
+ Handle<FixedArrayBase> elms_obj =
+ EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
+ if (elms_obj.is_null() ||
!IsJSArrayFastElementMovingAllowed(heap,
*Handle<JSArray>::cast(receiver))) {
return CallJsBuiltin(isolate, "ArrayShift", args);
}
- Handle<FixedArrayBase> elms_obj = Handle<FixedArrayBase>::cast(elms_or_null);
Handle<JSArray> array = Handle<JSArray>::cast(receiver);
ASSERT(!array->map()->is_observed());
@@ -628,15 +609,13 @@
HandleScope scope(isolate);
Heap* heap = isolate->heap();
Handle<Object> receiver = args.receiver();
- Handle<Object> elms_or_null =
- EnsureJSArrayWithWritableFastElementsWrapper(isolate, receiver, NULL, 0);
- RETURN_IF_EMPTY_HANDLE(isolate, elms_or_null);
- if ((*elms_or_null == NULL) ||
+ Handle<FixedArrayBase> elms_obj =
+ EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
+ if (elms_obj.is_null() ||
!IsJSArrayFastElementMovingAllowed(heap,
*Handle<JSArray>::cast(receiver))) {
return CallJsBuiltin(isolate, "ArrayUnshift", args);
}
- Handle<FixedArrayBase> elms_obj = Handle<FixedArrayBase>::cast(elms_or_null);
Handle<JSArray> array = Handle<JSArray>::cast(receiver);
ASSERT(!array->map()->is_observed());
if (!array->HasFastSmiOrObjectElements()) {
@@ -823,16 +802,13 @@
HandleScope scope(isolate);
Heap* heap = isolate->heap();
Handle<Object> receiver = args.receiver();
- Handle<Object> elms_or_null =
- EnsureJSArrayWithWritableFastElementsWrapper(isolate, receiver, &args, 3);
- RETURN_IF_EMPTY_HANDLE(isolate, elms_or_null);
-
- if ((*elms_or_null == NULL) ||
+ Handle<FixedArrayBase> elms_obj =
+ EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 3);
+ if (elms_obj.is_null() ||
!IsJSArrayFastElementMovingAllowed(heap,
*Handle<JSArray>::cast(receiver))) {
return CallJsBuiltin(isolate, "ArraySplice", args);
}
- Handle<FixedArrayBase> elms_obj = Handle<FixedArrayBase>::cast(elms_or_null);
Handle<JSArray> array = Handle<JSArray>::cast(receiver);
ASSERT(!array->map()->is_observed());
diff --git a/src/code-stubs.h b/src/code-stubs.h
index 3367c6d..5a88942 100644
--- a/src/code-stubs.h
+++ b/src/code-stubs.h
@@ -100,8 +100,8 @@
V(StringLength) \
V(KeyedStringLength)
-// List of code stubs only used on ARM platforms.
-#if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_ARM64)
+// List of code stubs only used on ARM 32 bits platforms.
+#if V8_TARGET_ARCH_ARM
#define CODE_STUB_LIST_ARM(V) \
V(GetProperty) \
V(SetProperty) \
@@ -111,6 +111,19 @@
#define CODE_STUB_LIST_ARM(V)
#endif
+// List of code stubs only used on ARM 64 bits platforms.
+#if V8_TARGET_ARCH_ARM64
+#define CODE_STUB_LIST_ARM64(V) \
+ V(GetProperty) \
+ V(SetProperty) \
+ V(InvokeBuiltin) \
+ V(DirectCEntry) \
+ V(StoreRegistersState) \
+ V(RestoreRegistersState)
+#else
+#define CODE_STUB_LIST_ARM64(V)
+#endif
+
// List of code stubs only used on MIPS platforms.
#if V8_TARGET_ARCH_MIPS
#define CODE_STUB_LIST_MIPS(V) \
@@ -126,6 +139,7 @@
#define CODE_STUB_LIST(V) \
CODE_STUB_LIST_ALL_PLATFORMS(V) \
CODE_STUB_LIST_ARM(V) \
+ CODE_STUB_LIST_ARM64(V) \
CODE_STUB_LIST_MIPS(V)
// Stub is base classes of all stubs.
@@ -1496,7 +1510,6 @@
void GenerateCore(MacroAssembler* masm,
Label* throw_normal_exception,
Label* throw_termination_exception,
- Label* throw_out_of_memory_exception,
bool do_gc,
bool always_allocate_scope);
diff --git a/src/contexts.h b/src/contexts.h
index 479a692..6ba9b3e 100644
--- a/src/contexts.h
+++ b/src/contexts.h
@@ -162,7 +162,6 @@
V(SCRIPT_FUNCTION_INDEX, JSFunction, script_function) \
V(OPAQUE_REFERENCE_FUNCTION_INDEX, JSFunction, opaque_reference_function) \
V(CONTEXT_EXTENSION_FUNCTION_INDEX, JSFunction, context_extension_function) \
- V(OUT_OF_MEMORY_INDEX, Object, out_of_memory) \
V(MAP_CACHE_INDEX, Object, map_cache) \
V(EMBEDDER_DATA_INDEX, FixedArray, embedder_data) \
V(ALLOW_CODE_GEN_FROM_STRINGS_INDEX, Object, allow_code_gen_from_strings) \
@@ -440,12 +439,6 @@
return map == map->GetHeap()->global_context_map();
}
- // Tells whether the native context is marked with out of memory.
- inline bool has_out_of_memory();
-
- // Mark the native context with out of memory.
- inline void mark_out_of_memory();
-
// A native context holds a list of all functions with optimized code.
void AddOptimizedFunction(JSFunction* function);
void RemoveOptimizedFunction(JSFunction* function);
diff --git a/src/d8-debug.h b/src/d8-debug.h
index f753177..2d4f5e1 100644
--- a/src/d8-debug.h
+++ b/src/d8-debug.h
@@ -31,6 +31,7 @@
#include "d8.h"
#include "debug.h"
+#include "platform/socket.h"
namespace v8 {
diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc
index 9503f47..4d5e605 100644
--- a/src/deoptimizer.cc
+++ b/src/deoptimizer.cc
@@ -357,9 +357,41 @@
SelectedCodeUnlinker unlinker;
VisitAllOptimizedFunctionsForContext(context, &unlinker);
+ Isolate* isolate = context->GetHeap()->isolate();
+#ifdef DEBUG
+ Code* topmost_optimized_code = NULL;
+ bool safe_to_deopt_topmost_optimized_code = false;
+ // Make sure all activations of optimized code can deopt at their current PC.
+ // The topmost optimized code has special handling because it cannot be
+ // deoptimized due to weak object dependency.
+ for (StackFrameIterator it(isolate, isolate->thread_local_top());
+ !it.done(); it.Advance()) {
+ StackFrame::Type type = it.frame()->type();
+ if (type == StackFrame::OPTIMIZED) {
+ Code* code = it.frame()->LookupCode();
+ if (FLAG_trace_deopt) {
+ JSFunction* function =
+ static_cast<OptimizedFrame*>(it.frame())->function();
+ CodeTracer::Scope scope(isolate->GetCodeTracer());
+ PrintF(scope.file(), "[deoptimizer found activation of function: ");
+ function->PrintName(scope.file());
+ PrintF(scope.file(),
+ " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
+ }
+ SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
+ int deopt_index = safepoint.deoptimization_index();
+ bool safe_to_deopt = deopt_index != Safepoint::kNoDeoptimizationIndex;
+ CHECK(topmost_optimized_code == NULL || safe_to_deopt);
+ if (topmost_optimized_code == NULL) {
+ topmost_optimized_code = code;
+ safe_to_deopt_topmost_optimized_code = safe_to_deopt;
+ }
+ }
+ }
+#endif
+
// Move marked code from the optimized code list to the deoptimized
// code list, collecting them into a ZoneList.
- Isolate* isolate = context->GetHeap()->isolate();
Zone zone(isolate);
ZoneList<Code*> codes(10, &zone);
@@ -392,35 +424,17 @@
element = next;
}
-#ifdef DEBUG
- // Make sure all activations of optimized code can deopt at their current PC.
- for (StackFrameIterator it(isolate, isolate->thread_local_top());
- !it.done(); it.Advance()) {
- StackFrame::Type type = it.frame()->type();
- if (type == StackFrame::OPTIMIZED) {
- Code* code = it.frame()->LookupCode();
- if (FLAG_trace_deopt) {
- JSFunction* function =
- static_cast<OptimizedFrame*>(it.frame())->function();
- CodeTracer::Scope scope(isolate->GetCodeTracer());
- PrintF(scope.file(), "[deoptimizer patches for lazy deopt: ");
- function->PrintName(scope.file());
- PrintF(scope.file(),
- " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
- }
- SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
- int deopt_index = safepoint.deoptimization_index();
- CHECK(deopt_index != Safepoint::kNoDeoptimizationIndex);
- }
- }
-#endif
-
// TODO(titzer): we need a handle scope only because of the macro assembler,
// which is only used in EnsureCodeForDeoptimizationEntry.
HandleScope scope(isolate);
// Now patch all the codes for deoptimization.
for (int i = 0; i < codes.length(); i++) {
+#ifdef DEBUG
+ if (codes[i] == topmost_optimized_code) {
+ ASSERT(safe_to_deopt_topmost_optimized_code);
+ }
+#endif
// It is finally time to die, code object.
// Do platform-specific patching to force any activations to lazy deopt.
PatchCodeForDeoptimization(isolate, codes[i]);
diff --git a/src/elements.cc b/src/elements.cc
index daf4b21..527df6e 100644
--- a/src/elements.cc
+++ b/src/elements.cc
@@ -318,7 +318,7 @@
// that no GC is triggered, allocate HeapNumbers from old space if they
// can't be taken from new space.
if (!maybe_value->ToObject(&value)) {
- ASSERT(maybe_value->IsRetryAfterGC() || maybe_value->IsOutOfMemory());
+ ASSERT(maybe_value->IsRetryAfterGC());
Heap* heap = from->GetHeap();
MaybeObject* maybe_value_object =
heap->AllocateHeapNumber(from->get_scalar(i + from_start),
@@ -592,7 +592,9 @@
typedef ElementsTraitsParam ElementsTraits;
typedef typename ElementsTraitsParam::BackingStore BackingStore;
- virtual ElementsKind kind() const { return ElementsTraits::Kind; }
+ virtual ElementsKind kind() const V8_FINAL V8_OVERRIDE {
+ return ElementsTraits::Kind;
+ }
static void ValidateContents(JSObject* holder, int length) {
}
@@ -616,7 +618,7 @@
ElementsAccessorSubclass::ValidateContents(holder, length);
}
- virtual void Validate(JSObject* holder) {
+ virtual void Validate(JSObject* holder) V8_FINAL V8_OVERRIDE {
ElementsAccessorSubclass::ValidateImpl(holder);
}
@@ -631,7 +633,7 @@
virtual bool HasElement(Object* receiver,
JSObject* holder,
uint32_t key,
- FixedArrayBase* backing_store) {
+ FixedArrayBase* backing_store) V8_FINAL V8_OVERRIDE {
if (backing_store == NULL) {
backing_store = holder->elements();
}
@@ -644,7 +646,7 @@
Handle<Object> receiver,
Handle<JSObject> holder,
uint32_t key,
- Handle<FixedArrayBase> backing_store) {
+ Handle<FixedArrayBase> backing_store) V8_FINAL V8_OVERRIDE {
CALL_HEAP_FUNCTION(holder->GetIsolate(),
Get(*receiver, *holder, key,
backing_store.is_null()
@@ -652,10 +654,11 @@
Object);
}
- MUST_USE_RESULT virtual MaybeObject* Get(Object* receiver,
- JSObject* holder,
- uint32_t key,
- FixedArrayBase* backing_store) {
+ MUST_USE_RESULT virtual MaybeObject* Get(
+ Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ FixedArrayBase* backing_store) V8_FINAL V8_OVERRIDE {
if (backing_store == NULL) {
backing_store = holder->elements();
}
@@ -687,7 +690,7 @@
Object* receiver,
JSObject* holder,
uint32_t key,
- FixedArrayBase* backing_store) {
+ FixedArrayBase* backing_store) V8_FINAL V8_OVERRIDE {
if (backing_store == NULL) {
backing_store = holder->elements();
}
@@ -710,7 +713,7 @@
Object* receiver,
JSObject* holder,
uint32_t key,
- FixedArrayBase* backing_store) {
+ FixedArrayBase* backing_store) V8_FINAL V8_OVERRIDE {
if (backing_store == NULL) {
backing_store = holder->elements();
}
@@ -734,7 +737,7 @@
Object* receiver,
JSObject* holder,
uint32_t key,
- FixedArrayBase* backing_store) {
+ FixedArrayBase* backing_store) V8_FINAL V8_OVERRIDE {
if (backing_store == NULL) {
backing_store = holder->elements();
}
@@ -752,7 +755,7 @@
MUST_USE_RESULT virtual Handle<Object> SetLength(
Handle<JSArray> array,
- Handle<Object> length) {
+ Handle<Object> length) V8_FINAL V8_OVERRIDE {
Isolate* isolate = array->GetIsolate();
return ElementsAccessorSubclass::SetLengthImpl(
array, length, handle(array->elements(), isolate));
@@ -766,7 +769,7 @@
MUST_USE_RESULT virtual MaybeObject* SetCapacityAndLength(
JSArray* array,
int capacity,
- int length) {
+ int length) V8_FINAL V8_OVERRIDE {
return ElementsAccessorSubclass::SetFastElementsCapacityAndLength(
array,
capacity,
@@ -784,7 +787,7 @@
MUST_USE_RESULT virtual Handle<Object> Delete(
Handle<JSObject> obj,
uint32_t key,
- JSReceiver::DeleteMode mode) = 0;
+ JSReceiver::DeleteMode mode) V8_OVERRIDE = 0;
MUST_USE_RESULT static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
uint32_t from_start,
@@ -821,20 +824,21 @@
Handle<FixedArrayBase> to,
uint32_t to_start,
int copy_size,
- Handle<FixedArrayBase> from) {
+ Handle<FixedArrayBase> from) V8_FINAL V8_OVERRIDE {
Handle<Object> result = CopyElementsHelper(
from_holder, from_start, from_kind, to, to_start, copy_size, from);
ASSERT(!result.is_null());
USE(result);
}
- MUST_USE_RESULT virtual MaybeObject* CopyElements(JSObject* from_holder,
- uint32_t from_start,
- ElementsKind from_kind,
- FixedArrayBase* to,
- uint32_t to_start,
- int copy_size,
- FixedArrayBase* from) {
+ MUST_USE_RESULT virtual MaybeObject* CopyElements(
+ JSObject* from_holder,
+ uint32_t from_start,
+ ElementsKind from_kind,
+ FixedArrayBase* to,
+ uint32_t to_start,
+ int copy_size,
+ FixedArrayBase* from) V8_FINAL V8_OVERRIDE {
int packed_size = kPackedSizeNotKnown;
if (from == NULL) {
from = from_holder->elements();
@@ -858,7 +862,7 @@
Object* receiver,
JSObject* holder,
FixedArray* to,
- FixedArrayBase* from) {
+ FixedArrayBase* from) V8_FINAL V8_OVERRIDE {
int len0 = to->length();
#ifdef ENABLE_SLOW_ASSERTS
if (FLAG_enable_slow_asserts) {
@@ -936,7 +940,8 @@
return backing_store->length();
}
- virtual uint32_t GetCapacity(FixedArrayBase* backing_store) {
+ virtual uint32_t GetCapacity(FixedArrayBase* backing_store)
+ V8_FINAL V8_OVERRIDE {
return ElementsAccessorSubclass::GetCapacityImpl(backing_store);
}
@@ -946,7 +951,7 @@
}
virtual uint32_t GetKeyForIndex(FixedArrayBase* backing_store,
- uint32_t index) {
+ uint32_t index) V8_FINAL V8_OVERRIDE {
return ElementsAccessorSubclass::GetKeyForIndexImpl(backing_store, index);
}
@@ -1102,9 +1107,10 @@
return isolate->factory()->true_value();
}
- virtual Handle<Object> Delete(Handle<JSObject> obj,
- uint32_t key,
- JSReceiver::DeleteMode mode) {
+ virtual Handle<Object> Delete(
+ Handle<JSObject> obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) V8_FINAL V8_OVERRIDE {
return DeleteCommon(obj, key, mode);
}
@@ -1436,9 +1442,10 @@
return obj;
}
- MUST_USE_RESULT virtual Handle<Object> Delete(Handle<JSObject> obj,
- uint32_t key,
- JSReceiver::DeleteMode mode) {
+ MUST_USE_RESULT virtual Handle<Object> Delete(
+ Handle<JSObject> obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) V8_FINAL V8_OVERRIDE {
// External arrays always ignore deletes.
return obj->GetIsolate()->factory()->true_value();
}
@@ -1621,9 +1628,10 @@
friend class ElementsAccessorBase<DictionaryElementsAccessor,
ElementsKindTraits<DICTIONARY_ELEMENTS> >;
- MUST_USE_RESULT virtual Handle<Object> Delete(Handle<JSObject> obj,
- uint32_t key,
- JSReceiver::DeleteMode mode) {
+ MUST_USE_RESULT virtual Handle<Object> Delete(
+ Handle<JSObject> obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) V8_FINAL V8_OVERRIDE {
return DeleteCommon(obj, key, mode);
}
@@ -1813,9 +1821,10 @@
return obj;
}
- MUST_USE_RESULT virtual Handle<Object> Delete(Handle<JSObject> obj,
- uint32_t key,
- JSReceiver::DeleteMode mode) {
+ MUST_USE_RESULT virtual Handle<Object> Delete(
+ Handle<JSObject> obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) V8_FINAL V8_OVERRIDE {
Isolate* isolate = obj->GetIsolate();
Handle<FixedArray> parameter_map =
handle(FixedArray::cast(obj->elements()), isolate);
diff --git a/src/execution.cc b/src/execution.cc
index 1e0a6a8..7442d17 100644
--- a/src/execution.cc
+++ b/src/execution.cc
@@ -135,11 +135,6 @@
ASSERT(*has_pending_exception == isolate->has_pending_exception());
if (*has_pending_exception) {
isolate->ReportPendingMessages();
- if (isolate->pending_exception()->IsOutOfMemory()) {
- if (!isolate->ignore_out_of_memory()) {
- V8::FatalProcessOutOfMemory("JS", true);
- }
- }
#ifdef ENABLE_DEBUGGER_SUPPORT
// Reset stepping state when script exits with uncaught exception.
if (isolate->debugger()->IsDebuggerActive()) {
@@ -225,9 +220,6 @@
ASSERT(catcher.HasCaught());
ASSERT(isolate->has_pending_exception());
ASSERT(isolate->external_caught_exception());
- if (isolate->is_out_of_memory() && !isolate->ignore_out_of_memory()) {
- V8::FatalProcessOutOfMemory("OOM during Execution::TryCall");
- }
if (isolate->pending_exception() ==
isolate->heap()->termination_exception()) {
result = isolate->factory()->termination_exception();
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index 3b9ebb3..d7341b9 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -175,12 +175,9 @@
"enable harmony modules (implies block scoping)")
DEFINE_bool(harmony_symbols, false,
"enable harmony symbols (a.k.a. private names)")
-DEFINE_bool(harmony_promises, false, "enable harmony promises")
DEFINE_bool(harmony_proxies, false, "enable harmony proxies")
DEFINE_bool(harmony_collections, false,
- "enable harmony collections (sets, maps, weak sets, weak maps)")
-DEFINE_bool(harmony_weak_collections, false,
- "enable only harmony weak collections (weak sets and maps)")
+ "enable harmony collections (sets, maps)")
DEFINE_bool(harmony_generators, false, "enable harmony generators")
DEFINE_bool(harmony_iteration, false, "enable harmony iteration (for-of)")
DEFINE_bool(harmony_numeric_literals, false,
@@ -200,14 +197,10 @@
DEFINE_implication(harmony, harmony_numeric_literals)
DEFINE_implication(harmony, harmony_strings)
DEFINE_implication(harmony, harmony_arrays)
-DEFINE_implication(harmony_collections, harmony_weak_collections)
-DEFINE_implication(harmony_promises, harmony_weak_collections)
DEFINE_implication(harmony_modules, harmony_scoping)
DEFINE_implication(harmony, es_staging)
DEFINE_implication(es_staging, harmony_maths)
-DEFINE_implication(es_staging, harmony_promises)
-DEFINE_implication(es_staging, harmony_weak_collections)
// Flags for experimental implementation features.
DEFINE_bool(packed_arrays, true, "optimizes arrays that have no holes")
diff --git a/src/full-codegen.cc b/src/full-codegen.cc
index 2170b84..0229c74 100644
--- a/src/full-codegen.cc
+++ b/src/full-codegen.cc
@@ -883,7 +883,7 @@
}
}
#else
- CodeGenerator::RecordPositions(masm_, pos);
+ CodeGenerator::RecordPositions(masm_, expr->position());
#endif
}
@@ -980,6 +980,31 @@
}
+void FullCodeGenerator::EmitTypedArrayInitialize(CallRuntime* expr) {
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 5);
+ for (int i = 0; i < 5; i++) VisitForStackValue(args->at(i));
+ masm()->CallRuntime(Runtime::kTypedArrayInitialize, 5);
+ context()->Plug(result_register());
+}
+
+
+void FullCodeGenerator::EmitDataViewInitialize(CallRuntime* expr) {
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 4);
+ for (int i = 0; i < 4; i++) VisitForStackValue(args->at(i));
+ masm()->CallRuntime(Runtime::kDataViewInitialize, 4);
+ context()->Plug(result_register());
+}
+
+
+void FullCodeGenerator::EmitMaxSmi(CallRuntime* expr) {
+ ASSERT(expr->arguments()->length() == 0);
+ masm()->CallRuntime(Runtime::kMaxSmi, 0);
+ context()->Plug(result_register());
+}
+
+
void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
switch (expr->op()) {
case Token::COMMA:
diff --git a/src/heap-inl.h b/src/heap-inl.h
index efad2fb..7e465d5 100644
--- a/src/heap-inl.h
+++ b/src/heap-inl.h
@@ -138,7 +138,7 @@
MaybeObject* Heap::AllocateOneByteInternalizedString(Vector<const uint8_t> str,
uint32_t hash_field) {
if (str.length() > String::kMaxLength) {
- return Failure::OutOfMemoryException(0x2);
+ v8::internal::Heap::FatalProcessOutOfMemory("invalid string length", true);
}
// Compute map and object size.
Map* map = ascii_internalized_string_map();
@@ -171,7 +171,7 @@
MaybeObject* Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
uint32_t hash_field) {
if (str.length() > String::kMaxLength) {
- return Failure::OutOfMemoryException(0x3);
+ v8::internal::Heap::FatalProcessOutOfMemory("invalid string length", true);
}
// Compute map and object size.
Map* map = internalized_string_map();
@@ -641,24 +641,18 @@
// Warning: Do not use the identifiers __object__, __maybe_object__ or
// __scope__ in a call to this macro.
-#define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY, OOM)\
+#define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \
do { \
GC_GREEDY_CHECK(ISOLATE); \
MaybeObject* __maybe_object__ = FUNCTION_CALL; \
Object* __object__ = NULL; \
if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
- if (__maybe_object__->IsOutOfMemory()) { \
- OOM; \
- } \
if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
(ISOLATE)->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \
allocation_space(), \
"allocation failure"); \
__maybe_object__ = FUNCTION_CALL; \
if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
- if (__maybe_object__->IsOutOfMemory()) { \
- OOM; \
- } \
if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
(ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \
(ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \
@@ -667,9 +661,6 @@
__maybe_object__ = FUNCTION_CALL; \
} \
if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
- if (__maybe_object__->IsOutOfMemory()) { \
- OOM; \
- } \
if (__maybe_object__->IsRetryAfterGC()) { \
/* TODO(1181417): Fix this. */ \
v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true);\
@@ -683,8 +674,7 @@
ISOLATE, \
FUNCTION_CALL, \
RETURN_VALUE, \
- RETURN_EMPTY, \
- v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY", true))
+ RETURN_EMPTY)
#define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \
CALL_AND_RETRY_OR_DIE(ISOLATE, \
@@ -701,7 +691,6 @@
CALL_AND_RETRY(ISOLATE, \
FUNCTION_CALL, \
return __object__, \
- return __maybe_object__, \
return __maybe_object__)
diff --git a/src/heap.cc b/src/heap.cc
index 0691947..e785629 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -3288,13 +3288,8 @@
}
set_undefined_cell(Cell::cast(obj));
- // Allocate objects to hold symbol registry.
- { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
- if (!maybe_obj->ToObject(&obj)) return false;
- maybe_obj = AllocateJSObjectFromMap(Map::cast(obj));
- if (!maybe_obj->ToObject(&obj)) return false;
- }
- set_symbol_registry(JSObject::cast(obj));
+ // The symbol registry is initialized lazily.
+ set_symbol_registry(undefined_value());
// Allocate object to hold object observation state.
{ MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
@@ -3871,8 +3866,7 @@
const ExternalAsciiString::Resource* resource) {
size_t length = resource->length();
if (length > static_cast<size_t>(String::kMaxLength)) {
- isolate()->context()->mark_out_of_memory();
- return Failure::OutOfMemoryException(0x5);
+ v8::internal::Heap::FatalProcessOutOfMemory("invalid string length", true);
}
Map* map = external_ascii_string_map();
@@ -3894,8 +3888,7 @@
const ExternalTwoByteString::Resource* resource) {
size_t length = resource->length();
if (length > static_cast<size_t>(String::kMaxLength)) {
- isolate()->context()->mark_out_of_memory();
- return Failure::OutOfMemoryException(0x6);
+ v8::internal::Heap::FatalProcessOutOfMemory("invalid string length", true);
}
// For small strings we check whether the resource contains only
@@ -3946,7 +3939,7 @@
MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
if (length < 0 || length > ByteArray::kMaxLength) {
- return Failure::OutOfMemoryException(0x7);
+ v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
}
int size = ByteArray::SizeFor(length);
AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
@@ -4981,7 +4974,7 @@
Map* map;
if (chars > String::kMaxLength) {
- return Failure::OutOfMemoryException(0x9);
+ v8::internal::Heap::FatalProcessOutOfMemory("invalid string length", true);
}
if (is_one_byte) {
map = ascii_internalized_string_map();
@@ -5029,7 +5022,7 @@
MaybeObject* Heap::AllocateRawOneByteString(int length,
PretenureFlag pretenure) {
if (length < 0 || length > String::kMaxLength) {
- return Failure::OutOfMemoryException(0xb);
+ v8::internal::Heap::FatalProcessOutOfMemory("invalid string length", true);
}
int size = SeqOneByteString::SizeFor(length);
ASSERT(size <= SeqOneByteString::kMaxSize);
@@ -5053,7 +5046,7 @@
MaybeObject* Heap::AllocateRawTwoByteString(int length,
PretenureFlag pretenure) {
if (length < 0 || length > String::kMaxLength) {
- return Failure::OutOfMemoryException(0xc);
+ v8::internal::Heap::FatalProcessOutOfMemory("invalid string length", true);
}
int size = SeqTwoByteString::SizeFor(length);
ASSERT(size <= SeqTwoByteString::kMaxSize);
@@ -5201,7 +5194,7 @@
MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
if (length < 0 || length > FixedArray::kMaxLength) {
- return Failure::OutOfMemoryException(0xe);
+ v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
}
int size = FixedArray::SizeFor(length);
AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
@@ -5313,7 +5306,7 @@
MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
PretenureFlag pretenure) {
if (length < 0 || length > FixedDoubleArray::kMaxLength) {
- return Failure::OutOfMemoryException(0xf);
+ v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
}
int size = FixedDoubleArray::SizeFor(length);
#ifndef V8_HOST_ARCH_64_BIT
diff --git a/src/heap.h b/src/heap.h
index 0446a98..f6cd3eb 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -190,7 +190,7 @@
V(Cell, undefined_cell, UndefineCell) \
V(JSObject, observation_state, ObservationState) \
V(Map, external_map, ExternalMap) \
- V(JSObject, symbol_registry, SymbolRegistry) \
+ V(Object, symbol_registry, SymbolRegistry) \
V(Symbol, frozen_symbol, FrozenSymbol) \
V(Symbol, nonexistent_symbol, NonExistentSymbol) \
V(Symbol, elements_transition_symbol, ElementsTransitionSymbol) \
@@ -306,6 +306,11 @@
V(String_string, "String") \
V(symbol_string, "symbol") \
V(Symbol_string, "Symbol") \
+ V(for_string, "for") \
+ V(for_api_string, "for_api") \
+ V(for_intern_string, "for_intern") \
+ V(private_api_string, "private_api") \
+ V(private_intern_string, "private_intern") \
V(Date_string, "Date") \
V(this_string, "this") \
V(to_string_string, "toString") \
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index 0be9dfb..45d4b56 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -2309,7 +2309,7 @@
PretenureFlag pretenure_flag = !FLAG_allocation_site_pretenuring ?
isolate()->heap()->GetPretenureMode() : NOT_TENURED;
- return Add<HAllocate>(total_size, HType::JSArray(), pretenure_flag,
+ return Add<HAllocate>(total_size, HType::Tagged(), pretenure_flag,
instance_type);
}
@@ -2615,11 +2615,11 @@
HValue* object_elements;
if (IsFastDoubleElementsKind(kind)) {
HValue* elems_size = Add<HConstant>(FixedDoubleArray::SizeFor(length));
- object_elements = Add<HAllocate>(elems_size, HType::JSArray(),
+ object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
NOT_TENURED, FIXED_DOUBLE_ARRAY_TYPE);
} else {
HValue* elems_size = Add<HConstant>(FixedArray::SizeFor(length));
- object_elements = Add<HAllocate>(elems_size, HType::JSArray(),
+ object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
NOT_TENURED, FIXED_ARRAY_TYPE);
}
Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
@@ -8439,7 +8439,7 @@
}
-void HOptimizedGraphBuilder::VisitDataViewInitialize(
+void HOptimizedGraphBuilder::GenerateDataViewInitialize(
CallRuntime* expr) {
ZoneList<Expression*>* arguments = expr->arguments();
@@ -8462,7 +8462,7 @@
}
-void HOptimizedGraphBuilder::VisitTypedArrayInitialize(
+void HOptimizedGraphBuilder::GenerateTypedArrayInitialize(
CallRuntime* expr) {
ZoneList<Expression*>* arguments = expr->arguments();
@@ -8533,7 +8533,7 @@
HValue* elements =
Add<HAllocate>(
Add<HConstant>(ExternalArray::kAlignedSize),
- HType::JSArray(),
+ HType::Tagged(),
NOT_TENURED,
external_array_map->instance_type());
@@ -8581,6 +8581,13 @@
}
+void HOptimizedGraphBuilder::GenerateMaxSmi(CallRuntime* expr) {
+ ASSERT(expr->arguments()->length() == 0);
+ HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue));
+ return ast_context()->ReturnInstruction(max_smi, expr->id());
+}
+
+
void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
@@ -8592,20 +8599,6 @@
const Runtime::Function* function = expr->function();
ASSERT(function != NULL);
- if (function->function_id == Runtime::kDataViewInitialize) {
- return VisitDataViewInitialize(expr);
- }
-
- if (function->function_id == Runtime::kTypedArrayInitialize) {
- return VisitTypedArrayInitialize(expr);
- }
-
- if (function->function_id == Runtime::kMaxSmi) {
- ASSERT(expr->arguments()->length() == 0);
- HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue));
- return ast_context()->ReturnInstruction(max_smi, expr->id());
- }
-
if (function->intrinsic_type == Runtime::INLINE) {
ASSERT(expr->name()->length() > 0);
ASSERT(expr->name()->Get(0) == '_');
@@ -9795,10 +9788,10 @@
Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
empty_fixed_array);
}
- object_elements = Add<HAllocate>(object_elements_size, HType::JSObject(),
+ object_elements = Add<HAllocate>(object_elements_size, HType::Tagged(),
pretenure_flag, FIXED_DOUBLE_ARRAY_TYPE, site_context->current());
} else {
- object_elements = Add<HAllocate>(object_elements_size, HType::JSObject(),
+ object_elements = Add<HAllocate>(object_elements_size, HType::Tagged(),
pretenure_flag, FIXED_ARRAY_TYPE, site_context->current());
}
}
diff --git a/src/hydrogen.h b/src/hydrogen.h
index e91199e..d0bfa02 100644
--- a/src/hydrogen.h
+++ b/src/hydrogen.h
@@ -2321,13 +2321,9 @@
SmallMapList* types,
Handle<String> name);
- void VisitTypedArrayInitialize(CallRuntime* expr);
-
bool IsCallNewArrayInlineable(CallNew* expr);
void BuildInlinedCallNewArray(CallNew* expr);
- void VisitDataViewInitialize(CallRuntime* expr);
-
class PropertyAccessInfo {
public:
PropertyAccessInfo(HOptimizedGraphBuilder* builder,
diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc
index 5292755..3a4f590 100644
--- a/src/ia32/assembler-ia32.cc
+++ b/src/ia32/assembler-ia32.cc
@@ -2568,7 +2568,7 @@
void Assembler::GrowBuffer() {
- ASSERT(overflow());
+ ASSERT(buffer_overflow());
if (!own_buffer_) FATAL("external code buffer is too small");
// Compute new buffer size.
@@ -2627,7 +2627,7 @@
}
}
- ASSERT(!overflow());
+ ASSERT(!buffer_overflow());
}
diff --git a/src/ia32/assembler-ia32.h b/src/ia32/assembler-ia32.h
index fbd8332..27e5302 100644
--- a/src/ia32/assembler-ia32.h
+++ b/src/ia32/assembler-ia32.h
@@ -1170,7 +1170,9 @@
// Check if there is less than kGap bytes available in the buffer.
// If this is the case, we need to grow the buffer before emitting
// an instruction or relocation information.
- inline bool overflow() const { return pc_ >= reloc_info_writer.pos() - kGap; }
+ inline bool buffer_overflow() const {
+ return pc_ >= reloc_info_writer.pos() - kGap;
+ }
// Get the number of bytes available in the buffer.
inline int available_space() const { return reloc_info_writer.pos() - pc_; }
@@ -1272,7 +1274,7 @@
class EnsureSpace BASE_EMBEDDED {
public:
explicit EnsureSpace(Assembler* assembler) : assembler_(assembler) {
- if (assembler_->overflow()) assembler_->GrowBuffer();
+ if (assembler_->buffer_overflow()) assembler_->GrowBuffer();
#ifdef DEBUG
space_before_ = assembler_->available_space();
#endif
diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc
index d4c656f..fda4e63 100644
--- a/src/ia32/builtins-ia32.cc
+++ b/src/ia32/builtins-ia32.cc
@@ -500,7 +500,7 @@
ProfileEntryHookStub::MaybeCallEntryHook(masm);
// Clear the context before we push it when entering the internal frame.
- __ Set(esi, Immediate(0));
+ __ Move(esi, Immediate(0));
{
FrameScope scope(masm, StackFrame::INTERNAL);
@@ -522,7 +522,7 @@
// Copy arguments to the stack in a loop.
Label loop, entry;
- __ Set(ecx, Immediate(0));
+ __ Move(ecx, Immediate(0));
__ jmp(&entry);
__ bind(&loop);
__ mov(edx, Operand(ebx, ecx, times_4, 0)); // push parameter from argv
@@ -785,7 +785,7 @@
// 3a. Patch the first argument if necessary when calling a function.
Label shift_arguments;
- __ Set(edx, Immediate(0)); // indicate regular JS_FUNCTION
+ __ Move(edx, Immediate(0)); // indicate regular JS_FUNCTION
{ Label convert_to_object, use_global_receiver, patch_receiver;
// Change context eagerly in case we need the global receiver.
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
@@ -825,7 +825,7 @@
__ push(ebx);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ mov(ebx, eax);
- __ Set(edx, Immediate(0)); // restore
+ __ Move(edx, Immediate(0)); // restore
__ pop(eax);
__ SmiUntag(eax);
@@ -848,11 +848,11 @@
// 3b. Check for function proxy.
__ bind(&slow);
- __ Set(edx, Immediate(1)); // indicate function proxy
+ __ Move(edx, Immediate(1)); // indicate function proxy
__ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
__ j(equal, &shift_arguments);
__ bind(&non_function);
- __ Set(edx, Immediate(2)); // indicate non-function
+ __ Move(edx, Immediate(2)); // indicate non-function
// 3c. Patch the first argument when calling a non-function. The
// CALL_NON_FUNCTION builtin expects the non-function callee as
@@ -880,7 +880,7 @@
{ Label function, non_proxy;
__ test(edx, edx);
__ j(zero, &function);
- __ Set(ebx, Immediate(0));
+ __ Move(ebx, Immediate(0));
__ cmp(edx, Immediate(1));
__ j(not_equal, &non_proxy);
@@ -1058,7 +1058,7 @@
__ bind(&call_proxy);
__ push(edi); // add function proxy as last argument
__ inc(eax);
- __ Set(ebx, Immediate(0));
+ __ Move(ebx, Immediate(0));
__ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
__ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
@@ -1192,7 +1192,7 @@
// Set properties and elements.
Factory* factory = masm->isolate()->factory();
- __ Set(ecx, Immediate(factory->empty_fixed_array()));
+ __ Move(ecx, Immediate(factory->empty_fixed_array()));
__ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
__ mov(FieldOperand(eax, JSObject::kElementsOffset), ecx);
@@ -1233,7 +1233,7 @@
// Load the empty string into ebx, remove the receiver from the
// stack, and jump back to the case where the argument is a string.
__ bind(&no_arguments);
- __ Set(ebx, Immediate(factory->empty_string()));
+ __ Move(ebx, Immediate(factory->empty_string()));
__ pop(ecx);
__ lea(esp, Operand(esp, kPointerSize));
__ push(ecx);
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index fa67502..02af639 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -845,8 +845,8 @@
__ bind(&try_arithmetic_simplification);
// Skip to runtime if possibly NaN (indicated by the indefinite integer).
__ cvttsd2si(exponent, Operand(double_exponent));
- __ cmp(exponent, Immediate(0x80000000u));
- __ j(equal, &call_runtime);
+ __ cmp(exponent, Immediate(0x1));
+ __ j(overflow, &call_runtime);
if (exponent_type_ == ON_STACK) {
// Detect square root case. Crankshaft detects constant +/-0.5 at
@@ -1542,7 +1542,7 @@
__ j(above, &runtime);
// Reset offset for possibly sliced string.
- __ Set(edi, Immediate(0));
+ __ Move(edi, Immediate(0));
__ mov(eax, Operand(esp, kSubjectOffset));
__ JumpIfSmi(eax, &runtime);
__ mov(edx, eax); // Make a copy of the original subject string.
@@ -1636,7 +1636,7 @@
__ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
__ j(above_equal, &runtime);
__ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
- __ Set(ecx, Immediate(1)); // Type is one byte.
+ __ Move(ecx, Immediate(1)); // Type is one byte.
// (E) Carry on. String handling is done.
__ bind(&check_code);
@@ -1904,7 +1904,7 @@
__ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
__ j(above_equal, &runtime);
__ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
- __ Set(ecx, Immediate(0)); // Type is two byte.
+ __ Move(ecx, Immediate(0)); // Type is two byte.
__ jmp(&check_code); // Go to (E).
// (10) Not a string or a short external string? If yes, bail out to runtime.
@@ -2001,7 +2001,7 @@
Label check_for_nan;
__ cmp(edx, masm->isolate()->factory()->undefined_value());
__ j(not_equal, &check_for_nan, Label::kNear);
- __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
+ __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
__ ret(0);
__ bind(&check_for_nan);
}
@@ -2016,7 +2016,7 @@
__ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
__ j(above_equal, ¬_identical);
}
- __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
+ __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
__ ret(0);
@@ -2130,7 +2130,7 @@
__ j(below, &below_label, Label::kNear);
__ j(above, &above_label, Label::kNear);
- __ Set(eax, Immediate(0));
+ __ Move(eax, Immediate(0));
__ ret(0);
__ bind(&below_label);
@@ -2222,7 +2222,7 @@
__ j(zero, &return_unequal, Label::kNear);
// The objects are both undetectable, so they both compare as the value
// undefined, and are equal.
- __ Set(eax, Immediate(EQUAL));
+ __ Move(eax, Immediate(EQUAL));
__ bind(&return_unequal);
// Return non-equal by returning the non-zero object pointer in eax,
// or return equal if we fell through to here.
@@ -2438,8 +2438,8 @@
__ pop(ecx);
__ push(edi); // put proxy as additional argument under return address
__ push(ecx);
- __ Set(eax, Immediate(argc_ + 1));
- __ Set(ebx, Immediate(0));
+ __ Move(eax, Immediate(argc_ + 1));
+ __ Move(ebx, Immediate(0));
__ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
{
Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
@@ -2450,8 +2450,8 @@
// of the original receiver from the call site).
__ bind(&non_function);
__ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
- __ Set(eax, Immediate(argc_));
- __ Set(ebx, Immediate(0));
+ __ Move(eax, Immediate(argc_));
+ __ Move(ebx, Immediate(0));
__ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
__ jmp(adaptor, RelocInfo::CODE_TARGET);
@@ -2533,7 +2533,7 @@
__ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
__ bind(&do_call);
// Set expected number of arguments to zero (not changing eax).
- __ Set(ebx, Immediate(0));
+ __ Move(ebx, Immediate(0));
Handle<Code> arguments_adaptor =
masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
__ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
@@ -2583,23 +2583,9 @@
}
-static void JumpIfOOM(MacroAssembler* masm,
- Register value,
- Register scratch,
- Label* oom_label) {
- __ mov(scratch, value);
- STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3);
- STATIC_ASSERT(kFailureTag == 3);
- __ and_(scratch, 0xf);
- __ cmp(scratch, 0xf);
- __ j(equal, oom_label);
-}
-
-
void CEntryStub::GenerateCore(MacroAssembler* masm,
Label* throw_normal_exception,
Label* throw_termination_exception,
- Label* throw_out_of_memory_exception,
bool do_gc,
bool always_allocate_scope) {
// eax: result parameter for PerformGC, if any
@@ -2694,15 +2680,9 @@
__ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
__ j(zero, &retry, Label::kNear);
- // Special handling of out of memory exceptions.
- JumpIfOOM(masm, eax, ecx, throw_out_of_memory_exception);
-
// Retrieve the pending exception.
__ mov(eax, Operand::StaticVariable(pending_exception_address));
- // See if we just retrieved an OOM exception.
- JumpIfOOM(masm, eax, ecx, throw_out_of_memory_exception);
-
// Clear the pending exception.
__ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
__ mov(Operand::StaticVariable(pending_exception_address), edx);
@@ -2746,13 +2726,11 @@
Label throw_normal_exception;
Label throw_termination_exception;
- Label throw_out_of_memory_exception;
// Call into the runtime system.
GenerateCore(masm,
&throw_normal_exception,
&throw_termination_exception,
- &throw_out_of_memory_exception,
false,
false);
@@ -2760,7 +2738,6 @@
GenerateCore(masm,
&throw_normal_exception,
&throw_termination_exception,
- &throw_out_of_memory_exception,
true,
false);
@@ -2770,27 +2747,9 @@
GenerateCore(masm,
&throw_normal_exception,
&throw_termination_exception,
- &throw_out_of_memory_exception,
true,
true);
- __ bind(&throw_out_of_memory_exception);
- // Set external caught exception to false.
- Isolate* isolate = masm->isolate();
- ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
- isolate);
- __ mov(Operand::StaticVariable(external_caught), Immediate(false));
-
- // Set pending exception and eax to out of memory exception.
- ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
- isolate);
- Label already_have_failure;
- JumpIfOOM(masm, eax, ecx, &already_have_failure);
- __ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException(0x1)));
- __ bind(&already_have_failure);
- __ mov(Operand::StaticVariable(pending_exception), eax);
- // Fall through to the next label.
-
__ bind(&throw_termination_exception);
__ ThrowUncatchable(eax);
@@ -3024,7 +2983,7 @@
}
__ mov(Operand(scratch, kDeltaToMovImmediate), eax);
if (!ReturnTrueFalseObject()) {
- __ Set(eax, Immediate(0));
+ __ Move(eax, Immediate(0));
}
}
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
@@ -3044,7 +3003,7 @@
}
__ mov(Operand(scratch, kDeltaToMovImmediate), eax);
if (!ReturnTrueFalseObject()) {
- __ Set(eax, Immediate(Smi::FromInt(1)));
+ __ Move(eax, Immediate(Smi::FromInt(1)));
}
}
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
@@ -3060,20 +3019,20 @@
// Null is not instance of anything.
__ cmp(object, factory->null_value());
__ j(not_equal, &object_not_null, Label::kNear);
- __ Set(eax, Immediate(Smi::FromInt(1)));
+ __ Move(eax, Immediate(Smi::FromInt(1)));
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
__ bind(&object_not_null);
// Smi values is not instance of anything.
__ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
- __ Set(eax, Immediate(Smi::FromInt(1)));
+ __ Move(eax, Immediate(Smi::FromInt(1)));
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
__ bind(&object_not_null_or_smi);
// String values is not instance of anything.
Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
__ j(NegateCondition(is_string), &slow, Label::kNear);
- __ Set(eax, Immediate(Smi::FromInt(1)));
+ __ Move(eax, Immediate(Smi::FromInt(1)));
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
// Slow-case: Go through the JavaScript implementation.
@@ -3221,7 +3180,7 @@
__ j(not_zero, &slow_case_);
Factory* factory = masm->isolate()->factory();
- __ Set(result_, Immediate(factory->single_character_string_cache()));
+ __ Move(result_, Immediate(factory->single_character_string_cache()));
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiShiftSize == 0);
@@ -3620,7 +3579,7 @@
__ cmp(length, FieldOperand(right, String::kLengthOffset));
__ j(equal, &check_zero_length, Label::kNear);
__ bind(&strings_not_equal);
- __ Set(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
+ __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
__ ret(0);
// Check if the length is zero.
@@ -3629,7 +3588,7 @@
STATIC_ASSERT(kSmiTag == 0);
__ test(length, length);
__ j(not_zero, &compare_chars, Label::kNear);
- __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
+ __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
__ ret(0);
// Compare characters.
@@ -3638,7 +3597,7 @@
&strings_not_equal, Label::kNear);
// Characters are equal.
- __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
+ __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
__ ret(0);
}
@@ -3686,7 +3645,7 @@
// Result is EQUAL.
STATIC_ASSERT(EQUAL == 0);
STATIC_ASSERT(kSmiTag == 0);
- __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
+ __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
__ ret(0);
Label result_greater;
@@ -3699,12 +3658,12 @@
__ bind(&result_less);
// Result is LESS.
- __ Set(eax, Immediate(Smi::FromInt(LESS)));
+ __ Move(eax, Immediate(Smi::FromInt(LESS)));
__ ret(0);
// Result is GREATER.
__ bind(&result_greater);
- __ Set(eax, Immediate(Smi::FromInt(GREATER)));
+ __ Move(eax, Immediate(Smi::FromInt(GREATER)));
__ ret(0);
}
@@ -3755,7 +3714,7 @@
__ j(not_equal, ¬_same, Label::kNear);
STATIC_ASSERT(EQUAL == 0);
STATIC_ASSERT(kSmiTag == 0);
- __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
+ __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
__ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1);
__ ret(2 * kPointerSize);
@@ -4161,7 +4120,7 @@
__ j(not_equal, &done, Label::kNear);
STATIC_ASSERT(EQUAL == 0);
STATIC_ASSERT(kSmiTag == 0);
- __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
+ __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
__ bind(&done);
__ ret(0);
@@ -4206,7 +4165,7 @@
__ j(not_equal, &done, Label::kNear);
STATIC_ASSERT(EQUAL == 0);
STATIC_ASSERT(kSmiTag == 0);
- __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
+ __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
__ bind(&done);
__ ret(0);
@@ -4252,7 +4211,7 @@
__ j(not_equal, ¬_same, Label::kNear);
STATIC_ASSERT(EQUAL == 0);
STATIC_ASSERT(kSmiTag == 0);
- __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
+ __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
__ ret(0);
// Handle not identical strings.
@@ -5331,9 +5290,9 @@
// FunctionCallbackInfo::values_.
__ mov(ApiParameterOperand(3), scratch);
// FunctionCallbackInfo::length_.
- __ Set(ApiParameterOperand(4), Immediate(argc));
+ __ Move(ApiParameterOperand(4), Immediate(argc));
// FunctionCallbackInfo::is_construct_call_.
- __ Set(ApiParameterOperand(5), Immediate(0));
+ __ Move(ApiParameterOperand(5), Immediate(0));
// v8::InvocationCallback's argument.
__ lea(scratch, ApiParameterOperand(2));
diff --git a/src/ia32/debug-ia32.cc b/src/ia32/debug-ia32.cc
index 4c76f7d..42284ec 100644
--- a/src/ia32/debug-ia32.cc
+++ b/src/ia32/debug-ia32.cc
@@ -138,7 +138,7 @@
#ifdef DEBUG
__ RecordComment("// Calling from debug break to runtime - come in - over");
#endif
- __ Set(eax, Immediate(0)); // No arguments.
+ __ Move(eax, Immediate(0)); // No arguments.
__ mov(ebx, Immediate(ExternalReference::debug_break(masm->isolate())));
CEntryStub ceb(1);
@@ -154,7 +154,7 @@
int r = JSCallerSavedCode(i);
Register reg = { r };
if (FLAG_debug_code) {
- __ Set(reg, Immediate(kDebugZapValue));
+ __ Move(reg, Immediate(kDebugZapValue));
}
bool taken = reg.code() == esi.code();
if ((object_regs & (1 << r)) != 0) {
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 16c3294..b05b888 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -101,6 +101,25 @@
};
+static void EmitStackCheck(MacroAssembler* masm_,
+ int pointers = 0,
+ Register scratch = esp) {
+ Label ok;
+ Isolate* isolate = masm_->isolate();
+ ExternalReference stack_limit =
+ ExternalReference::address_of_stack_limit(isolate);
+ ASSERT(scratch.is(esp) == (pointers == 0));
+ if (pointers != 0) {
+ __ mov(scratch, esp);
+ __ sub(scratch, Immediate(pointers * kPointerSize));
+ }
+ __ cmp(scratch, Operand::StaticVariable(stack_limit));
+ __ j(above_equal, &ok, Label::kNear);
+ __ call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
+ __ bind(&ok);
+}
+
+
// Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right, with the
// return address on top of them. The actual argument count matches the
@@ -171,8 +190,26 @@
if (locals_count == 1) {
__ push(Immediate(isolate()->factory()->undefined_value()));
} else if (locals_count > 1) {
+ if (locals_count >= 128) {
+ EmitStackCheck(masm_, locals_count, ecx);
+ }
__ mov(eax, Immediate(isolate()->factory()->undefined_value()));
- for (int i = 0; i < locals_count; i++) {
+ const int kMaxPushes = 32;
+ if (locals_count >= kMaxPushes) {
+ int loop_iterations = locals_count / kMaxPushes;
+ __ mov(ecx, loop_iterations);
+ Label loop_header;
+ __ bind(&loop_header);
+ // Do pushes.
+ for (int i = 0; i < kMaxPushes; i++) {
+ __ push(eax);
+ }
+ __ dec(ecx);
+ __ j(not_zero, &loop_header, Label::kNear);
+ }
+ int remaining = locals_count % kMaxPushes;
+ // Emit the remaining pushes.
+ for (int i = 0; i < remaining; i++) {
__ push(eax);
}
}
@@ -285,13 +322,7 @@
{ Comment cmnt(masm_, "[ Stack check");
PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
- Label ok;
- ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit(isolate());
- __ cmp(esp, Operand::StaticVariable(stack_limit));
- __ j(above_equal, &ok, Label::kNear);
- __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
- __ bind(&ok);
+ EmitStackCheck(masm_);
}
{ Comment cmnt(masm_, "[ Body");
@@ -311,7 +342,7 @@
void FullCodeGenerator::ClearAccumulator() {
- __ Set(eax, Immediate(Smi::FromInt(0)));
+ __ Move(eax, Immediate(Smi::FromInt(0)));
}
@@ -470,9 +501,9 @@
void FullCodeGenerator::AccumulatorValueContext::Plug(
Handle<Object> lit) const {
if (lit->IsSmi()) {
- __ SafeSet(result_register(), Immediate(lit));
+ __ SafeMove(result_register(), Immediate(lit));
} else {
- __ Set(result_register(), Immediate(lit));
+ __ Move(result_register(), Immediate(lit));
}
}
@@ -1119,7 +1150,7 @@
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
__ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
__ j(above, &non_proxy);
- __ Set(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
+ __ Move(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
__ bind(&non_proxy);
__ push(ebx); // Smi
__ push(eax); // Array
@@ -2832,7 +2863,7 @@
SetSourcePosition(expr->position());
// Load function and argument count into edi and eax.
- __ Set(eax, Immediate(arg_count));
+ __ Move(eax, Immediate(arg_count));
__ mov(edi, Operand(esp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
@@ -3104,9 +3135,11 @@
Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
__ CheckMap(eax, map, if_false, DO_SMI_CHECK);
- __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x80000000));
- __ j(not_equal, if_false);
- __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x00000000));
+ // Check if the exponent half is 0x80000000. Comparing against 1 and
+ // checking for overflow is the shortest possible encoding.
+ __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
+ __ j(no_overflow, if_false);
+ __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
@@ -3223,7 +3256,7 @@
// parameter count in eax.
VisitForAccumulatorValue(args->at(0));
__ mov(edx, eax);
- __ Set(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
+ __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
__ CallStub(&stub);
context()->Plug(eax);
@@ -3235,7 +3268,7 @@
Label exit;
// Get the number of formal parameters.
- __ Set(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
+ __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
// Check if the calling frame is an arguments adaptor frame.
__ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
@@ -3602,13 +3635,13 @@
__ bind(&index_out_of_range);
// When the index is out of range, the spec requires us to return
// NaN.
- __ Set(result, Immediate(isolate()->factory()->nan_value()));
+ __ Move(result, Immediate(isolate()->factory()->nan_value()));
__ jmp(&done);
__ bind(&need_conversion);
// Move the undefined value into the result register, which will
// trigger conversion.
- __ Set(result, Immediate(isolate()->factory()->undefined_value()));
+ __ Move(result, Immediate(isolate()->factory()->undefined_value()));
__ jmp(&done);
NopRuntimeCallHelper call_helper;
@@ -3650,13 +3683,13 @@
__ bind(&index_out_of_range);
// When the index is out of range, the spec requires us to return
// the empty string.
- __ Set(result, Immediate(isolate()->factory()->empty_string()));
+ __ Move(result, Immediate(isolate()->factory()->empty_string()));
__ jmp(&done);
__ bind(&need_conversion);
// Move smi zero into the result register, which will trigger
// conversion.
- __ Set(result, Immediate(Smi::FromInt(0)));
+ __ Move(result, Immediate(Smi::FromInt(0)));
__ jmp(&done);
NopRuntimeCallHelper call_helper;
@@ -3907,8 +3940,8 @@
// Check that all array elements are sequential ASCII strings, and
// accumulate the sum of their lengths, as a smi-encoded value.
- __ Set(index, Immediate(0));
- __ Set(string_length, Immediate(0));
+ __ Move(index, Immediate(0));
+ __ Move(string_length, Immediate(0));
// Loop condition: while (index < length).
// Live loop registers: index, array_length, string,
// scratch, string_length, elements.
@@ -4024,7 +4057,7 @@
__ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
__ mov_b(separator_operand, scratch);
- __ Set(index, Immediate(0));
+ __ Move(index, Immediate(0));
// Jump into the loop after the code that copies the separator, so the first
// element is not preceded by a separator
__ jmp(&loop_2_entry);
@@ -4061,7 +4094,7 @@
// Long separator case (separator is more than one character).
__ bind(&long_separator);
- __ Set(index, Immediate(0));
+ __ Move(index, Immediate(0));
// Jump into the loop after the code that copies the separator, so the first
// element is not preceded by a separator
__ jmp(&loop_3_entry);
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index 7c0df18..9afde8e 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -199,7 +199,7 @@
if (support_aligned_spilled_doubles_ && dynamic_frame_alignment_) {
// Move state of dynamic frame alignment into edx.
- __ Set(edx, Immediate(kNoAlignmentPadding));
+ __ Move(edx, Immediate(kNoAlignmentPadding));
Label do_not_pad, align_loop;
STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
@@ -346,7 +346,7 @@
osr_pc_offset_ = masm()->pc_offset();
// Move state of dynamic frame alignment into edx.
- __ Set(edx, Immediate(kNoAlignmentPadding));
+ __ Move(edx, Immediate(kNoAlignmentPadding));
if (support_aligned_spilled_doubles_ && dynamic_frame_alignment_) {
Label do_not_pad, align_loop;
@@ -1464,7 +1464,7 @@
DeoptimizeIf(equal, instr->environment());
} else {
__ j(not_equal, &no_overflow_possible, Label::kNear);
- __ Set(result_reg, Immediate(0));
+ __ Move(result_reg, Immediate(0));
__ jmp(&done, Label::kNear);
}
__ bind(&no_overflow_possible);
@@ -1926,12 +1926,12 @@
void LCodeGen::DoConstantI(LConstantI* instr) {
- __ Set(ToRegister(instr->result()), Immediate(instr->value()));
+ __ Move(ToRegister(instr->result()), Immediate(instr->value()));
}
void LCodeGen::DoConstantS(LConstantS* instr) {
- __ Set(ToRegister(instr->result()), Immediate(instr->value()));
+ __ Move(ToRegister(instr->result()), Immediate(instr->value()));
}
@@ -1958,22 +1958,22 @@
if (CpuFeatures::IsSupported(SSE4_1)) {
CpuFeatureScope scope2(masm(), SSE4_1);
if (lower != 0) {
- __ Set(temp, Immediate(lower));
+ __ Move(temp, Immediate(lower));
__ movd(res, Operand(temp));
- __ Set(temp, Immediate(upper));
+ __ Move(temp, Immediate(upper));
__ pinsrd(res, Operand(temp), 1);
} else {
__ xorps(res, res);
- __ Set(temp, Immediate(upper));
+ __ Move(temp, Immediate(upper));
__ pinsrd(res, Operand(temp), 1);
}
} else {
- __ Set(temp, Immediate(upper));
+ __ Move(temp, Immediate(upper));
__ movd(res, Operand(temp));
__ psllq(res, 32);
if (lower != 0) {
XMMRegister xmm_scratch = double_scratch0();
- __ Set(temp, Immediate(lower));
+ __ Move(temp, Immediate(lower));
__ movd(xmm_scratch, Operand(temp));
__ orps(res, xmm_scratch);
}
@@ -2654,8 +2654,8 @@
Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
__ CheckMap(value, map, instr->FalseLabel(chunk()), DO_SMI_CHECK);
__ cmp(FieldOperand(value, HeapNumber::kExponentOffset),
- Immediate(0x80000000));
- EmitFalseBranch(instr, not_equal);
+ Immediate(0x1));
+ EmitFalseBranch(instr, no_overflow);
__ cmp(FieldOperand(value, HeapNumber::kMantissaOffset),
Immediate(0x00000000));
EmitBranch(instr, equal);
@@ -3955,8 +3955,8 @@
__ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown);
__ cvttsd2si(output_reg, Operand(xmm_scratch));
// Overflow is signalled with minint.
- __ cmp(output_reg, 0x80000000u);
- DeoptimizeIf(equal, instr->environment());
+ __ cmp(output_reg, 0x1);
+ DeoptimizeIf(overflow, instr->environment());
} else {
Label negative_sign, done;
// Deoptimize on unordered.
@@ -3972,7 +3972,7 @@
__ movmskpd(output_reg, input_reg);
__ test(output_reg, Immediate(1));
DeoptimizeIf(not_zero, instr->environment());
- __ Set(output_reg, Immediate(0));
+ __ Move(output_reg, Immediate(0));
__ jmp(&done, Label::kNear);
__ bind(&positive_sign);
}
@@ -3980,8 +3980,8 @@
// Use truncating instruction (OK because input is positive).
__ cvttsd2si(output_reg, Operand(input_reg));
// Overflow is signalled with minint.
- __ cmp(output_reg, 0x80000000u);
- DeoptimizeIf(equal, instr->environment());
+ __ cmp(output_reg, 0x1);
+ DeoptimizeIf(overflow, instr->environment());
__ jmp(&done, Label::kNear);
// Non-zero negative reaches here.
@@ -4020,9 +4020,9 @@
__ addsd(xmm_scratch, input_reg);
__ cvttsd2si(output_reg, Operand(xmm_scratch));
// Overflow is signalled with minint.
- __ cmp(output_reg, 0x80000000u);
+ __ cmp(output_reg, 0x1);
__ RecordComment("D2I conversion overflow");
- DeoptimizeIf(equal, instr->environment());
+ DeoptimizeIf(overflow, instr->environment());
__ jmp(&done, dist);
__ bind(&below_one_half);
@@ -4036,9 +4036,9 @@
__ subsd(input_temp, xmm_scratch);
__ cvttsd2si(output_reg, Operand(input_temp));
// Catch minint due to overflow, and to prevent overflow when compensating.
- __ cmp(output_reg, 0x80000000u);
+ __ cmp(output_reg, 0x1);
__ RecordComment("D2I conversion overflow");
- DeoptimizeIf(equal, instr->environment());
+ DeoptimizeIf(overflow, instr->environment());
__ Cvtsi2sd(xmm_scratch, output_reg);
__ ucomisd(xmm_scratch, input_temp);
@@ -4057,7 +4057,7 @@
__ RecordComment("Minus zero");
DeoptimizeIf(not_zero, instr->environment());
}
- __ Set(output_reg, Immediate(0));
+ __ Move(output_reg, Immediate(0));
__ bind(&done);
}
@@ -4178,7 +4178,7 @@
__ bsr(result, input);
__ j(not_zero, ¬_zero_input);
- __ Set(result, Immediate(63)); // 63^31 == 32
+ __ Move(result, Immediate(63)); // 63^31 == 32
__ bind(¬_zero_input);
__ xor_(result, Immediate(31)); // for x in [0..31], 31^x == 31-x.
@@ -4238,7 +4238,7 @@
// No cell in ebx for construct type feedback in optimized code
__ mov(ebx, isolate()->factory()->undefined_value());
CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
- __ Set(eax, Immediate(instr->arity()));
+ __ Move(eax, Immediate(instr->arity()));
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
}
@@ -4248,7 +4248,7 @@
ASSERT(ToRegister(instr->constructor()).is(edi));
ASSERT(ToRegister(instr->result()).is(eax));
- __ Set(eax, Immediate(instr->arity()));
+ __ Move(eax, Immediate(instr->arity()));
__ mov(ebx, isolate()->factory()->undefined_value());
ElementsKind kind = instr->hydrogen()->elements_kind();
AllocationSiteOverrideMode override_mode =
@@ -4341,17 +4341,13 @@
instr->hydrogen()->value()->IsHeapObject()
? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
- if (representation.IsSmi()) {
+ ASSERT(!(representation.IsSmi() &&
+ instr->value()->IsConstantOperand() &&
+ !IsSmi(LConstantOperand::cast(instr->value()))));
+ if (representation.IsHeapObject()) {
if (instr->value()->IsConstantOperand()) {
LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
- if (!IsSmi(operand_value)) {
- DeoptimizeIf(no_condition, instr->environment());
- }
- }
- } else if (representation.IsHeapObject()) {
- if (instr->value()->IsConstantOperand()) {
- LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
- if (IsInteger32(operand_value)) {
+ if (chunk_->LookupConstant(operand_value)->HasSmiValue()) {
DeoptimizeIf(no_condition, instr->environment());
}
} else {
@@ -4790,7 +4786,7 @@
// TODO(3095996): Get rid of this. For now, we need to make the
// result register contain a valid pointer because it is already
// contained in the register pointer map.
- __ Set(result, Immediate(0));
+ __ Move(result, Immediate(0));
PushSafepointRegistersScope scope(this);
__ push(string);
@@ -4839,7 +4835,7 @@
__ cmp(char_code, String::kMaxOneByteCharCode);
__ j(above, deferred->entry());
- __ Set(result, Immediate(factory()->single_character_string_cache()));
+ __ Move(result, Immediate(factory()->single_character_string_cache()));
__ mov(result, FieldOperand(result,
char_code, times_pointer_size,
FixedArray::kHeaderSize));
@@ -4856,7 +4852,7 @@
// TODO(3095996): Get rid of this. For now, we need to make the
// result register contain a valid pointer because it is already
// contained in the register pointer map.
- __ Set(result, Immediate(0));
+ __ Move(result, Immediate(0));
PushSafepointRegistersScope scope(this);
__ SmiTag(char_code);
@@ -5021,7 +5017,7 @@
// TODO(3095996): Put a valid pointer value in the stack slot where the
// result register is stored, as this register is in the pointer map, but
// contains an integer value.
- __ Set(reg, Immediate(0));
+ __ Move(reg, Immediate(0));
// Preserve the value of all registers.
PushSafepointRegistersScope scope(this);
@@ -5098,7 +5094,7 @@
// result register contain a valid pointer because it is already
// contained in the register pointer map.
Register reg = ToRegister(instr->result());
- __ Set(reg, Immediate(0));
+ __ Move(reg, Immediate(0));
PushSafepointRegistersScope scope(this);
// NumberTagI and NumberTagD use the context from the frame, rather than
@@ -5298,20 +5294,20 @@
// for truncating conversions.
__ cmp(input_reg, factory()->undefined_value());
__ j(not_equal, &check_bools, Label::kNear);
- __ Set(input_reg, Immediate(0));
+ __ Move(input_reg, Immediate(0));
__ jmp(done);
__ bind(&check_bools);
__ cmp(input_reg, factory()->true_value());
__ j(not_equal, &check_false, Label::kNear);
- __ Set(input_reg, Immediate(1));
+ __ Move(input_reg, Immediate(1));
__ jmp(done);
__ bind(&check_false);
__ cmp(input_reg, factory()->false_value());
__ RecordComment("Deferred TaggedToI: cannot truncate");
DeoptimizeIf(not_equal, instr->environment());
- __ Set(input_reg, Immediate(0));
+ __ Move(input_reg, Immediate(0));
} else {
Label bailout;
XMMRegister scratch = (instr->temp() != NULL)
@@ -5902,7 +5898,7 @@
// TODO(3095996): Get rid of this. For now, we need to make the
// result register contain a valid pointer because it is already
// contained in the register pointer map.
- __ Set(result, Immediate(Smi::FromInt(0)));
+ __ Move(result, Immediate(Smi::FromInt(0)));
PushSafepointRegistersScope scope(this);
if (instr->size()->IsRegister()) {
diff --git a/src/ia32/lithium-gap-resolver-ia32.cc b/src/ia32/lithium-gap-resolver-ia32.cc
index d621bd2..01821d9 100644
--- a/src/ia32/lithium-gap-resolver-ia32.cc
+++ b/src/ia32/lithium-gap-resolver-ia32.cc
@@ -309,7 +309,7 @@
Representation r = cgen_->IsSmi(constant_source)
? Representation::Smi() : Representation::Integer32();
if (cgen_->IsInteger32(constant_source)) {
- __ Set(dst, cgen_->ToImmediate(constant_source, r));
+ __ Move(dst, cgen_->ToImmediate(constant_source, r));
} else {
__ LoadObject(dst, cgen_->ToHandle(constant_source));
}
@@ -342,7 +342,7 @@
Representation r = cgen_->IsSmi(constant_source)
? Representation::Smi() : Representation::Integer32();
if (cgen_->IsInteger32(constant_source)) {
- __ Set(dst, cgen_->ToImmediate(constant_source, r));
+ __ Move(dst, cgen_->ToImmediate(constant_source, r));
} else {
Register tmp = EnsureTempRegister();
__ LoadObject(tmp, cgen_->ToHandle(constant_source));
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc
index 530c3f5..0235a13 100644
--- a/src/ia32/macro-assembler-ia32.cc
+++ b/src/ia32/macro-assembler-ia32.cc
@@ -214,22 +214,22 @@
Register result_reg) {
Label done;
Label conv_failure;
- pxor(scratch_reg, scratch_reg);
+ xorps(scratch_reg, scratch_reg);
cvtsd2si(result_reg, input_reg);
test(result_reg, Immediate(0xFFFFFF00));
j(zero, &done, Label::kNear);
- cmp(result_reg, Immediate(0x80000000));
- j(equal, &conv_failure, Label::kNear);
+ cmp(result_reg, Immediate(0x1));
+ j(overflow, &conv_failure, Label::kNear);
mov(result_reg, Immediate(0));
- setcc(above, result_reg);
+ setcc(sign, result_reg);
sub(result_reg, Immediate(1));
and_(result_reg, Immediate(255));
jmp(&done, Label::kNear);
bind(&conv_failure);
- Set(result_reg, Immediate(0));
+ Move(result_reg, Immediate(0));
ucomisd(input_reg, scratch_reg);
j(below, &done, Label::kNear);
- Set(result_reg, Immediate(255));
+ Move(result_reg, Immediate(255));
bind(&done);
}
@@ -256,8 +256,8 @@
XMMRegister input_reg) {
Label done;
cvttsd2si(result_reg, Operand(input_reg));
- cmp(result_reg, 0x80000000u);
- j(not_equal, &done, Label::kNear);
+ cmp(result_reg, 0x1);
+ j(no_overflow, &done, Label::kNear);
sub(esp, Immediate(kDoubleSize));
movsd(MemOperand(esp, 0), input_reg);
@@ -374,8 +374,8 @@
CpuFeatureScope scope(this, SSE2);
movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
cvttsd2si(result_reg, Operand(xmm0));
- cmp(result_reg, 0x80000000u);
- j(not_equal, &done, Label::kNear);
+ cmp(result_reg, 0x1);
+ j(no_overflow, &done, Label::kNear);
// Check if the input was 0x8000000 (kMinInt).
// If no, then we got an overflow and we deoptimize.
ExternalReference min_int = ExternalReference::address_of_min_int();
@@ -715,7 +715,7 @@
#ifdef ENABLE_DEBUGGER_SUPPORT
void MacroAssembler::DebugBreak() {
- Set(eax, Immediate(0));
+ Move(eax, Immediate(0));
mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
CEntryStub ces(1);
call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
@@ -729,20 +729,6 @@
}
-void MacroAssembler::Set(Register dst, const Immediate& x) {
- if (x.is_zero()) {
- xor_(dst, dst); // Shorter than mov.
- } else {
- mov(dst, x);
- }
-}
-
-
-void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
- mov(dst, x);
-}
-
-
bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
static const int kMaxImmediateBits = 17;
if (!RelocInfo::IsNone(x.rmode_)) return false;
@@ -750,12 +736,12 @@
}
-void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
+void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
- Set(dst, Immediate(x.x_ ^ jit_cookie()));
+ Move(dst, Immediate(x.x_ ^ jit_cookie()));
xor_(dst, jit_cookie());
} else {
- Set(dst, x);
+ Move(dst, x);
}
}
@@ -2258,7 +2244,7 @@
// arguments passed in because it is constant. At some point we
// should remove this need and make the runtime routine entry code
// smarter.
- Set(eax, Immediate(num_arguments));
+ Move(eax, Immediate(num_arguments));
mov(ebx, Immediate(ExternalReference(f, isolate())));
CEntryStub ces(1, CpuFeatures::IsSupported(SSE2) ? save_doubles
: kDontSaveFPRegs);
@@ -2283,7 +2269,7 @@
// arguments passed in because it is constant. At some point we
// should remove this need and make the runtime routine entry code
// smarter.
- Set(eax, Immediate(num_arguments));
+ Move(eax, Immediate(num_arguments));
JumpToExternalReference(ext);
}
@@ -2847,15 +2833,20 @@
}
-void MacroAssembler::Move(Register dst, Immediate imm) {
- if (imm.is_zero()) {
- xor_(dst, dst);
+void MacroAssembler::Move(Register dst, const Immediate& x) {
+ if (x.is_zero()) {
+ xor_(dst, dst); // Shorter than mov of 32-bit immediate 0.
} else {
- mov(dst, imm);
+ mov(dst, x);
}
}
+void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
+ mov(dst, x);
+}
+
+
void MacroAssembler::Move(XMMRegister dst, double val) {
// TODO(titzer): recognize double constants with ExternalReferences.
CpuFeatureScope scope(this, SSE2);
diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h
index bd35743..698c81f 100644
--- a/src/ia32/macro-assembler-ia32.h
+++ b/src/ia32/macro-assembler-ia32.h
@@ -287,7 +287,7 @@
if (object->IsHeapObject()) {
LoadHeapObject(result, Handle<HeapObject>::cast(object));
} else {
- Set(result, Immediate(object));
+ Move(result, Immediate(object));
}
}
@@ -350,9 +350,6 @@
void GetBuiltinEntry(Register target, Builtins::JavaScript id);
// Expression support
- void Set(Register dst, const Immediate& x);
- void Set(const Operand& dst, const Immediate& x);
-
// cvtsi2sd instruction only writes to the low 64-bit of dst register, which
// hinders register renaming and makes dependence chains longer. So we use
// xorps to clear the dst register before cvtsi2sd to solve this issue.
@@ -361,7 +358,7 @@
// Support for constant splitting.
bool IsUnsafeImmediate(const Immediate& x);
- void SafeSet(Register dst, const Immediate& x);
+ void SafeMove(Register dst, const Immediate& x);
void SafePush(const Immediate& x);
// Compare object type for heap object.
@@ -847,8 +844,9 @@
// Move if the registers are not identical.
void Move(Register target, Register source);
- // Move a constant into a register using the most efficient encoding.
- void Move(Register dst, Immediate imm);
+ // Move a constant into a destination using the most efficient encoding.
+ void Move(Register dst, const Immediate& x);
+ void Move(const Operand& dst, const Immediate& x);
// Move an immediate into an XMM register.
void Move(XMMRegister dst, double val);
diff --git a/src/ia32/regexp-macro-assembler-ia32.cc b/src/ia32/regexp-macro-assembler-ia32.cc
index d371c45..255df32 100644
--- a/src/ia32/regexp-macro-assembler-ia32.cc
+++ b/src/ia32/regexp-macro-assembler-ia32.cc
@@ -632,7 +632,7 @@
void RegExpMacroAssemblerIA32::Fail() {
STATIC_ASSERT(FAILURE == 0); // Return value for failure is zero.
if (!global()) {
- __ Set(eax, Immediate(FAILURE));
+ __ Move(eax, Immediate(FAILURE));
}
__ jmp(&exit_label_);
}
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index 28f4028..1a745c7 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -283,7 +283,7 @@
__ j(not_equal, miss);
// Load its initial map. The global functions all have initial maps.
- __ Set(prototype, Immediate(Handle<Map>(function->initial_map())));
+ __ Move(prototype, Immediate(Handle<Map>(function->initial_map())));
// Load the prototype from the initial map.
__ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
}
diff --git a/src/interpreter-irregexp.cc b/src/interpreter-irregexp.cc
index ccabfd1..de54d0c 100644
--- a/src/interpreter-irregexp.cc
+++ b/src/interpreter-irregexp.cc
@@ -158,7 +158,7 @@
// matching terminates.
class BacktrackStack {
public:
- explicit BacktrackStack(Isolate* isolate) : isolate_(isolate) {
+ explicit BacktrackStack() {
data_ = NewArray<int>(kBacktrackStackSize);
}
@@ -174,7 +174,6 @@
static const int kBacktrackStackSize = 10000;
int* data_;
- Isolate* isolate_;
DISALLOW_COPY_AND_ASSIGN(BacktrackStack);
};
@@ -191,7 +190,7 @@
// BacktrackStack ensures that the memory allocated for the backtracking stack
// is returned to the system or cached if there is no stack being cached at
// the moment.
- BacktrackStack backtrack_stack(isolate);
+ BacktrackStack backtrack_stack;
int* backtrack_stack_base = backtrack_stack.data();
int* backtrack_sp = backtrack_stack_base;
int backtrack_stack_space = backtrack_stack.max_size();
diff --git a/src/isolate.cc b/src/isolate.cc
index 5518f00..7e06a2e 100644
--- a/src/isolate.cc
+++ b/src/isolate.cc
@@ -80,10 +80,6 @@
ThreadLocalTop::ThreadLocalTop() {
InitializeInternal();
- // This flag may be set using v8::V8::IgnoreOutOfMemoryException()
- // before an isolate is initialized. The initialize methods below do
- // not touch it to preserve its value.
- ignore_out_of_memory_ = false;
}
@@ -1273,14 +1269,8 @@
ASSERT(has_pending_exception());
PropagatePendingExceptionToExternalTryCatch();
- // If the pending exception is OutOfMemoryException set out_of_memory in
- // the native context. Note: We have to mark the native context here
- // since the GenerateThrowOutOfMemory stub cannot make a RuntimeCall to
- // set it.
HandleScope scope(this);
- if (thread_local_top_.pending_exception_->IsOutOfMemory()) {
- context()->mark_out_of_memory();
- } else if (thread_local_top_.pending_exception_ ==
+ if (thread_local_top_.pending_exception_ ==
heap()->termination_exception()) {
// Do nothing: if needed, the exception has been already propagated to
// v8::TryCatch.
@@ -1311,8 +1301,7 @@
MessageLocation Isolate::GetMessageLocation() {
ASSERT(has_pending_exception());
- if (!thread_local_top_.pending_exception_->IsOutOfMemory() &&
- thread_local_top_.pending_exception_ != heap()->termination_exception() &&
+ if (thread_local_top_.pending_exception_ != heap()->termination_exception() &&
thread_local_top_.has_pending_message_ &&
!thread_local_top_.pending_message_obj_->IsTheHole() &&
!thread_local_top_.pending_message_obj_->IsTheHole()) {
@@ -1331,39 +1320,36 @@
ASSERT(has_pending_exception());
PropagatePendingExceptionToExternalTryCatch();
- // Always reschedule out of memory exceptions.
- if (!is_out_of_memory()) {
- bool is_termination_exception =
- pending_exception() == heap_.termination_exception();
+ bool is_termination_exception =
+ pending_exception() == heap_.termination_exception();
- // Do not reschedule the exception if this is the bottom call.
- bool clear_exception = is_bottom_call;
+ // Do not reschedule the exception if this is the bottom call.
+ bool clear_exception = is_bottom_call;
- if (is_termination_exception) {
- if (is_bottom_call) {
- thread_local_top()->external_caught_exception_ = false;
- clear_pending_exception();
- return false;
- }
- } else if (thread_local_top()->external_caught_exception_) {
- // If the exception is externally caught, clear it if there are no
- // JavaScript frames on the way to the C++ frame that has the
- // external handler.
- ASSERT(thread_local_top()->try_catch_handler_address() != NULL);
- Address external_handler_address =
- thread_local_top()->try_catch_handler_address();
- JavaScriptFrameIterator it(this);
- if (it.done() || (it.frame()->sp() > external_handler_address)) {
- clear_exception = true;
- }
- }
-
- // Clear the exception if needed.
- if (clear_exception) {
+ if (is_termination_exception) {
+ if (is_bottom_call) {
thread_local_top()->external_caught_exception_ = false;
clear_pending_exception();
return false;
}
+ } else if (thread_local_top()->external_caught_exception_) {
+ // If the exception is externally caught, clear it if there are no
+ // JavaScript frames on the way to the C++ frame that has the
+ // external handler.
+ ASSERT(thread_local_top()->try_catch_handler_address() != NULL);
+ Address external_handler_address =
+ thread_local_top()->try_catch_handler_address();
+ JavaScriptFrameIterator it(this);
+ if (it.done() || (it.frame()->sp() > external_handler_address)) {
+ clear_exception = true;
+ }
+ }
+
+ // Clear the exception if needed.
+ if (clear_exception) {
+ thread_local_top()->external_caught_exception_ = false;
+ clear_pending_exception();
+ return false;
}
// Reschedule the exception.
@@ -1383,23 +1369,6 @@
}
-bool Isolate::is_out_of_memory() {
- if (has_pending_exception()) {
- MaybeObject* e = pending_exception();
- if (e->IsFailure() && Failure::cast(e)->IsOutOfMemoryException()) {
- return true;
- }
- }
- if (has_scheduled_exception()) {
- MaybeObject* e = scheduled_exception();
- if (e->IsFailure() && Failure::cast(e)->IsOutOfMemoryException()) {
- return true;
- }
- }
- return false;
-}
-
-
Handle<Context> Isolate::native_context() {
return Handle<Context>(context()->global_object()->native_context());
}
@@ -1851,9 +1820,7 @@
if (!external_caught) return;
- if (thread_local_top_.pending_exception_->IsOutOfMemory()) {
- // Do not propagate OOM exception: we should kill VM asap.
- } else if (thread_local_top_.pending_exception_ ==
+ if (thread_local_top_.pending_exception_ ==
heap()->termination_exception()) {
try_catch_handler()->can_continue_ = false;
try_catch_handler()->has_terminated_ = true;
@@ -2329,4 +2296,25 @@
#undef ISOLATE_FIELD_OFFSET
#endif
+
+Handle<JSObject> Isolate::GetSymbolRegistry() {
+ if (heap()->symbol_registry()->IsUndefined()) {
+ Handle<Map> map = factory()->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
+ Handle<JSObject> registry = factory()->NewJSObjectFromMap(map);
+ heap()->set_symbol_registry(*registry);
+
+ static const char* nested[] = {
+ "for", "for_api", "for_intern", "keyFor", "private_api", "private_intern"
+ };
+ for (unsigned i = 0; i < ARRAY_SIZE(nested); ++i) {
+ Handle<String> name = factory()->InternalizeUtf8String(nested[i]);
+ Handle<JSObject> obj = factory()->NewJSObjectFromMap(map);
+ JSObject::NormalizeProperties(obj, KEEP_INOBJECT_PROPERTIES, 8);
+ JSObject::SetProperty(registry, name, obj, NONE, STRICT);
+ }
+ }
+ return Handle<JSObject>::cast(factory()->symbol_registry());
+}
+
+
} } // namespace v8::internal
diff --git a/src/isolate.h b/src/isolate.h
index 1f3361c..b471378 100644
--- a/src/isolate.h
+++ b/src/isolate.h
@@ -288,9 +288,6 @@
// Head of the list of live LookupResults.
LookupResult* top_lookup_result_;
- // Whether out of memory exceptions should be ignored.
- bool ignore_out_of_memory_;
-
private:
void InitializeInternal();
@@ -641,8 +638,7 @@
bool IsExternallyCaught();
bool is_catchable_by_javascript(MaybeObject* exception) {
- return (!exception->IsOutOfMemory()) &&
- (exception != heap()->termination_exception());
+ return exception != heap()->termination_exception();
}
// Serializer.
@@ -721,12 +717,6 @@
int frame_limit,
StackTrace::StackTraceOptions options);
- // Tells whether the current context has experienced an out of memory
- // exception.
- bool is_out_of_memory();
-
- THREAD_LOCAL_TOP_ACCESSOR(bool, ignore_out_of_memory)
-
void PrintCurrentStackTrace(FILE* out);
void PrintStack(StringStream* accumulator);
void PrintStack(FILE* out);
@@ -1126,6 +1116,9 @@
return id;
}
+ // Get (and lazily initialize) the registry for per-isolate symbols.
+ Handle<JSObject> GetSymbolRegistry();
+
private:
Isolate();
@@ -1475,17 +1468,6 @@
};
-// Tells whether the native context is marked with out of memory.
-inline bool Context::has_out_of_memory() {
- return native_context()->out_of_memory()->IsTrue();
-}
-
-
-// Mark the native context with out of memory.
-inline void Context::mark_out_of_memory() {
- native_context()->set_out_of_memory(GetIsolate()->heap()->true_value());
-}
-
class CodeTracer V8_FINAL : public Malloced {
public:
explicit CodeTracer(int isolate_id)
diff --git a/src/macros.py b/src/macros.py
index 1722c6c..0b69e6b 100644
--- a/src/macros.py
+++ b/src/macros.py
@@ -162,6 +162,7 @@
macro JSON_NUMBER_TO_STRING(arg) = ((%_IsSmi(%IS_VAR(arg)) || arg - arg == 0) ? %_NumberToString(arg) : "null");
# Private names.
+macro GLOBAL_PRIVATE(name) = (%CreateGlobalPrivateSymbol(name));
macro NEW_PRIVATE(name) = (%CreatePrivateSymbol(name));
macro IS_PRIVATE(sym) = (%SymbolIsPrivate(sym));
macro HAS_PRIVATE(obj, sym) = (sym in obj);
diff --git a/src/messages.js b/src/messages.js
index 8f9a944..a389bb8 100644
--- a/src/messages.js
+++ b/src/messages.js
@@ -788,11 +788,10 @@
// ----------------------------------------------------------------------------
// Error implementation
-//TODO(rossberg)
-var CallSiteReceiverKey = NEW_PRIVATE("receiver");
-var CallSiteFunctionKey = NEW_PRIVATE("function");
-var CallSitePositionKey = NEW_PRIVATE("position");
-var CallSiteStrictModeKey = NEW_PRIVATE("strict mode");
+var CallSiteReceiverKey = NEW_PRIVATE("CallSite#receiver");
+var CallSiteFunctionKey = NEW_PRIVATE("CallSite#function");
+var CallSitePositionKey = NEW_PRIVATE("CallSite#position");
+var CallSiteStrictModeKey = NEW_PRIVATE("CallSite#strict_mode");
function CallSite(receiver, fun, pos, strict_mode) {
SET_PRIVATE(this, CallSiteReceiverKey, receiver);
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
index 6d06bd9..3b8e223 100644
--- a/src/mips/code-stubs-mips.cc
+++ b/src/mips/code-stubs-mips.cc
@@ -1606,21 +1606,9 @@
}
-static void JumpIfOOM(MacroAssembler* masm,
- Register value,
- Register scratch,
- Label* oom_label) {
- STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3);
- STATIC_ASSERT(kFailureTag == 3);
- __ andi(scratch, value, 0xf);
- __ Branch(oom_label, eq, scratch, Operand(0xf));
-}
-
-
void CEntryStub::GenerateCore(MacroAssembler* masm,
Label* throw_normal_exception,
Label* throw_termination_exception,
- Label* throw_out_of_memory_exception,
bool do_gc,
bool always_allocate) {
// v0: result parameter for PerformGC, if any
@@ -1723,17 +1711,11 @@
__ andi(t0, v0, ((1 << kFailureTypeTagSize) - 1) << kFailureTagSize);
__ Branch(&retry, eq, t0, Operand(zero_reg));
- // Special handling of out of memory exceptions.
- JumpIfOOM(masm, v0, t0, throw_out_of_memory_exception);
-
// Retrieve the pending exception.
__ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
isolate)));
__ lw(v0, MemOperand(t0));
- // See if we just retrieved an OOM exception.
- JumpIfOOM(masm, v0, t0, throw_out_of_memory_exception);
-
// Clear the pending exception.
__ li(a3, Operand(isolate->factory()->the_hole_value()));
__ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
@@ -1787,13 +1769,11 @@
Label throw_normal_exception;
Label throw_termination_exception;
- Label throw_out_of_memory_exception;
// Call into the runtime system.
GenerateCore(masm,
&throw_normal_exception,
&throw_termination_exception,
- &throw_out_of_memory_exception,
false,
false);
@@ -1801,7 +1781,6 @@
GenerateCore(masm,
&throw_normal_exception,
&throw_termination_exception,
- &throw_out_of_memory_exception,
true,
false);
@@ -1811,30 +1790,9 @@
GenerateCore(masm,
&throw_normal_exception,
&throw_termination_exception,
- &throw_out_of_memory_exception,
true,
true);
- __ bind(&throw_out_of_memory_exception);
- // Set external caught exception to false.
- Isolate* isolate = masm->isolate();
- ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
- isolate);
- __ li(a0, Operand(false, RelocInfo::NONE32));
- __ li(a2, Operand(external_caught));
- __ sw(a0, MemOperand(a2));
-
- // Set pending exception and v0 to out of memory exception.
- Label already_have_failure;
- JumpIfOOM(masm, v0, t0, &already_have_failure);
- Failure* out_of_memory = Failure::OutOfMemoryException(0x1);
- __ li(v0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
- __ bind(&already_have_failure);
- __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
- __ sw(v0, MemOperand(a2));
- // Fall through to the next label.
-
__ bind(&throw_termination_exception);
__ ThrowUncatchable(v0);
diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc
index ccb376a..18ff8e4 100644
--- a/src/mips/full-codegen-mips.cc
+++ b/src/mips/full-codegen-mips.cc
@@ -120,6 +120,24 @@
};
+static void EmitStackCheck(MacroAssembler* masm_,
+ Register stack_limit_scratch,
+ int pointers = 0,
+ Register scratch = sp) {
+ Isolate* isolate = masm_->isolate();
+ Label ok;
+ ASSERT(scratch.is(sp) == (pointers == 0));
+ if (pointers != 0) {
+ __ Subu(scratch, sp, Operand(pointers * kPointerSize));
+ }
+ __ LoadRoot(stack_limit_scratch, Heap::kStackLimitRootIndex);
+ __ Branch(&ok, hs, scratch, Operand(stack_limit_scratch));
+ PredictableCodeSizeScope predictable(masm_, 4 * Assembler::kInstrSize);
+ __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
+ __ bind(&ok);
+}
+
+
// Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right. The actual
// argument count matches the formal parameter count expected by the
@@ -187,22 +205,30 @@
// Generators allocate locals, if any, in context slots.
ASSERT(!info->function()->is_generator() || locals_count == 0);
if (locals_count > 0) {
- // Emit a loop to initialize stack cells for locals when optimizing for
- // size. Otherwise, unroll the loop for maximum performance.
+ if (locals_count >= 128) {
+ EmitStackCheck(masm_, a2, locals_count, t5);
+ }
__ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
- if ((FLAG_optimize_for_size && locals_count > 4) ||
- !is_int16(locals_count)) {
- Label loop;
- __ Subu(a2, sp, Operand(locals_count * kPointerSize));
- __ bind(&loop);
- __ Subu(sp, sp, Operand(kPointerSize));
- __ Branch(&loop, gt, sp, Operand(a2), USE_DELAY_SLOT);
- __ sw(t5, MemOperand(sp, 0)); // Push in the delay slot.
- } else {
- __ Subu(sp, sp, Operand(locals_count * kPointerSize));
- for (int i = 0; i < locals_count; i++) {
+ int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
+ if (locals_count >= kMaxPushes) {
+ int loop_iterations = locals_count / kMaxPushes;
+ __ li(a2, Operand(loop_iterations));
+ Label loop_header;
+ __ bind(&loop_header);
+ // Do pushes.
+ __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
+ for (int i = 0; i < kMaxPushes; i++) {
__ sw(t5, MemOperand(sp, i * kPointerSize));
}
+ // Continue loop if not done.
+ __ Subu(a2, a2, Operand(1));
+ __ Branch(&loop_header, ne, a2, Operand(zero_reg));
+ }
+ int remaining = locals_count % kMaxPushes;
+ // Emit the remaining pushes.
+ __ Subu(sp, sp, Operand(remaining * kPointerSize));
+ for (int i = 0; i < remaining; i++) {
+ __ sw(t5, MemOperand(sp, i * kPointerSize));
}
}
}
@@ -313,11 +339,7 @@
{ Comment cmnt(masm_, "[ Stack check");
PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
- Label ok;
- __ LoadRoot(t0, Heap::kStackLimitRootIndex);
- __ Branch(&ok, hs, sp, Operand(t0));
- __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
- __ bind(&ok);
+ EmitStackCheck(masm_, at);
}
{ Comment cmnt(masm_, "[ Body");
diff --git a/src/mips/lithium-codegen-mips.cc b/src/mips/lithium-codegen-mips.cc
index b1919ba..ee70f67 100644
--- a/src/mips/lithium-codegen-mips.cc
+++ b/src/mips/lithium-codegen-mips.cc
@@ -4036,6 +4036,9 @@
instr->hydrogen()->value()->IsHeapObject()
? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
+ ASSERT(!(representation.IsSmi() &&
+ instr->value()->IsConstantOperand() &&
+ !IsSmi(LConstantOperand::cast(instr->value()))));
if (representation.IsHeapObject()) {
Register value = ToRegister(instr->value());
if (!instr->hydrogen()->value()->type().IsHeapObject()) {
diff --git a/src/objects-inl.h b/src/objects-inl.h
index 9cc4722..b3f23e6 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -649,12 +649,6 @@
}
-bool MaybeObject::IsOutOfMemory() {
- return HAS_FAILURE_TAG(this)
- && Failure::cast(this)->IsOutOfMemoryException();
-}
-
-
bool MaybeObject::IsException() {
return this == Failure::Exception();
}
@@ -1245,11 +1239,6 @@
}
-bool Failure::IsOutOfMemoryException() const {
- return type() == OUT_OF_MEMORY_EXCEPTION;
-}
-
-
AllocationSpace Failure::allocation_space() const {
ASSERT_EQ(RETRY_AFTER_GC, type());
return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
@@ -1267,11 +1256,6 @@
}
-Failure* Failure::OutOfMemoryException(intptr_t value) {
- return Construct(OUT_OF_MEMORY_EXCEPTION, value);
-}
-
-
intptr_t Failure::value() const {
return static_cast<intptr_t>(
reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
@@ -1621,85 +1605,79 @@
}
-MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
- uint32_t count,
- EnsureElementsMode mode) {
- ElementsKind current_kind = map()->elements_kind();
+void JSObject::EnsureCanContainElements(Handle<JSObject> object,
+ Object** objects,
+ uint32_t count,
+ EnsureElementsMode mode) {
+ ElementsKind current_kind = object->map()->elements_kind();
ElementsKind target_kind = current_kind;
- ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
- bool is_holey = IsFastHoleyElementsKind(current_kind);
- if (current_kind == FAST_HOLEY_ELEMENTS) return this;
- Heap* heap = GetHeap();
- Object* the_hole = heap->the_hole_value();
- for (uint32_t i = 0; i < count; ++i) {
- Object* current = *objects++;
- if (current == the_hole) {
- is_holey = true;
- target_kind = GetHoleyElementsKind(target_kind);
- } else if (!current->IsSmi()) {
- if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
- if (IsFastSmiElementsKind(target_kind)) {
- if (is_holey) {
- target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
- } else {
- target_kind = FAST_DOUBLE_ELEMENTS;
+ {
+ DisallowHeapAllocation no_allocation;
+ ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
+ bool is_holey = IsFastHoleyElementsKind(current_kind);
+ if (current_kind == FAST_HOLEY_ELEMENTS) return;
+ Heap* heap = object->GetHeap();
+ Object* the_hole = heap->the_hole_value();
+ for (uint32_t i = 0; i < count; ++i) {
+ Object* current = *objects++;
+ if (current == the_hole) {
+ is_holey = true;
+ target_kind = GetHoleyElementsKind(target_kind);
+ } else if (!current->IsSmi()) {
+ if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
+ if (IsFastSmiElementsKind(target_kind)) {
+ if (is_holey) {
+ target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
+ } else {
+ target_kind = FAST_DOUBLE_ELEMENTS;
+ }
}
+ } else if (is_holey) {
+ target_kind = FAST_HOLEY_ELEMENTS;
+ break;
+ } else {
+ target_kind = FAST_ELEMENTS;
}
- } else if (is_holey) {
- target_kind = FAST_HOLEY_ELEMENTS;
- break;
- } else {
- target_kind = FAST_ELEMENTS;
}
}
}
-
if (target_kind != current_kind) {
- return TransitionElementsKind(target_kind);
+ TransitionElementsKind(object, target_kind);
}
- return this;
}
-// TODO(ishell): Temporary wrapper until handlified.
void JSObject::EnsureCanContainElements(Handle<JSObject> object,
Handle<FixedArrayBase> elements,
uint32_t length,
EnsureElementsMode mode) {
- CALL_HEAP_FUNCTION_VOID(object->GetIsolate(),
- object->EnsureCanContainElements(*elements,
- length,
- mode));
-}
-
-
-MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
- uint32_t length,
- EnsureElementsMode mode) {
- if (elements->map() != GetHeap()->fixed_double_array_map()) {
- ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
- elements->map() == GetHeap()->fixed_cow_array_map());
+ Heap* heap = object->GetHeap();
+ if (elements->map() != heap->fixed_double_array_map()) {
+ ASSERT(elements->map() == heap->fixed_array_map() ||
+ elements->map() == heap->fixed_cow_array_map());
if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
mode = DONT_ALLOW_DOUBLE_ELEMENTS;
}
- Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
- return EnsureCanContainElements(objects, length, mode);
+ Object** objects =
+ Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
+ EnsureCanContainElements(object, objects, length, mode);
+ return;
}
ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
- if (GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
- return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
- } else if (GetElementsKind() == FAST_SMI_ELEMENTS) {
- FixedDoubleArray* double_array = FixedDoubleArray::cast(elements);
+ if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
+ TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
+ } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
+ Handle<FixedDoubleArray> double_array =
+ Handle<FixedDoubleArray>::cast(elements);
for (uint32_t i = 0; i < length; ++i) {
if (double_array->is_the_hole(i)) {
- return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
+ TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
+ return;
}
}
- return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
+ TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
}
-
- return this;
}
diff --git a/src/objects.cc b/src/objects.cc
index 54ffcb6..c764b33 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -11818,31 +11818,16 @@
}
-// TODO(ishell): temporary wrapper until handilfied.
-// static
void JSObject::EnsureCanContainElements(Handle<JSObject> object,
Arguments* args,
uint32_t first_arg,
uint32_t arg_count,
EnsureElementsMode mode) {
- CALL_HEAP_FUNCTION_VOID(object->GetIsolate(),
- object->EnsureCanContainElements(args,
- first_arg,
- arg_count,
- mode));
-}
-
-
-MaybeObject* JSObject::EnsureCanContainElements(Arguments* args,
- uint32_t first_arg,
- uint32_t arg_count,
- EnsureElementsMode mode) {
// Elements in |Arguments| are ordered backwards (because they're on the
// stack), but the method that's called here iterates over them in forward
// direction.
return EnsureCanContainElements(
- args->arguments() - first_arg - (arg_count - 1),
- arg_count, mode);
+ object, args->arguments() - first_arg - (arg_count - 1), arg_count, mode);
}
@@ -13936,7 +13921,7 @@
? at_least_space_for
: ComputeCapacity(at_least_space_for);
if (capacity > HashTable::kMaxCapacity) {
- return Failure::OutOfMemoryException(0x10);
+ v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true);
}
Object* obj;
diff --git a/src/objects.h b/src/objects.h
index f730d2f..c4d3f25 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -934,7 +934,6 @@
public:
inline bool IsFailure();
inline bool IsRetryAfterGC();
- inline bool IsOutOfMemory();
inline bool IsException();
INLINE(bool IsTheHole());
INLINE(bool IsUninitialized());
@@ -1728,15 +1727,11 @@
inline AllocationSpace allocation_space() const;
inline bool IsInternalError() const;
- inline bool IsOutOfMemoryException() const;
static inline Failure* RetryAfterGC(AllocationSpace space);
static inline Failure* RetryAfterGC(); // NEW_SPACE
static inline Failure* Exception();
static inline Failure* InternalError();
- // TODO(jkummerow): The value is temporary instrumentation. Remove it
- // when it has served its purpose.
- static inline Failure* OutOfMemoryException(intptr_t value);
// Casting.
static inline Failure* cast(MaybeObject* object);
@@ -2420,7 +2415,8 @@
static inline void EnsureCanContainHeapObjectElements(Handle<JSObject> obj);
// Makes sure that this object can contain the specified elements.
- MUST_USE_RESULT inline MaybeObject* EnsureCanContainElements(
+ static inline void EnsureCanContainElements(
+ Handle<JSObject> object,
Object** elements,
uint32_t count,
EnsureElementsMode mode);
@@ -2429,21 +2425,12 @@
Handle<FixedArrayBase> elements,
uint32_t length,
EnsureElementsMode mode);
- MUST_USE_RESULT inline MaybeObject* EnsureCanContainElements(
- FixedArrayBase* elements,
- uint32_t length,
- EnsureElementsMode mode);
static void EnsureCanContainElements(
Handle<JSObject> object,
Arguments* arguments,
uint32_t first_arg,
uint32_t arg_count,
EnsureElementsMode mode);
- MUST_USE_RESULT MaybeObject* EnsureCanContainElements(
- Arguments* arguments,
- uint32_t first_arg,
- uint32_t arg_count,
- EnsureElementsMode mode);
// Would we convert a fast elements array to dictionary mode given
// an access at key?
diff --git a/src/parser.cc b/src/parser.cc
index 9724ec7..56eec54 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -893,7 +893,7 @@
CheckOctalLiteral(beg_pos, scanner()->location().end_pos, &ok);
}
- if (ok && FLAG_harmony_scoping && strict_mode() == STRICT) {
+ if (ok && allow_harmony_scoping() && strict_mode() == STRICT) {
CheckConflictingVarDeclarations(scope_, &ok);
}
@@ -1698,7 +1698,7 @@
// because the var declaration is hoisted to the function scope where 'x'
// is already bound.
ASSERT(IsDeclaredVariableMode(var->mode()));
- if (FLAG_harmony_scoping && strict_mode() == STRICT) {
+ if (allow_harmony_scoping() && strict_mode() == STRICT) {
// In harmony we treat re-declarations as early errors. See
// ES5 16 for a definition of early errors.
SmartArrayPointer<char> c_string = name->ToCString(DISALLOW_NULLS);
@@ -1876,7 +1876,7 @@
// In extended mode, a function behaves as a lexical binding, except in the
// global scope.
VariableMode mode =
- FLAG_harmony_scoping &&
+ allow_harmony_scoping() &&
strict_mode() == STRICT && !scope_->is_global_scope() ? LET : VAR;
VariableProxy* proxy = NewUnresolved(name, mode, Interface::NewValue());
Declaration* declaration =
@@ -1888,7 +1888,7 @@
Block* Parser::ParseBlock(ZoneStringList* labels, bool* ok) {
- if (FLAG_harmony_scoping && strict_mode() == STRICT) {
+ if (allow_harmony_scoping() && strict_mode() == STRICT) {
return ParseScopedBlock(labels, ok);
}
@@ -2016,7 +2016,7 @@
init_op = Token::INIT_CONST_LEGACY;
break;
case STRICT:
- if (FLAG_harmony_scoping) {
+ if (allow_harmony_scoping()) {
if (var_context == kStatement) {
// In strict mode 'const' declarations are only allowed in source
// element positions.
@@ -2043,7 +2043,7 @@
// contained in extended code.
//
// TODO(rossberg): make 'let' a legal identifier in sloppy mode.
- if (!FLAG_harmony_scoping || strict_mode() == SLOPPY) {
+ if (!allow_harmony_scoping() || strict_mode() == SLOPPY) {
ReportMessage("illegal_let", Vector<const char*>::empty());
*ok = false;
return NULL;
@@ -2670,7 +2670,7 @@
Target target(&this->target_stack_, &catch_collector);
VariableMode mode =
- FLAG_harmony_scoping && strict_mode() == STRICT ? LET : VAR;
+ allow_harmony_scoping() && strict_mode() == STRICT ? LET : VAR;
catch_variable =
catch_scope->DeclareLocal(name, mode, kCreatedInitialized);
@@ -3181,7 +3181,7 @@
Scope* original_declaration_scope = original_scope_->DeclarationScope();
Scope* scope =
function_type == FunctionLiteral::DECLARATION &&
- (!FLAG_harmony_scoping || strict_mode() == SLOPPY) &&
+ (!allow_harmony_scoping() || strict_mode() == SLOPPY) &&
(original_scope_ == original_declaration_scope ||
declaration_scope != original_declaration_scope)
? NewScope(declaration_scope, FUNCTION_SCOPE)
@@ -3272,11 +3272,12 @@
Variable* fvar = NULL;
Token::Value fvar_init_op = Token::INIT_CONST_LEGACY;
if (function_type == FunctionLiteral::NAMED_EXPRESSION) {
- if (FLAG_harmony_scoping && strict_mode() == STRICT) {
+ if (allow_harmony_scoping() && strict_mode() == STRICT) {
fvar_init_op = Token::INIT_CONST;
}
- VariableMode fvar_mode = FLAG_harmony_scoping && strict_mode() == STRICT
- ? CONST : CONST_LEGACY;
+ VariableMode fvar_mode =
+ allow_harmony_scoping() && strict_mode() == STRICT ? CONST
+ : CONST_LEGACY;
fvar = new(zone()) Variable(scope_,
function_name, fvar_mode, true /* is valid LHS */,
Variable::NORMAL, kCreatedInitialized, Interface::NewConst());
@@ -3498,7 +3499,7 @@
dont_optimize_reason = factory()->visitor()->dont_optimize_reason();
}
- if (FLAG_harmony_scoping && strict_mode() == STRICT) {
+ if (allow_harmony_scoping() && strict_mode() == STRICT) {
CheckConflictingVarDeclarations(scope, CHECK_OK);
}
diff --git a/src/preparser.cc b/src/preparser.cc
index 459cb6a..9bcc880 100644
--- a/src/preparser.cc
+++ b/src/preparser.cc
@@ -379,7 +379,7 @@
//
Expect(Token::LBRACE, CHECK_OK);
while (peek() != Token::RBRACE) {
- if (FLAG_harmony_scoping && strict_mode() == STRICT) {
+ if (allow_harmony_scoping() && strict_mode() == STRICT) {
ParseSourceElement(CHECK_OK);
} else {
ParseStatement(CHECK_OK);
@@ -444,7 +444,7 @@
// non-harmony semantics in sloppy mode.
Consume(Token::CONST);
if (strict_mode() == STRICT) {
- if (FLAG_harmony_scoping) {
+ if (allow_harmony_scoping()) {
if (var_context != kSourceElement && var_context != kForStatement) {
ReportMessageAt(scanner()->peek_location(), "unprotected_const");
*ok = false;
@@ -467,7 +467,7 @@
// contained in extended code.
//
// TODO(rossberg): make 'let' a legal identifier in sloppy mode.
- if (!FLAG_harmony_scoping || strict_mode() == SLOPPY) {
+ if (!allow_harmony_scoping() || strict_mode() == SLOPPY) {
ReportMessageAt(scanner()->peek_location(), "illegal_let");
*ok = false;
return Statement::Default();
@@ -850,7 +850,6 @@
// Parse function body.
ScopeType outer_scope_type = scope_->type();
- bool inside_with = scope_->inside_with();
PreParserScope function_scope(scope_, FUNCTION_SCOPE);
FunctionState function_state(&function_state_, &scope_, &function_scope);
function_state.set_is_generator(is_generator);
@@ -892,8 +891,7 @@
// See Parser::ParseFunctionLiteral for more information about lazy parsing
// and lazy compilation.
- bool is_lazily_parsed = (outer_scope_type == GLOBAL_SCOPE &&
- !inside_with && allow_lazy() &&
+ bool is_lazily_parsed = (outer_scope_type == GLOBAL_SCOPE && allow_lazy() &&
!parenthesized_function_);
parenthesized_function_ = false;
diff --git a/src/preparser.h b/src/preparser.h
index ee8e46b..080b772 100644
--- a/src/preparser.h
+++ b/src/preparser.h
@@ -678,18 +678,7 @@
public:
explicit PreParserScope(PreParserScope* outer_scope, ScopeType scope_type)
: scope_type_(scope_type) {
- if (outer_scope) {
- scope_inside_with_ = outer_scope->scope_inside_with_ || is_with_scope();
- strict_mode_ = outer_scope->strict_mode();
- } else {
- scope_inside_with_ = is_with_scope();
- strict_mode_ = SLOPPY;
- }
- }
-
- bool is_with_scope() const { return scope_type_ == WITH_SCOPE; }
- bool inside_with() const {
- return scope_inside_with_;
+ strict_mode_ = outer_scope ? outer_scope->strict_mode() : SLOPPY;
}
ScopeType type() { return scope_type_; }
@@ -698,7 +687,6 @@
private:
ScopeType scope_type_;
- bool scope_inside_with_;
StrictMode strict_mode_;
};
diff --git a/src/promise.js b/src/promise.js
index 390c19e..63f5629 100644
--- a/src/promise.js
+++ b/src/promise.js
@@ -34,25 +34,7 @@
// var $WeakMap = global.WeakMap
-var $Promise = Promise;
-
-
-//-------------------------------------------------------------------
-
-// Core functionality.
-
-// Status values: 0 = pending, +1 = resolved, -1 = rejected
-var promiseStatus = NEW_PRIVATE("Promise#status");
-var promiseValue = NEW_PRIVATE("Promise#value");
-var promiseOnResolve = NEW_PRIVATE("Promise#onResolve");
-var promiseOnReject = NEW_PRIVATE("Promise#onReject");
-var promiseRaw = NEW_PRIVATE("Promise#raw");
-
-function IsPromise(x) {
- return IS_SPEC_OBJECT(x) && %HasLocalProperty(x, promiseStatus);
-}
-
-function Promise(resolver) {
+var $Promise = function Promise(resolver) {
if (resolver === promiseRaw) return;
if (!%_IsConstructCall()) throw MakeTypeError('not_a_promise', [this]);
if (typeof resolver !== 'function')
@@ -66,6 +48,22 @@
}
}
+
+//-------------------------------------------------------------------
+
+// Core functionality.
+
+// Status values: 0 = pending, +1 = resolved, -1 = rejected
+var promiseStatus = GLOBAL_PRIVATE("Promise#status");
+var promiseValue = GLOBAL_PRIVATE("Promise#value");
+var promiseOnResolve = GLOBAL_PRIVATE("Promise#onResolve");
+var promiseOnReject = GLOBAL_PRIVATE("Promise#onReject");
+var promiseRaw = GLOBAL_PRIVATE("Promise#raw");
+
+function IsPromise(x) {
+ return IS_SPEC_OBJECT(x) && %HasLocalProperty(x, promiseStatus);
+}
+
function PromiseSet(promise, status, value, onResolve, onReject) {
SET_PRIVATE(promise, promiseStatus, status);
SET_PRIVATE(promise, promiseValue, value);
@@ -99,7 +97,7 @@
function PromiseNopResolver() {}
function PromiseCreate() {
- return new Promise(PromiseNopResolver)
+ return new $Promise(PromiseNopResolver)
}
@@ -108,7 +106,7 @@
function PromiseDeferred() {
if (this === $Promise) {
// Optimized case, avoid extra closure.
- var promise = PromiseInit(new Promise(promiseRaw));
+ var promise = PromiseInit(new $Promise(promiseRaw));
return {
promise: promise,
resolve: function(x) { PromiseResolve(promise, x) },
@@ -127,7 +125,7 @@
function PromiseResolved(x) {
if (this === $Promise) {
// Optimized case, avoid extra closure.
- return PromiseSet(new Promise(promiseRaw), +1, x);
+ return PromiseSet(new $Promise(promiseRaw), +1, x);
} else {
return new this(function(resolve, reject) { resolve(x) });
}
@@ -136,7 +134,7 @@
function PromiseRejected(r) {
if (this === $Promise) {
// Optimized case, avoid extra closure.
- return PromiseSet(new Promise(promiseRaw), -1, r);
+ return PromiseSet(new $Promise(promiseRaw), -1, r);
} else {
return new this(function(resolve, reject) { reject(r) });
}
@@ -306,9 +304,8 @@
//-------------------------------------------------------------------
function SetUpPromise() {
- %CheckIsBootstrapping()
- var global_receiver = %GlobalReceiver(global);
- global_receiver.Promise = $Promise;
+ %CheckIsBootstrapping();
+ %SetProperty(global, "Promise", $Promise, NONE);
InstallFunctions($Promise, DONT_ENUM, [
"defer", PromiseDeferred,
"accept", PromiseResolved,
diff --git a/src/runtime.cc b/src/runtime.cc
index b4c34ef..8d98f27 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -621,6 +621,25 @@
}
+RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateGlobalPrivateSymbol) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 1);
+ CONVERT_ARG_HANDLE_CHECKED(String, name, 0);
+ Handle<JSObject> registry = isolate->GetSymbolRegistry();
+ Handle<String> part = isolate->factory()->private_intern_string();
+ Handle<JSObject> privates =
+ Handle<JSObject>::cast(JSObject::GetProperty(registry, part));
+ Handle<Object> symbol = JSObject::GetProperty(privates, name);
+ if (!symbol->IsSymbol()) {
+ ASSERT(symbol->IsUndefined());
+ symbol = isolate->factory()->NewPrivateSymbol();
+ Handle<Symbol>::cast(symbol)->set_name(*name);
+ JSObject::SetProperty(privates, name, symbol, NONE, STRICT);
+ }
+ return *symbol;
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_NewSymbolWrapper) {
ASSERT(args.length() == 1);
CONVERT_ARG_CHECKED(Symbol, symbol, 0);
@@ -637,9 +656,9 @@
RUNTIME_FUNCTION(MaybeObject*, Runtime_SymbolRegistry) {
- SealHandleScope shs(isolate);
+ HandleScope scope(isolate);
ASSERT(args.length() == 0);
- return isolate->heap()->symbol_registry();
+ return *isolate->GetSymbolRegistry();
}
@@ -3913,7 +3932,9 @@
static_cast<int64_t>(pattern_len)) *
static_cast<int64_t>(matches) +
static_cast<int64_t>(subject_len);
- if (result_len_64 > INT_MAX) return Failure::OutOfMemoryException(0x11);
+ if (result_len_64 > INT_MAX) {
+ v8::internal::Heap::FatalProcessOutOfMemory("invalid string length", true);
+ }
int result_len = static_cast<int>(result_len_64);
int subject_pos = 0;
@@ -6310,6 +6331,15 @@
}
+static inline bool ToUpperOverflows(uc32 character) {
+ // y with umlauts and the micro sign are the only characters that stop
+ // fitting into one-byte when converting to uppercase.
+ static const uc32 yuml_code = 0xff;
+ static const uc32 micro_code = 0xb5;
+ return (character == yuml_code || character == micro_code);
+}
+
+
template <class Converter>
MUST_USE_RESULT static MaybeObject* ConvertCaseHelper(
Isolate* isolate,
@@ -6337,10 +6367,7 @@
unibrow::uchar chars[Converter::kMaxWidth];
// We can assume that the string is not empty
uc32 current = stream.GetNext();
- // y with umlauts is the only character that stops fitting into one-byte
- // when converting to uppercase.
- static const uc32 yuml_code = 0xff;
- bool ignore_yuml = result->IsSeqTwoByteString() || Converter::kIsToLower;
+ bool ignore_overflow = Converter::kIsToLower || result->IsSeqTwoByteString();
for (int i = 0; i < result_length;) {
bool has_next = stream.HasMore();
uc32 next = has_next ? stream.GetNext() : 0;
@@ -6349,14 +6376,15 @@
// The case conversion of this character is the character itself.
result->Set(i, current);
i++;
- } else if (char_length == 1 && (ignore_yuml || current != yuml_code)) {
+ } else if (char_length == 1 &&
+ (ignore_overflow || !ToUpperOverflows(current))) {
// Common case: converting the letter resulted in one character.
ASSERT(static_cast<uc32>(chars[0]) != current);
result->Set(i, chars[0]);
has_changed_character = true;
i++;
} else if (result_length == string->length()) {
- bool found_yuml = (current == yuml_code);
+ bool overflows = ToUpperOverflows(current);
// We've assumed that the result would be as long as the
// input but here is a character that converts to several
// characters. No matter, we calculate the exact length
@@ -6376,7 +6404,7 @@
int current_length = i + char_length + next_length;
while (stream.HasMore()) {
current = stream.GetNext();
- found_yuml |= (current == yuml_code);
+ overflows |= ToUpperOverflows(current);
// NOTE: we use 0 as the next character here because, while
// the next character may affect what a character converts to,
// it does not in any case affect the length of what it convert
@@ -6390,9 +6418,9 @@
}
}
// Try again with the real length. Return signed if we need
- // to allocate a two-byte string for y-umlaut to uppercase.
- return (found_yuml && !ignore_yuml) ? Smi::FromInt(-current_length)
- : Smi::FromInt(current_length);
+ // to allocate a two-byte string for to uppercase.
+ return (overflows && !ignore_overflow) ? Smi::FromInt(-current_length)
+ : Smi::FromInt(current_length);
} else {
for (int j = 0; j < char_length; j++) {
result->Set(i, chars[j]);
@@ -11407,6 +11435,14 @@
}
+static bool ParameterIsShadowedByContextLocal(Handle<ScopeInfo> info,
+ int index) {
+ VariableMode mode;
+ InitializationFlag flag;
+ return info->ContextSlotIndex(info->ParameterName(index), &mode, &flag) != -1;
+}
+
+
// Create a plain JSObject which materializes the local scope for the specified
// frame.
static Handle<JSObject> MaterializeStackLocalsWithFrameInspector(
@@ -11419,17 +11455,16 @@
// First fill all parameters.
for (int i = 0; i < scope_info->ParameterCount(); ++i) {
- Handle<String> name(scope_info->ParameterName(i));
- VariableMode mode;
- InitializationFlag init_flag;
// Do not materialize the parameter if it is shadowed by a context local.
- if (scope_info->ContextSlotIndex(*name, &mode, &init_flag) != -1) continue;
+ if (ParameterIsShadowedByContextLocal(scope_info, i)) continue;
+ HandleScope scope(isolate);
Handle<Object> value(i < frame_inspector->GetParametersCount()
? frame_inspector->GetParameter(i)
: isolate->heap()->undefined_value(),
isolate);
ASSERT(!value->IsTheHole());
+ Handle<String> name(scope_info->ParameterName(i));
RETURN_IF_EMPTY_HANDLE_VALUE(
isolate,
@@ -11470,10 +11505,13 @@
// Parameters.
for (int i = 0; i < scope_info->ParameterCount(); ++i) {
+ // Shadowed parameters were not materialized.
+ if (ParameterIsShadowedByContextLocal(scope_info, i)) continue;
+
ASSERT(!frame->GetParameter(i)->IsTheHole());
HandleScope scope(isolate);
- Handle<Object> value = GetProperty(
- isolate, target, Handle<String>(scope_info->ParameterName(i)));
+ Handle<String> name(scope_info->ParameterName(i));
+ Handle<Object> value = GetProperty(isolate, target, name);
frame->SetParameterValue(i, *value);
}
diff --git a/src/runtime.h b/src/runtime.h
index aa811a6..e66403e 100644
--- a/src/runtime.h
+++ b/src/runtime.h
@@ -111,7 +111,6 @@
F(FlattenString, 1, 1) \
F(TryMigrateInstance, 1, 1) \
F(NotifyContextDisposed, 0, 1) \
- F(MaxSmi, 0, 1) \
\
/* Array join support */ \
F(PushIfAbsent, 2, 1) \
@@ -312,6 +311,7 @@
/* Harmony symbols */ \
F(CreateSymbol, 1, 1) \
F(CreatePrivateSymbol, 1, 1) \
+ F(CreateGlobalPrivateSymbol, 1, 1) \
F(NewSymbolWrapper, 1, 1) \
F(SymbolDescription, 1, 1) \
F(SymbolRegistry, 0, 1) \
@@ -368,7 +368,6 @@
F(ArrayBufferIsView, 1, 1) \
F(ArrayBufferNeuter, 1, 1) \
\
- F(TypedArrayInitialize, 5, 1) \
F(TypedArrayInitializeFromArrayLike, 4, 1) \
F(TypedArrayGetBuffer, 1, 1) \
F(TypedArrayGetByteLength, 1, 1) \
@@ -376,7 +375,6 @@
F(TypedArrayGetLength, 1, 1) \
F(TypedArraySetFastCases, 3, 1) \
\
- F(DataViewInitialize, 4, 1) \
F(DataViewGetBuffer, 1, 1) \
F(DataViewGetByteLength, 1, 1) \
F(DataViewGetByteOffset, 1, 1) \
@@ -662,7 +660,10 @@
F(NumberToString, 1, 1) \
F(DoubleHi, 1, 1) \
F(DoubleLo, 1, 1) \
- F(ConstructDouble, 2, 1)
+ F(ConstructDouble, 2, 1) \
+ F(TypedArrayInitialize, 5, 1) \
+ F(DataViewInitialize, 4, 1) \
+ F(MaxSmi, 0, 1)
//---------------------------------------------------------------------------
diff --git a/src/sampler.cc b/src/sampler.cc
index fef3f28..c6830e6 100644
--- a/src/sampler.cc
+++ b/src/sampler.cc
@@ -159,6 +159,23 @@
// Other fields are not used by V8, don't define them here.
} ucontext_t;
enum { REG_EBP = 6, REG_ESP = 7, REG_EIP = 14 };
+
+#elif defined(__x86_64__)
+// x64 version for Android.
+typedef struct {
+ uint64_t gregs[23];
+ void* fpregs;
+ uint64_t __reserved1[8];
+} mcontext_t;
+
+typedef struct ucontext {
+ uint64_t uc_flags;
+ struct ucontext *uc_link;
+ stack_t uc_stack;
+ mcontext_t uc_mcontext;
+ // Other fields are not used by V8, don't define them here.
+} ucontext_t;
+enum { REG_RBP = 10, REG_RSP = 15, REG_RIP = 16 };
#endif
#endif // V8_OS_ANDROID && !defined(__BIONIC_HAVE_UCONTEXT_T)
diff --git a/src/symbol.js b/src/symbol.js
index 49f9096..e7ea5a6 100644
--- a/src/symbol.js
+++ b/src/symbol.js
@@ -63,30 +63,19 @@
}
-function GetSymbolRegistry() {
- var registry = %SymbolRegistry();
- if (!('internal' in registry)) {
- registry.internal = {__proto__: null};
- registry.for = {__proto__: null};
- registry.keyFor = {__proto__: null};
- }
- return registry;
-}
-
-
function InternalSymbol(key) {
- var registry = GetSymbolRegistry();
- if (!(key in registry.internal)) {
- registry.internal[key] = %CreateSymbol(key);
+ var internal_registry = %SymbolRegistry().for_intern;
+ if (IS_UNDEFINED(internal_registry[key])) {
+ internal_registry[key] = %CreateSymbol(key);
}
- return registry.internal[key];
+ return internal_registry[key];
}
function SymbolFor(key) {
key = TO_STRING_INLINE(key);
- var registry = GetSymbolRegistry();
- if (!(key in registry.for)) {
+ var registry = %SymbolRegistry();
+ if (IS_UNDEFINED(registry.for[key])) {
var symbol = %CreateSymbol(key);
registry.for[key] = symbol;
registry.keyFor[symbol] = key;
@@ -96,10 +85,8 @@
function SymbolKeyFor(symbol) {
- if (!IS_SYMBOL(symbol)) {
- throw MakeTypeError("not_a_symbol", [symbol]);
- }
- return GetSymbolRegistry().keyFor[symbol];
+ if (!IS_SYMBOL(symbol)) throw MakeTypeError("not_a_symbol", [symbol]);
+ return %SymbolRegistry().keyFor[symbol];
}
@@ -118,13 +105,13 @@
//-------------------------------------------------------------------
-var symbolCreate = InternalSymbol("@@create");
-var symbolHasInstance = InternalSymbol("@@hasInstance");
-var symbolIsConcatSpreadable = InternalSymbol("@@isConcatSpreadable");
-var symbolIsRegExp = InternalSymbol("@@isRegExp");
-var symbolIterator = InternalSymbol("@@iterator");
-var symbolToStringTag = InternalSymbol("@@toStringTag");
-var symbolUnscopables = InternalSymbol("@@unscopables");
+var symbolCreate = InternalSymbol("Symbol.create");
+var symbolHasInstance = InternalSymbol("Symbol.hasInstance");
+var symbolIsConcatSpreadable = InternalSymbol("Symbol.isConcatSpreadable");
+var symbolIsRegExp = InternalSymbol("Symbol.isRegExp");
+var symbolIterator = InternalSymbol("Symbol.iterator");
+var symbolToStringTag = InternalSymbol("Symbol.toStringTag");
+var symbolUnscopables = InternalSymbol("Symbol.unscopables");
//-------------------------------------------------------------------
@@ -135,14 +122,15 @@
%SetCode($Symbol, SymbolConstructor);
%FunctionSetPrototype($Symbol, new $Object());
- %SetProperty($Symbol, "create", symbolCreate, DONT_ENUM);
- %SetProperty($Symbol, "hasInstance", symbolHasInstance, DONT_ENUM);
- %SetProperty($Symbol, "isConcatSpreadable",
- symbolIsConcatSpreadable, DONT_ENUM);
- %SetProperty($Symbol, "isRegExp", symbolIsRegExp, DONT_ENUM);
- %SetProperty($Symbol, "iterator", symbolIterator, DONT_ENUM);
- %SetProperty($Symbol, "toStringTag", symbolToStringTag, DONT_ENUM);
- %SetProperty($Symbol, "unscopables", symbolUnscopables, DONT_ENUM);
+ InstallConstants($Symbol, $Array(
+ "create", symbolCreate,
+ "hasInstance", symbolHasInstance,
+ "isConcatSpreadable", symbolIsConcatSpreadable,
+ "isRegExp", symbolIsRegExp,
+ "iterator", symbolIterator,
+ "toStringTag", symbolToStringTag,
+ "unscopables", symbolUnscopables
+ ));
InstallFunctions($Symbol, DONT_ENUM, $Array(
"for", SymbolFor,
"keyFor", SymbolKeyFor
diff --git a/src/typedarray.js b/src/typedarray.js
index cba8993..a7a6d87 100644
--- a/src/typedarray.js
+++ b/src/typedarray.js
@@ -87,28 +87,28 @@
newByteLength = newLength * ELEMENT_SIZE;
}
if ((offset + newByteLength > bufferByteLength)
- || (newLength > %MaxSmi())) {
+ || (newLength > %_MaxSmi())) {
throw MakeRangeError("invalid_typed_array_length");
}
- %TypedArrayInitialize(obj, ARRAY_ID, buffer, offset, newByteLength);
+ %_TypedArrayInitialize(obj, ARRAY_ID, buffer, offset, newByteLength);
}
function NAMEConstructByLength(obj, length) {
var l = IS_UNDEFINED(length) ?
0 : ToPositiveInteger(length, "invalid_typed_array_length");
- if (l > %MaxSmi()) {
+ if (l > %_MaxSmi()) {
throw MakeRangeError("invalid_typed_array_length");
}
var byteLength = l * ELEMENT_SIZE;
var buffer = new $ArrayBuffer(byteLength);
- %TypedArrayInitialize(obj, ARRAY_ID, buffer, 0, byteLength);
+ %_TypedArrayInitialize(obj, ARRAY_ID, buffer, 0, byteLength);
}
function NAMEConstructByArrayLike(obj, arrayLike) {
var length = arrayLike.length;
var l = ToPositiveInteger(length, "invalid_typed_array_length");
- if (l > %MaxSmi()) {
+ if (l > %_MaxSmi()) {
throw MakeRangeError("invalid_typed_array_length");
}
if(!%TypedArrayInitializeFromArrayLike(obj, ARRAY_ID, arrayLike, l)) {
@@ -350,7 +350,7 @@
if (length < 0 || offset + length > bufferByteLength) {
throw new MakeRangeError('invalid_data_view_length');
}
- %DataViewInitialize(this, buffer, offset, length);
+ %_DataViewInitialize(this, buffer, offset, length);
} else {
throw MakeTypeError('constructor_not_function', ["DataView"]);
}
diff --git a/src/version.cc b/src/version.cc
index 59ac193..3060acc 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,7 +34,7 @@
// system so their names cannot be changed without changing the scripts.
#define MAJOR_VERSION 3
#define MINOR_VERSION 25
-#define BUILD_NUMBER 23
+#define BUILD_NUMBER 24
#define PATCH_LEVEL 0
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index ce820b1..17bdc42 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -709,8 +709,8 @@
__ bind(&try_arithmetic_simplification);
__ cvttsd2si(exponent, double_exponent);
// Skip to runtime if possibly NaN (indicated by the indefinite integer).
- __ cmpl(exponent, Immediate(0x80000000u));
- __ j(equal, &call_runtime);
+ __ cmpl(exponent, Immediate(0x1));
+ __ j(overflow, &call_runtime);
if (exponent_type_ == ON_STACK) {
// Detect square root case. Crankshaft detects constant +/-0.5 at
@@ -2407,23 +2407,9 @@
}
-static void JumpIfOOM(MacroAssembler* masm,
- Register value,
- Register scratch,
- Label* oom_label) {
- __ movp(scratch, value);
- STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3);
- STATIC_ASSERT(kFailureTag == 3);
- __ and_(scratch, Immediate(0xf));
- __ cmpq(scratch, Immediate(0xf));
- __ j(equal, oom_label);
-}
-
-
void CEntryStub::GenerateCore(MacroAssembler* masm,
Label* throw_normal_exception,
Label* throw_termination_exception,
- Label* throw_out_of_memory_exception,
bool do_gc,
bool always_allocate_scope) {
// rax: result parameter for PerformGC, if any.
@@ -2530,9 +2516,6 @@
__ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
__ j(zero, &retry, Label::kNear);
- // Special handling of out of memory exceptions.
- JumpIfOOM(masm, rax, kScratchRegister, throw_out_of_memory_exception);
-
// Retrieve the pending exception.
ExternalReference pending_exception_address(
Isolate::kPendingExceptionAddress, masm->isolate());
@@ -2540,9 +2523,6 @@
masm->ExternalOperand(pending_exception_address);
__ movp(rax, pending_exception_operand);
- // See if we just retrieved an OOM exception.
- JumpIfOOM(masm, rax, kScratchRegister, throw_out_of_memory_exception);
-
// Clear the pending exception.
pending_exception_operand =
masm->ExternalOperand(pending_exception_address);
@@ -2598,13 +2578,11 @@
Label throw_normal_exception;
Label throw_termination_exception;
- Label throw_out_of_memory_exception;
// Call into the runtime system.
GenerateCore(masm,
&throw_normal_exception,
&throw_termination_exception,
- &throw_out_of_memory_exception,
false,
false);
@@ -2612,7 +2590,6 @@
GenerateCore(masm,
&throw_normal_exception,
&throw_termination_exception,
- &throw_out_of_memory_exception,
true,
false);
@@ -2622,28 +2599,9 @@
GenerateCore(masm,
&throw_normal_exception,
&throw_termination_exception,
- &throw_out_of_memory_exception,
true,
true);
- __ bind(&throw_out_of_memory_exception);
- // Set external caught exception to false.
- Isolate* isolate = masm->isolate();
- ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
- isolate);
- __ Set(rax, static_cast<int64_t>(false));
- __ Store(external_caught, rax);
-
- // Set pending exception and rax to out of memory exception.
- ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
- isolate);
- Label already_have_failure;
- JumpIfOOM(masm, rax, kScratchRegister, &already_have_failure);
- __ Move(rax, Failure::OutOfMemoryException(0x1), Assembler::RelocInfoNone());
- __ bind(&already_have_failure);
- __ Store(pending_exception, rax);
- // Fall through to the next label.
-
__ bind(&throw_termination_exception);
__ ThrowUncatchable(rax);
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index 137622e..8eb6092 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -101,6 +101,23 @@
};
+static void EmitStackCheck(MacroAssembler* masm_,
+ int pointers = 0,
+ Register scratch = rsp) {
+ Isolate* isolate = masm_->isolate();
+ Label ok;
+ ASSERT(scratch.is(rsp) == (pointers == 0));
+ if (pointers != 0) {
+ __ movq(scratch, rsp);
+ __ subq(scratch, Immediate(pointers * kPointerSize));
+ }
+ __ CompareRoot(scratch, Heap::kStackLimitRootIndex);
+ __ j(above_equal, &ok, Label::kNear);
+ __ call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
+ __ bind(&ok);
+}
+
+
// Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right, with the
// return address on top of them. The actual argument count matches the
@@ -171,8 +188,27 @@
if (locals_count == 1) {
__ PushRoot(Heap::kUndefinedValueRootIndex);
} else if (locals_count > 1) {
+ if (locals_count >= 128) {
+ EmitStackCheck(masm_, locals_count, rcx);
+ }
__ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
- for (int i = 0; i < locals_count; i++) {
+ const int kMaxPushes = 32;
+ if (locals_count >= kMaxPushes) {
+ int loop_iterations = locals_count / kMaxPushes;
+ __ movq(rcx, Immediate(loop_iterations));
+ Label loop_header;
+ __ bind(&loop_header);
+ // Do pushes.
+ for (int i = 0; i < kMaxPushes; i++) {
+ __ Push(rdx);
+ }
+ // Continue loop if not done.
+ __ decq(rcx);
+ __ j(not_zero, &loop_header, Label::kNear);
+ }
+ int remaining = locals_count % kMaxPushes;
+ // Emit the remaining pushes.
+ for (int i = 0; i < remaining; i++) {
__ Push(rdx);
}
}
@@ -284,11 +320,7 @@
{ Comment cmnt(masm_, "[ Stack check");
PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
- Label ok;
- __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
- __ j(above_equal, &ok, Label::kNear);
- __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
- __ bind(&ok);
+ EmitStackCheck(masm_);
}
{ Comment cmnt(masm_, "[ Body");
@@ -3084,8 +3116,8 @@
Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
__ CheckMap(rax, map, if_false, DO_SMI_CHECK);
__ cmpl(FieldOperand(rax, HeapNumber::kExponentOffset),
- Immediate(0x80000000));
- __ j(not_equal, if_false);
+ Immediate(0x1));
+ __ j(no_overflow, if_false);
__ cmpl(FieldOperand(rax, HeapNumber::kMantissaOffset),
Immediate(0x00000000));
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index 5849cf4..a6be271 100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -409,20 +409,12 @@
bool LCodeGen::IsInteger32Constant(LConstantOperand* op) const {
- return op->IsConstantOperand() &&
- chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
+ return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
}
bool LCodeGen::IsSmiConstant(LConstantOperand* op) const {
- return op->IsConstantOperand() &&
- chunk_->LookupLiteralRepresentation(op).IsSmi();
-}
-
-
-bool LCodeGen::IsTaggedConstant(LConstantOperand* op) const {
- return op->IsConstantOperand() &&
- chunk_->LookupLiteralRepresentation(op).IsTagged();
+ return chunk_->LookupLiteralRepresentation(op).IsSmi();
}
@@ -2261,8 +2253,8 @@
Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
__ CheckMap(value, map, instr->FalseLabel(chunk()), DO_SMI_CHECK);
__ cmpl(FieldOperand(value, HeapNumber::kExponentOffset),
- Immediate(0x80000000));
- EmitFalseBranch(instr, not_equal);
+ Immediate(0x1));
+ EmitFalseBranch(instr, no_overflow);
__ cmpl(FieldOperand(value, HeapNumber::kMantissaOffset),
Immediate(0x00000000));
EmitBranch(instr, equal);
@@ -3577,8 +3569,8 @@
}
__ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown);
__ cvttsd2si(output_reg, xmm_scratch);
- __ cmpl(output_reg, Immediate(0x80000000));
- DeoptimizeIf(equal, instr->environment());
+ __ cmpl(output_reg, Immediate(0x1));
+ DeoptimizeIf(overflow, instr->environment());
} else {
Label negative_sign, done;
// Deoptimize on unordered.
@@ -3602,8 +3594,8 @@
// Use truncating instruction (OK because input is positive).
__ cvttsd2si(output_reg, input_reg);
// Overflow is signalled with minint.
- __ cmpl(output_reg, Immediate(0x80000000));
- DeoptimizeIf(equal, instr->environment());
+ __ cmpl(output_reg, Immediate(0x1));
+ DeoptimizeIf(overflow, instr->environment());
__ jmp(&done, Label::kNear);
// Non-zero negative reaches here.
@@ -3640,9 +3632,9 @@
__ addsd(xmm_scratch, input_reg);
__ cvttsd2si(output_reg, xmm_scratch);
// Overflow is signalled with minint.
- __ cmpl(output_reg, Immediate(0x80000000));
+ __ cmpl(output_reg, Immediate(0x1));
__ RecordComment("D2I conversion overflow");
- DeoptimizeIf(equal, instr->environment());
+ DeoptimizeIf(overflow, instr->environment());
__ jmp(&done, dist);
__ bind(&below_one_half);
@@ -3657,9 +3649,9 @@
__ subsd(input_temp, xmm_scratch);
__ cvttsd2si(output_reg, input_temp);
// Catch minint due to overflow, and to prevent overflow when compensating.
- __ cmpl(output_reg, Immediate(0x80000000));
+ __ cmpl(output_reg, Immediate(0x1));
__ RecordComment("D2I conversion overflow");
- DeoptimizeIf(equal, instr->environment());
+ DeoptimizeIf(overflow, instr->environment());
__ Cvtlsi2sd(xmm_scratch, output_reg);
__ ucomisd(xmm_scratch, input_temp);
@@ -3955,18 +3947,13 @@
SmiCheck check_needed = hinstr->value()->IsHeapObject()
? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
- if (representation.IsSmi()) {
+ ASSERT(!(representation.IsSmi() &&
+ instr->value()->IsConstantOperand() &&
+ !IsInteger32Constant(LConstantOperand::cast(instr->value()))));
+ if (representation.IsHeapObject()) {
if (instr->value()->IsConstantOperand()) {
LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
- if (!IsInteger32Constant(operand_value) &&
- !IsSmiConstant(operand_value)) {
- DeoptimizeIf(no_condition, instr->environment());
- }
- }
- } else if (representation.IsHeapObject()) {
- if (instr->value()->IsConstantOperand()) {
- LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
- if (IsInteger32Constant(operand_value)) {
+ if (chunk_->LookupConstant(operand_value)->HasSmiValue()) {
DeoptimizeIf(no_condition, instr->environment());
}
} else {
@@ -4691,8 +4678,8 @@
Register output = ToRegister(instr->result());
if (hchange->CheckFlag(HValue::kCanOverflow) &&
hchange->value()->CheckFlag(HValue::kUint32)) {
- __ testl(input, Immediate(0x80000000));
- DeoptimizeIf(not_zero, instr->environment());
+ __ testl(input, input);
+ DeoptimizeIf(sign, instr->environment());
}
__ Integer32ToSmi(output, input);
if (hchange->CheckFlag(HValue::kCanOverflow) &&
@@ -5087,7 +5074,7 @@
// conversions.
__ Cmp(input_reg, factory()->undefined_value());
DeoptimizeIf(not_equal, instr->environment());
- __ movp(input_reg, Immediate(0));
+ __ xorl(input_reg, input_reg);
__ jmp(&done, Label::kNear);
// Heap number
diff --git a/src/x64/lithium-codegen-x64.h b/src/x64/lithium-codegen-x64.h
index e2c5a0a..4f60644 100644
--- a/src/x64/lithium-codegen-x64.h
+++ b/src/x64/lithium-codegen-x64.h
@@ -91,7 +91,6 @@
Smi* ToSmi(LConstantOperand* op) const;
double ToDouble(LConstantOperand* op) const;
ExternalReference ToExternalReference(LConstantOperand* op) const;
- bool IsTaggedConstant(LConstantOperand* op) const;
Handle<Object> ToHandle(LConstantOperand* op) const;
Operand ToOperand(LOperand* op) const;
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index e5b4160..7b17454 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -3154,10 +3154,10 @@
cvtsd2si(result_reg, input_reg);
testl(result_reg, Immediate(0xFFFFFF00));
j(zero, &done, Label::kNear);
- cmpl(result_reg, Immediate(0x80000000));
- j(equal, &conv_failure, Label::kNear);
+ cmpl(result_reg, Immediate(1));
+ j(overflow, &conv_failure, Label::kNear);
movl(result_reg, Immediate(0));
- setcc(above, result_reg);
+ setcc(sign, result_reg);
subl(result_reg, Immediate(1));
andl(result_reg, Immediate(255));
jmp(&done, Label::kNear);
@@ -3194,9 +3194,8 @@
Label done;
movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
cvttsd2siq(result_reg, xmm0);
- Set(kScratchRegister, V8_UINT64_C(0x8000000000000000));
- cmpq(result_reg, kScratchRegister);
- j(not_equal, &done, Label::kNear);
+ cmpq(result_reg, Immediate(1));
+ j(no_overflow, &done, Label::kNear);
// Slow case.
if (input_reg.is(result_reg)) {
@@ -3216,9 +3215,8 @@
XMMRegister input_reg) {
Label done;
cvttsd2siq(result_reg, input_reg);
- movq(kScratchRegister, V8_INT64_C(0x8000000000000000));
- cmpq(result_reg, kScratchRegister);
- j(not_equal, &done, Label::kNear);
+ cmpq(result_reg, Immediate(1));
+ j(no_overflow, &done, Label::kNear);
subp(rsp, Immediate(kDoubleSize));
movsd(MemOperand(rsp, 0), input_reg);
diff --git a/test/cctest/cctest.status b/test/cctest/cctest.status
index 7e5fe2a..182a35b 100644
--- a/test/cctest/cctest.status
+++ b/test/cctest/cctest.status
@@ -89,9 +89,6 @@
# BUG(v8:3155).
'test-strings/AsciiArrayJoin': [PASS, ['mode == debug', FAIL]],
-
- # BUG(v8:3226).
- 'test-assembler-arm64/fmadd_fmsub_float_nans': [PASS, FAIL]
}], # 'arch == arm64'
['arch == arm64 and simulator_run == True', {
diff --git a/test/cctest/test-api.cc b/test/cctest/test-api.cc
index a1b05f9..ffc8652 100644
--- a/test/cctest/test-api.cc
+++ b/test/cctest/test-api.cc
@@ -2757,7 +2757,8 @@
v8::Local<v8::Object> obj = v8::Object::New(isolate);
v8::Local<v8::Symbol> sym1 = v8::Symbol::New(isolate);
- v8::Local<v8::Symbol> sym2 = v8::Symbol::New(isolate, "my-symbol");
+ v8::Local<v8::Symbol> sym2 =
+ v8::Symbol::New(isolate, v8_str("my-symbol"));
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
@@ -2775,7 +2776,7 @@
CHECK(!sym1->StrictEquals(sym2));
CHECK(!sym2->StrictEquals(sym1));
- CHECK(sym2->Name()->Equals(v8::String::NewFromUtf8(isolate, "my-symbol")));
+ CHECK(sym2->Name()->Equals(v8_str("my-symbol")));
v8::Local<v8::Value> sym_val = sym2;
CHECK(sym_val->IsSymbol());
@@ -2845,8 +2846,8 @@
v8::Local<v8::Object> obj = v8::Object::New(isolate);
v8::Local<v8::Private> priv1 = v8::Private::New(isolate);
- v8::Local<v8::Private> priv2 = v8::Private::New(isolate,
- v8_str("my-private"));
+ v8::Local<v8::Private> priv2 =
+ v8::Private::New(isolate, v8_str("my-private"));
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
@@ -2897,6 +2898,55 @@
}
+THREADED_TEST(GlobalSymbols) {
+ i::FLAG_harmony_symbols = true;
+
+ LocalContext env;
+ v8::Isolate* isolate = env->GetIsolate();
+ v8::HandleScope scope(isolate);
+
+ v8::Local<String> name = v8_str("my-symbol");
+ v8::Local<v8::Symbol> glob = v8::Symbol::For(isolate, name);
+ v8::Local<v8::Symbol> glob2 = v8::Symbol::For(isolate, name);
+ CHECK(glob2->SameValue(glob));
+
+ v8::Local<v8::Symbol> glob_api = v8::Symbol::ForApi(isolate, name);
+ v8::Local<v8::Symbol> glob_api2 = v8::Symbol::ForApi(isolate, name);
+ CHECK(glob_api2->SameValue(glob_api));
+ CHECK(!glob_api->SameValue(glob));
+
+ v8::Local<v8::Symbol> sym = v8::Symbol::New(isolate, name);
+ CHECK(!sym->SameValue(glob));
+
+ CompileRun("var sym2 = Symbol.for('my-symbol')");
+ v8::Local<Value> sym2 = env->Global()->Get(v8_str("sym2"));
+ CHECK(sym2->SameValue(glob));
+ CHECK(!sym2->SameValue(glob_api));
+}
+
+
+THREADED_TEST(GlobalPrivates) {
+ LocalContext env;
+ v8::Isolate* isolate = env->GetIsolate();
+ v8::HandleScope scope(isolate);
+
+ v8::Local<String> name = v8_str("my-private");
+ v8::Local<v8::Private> glob = v8::Private::ForApi(isolate, name);
+ v8::Local<v8::Object> obj = v8::Object::New(isolate);
+ CHECK(obj->SetPrivate(glob, v8::Integer::New(isolate, 3)));
+
+ v8::Local<v8::Private> glob2 = v8::Private::ForApi(isolate, name);
+ CHECK(obj->HasPrivate(glob2));
+
+ v8::Local<v8::Private> priv = v8::Private::New(isolate, name);
+ CHECK(!obj->HasPrivate(priv));
+
+ CompileRun("var intern = %CreateGlobalPrivateSymbol('my-private')");
+ v8::Local<Value> intern = env->Global()->Get(v8_str("intern"));
+ CHECK(!obj->Has(intern));
+}
+
+
class ScopedArrayBufferContents {
public:
explicit ScopedArrayBufferContents(
@@ -10189,10 +10239,11 @@
Local<v8::Object> o2 = t2->GetFunction()->NewInstance();
CHECK(o2->SetPrototype(o1));
- v8::Local<v8::Symbol> sym = v8::Symbol::New(context->GetIsolate(), "s1");
+ v8::Local<v8::Symbol> sym =
+ v8::Symbol::New(context->GetIsolate(), v8_str("s1"));
o1->Set(sym, v8_num(3));
- o1->SetHiddenValue(v8_str("h1"),
- v8::Integer::New(context->GetIsolate(), 2013));
+ o1->SetHiddenValue(
+ v8_str("h1"), v8::Integer::New(context->GetIsolate(), 2013));
// Call the runtime version of GetLocalPropertyNames() on
// the natively created object through JavaScript.
@@ -19332,7 +19383,6 @@
class InitDefaultIsolateThread : public v8::internal::Thread {
public:
enum TestCase {
- IgnoreOOM,
SetResourceConstraints,
SetFatalHandler,
SetCounterFunction,
@@ -19349,34 +19399,30 @@
v8::Isolate* isolate = v8::Isolate::New();
isolate->Enter();
switch (testCase_) {
- case IgnoreOOM:
- v8::V8::IgnoreOutOfMemoryException();
- break;
+ case SetResourceConstraints: {
+ static const int K = 1024;
+ v8::ResourceConstraints constraints;
+ constraints.set_max_young_space_size(256 * K);
+ constraints.set_max_old_space_size(4 * K * K);
+ v8::SetResourceConstraints(CcTest::isolate(), &constraints);
+ break;
+ }
- case SetResourceConstraints: {
- static const int K = 1024;
- v8::ResourceConstraints constraints;
- constraints.set_max_young_space_size(256 * K);
- constraints.set_max_old_space_size(4 * K * K);
- v8::SetResourceConstraints(CcTest::isolate(), &constraints);
- break;
- }
+ case SetFatalHandler:
+ v8::V8::SetFatalErrorHandler(NULL);
+ break;
- case SetFatalHandler:
- v8::V8::SetFatalErrorHandler(NULL);
- break;
+ case SetCounterFunction:
+ v8::V8::SetCounterFunction(NULL);
+ break;
- case SetCounterFunction:
- v8::V8::SetCounterFunction(NULL);
- break;
+ case SetCreateHistogramFunction:
+ v8::V8::SetCreateHistogramFunction(NULL);
+ break;
- case SetCreateHistogramFunction:
- v8::V8::SetCreateHistogramFunction(NULL);
- break;
-
- case SetAddHistogramSampleFunction:
- v8::V8::SetAddHistogramSampleFunction(NULL);
- break;
+ case SetAddHistogramSampleFunction:
+ v8::V8::SetAddHistogramSampleFunction(NULL);
+ break;
}
isolate->Exit();
isolate->Dispose();
@@ -19400,31 +19446,26 @@
TEST(InitializeDefaultIsolateOnSecondaryThread1) {
- InitializeTestHelper(InitDefaultIsolateThread::IgnoreOOM);
-}
-
-
-TEST(InitializeDefaultIsolateOnSecondaryThread2) {
InitializeTestHelper(InitDefaultIsolateThread::SetResourceConstraints);
}
-TEST(InitializeDefaultIsolateOnSecondaryThread3) {
+TEST(InitializeDefaultIsolateOnSecondaryThread2) {
InitializeTestHelper(InitDefaultIsolateThread::SetFatalHandler);
}
-TEST(InitializeDefaultIsolateOnSecondaryThread4) {
+TEST(InitializeDefaultIsolateOnSecondaryThread3) {
InitializeTestHelper(InitDefaultIsolateThread::SetCounterFunction);
}
-TEST(InitializeDefaultIsolateOnSecondaryThread5) {
+TEST(InitializeDefaultIsolateOnSecondaryThread4) {
InitializeTestHelper(InitDefaultIsolateThread::SetCreateHistogramFunction);
}
-TEST(InitializeDefaultIsolateOnSecondaryThread6) {
+TEST(InitializeDefaultIsolateOnSecondaryThread5) {
InitializeTestHelper(InitDefaultIsolateThread::SetAddHistogramSampleFunction);
}
@@ -21962,25 +22003,6 @@
info.GetReturnValue().Set(v8_str("returned"));
}
- // TODO(dcarney): move this to v8.h
- static void SetAccessorProperty(Local<Object> object,
- Local<String> name,
- Local<Function> getter,
- Local<Function> setter = Local<Function>()) {
- i::Isolate* isolate = CcTest::i_isolate();
- v8::AccessControl settings = v8::DEFAULT;
- v8::PropertyAttribute attribute = v8::None;
- i::Handle<i::Object> getter_i = v8::Utils::OpenHandle(*getter);
- i::Handle<i::Object> setter_i = v8::Utils::OpenHandle(*setter, true);
- if (setter_i.is_null()) setter_i = isolate->factory()->null_value();
- i::JSObject::DefineAccessor(v8::Utils::OpenHandle(*object),
- v8::Utils::OpenHandle(*name),
- getter_i,
- setter_i,
- static_cast<PropertyAttributes>(attribute),
- settings);
- }
-
public:
enum SignatureType {
kNoSignature,
@@ -22049,9 +22071,9 @@
global_holder = Local<Object>::Cast(global_holder->GetPrototype());
}
global_holder->Set(v8_str("g_f"), function);
- SetAccessorProperty(global_holder, v8_str("g_acc"), function, function);
+ global_holder->SetAccessorProperty(v8_str("g_acc"), function, function);
function_holder->Set(v8_str("f"), function);
- SetAccessorProperty(function_holder, v8_str("acc"), function, function);
+ function_holder->SetAccessorProperty(v8_str("acc"), function, function);
// Initialize expected values.
callee = function;
count = 0;
@@ -22169,8 +22191,6 @@
TEST(Promises) {
- i::FLAG_harmony_promises = true;
-
LocalContext context;
v8::Isolate* isolate = context->GetIsolate();
v8::HandleScope scope(isolate);
diff --git a/test/cctest/test-assembler-arm.cc b/test/cctest/test-assembler-arm.cc
index b21dc34..9c1c04f 100644
--- a/test/cctest/test-assembler-arm.cc
+++ b/test/cctest/test-assembler-arm.cc
@@ -1266,6 +1266,10 @@
uint32_t dstA1;
uint32_t dstA2;
uint32_t dstA3;
+ uint32_t dstA4;
+ uint32_t dstA5;
+ uint32_t dstA6;
+ uint32_t dstA7;
} T;
T t;
@@ -1291,7 +1295,14 @@
__ add(r4, r0, Operand(OFFSET_OF(T, dstA0)));
__ vst1(Neon8, NeonListOperand(d0, 2), NeonMemOperand(r4));
- __ ldm(ia_w, sp, r4.bit() | pc.bit());
+ // The same expansion, but with different source and destination registers.
+ __ add(r4, r0, Operand(OFFSET_OF(T, srcA0)));
+ __ vld1(Neon8, NeonListOperand(d1), NeonMemOperand(r4));
+ __ vmovl(NeonU8, q1, d1);
+ __ add(r4, r0, Operand(OFFSET_OF(T, dstA4)));
+ __ vst1(Neon8, NeonListOperand(d2, 2), NeonMemOperand(r4));
+
+ __ ldm(ia_w, sp, r4.bit() | pc.bit());
CodeDesc desc;
assm.GetCode(&desc);
@@ -1326,6 +1337,10 @@
t.dstA1 = 0;
t.dstA2 = 0;
t.dstA3 = 0;
+ t.dstA4 = 0;
+ t.dstA5 = 0;
+ t.dstA6 = 0;
+ t.dstA7 = 0;
Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0);
USE(dummy);
CHECK_EQ(0x01020304, t.dst0);
@@ -1340,6 +1355,10 @@
CHECK_EQ(0x00410042, t.dstA1);
CHECK_EQ(0x00830084, t.dstA2);
CHECK_EQ(0x00810082, t.dstA3);
+ CHECK_EQ(0x00430044, t.dstA4);
+ CHECK_EQ(0x00410042, t.dstA5);
+ CHECK_EQ(0x00830084, t.dstA6);
+ CHECK_EQ(0x00810082, t.dstA7);
}
}
diff --git a/test/cctest/test-assembler-arm64.cc b/test/cctest/test-assembler-arm64.cc
index edb050e..51c202f 100644
--- a/test/cctest/test-assembler-arm64.cc
+++ b/test/cctest/test-assembler-arm64.cc
@@ -5247,32 +5247,42 @@
ASSERT(IsQuietNaN(q2_proc));
ASSERT(IsQuietNaN(qa_proc));
+ // Negated NaNs as it would be done on ARMv8 hardware.
+ double s1_proc_neg = rawbits_to_double(0xfffd555511111111);
+ double sa_proc_neg = rawbits_to_double(0xfffd5555aaaaaaaa);
+ double q1_proc_neg = rawbits_to_double(0xfffaaaaa11111111);
+ double qa_proc_neg = rawbits_to_double(0xfffaaaaaaaaaaaaa);
+ ASSERT(IsQuietNaN(s1_proc_neg));
+ ASSERT(IsQuietNaN(sa_proc_neg));
+ ASSERT(IsQuietNaN(q1_proc_neg));
+ ASSERT(IsQuietNaN(qa_proc_neg));
+
// Quiet NaNs are propagated.
- FmaddFmsubHelper(q1, 0, 0, q1_proc, -q1_proc, -q1_proc, q1_proc);
+ FmaddFmsubHelper(q1, 0, 0, q1_proc, q1_proc_neg, q1_proc_neg, q1_proc);
FmaddFmsubHelper(0, q2, 0, q2_proc, q2_proc, q2_proc, q2_proc);
- FmaddFmsubHelper(0, 0, qa, qa_proc, qa_proc, -qa_proc, -qa_proc);
- FmaddFmsubHelper(q1, q2, 0, q1_proc, -q1_proc, -q1_proc, q1_proc);
- FmaddFmsubHelper(0, q2, qa, qa_proc, qa_proc, -qa_proc, -qa_proc);
- FmaddFmsubHelper(q1, 0, qa, qa_proc, qa_proc, -qa_proc, -qa_proc);
- FmaddFmsubHelper(q1, q2, qa, qa_proc, qa_proc, -qa_proc, -qa_proc);
+ FmaddFmsubHelper(0, 0, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg);
+ FmaddFmsubHelper(q1, q2, 0, q1_proc, q1_proc_neg, q1_proc_neg, q1_proc);
+ FmaddFmsubHelper(0, q2, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg);
+ FmaddFmsubHelper(q1, 0, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg);
+ FmaddFmsubHelper(q1, q2, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg);
// Signalling NaNs are propagated, and made quiet.
- FmaddFmsubHelper(s1, 0, 0, s1_proc, -s1_proc, -s1_proc, s1_proc);
+ FmaddFmsubHelper(s1, 0, 0, s1_proc, s1_proc_neg, s1_proc_neg, s1_proc);
FmaddFmsubHelper(0, s2, 0, s2_proc, s2_proc, s2_proc, s2_proc);
- FmaddFmsubHelper(0, 0, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
- FmaddFmsubHelper(s1, s2, 0, s1_proc, -s1_proc, -s1_proc, s1_proc);
- FmaddFmsubHelper(0, s2, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
- FmaddFmsubHelper(s1, 0, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
- FmaddFmsubHelper(s1, s2, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
+ FmaddFmsubHelper(0, 0, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
+ FmaddFmsubHelper(s1, s2, 0, s1_proc, s1_proc_neg, s1_proc_neg, s1_proc);
+ FmaddFmsubHelper(0, s2, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
+ FmaddFmsubHelper(s1, 0, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
+ FmaddFmsubHelper(s1, s2, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
// Signalling NaNs take precedence over quiet NaNs.
- FmaddFmsubHelper(s1, q2, qa, s1_proc, -s1_proc, -s1_proc, s1_proc);
+ FmaddFmsubHelper(s1, q2, qa, s1_proc, s1_proc_neg, s1_proc_neg, s1_proc);
FmaddFmsubHelper(q1, s2, qa, s2_proc, s2_proc, s2_proc, s2_proc);
- FmaddFmsubHelper(q1, q2, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
- FmaddFmsubHelper(s1, s2, qa, s1_proc, -s1_proc, -s1_proc, s1_proc);
- FmaddFmsubHelper(q1, s2, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
- FmaddFmsubHelper(s1, q2, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
- FmaddFmsubHelper(s1, s2, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
+ FmaddFmsubHelper(q1, q2, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
+ FmaddFmsubHelper(s1, s2, qa, s1_proc, s1_proc_neg, s1_proc_neg, s1_proc);
+ FmaddFmsubHelper(q1, s2, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
+ FmaddFmsubHelper(s1, q2, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
+ FmaddFmsubHelper(s1, s2, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
// A NaN generated by the intermediate op1 * op2 overrides a quiet NaN in a.
FmaddFmsubHelper(0, kFP64PositiveInfinity, qa,
@@ -5320,32 +5330,42 @@
ASSERT(IsQuietNaN(q2_proc));
ASSERT(IsQuietNaN(qa_proc));
+ // Negated NaNs as it would be done on ARMv8 hardware.
+ float s1_proc_neg = rawbits_to_float(0xffd51111);
+ float sa_proc_neg = rawbits_to_float(0xffd5aaaa);
+ float q1_proc_neg = rawbits_to_float(0xffea1111);
+ float qa_proc_neg = rawbits_to_float(0xffeaaaaa);
+ ASSERT(IsQuietNaN(s1_proc_neg));
+ ASSERT(IsQuietNaN(sa_proc_neg));
+ ASSERT(IsQuietNaN(q1_proc_neg));
+ ASSERT(IsQuietNaN(qa_proc_neg));
+
// Quiet NaNs are propagated.
- FmaddFmsubHelper(q1, 0, 0, q1_proc, -q1_proc, -q1_proc, q1_proc);
+ FmaddFmsubHelper(q1, 0, 0, q1_proc, q1_proc_neg, q1_proc_neg, q1_proc);
FmaddFmsubHelper(0, q2, 0, q2_proc, q2_proc, q2_proc, q2_proc);
- FmaddFmsubHelper(0, 0, qa, qa_proc, qa_proc, -qa_proc, -qa_proc);
- FmaddFmsubHelper(q1, q2, 0, q1_proc, -q1_proc, -q1_proc, q1_proc);
- FmaddFmsubHelper(0, q2, qa, qa_proc, qa_proc, -qa_proc, -qa_proc);
- FmaddFmsubHelper(q1, 0, qa, qa_proc, qa_proc, -qa_proc, -qa_proc);
- FmaddFmsubHelper(q1, q2, qa, qa_proc, qa_proc, -qa_proc, -qa_proc);
+ FmaddFmsubHelper(0, 0, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg);
+ FmaddFmsubHelper(q1, q2, 0, q1_proc, q1_proc_neg, q1_proc_neg, q1_proc);
+ FmaddFmsubHelper(0, q2, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg);
+ FmaddFmsubHelper(q1, 0, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg);
+ FmaddFmsubHelper(q1, q2, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg);
// Signalling NaNs are propagated, and made quiet.
- FmaddFmsubHelper(s1, 0, 0, s1_proc, -s1_proc, -s1_proc, s1_proc);
+ FmaddFmsubHelper(s1, 0, 0, s1_proc, s1_proc_neg, s1_proc_neg, s1_proc);
FmaddFmsubHelper(0, s2, 0, s2_proc, s2_proc, s2_proc, s2_proc);
- FmaddFmsubHelper(0, 0, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
- FmaddFmsubHelper(s1, s2, 0, s1_proc, -s1_proc, -s1_proc, s1_proc);
- FmaddFmsubHelper(0, s2, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
- FmaddFmsubHelper(s1, 0, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
- FmaddFmsubHelper(s1, s2, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
+ FmaddFmsubHelper(0, 0, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
+ FmaddFmsubHelper(s1, s2, 0, s1_proc, s1_proc_neg, s1_proc_neg, s1_proc);
+ FmaddFmsubHelper(0, s2, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
+ FmaddFmsubHelper(s1, 0, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
+ FmaddFmsubHelper(s1, s2, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
// Signalling NaNs take precedence over quiet NaNs.
- FmaddFmsubHelper(s1, q2, qa, s1_proc, -s1_proc, -s1_proc, s1_proc);
+ FmaddFmsubHelper(s1, q2, qa, s1_proc, s1_proc_neg, s1_proc_neg, s1_proc);
FmaddFmsubHelper(q1, s2, qa, s2_proc, s2_proc, s2_proc, s2_proc);
- FmaddFmsubHelper(q1, q2, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
- FmaddFmsubHelper(s1, s2, qa, s1_proc, -s1_proc, -s1_proc, s1_proc);
- FmaddFmsubHelper(q1, s2, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
- FmaddFmsubHelper(s1, q2, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
- FmaddFmsubHelper(s1, s2, sa, sa_proc, sa_proc, -sa_proc, -sa_proc);
+ FmaddFmsubHelper(q1, q2, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
+ FmaddFmsubHelper(s1, s2, qa, s1_proc, s1_proc_neg, s1_proc_neg, s1_proc);
+ FmaddFmsubHelper(q1, s2, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
+ FmaddFmsubHelper(s1, q2, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
+ FmaddFmsubHelper(s1, s2, sa, sa_proc, sa_proc, sa_proc_neg, sa_proc_neg);
// A NaN generated by the intermediate op1 * op2 overrides a quiet NaN in a.
FmaddFmsubHelper(0, kFP32PositiveInfinity, qa,
@@ -10154,7 +10174,7 @@
static void ProcessNaNsHelper(double n, double m, double expected) {
- ASSERT(isnan(n) || isnan(m));
+ ASSERT(std::isnan(n) || std::isnan(m));
ASSERT(isnan(expected));
SETUP();
@@ -10226,7 +10246,7 @@
static void ProcessNaNsHelper(float n, float m, float expected) {
- ASSERT(isnan(n) || isnan(m));
+ ASSERT(std::isnan(n) || std::isnan(m));
ASSERT(isnan(expected));
SETUP();
@@ -10298,10 +10318,10 @@
static void DefaultNaNHelper(float n, float m, float a) {
- ASSERT(isnan(n) || isnan(m) || isnan(a));
+ ASSERT(std::isnan(n) || std::isnan(m) || isnan(a));
- bool test_1op = isnan(n);
- bool test_2op = isnan(n) || isnan(m);
+ bool test_1op = std::isnan(n);
+ bool test_2op = std::isnan(n) || std::isnan(m);
SETUP();
START();
@@ -10426,10 +10446,10 @@
static void DefaultNaNHelper(double n, double m, double a) {
- ASSERT(isnan(n) || isnan(m) || isnan(a));
+ ASSERT(std::isnan(n) || std::isnan(m) || isnan(a));
- bool test_1op = isnan(n);
- bool test_2op = isnan(n) || isnan(m);
+ bool test_1op = std::isnan(n);
+ bool test_2op = std::isnan(n) || std::isnan(m);
SETUP();
START();
diff --git a/test/cctest/test-disasm-arm.cc b/test/cctest/test-disasm-arm.cc
index 56f093e..5eff420 100644
--- a/test/cctest/test-disasm-arm.cc
+++ b/test/cctest/test-disasm-arm.cc
@@ -410,6 +410,8 @@
"e6843895 pkhbt r3, r4, r5, lsl #17");
COMPARE(pkhtb(r3, r4, Operand(r5, ASR, 17)),
"e68438d5 pkhtb r3, r4, r5, asr #17");
+ COMPARE(uxtb(r9, Operand(r10, ROR, 0)),
+ "e6ef907a uxtb r9, r10");
COMPARE(uxtb(r3, Operand(r4, ROR, 8)),
"e6ef3474 uxtb r3, r4, ror #8");
COMPARE(uxtab(r3, r4, Operand(r5, ROR, 8)),
@@ -687,8 +689,10 @@
"f421420f vld1.8 {d4, d5, d6, d7}, [r1]");
COMPARE(vst1(Neon16, NeonListOperand(d17, 4), NeonMemOperand(r9)),
"f449124f vst1.16 {d17, d18, d19, d20}, [r9]");
+ COMPARE(vmovl(NeonU8, q3, d1),
+ "f3886a11 vmovl.u8 q3, d1");
COMPARE(vmovl(NeonU8, q4, d2),
- "f3884a12 vmovl.u8 q4, d2");
+ "f3888a12 vmovl.u8 q4, d2");
}
VERIFY_RUN();
diff --git a/test/cctest/test-microtask-delivery.cc b/test/cctest/test-microtask-delivery.cc
index 108337a..0172726 100644
--- a/test/cctest/test-microtask-delivery.cc
+++ b/test/cctest/test-microtask-delivery.cc
@@ -36,7 +36,6 @@
class HarmonyIsolate {
public:
HarmonyIsolate() {
- i::FLAG_harmony_promises = true;
isolate_ = Isolate::New();
isolate_->Enter();
}
diff --git a/test/cctest/test-parsing.cc b/test/cctest/test-parsing.cc
index 1f59f35..2746388 100644
--- a/test/cctest/test-parsing.cc
+++ b/test/cctest/test-parsing.cc
@@ -2529,8 +2529,8 @@
TEST(InvalidLeftHandSide) {
const char* assignment_context_data[][2] = {
- // {"", " = 1;"},
- // {"\"use strict\"; ", " = 1;"},
+ {"", " = 1;"},
+ {"\"use strict\"; ", " = 1;"},
{ NULL, NULL }
};
diff --git a/test/cctest/test-strings.cc b/test/cctest/test-strings.cc
index 8e63cb0..0f37c3e 100644
--- a/test/cctest/test-strings.cc
+++ b/test/cctest/test-strings.cc
@@ -1275,23 +1275,6 @@
}
-TEST(RegExpOverflow) {
- // Result string has the length 2^32, causing a 32-bit integer overflow.
- CcTest::InitializeVM();
- v8::HandleScope scope(CcTest::isolate());
- LocalContext context;
- v8::V8::IgnoreOutOfMemoryException();
- v8::Local<v8::Value> result = CompileRun(
- "var a = 'a'; "
- "for (var i = 0; i < 16; i++) { "
- " a += a; "
- "} "
- "a.replace(/a/g, a); ");
- CHECK(result.IsEmpty());
- CHECK(context->HasOutOfMemoryException());
-}
-
-
TEST(StringReplaceAtomTwoByteResult) {
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
diff --git a/test/mjsunit/debug-script.js b/test/mjsunit/debug-script.js
index 1a7283c..80d423e 100644
--- a/test/mjsunit/debug-script.js
+++ b/test/mjsunit/debug-script.js
@@ -59,7 +59,7 @@
}
// This has to be updated if the number of native scripts change.
-assertTrue(named_native_count == 17 || named_native_count == 18);
+assertTrue(named_native_count == 19 || named_native_count == 20);
// Only the 'gc' extension is loaded.
assertEquals(1, extension_count);
// This script and mjsunit.js has been loaded. If using d8, d8 loads
diff --git a/test/mjsunit/harmony/microtask-delivery.js b/test/mjsunit/es6/microtask-delivery.js
similarity index 97%
rename from test/mjsunit/harmony/microtask-delivery.js
rename to test/mjsunit/es6/microtask-delivery.js
index 566a39d..f74385e 100644
--- a/test/mjsunit/harmony/microtask-delivery.js
+++ b/test/mjsunit/es6/microtask-delivery.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --harmony-observation --harmony-promises --allow-natives-syntax
+// Flags: --allow-natives-syntax
var ordering = [];
function reset() {
diff --git a/test/mjsunit/es6/promises.js b/test/mjsunit/es6/promises.js
index 48b96f6..0470f39 100644
--- a/test/mjsunit/es6/promises.js
+++ b/test/mjsunit/es6/promises.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --harmony-promises --allow-natives-syntax
+// Flags: --allow-natives-syntax
var asyncAssertsExpected = 0;
diff --git a/test/mjsunit/es6/regress/regress-2034.js b/test/mjsunit/es6/regress/regress-2034.js
index 1b7dac0..5c738bf 100644
--- a/test/mjsunit/es6/regress/regress-2034.js
+++ b/test/mjsunit/es6/regress/regress-2034.js
@@ -25,8 +25,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --harmony-weak-collections
-
var key = {};
var map = new WeakMap;
Object.preventExtensions(key);
diff --git a/test/mjsunit/es6/regress/regress-2156.js b/test/mjsunit/es6/regress/regress-2156.js
index a34e381..fba2a29 100644
--- a/test/mjsunit/es6/regress/regress-2156.js
+++ b/test/mjsunit/es6/regress/regress-2156.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --allow-natives-syntax --harmony-weak-collections
+// Flags: --allow-natives-syntax
var key1 = {};
var key2 = {};
diff --git a/test/mjsunit/es6/regress/regress-2829.js b/test/mjsunit/es6/regress/regress-2829.js
index 8d8f3f8..b48039c 100644
--- a/test/mjsunit/es6/regress/regress-2829.js
+++ b/test/mjsunit/es6/regress/regress-2829.js
@@ -25,8 +25,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --harmony-weak-collections
-
(function test1() {
var wm1 = new WeakMap();
wm1.set(Object.prototype, 23);
diff --git a/test/mjsunit/es6/weak_collections.js b/test/mjsunit/es6/weak_collections.js
index af23903..74235e7 100644
--- a/test/mjsunit/es6/weak_collections.js
+++ b/test/mjsunit/es6/weak_collections.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --harmony-weak-collections --expose-gc --allow-natives-syntax
+// Flags: --expose-gc --allow-natives-syntax
// Note: this test is superseded by harmony/collections.js.
@@ -225,7 +225,7 @@
assertTrue(C.prototype instanceof Object);
assertEquals({
value: {},
- writable: true, // TODO(2793): This should be non-writable.
+ writable: false,
enumerable: false,
configurable: false
}, Object.getOwnPropertyDescriptor(C, "prototype"));
diff --git a/test/mjsunit/es7/object-observe.js b/test/mjsunit/es7/object-observe.js
index 0aa3601..f5e84a6 100644
--- a/test/mjsunit/es7/object-observe.js
+++ b/test/mjsunit/es7/object-observe.js
@@ -25,8 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --harmony-observation --harmony-proxies
-// Flags: --harmony-collections --harmony-weak-collections
+// Flags: --harmony-proxies --harmony-collections
// Flags: --harmony-symbols --allow-natives-syntax
var allObservers = [];
diff --git a/test/mjsunit/fuzz-natives-part1.js b/test/mjsunit/fuzz-natives-part1.js
index e22ac49..1bbe49e 100644
--- a/test/mjsunit/fuzz-natives-part1.js
+++ b/test/mjsunit/fuzz-natives-part1.js
@@ -206,17 +206,14 @@
"_TwoByteSeqStringSetChar": true,
// Only applicable to TypedArrays.
- "TypedArrayInitialize": true,
+ "_TypedArrayInitialize": true,
// Only applicable to generators.
"_GeneratorNext": true,
"_GeneratorThrow": true,
// Only applicable to DataViews.
- "DataViewInitialize": true,
- "DataViewGetBuffer": true,
- "DataViewGetByteLength": true,
- "DataViewGetByteOffset": true
+ "_DataViewInitialize": true,
};
var currentlyUncallable = {
diff --git a/test/mjsunit/fuzz-natives-part2.js b/test/mjsunit/fuzz-natives-part2.js
index 293ad7e..394c2d7 100644
--- a/test/mjsunit/fuzz-natives-part2.js
+++ b/test/mjsunit/fuzz-natives-part2.js
@@ -207,17 +207,14 @@
"_TwoByteSeqStringSetChar": true,
// Only applicable to TypedArrays.
- "TypedArrayInitialize": true,
+ "_TypedArrayInitialize": true,
// Only applicable to generators.
"_GeneratorNext": true,
"_GeneratorThrow": true,
// Only applicable to DataViews.
- "DataViewInitialize": true,
- "DataViewGetBuffer": true,
- "DataViewGetByteLength": true,
- "DataViewGetByteOffset": true
+ "_DataViewInitialize": true,
};
var currentlyUncallable = {
diff --git a/test/mjsunit/fuzz-natives-part3.js b/test/mjsunit/fuzz-natives-part3.js
index 1f33cef..28c24b0 100644
--- a/test/mjsunit/fuzz-natives-part3.js
+++ b/test/mjsunit/fuzz-natives-part3.js
@@ -206,17 +206,14 @@
"_TwoByteSeqStringSetChar": true,
// Only applicable to TypedArrays.
- "TypedArrayInitialize": true,
+ "_TypedArrayInitialize": true,
// Only applicable to generators.
"_GeneratorNext": true,
"_GeneratorThrow": true,
// Only applicable to DataViews.
- "DataViewInitialize":true,
- "DataViewGetBuffer": true,
- "DataViewGetByteLength": true,
- "DataViewGetByteOffset": true,
+ "_DataViewInitialize":true,
};
var currentlyUncallable = {
diff --git a/test/mjsunit/fuzz-natives-part4.js b/test/mjsunit/fuzz-natives-part4.js
index 5f1f912..2a336ca 100644
--- a/test/mjsunit/fuzz-natives-part4.js
+++ b/test/mjsunit/fuzz-natives-part4.js
@@ -206,17 +206,14 @@
"_TwoByteSeqStringSetChar": true,
// Only applicable to TypedArrays.
- "TypedArrayInitialize": true,
+ "_TypedArrayInitialize": true,
// Only applicable to generators.
"_GeneratorNext": true,
"_GeneratorThrow": true,
// Only applicable to DataViews.
- "DataViewInitialize": true,
- "DataViewGetBuffer": true,
- "DataViewGetByteLength": true,
- "DataViewGetByteOffset": true
+ "_DataViewInitialize": true,
};
var currentlyUncallable = {
diff --git a/test/mjsunit/harmony/collections.js b/test/mjsunit/harmony/collections.js
index b33d080..804a320 100644
--- a/test/mjsunit/harmony/collections.js
+++ b/test/mjsunit/harmony/collections.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --harmony-collections --harmony-weak-collections
+// Flags: --harmony-collections
// Flags: --expose-gc --allow-natives-syntax
@@ -290,19 +290,20 @@
// Test prototype property of Set, Map, WeakMap and WeakSet.
-function TestPrototype(C) {
+// TODO(2793): Should all be non-writable, and the extra flag removed.
+function TestPrototype(C, writable) {
assertTrue(C.prototype instanceof Object);
assertEquals({
value: {},
- writable: true, // TODO(2793): This should be non-writable.
+ writable: writable,
enumerable: false,
configurable: false
}, Object.getOwnPropertyDescriptor(C, "prototype"));
}
-TestPrototype(Set);
-TestPrototype(Map);
-TestPrototype(WeakMap);
-TestPrototype(WeakSet);
+TestPrototype(Set, true);
+TestPrototype(Map, true);
+TestPrototype(WeakMap, false);
+TestPrototype(WeakSet, false);
// Test constructor property of the Set, Map, WeakMap and WeakSet prototype.
diff --git a/test/mjsunit/harmony/private.js b/test/mjsunit/harmony/private.js
index a14afe0..2257998 100644
--- a/test/mjsunit/harmony/private.js
+++ b/test/mjsunit/harmony/private.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --harmony-symbols --harmony-collections --harmony-weak-collections
+// Flags: --harmony-symbols --harmony-collections
// Flags: --expose-gc --allow-natives-syntax
var symbols = []
diff --git a/test/mjsunit/harmony/proxies-hash.js b/test/mjsunit/harmony/proxies-hash.js
index 6f0b19f..789de35 100644
--- a/test/mjsunit/harmony/proxies-hash.js
+++ b/test/mjsunit/harmony/proxies-hash.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --harmony-proxies --harmony-collections --harmony-weak-collections
+// Flags: --harmony-proxies --harmony-collections
// Helper.
diff --git a/test/mjsunit/harmony/regress/regress-observe-empty-double-array.js b/test/mjsunit/harmony/regress/regress-observe-empty-double-array.js
index 4b65169..301ece7 100644
--- a/test/mjsunit/harmony/regress/regress-observe-empty-double-array.js
+++ b/test/mjsunit/harmony/regress/regress-observe-empty-double-array.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --harmony-observation --allow-natives-syntax
+// Flags: --allow-natives-syntax
//
// Test passes if it does not crash.
diff --git a/test/mjsunit/harmony/symbols.js b/test/mjsunit/harmony/symbols.js
index d19aece..2204392 100644
--- a/test/mjsunit/harmony/symbols.js
+++ b/test/mjsunit/harmony/symbols.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --harmony-symbols --harmony-collections --harmony-weak-collections
+// Flags: --harmony-symbols --harmony-collections
// Flags: --expose-gc --allow-natives-syntax
var symbols = []
@@ -410,12 +410,29 @@
TestGetOwnPropertySymbolsWithProto()
-function TestRegistry() {
- assertFalse(Symbol.for("@@create") === Symbol.create)
- assertFalse(Symbol.for("@@iterator") === Symbol.iterator)
- assertTrue(Symbol.keyFor(Symbol.create) === undefined)
- assertTrue(Symbol.keyFor(Symbol.iterator) === undefined)
+function TestWellKnown() {
+ var symbols = [
+ "create", "hasInstance", "isConcatSpreadable", "isRegExp",
+ "iterator", "toStringTag", "unscopables"
+ ]
+ for (var i in symbols) {
+ var name = symbols[i]
+ var desc = Object.getOwnPropertyDescriptor(Symbol, name)
+ assertSame("symbol", typeof desc.value)
+ assertSame("Symbol(Symbol." + name + ")", desc.value.toString())
+ assertFalse(desc.writable)
+ assertFalse(desc.configurable)
+ assertFalse(desc.enumerable)
+
+ assertFalse(Symbol.for("Symbol." + name) === desc.value)
+ assertTrue(Symbol.keyFor(desc.value) === undefined)
+ }
+}
+TestWellKnown()
+
+
+function TestRegistry() {
var symbol1 = Symbol.for("x1")
var symbol2 = Symbol.for("x2")
assertFalse(symbol1 === symbol2)
diff --git a/test/mjsunit/mjsunit.status b/test/mjsunit/mjsunit.status
index 9497094..05f9dd3 100644
--- a/test/mjsunit/mjsunit.status
+++ b/test/mjsunit/mjsunit.status
@@ -44,7 +44,6 @@
# When that bug is fixed, revert the expectation to:
# Skip long running test in debug and allow it to timeout in release mode.
# regress/regress-524: [PASS, TIMEOUT, ['mode == debug', SKIP]],
- 'string-oom-slow-*': [PASS, SLOW, ['mode == debug', SKIP]],
# This test non-deterministically runs out of memory on Windows ia32.
'regress/regress-crbug-160010': [SKIP],
diff --git a/test/mjsunit/regress-3225.js b/test/mjsunit/regress-3225.js
new file mode 100644
index 0000000..357f94b
--- /dev/null
+++ b/test/mjsunit/regress-3225.js
@@ -0,0 +1,48 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Flags: --expose-debug-as debug --harmony-generators
+
+Debug = debug.Debug
+
+var debug_step = 0;
+var failure = null;
+
+function listener(event, exec_state, event_data, data) {
+ if (event != Debug.DebugEvent.Break) return;
+ try {
+ if (debug_step == 0) {
+ assertEquals(1, exec_state.frame(0).evaluate('a').value());
+ assertEquals(3, exec_state.frame(0).evaluate('b').value());
+ exec_state.frame(0).evaluate("a = 4").value();
+ debug_step++;
+ } else {
+ assertEquals(4, exec_state.frame(0).evaluate('a').value());
+ assertEquals(3, exec_state.frame(0).evaluate('b').value());
+ exec_state.frame(0).evaluate("b = 5").value();
+ }
+ } catch (e) {
+ failure = e;
+ }
+}
+
+Debug.setListener(listener);
+
+function* generator(a, b) {
+ var b = 3; // Shadows a parameter.
+ debugger;
+ yield a;
+ yield b;
+ debugger;
+ return b;
+}
+
+var foo = generator(1, 2);
+
+assertEquals(4, foo.next().value);
+assertEquals(3, foo.next().value);
+assertEquals(5, foo.next().value);
+assertNull(failure);
+
+Debug.setListener(null);
diff --git a/test/mjsunit/regress/regress-355485.js b/test/mjsunit/regress/regress-355485.js
new file mode 100644
index 0000000..3c66884
--- /dev/null
+++ b/test/mjsunit/regress/regress-355485.js
@@ -0,0 +1,5 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+assertEquals("\u039c", "\u00b5".toUpperCase());
diff --git a/test/mjsunit/regress/regress-crbug-350864.js b/test/mjsunit/regress/regress-crbug-350864.js
index 15b3242..8a793cb 100644
--- a/test/mjsunit/regress/regress-crbug-350864.js
+++ b/test/mjsunit/regress/regress-crbug-350864.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --harmony_symbols --harmony-weak-collections
+// Flags: --harmony-symbols
var v0 = new WeakMap;
var v1 = {};
diff --git a/test/mjsunit/regress/regress-store-heapobject.js b/test/mjsunit/regress/regress-store-heapobject.js
new file mode 100644
index 0000000..9f2a1b8
--- /dev/null
+++ b/test/mjsunit/regress/regress-store-heapobject.js
@@ -0,0 +1,27 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Flags: --allow-natives-syntax
+
+var o = {a: undefined};
+
+function store(o, v) {
+ o.a = v;
+}
+
+store(o, undefined);
+store(o, undefined);
+
+function f(bool) {
+ var o = {a: undefined};
+ if (bool) {
+ store(o, 1);
+ }
+ return o;
+}
+
+f(false);
+f(false);
+%OptimizeFunctionOnNextCall(f);
+f(true);
diff --git a/test/mjsunit/string-oom-slow-escape.js b/test/mjsunit/string-oom-slow-escape.js
deleted file mode 100644
index 34088c0..0000000
--- a/test/mjsunit/string-oom-slow-escape.js
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright 2014 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-
-var a = '<';
-for (var i = 0; i < 26; i++) a += a;
-a = a + a + a;
-
-function escape_a() {
- escape(a);
-}
-
-assertThrows(escape_a, RangeError);
diff --git a/test/mjsunit/string-oom-slow-json-stringify.js b/test/mjsunit/string-oom-slow-json-stringify.js
deleted file mode 100644
index 2f99655..0000000
--- a/test/mjsunit/string-oom-slow-json-stringify.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright 2014 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-var a = 'a';
-for (var i = 0; i < 20; i++) a += a;
-for (var i = 0; i < 8; i++) a = [a, a];
-
-function stringify() {
- JSON.stringify(a);
-}
-
-assertThrows(stringify, RangeError);
diff --git a/test/mjsunit/string-oom-slow-replace-one-with-string.js b/test/mjsunit/string-oom-slow-replace-one-with-string.js
deleted file mode 100644
index bfcbe0f..0000000
--- a/test/mjsunit/string-oom-slow-replace-one-with-string.js
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright 2014 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-var a = 'a';
-for (var i = 0; i < 27; i++) a += a;
-
-function replace() {
- a.replace('a', a);
-}
-
-assertThrows(replace, RangeError);
diff --git a/test/mjsunit/string-oom-slow-to-uppercase.js b/test/mjsunit/string-oom-slow-to-uppercase.js
deleted file mode 100644
index 79f975b..0000000
--- a/test/mjsunit/string-oom-slow-to-uppercase.js
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright 2014 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-var s = "\u00df"; // ß
-for (var i = 0; i < 27; i++) s += s;
-
-function upper() {
- s.toUpperCase();
-}
-
-assertThrows(upper, RangeError);
diff --git a/tools/gyp/v8.gyp b/tools/gyp/v8.gyp
index 6f97def..e17ef35 100644
--- a/tools/gyp/v8.gyp
+++ b/tools/gyp/v8.gyp
@@ -1059,6 +1059,8 @@
'../../src/regexp.js',
'../../src/arraybuffer.js',
'../../src/typedarray.js',
+ '../../src/weak_collection.js',
+ '../../src/promise.js',
'../../src/object-observe.js',
'../../src/macros.py',
],
@@ -1067,8 +1069,6 @@
'../../src/symbol.js',
'../../src/proxy.js',
'../../src/collection.js',
- '../../src/weak_collection.js',
- '../../src/promise.js',
'../../src/generator.js',
'../../src/array-iterator.js',
'../../src/harmony-string.js',
diff --git a/tools/lexer-shell.cc b/tools/lexer-shell.cc
index 0610e7f..8c7debc 100644
--- a/tools/lexer-shell.cc
+++ b/tools/lexer-shell.cc
@@ -26,7 +26,6 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <assert.h>
-#include <fcntl.h>
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
@@ -35,53 +34,18 @@
#include "v8.h"
#include "api.h"
-#include "ast.h"
-#include "char-predicates-inl.h"
#include "messages.h"
#include "platform.h"
#include "runtime.h"
#include "scanner-character-streams.h"
#include "scopeinfo.h"
+#include "shell-utils.h"
#include "string-stream.h"
#include "scanner.h"
using namespace v8::internal;
-enum Encoding {
- LATIN1,
- UTF8,
- UTF16
-};
-
-
-const byte* ReadFile(const char* name, Isolate* isolate,
- int* size, int repeat) {
- FILE* file = fopen(name, "rb");
- *size = 0;
- if (file == NULL) return NULL;
-
- fseek(file, 0, SEEK_END);
- int file_size = ftell(file);
- rewind(file);
-
- *size = file_size * repeat;
-
- byte* chars = new byte[*size + 1];
- for (int i = 0; i < file_size;) {
- int read = static_cast<int>(fread(&chars[i], 1, file_size - i, file));
- i += read;
- }
- fclose(file);
-
- for (int i = file_size; i < *size; i++) {
- chars[i] = chars[i - file_size];
- }
- chars[*size] = 0;
-
- return chars;
-}
-
class BaselineScanner {
public:
@@ -92,7 +56,7 @@
int repeat)
: stream_(NULL) {
int length = 0;
- source_ = ReadFile(fname, isolate, &length, repeat);
+ source_ = ReadFileAndRepeat(fname, &length, repeat);
unicode_cache_ = new UnicodeCache();
scanner_ = new Scanner(unicode_cache_);
switch (encoding) {
diff --git a/tools/lexer-shell.gyp b/tools/lexer-shell.gyp
index 8e6ab7a..623a503 100644
--- a/tools/lexer-shell.gyp
+++ b/tools/lexer-shell.gyp
@@ -51,6 +51,29 @@
],
'sources': [
'lexer-shell.cc',
+ 'shell-utils.h',
+ ],
+ },
+ {
+ 'target_name': 'parser-shell',
+ 'type': 'executable',
+ 'dependencies': [
+ '../tools/gyp/v8.gyp:v8',
+ ],
+ 'conditions': [
+ ['v8_enable_i18n_support==1', {
+ 'dependencies': [
+ '<(icu_gyp_path):icui18n',
+ '<(icu_gyp_path):icuuc',
+ ],
+ }],
+ ],
+ 'include_dirs+': [
+ '../src',
+ ],
+ 'sources': [
+ 'parser-shell.cc',
+ 'shell-utils.h',
],
},
],
diff --git a/tools/parser-shell.cc b/tools/parser-shell.cc
new file mode 100644
index 0000000..4da15fc
--- /dev/null
+++ b/tools/parser-shell.cc
@@ -0,0 +1,171 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include <assert.h>
+#include <string.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string>
+#include <vector>
+#include "v8.h"
+
+#include "api.h"
+#include "compiler.h"
+#include "scanner-character-streams.h"
+#include "shell-utils.h"
+#include "parser.h"
+#include "preparse-data-format.h"
+#include "preparse-data.h"
+#include "preparser.h"
+
+using namespace v8::internal;
+
+enum TestMode {
+ PreParseAndParse,
+ PreParse,
+ Parse
+};
+
+std::pair<TimeDelta, TimeDelta> RunBaselineParser(
+ const char* fname, Encoding encoding, int repeat, v8::Isolate* isolate,
+ v8::Handle<v8::Context> context, TestMode test_mode) {
+ int length = 0;
+ const byte* source = ReadFileAndRepeat(fname, &length, repeat);
+ v8::Handle<v8::String> source_handle;
+ switch (encoding) {
+ case UTF8: {
+ source_handle = v8::String::NewFromUtf8(
+ isolate, reinterpret_cast<const char*>(source));
+ break;
+ }
+ case UTF16: {
+ source_handle = v8::String::NewFromTwoByte(
+ isolate, reinterpret_cast<const uint16_t*>(source),
+ v8::String::kNormalString, length / 2);
+ break;
+ }
+ case LATIN1: {
+ source_handle = v8::String::NewFromOneByte(isolate, source);
+ break;
+ }
+ }
+ v8::ScriptData* cached_data = NULL;
+ TimeDelta preparse_time, parse_time;
+ if (test_mode == PreParseAndParse || test_mode == PreParse) {
+ ElapsedTimer timer;
+ timer.Start();
+ cached_data = v8::ScriptData::PreCompile(source_handle);
+ preparse_time = timer.Elapsed();
+ if (cached_data == NULL || cached_data->HasError()) {
+ fprintf(stderr, "Preparsing failed\n");
+ return std::make_pair(TimeDelta(), TimeDelta());
+ }
+ }
+ if (test_mode == PreParseAndParse || test_mode == Parse) {
+ Handle<String> str = v8::Utils::OpenHandle(*source_handle);
+ i::Isolate* internal_isolate = str->GetIsolate();
+ Handle<Script> script = internal_isolate->factory()->NewScript(str);
+ CompilationInfoWithZone info(script);
+ info.MarkAsGlobal();
+ i::ScriptDataImpl* cached_data_impl =
+ static_cast<i::ScriptDataImpl*>(cached_data);
+ if (test_mode == PreParseAndParse) {
+ info.SetCachedData(&cached_data_impl,
+ i::CONSUME_CACHED_DATA);
+ }
+ info.SetContext(v8::Utils::OpenHandle(*context));
+ ElapsedTimer timer;
+ timer.Start();
+ // Allow lazy parsing; otherwise the preparse data won't help.
+ bool success = Parser::Parse(&info, true);
+ parse_time = timer.Elapsed();
+ if (!success) {
+ fprintf(stderr, "Parsing failed\n");
+ return std::make_pair(TimeDelta(), TimeDelta());
+ }
+ }
+ return std::make_pair(preparse_time, parse_time);
+}
+
+
+int main(int argc, char* argv[]) {
+ v8::V8::InitializeICU();
+ v8::V8::SetFlagsFromCommandLine(&argc, argv, true);
+ Encoding encoding = LATIN1;
+ TestMode test_mode = PreParseAndParse;
+ std::vector<std::string> fnames;
+ std::string benchmark;
+ int repeat = 1;
+ for (int i = 0; i < argc; ++i) {
+ if (strcmp(argv[i], "--latin1") == 0) {
+ encoding = LATIN1;
+ } else if (strcmp(argv[i], "--utf8") == 0) {
+ encoding = UTF8;
+ } else if (strcmp(argv[i], "--utf16") == 0) {
+ encoding = UTF16;
+ } else if (strcmp(argv[i], "--preparse-and-parse") == 0) {
+ test_mode = PreParseAndParse;
+ } else if (strcmp(argv[i], "--preparse") == 0) {
+ test_mode = PreParse;
+ } else if (strcmp(argv[i], "--parse") == 0) {
+ test_mode = Parse;
+ } else if (strncmp(argv[i], "--benchmark=", 12) == 0) {
+ benchmark = std::string(argv[i]).substr(12);
+ } else if (strncmp(argv[i], "--repeat=", 9) == 0) {
+ std::string repeat_str = std::string(argv[i]).substr(9);
+ repeat = atoi(repeat_str.c_str());
+ } else if (i > 0 && argv[i][0] != '-') {
+ fnames.push_back(std::string(argv[i]));
+ }
+ }
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+ {
+ v8::HandleScope handle_scope(isolate);
+ v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
+ v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
+ ASSERT(!context.IsEmpty());
+ {
+ v8::Context::Scope scope(context);
+ double preparse_total = 0;
+ double parse_total = 0;
+ for (size_t i = 0; i < fnames.size(); i++) {
+ std::pair<TimeDelta, TimeDelta> time = RunBaselineParser(
+ fnames[i].c_str(), encoding, repeat, isolate, context, test_mode);
+ preparse_total += time.first.InMillisecondsF();
+ parse_total += time.second.InMillisecondsF();
+ }
+ if (benchmark.empty()) benchmark = "Baseline";
+ printf("%s(PreParseRunTime): %.f ms\n", benchmark.c_str(),
+ preparse_total);
+ printf("%s(ParseRunTime): %.f ms\n", benchmark.c_str(), parse_total);
+ printf("%s(RunTime): %.f ms\n", benchmark.c_str(),
+ preparse_total + parse_total);
+ }
+ }
+ v8::V8::Dispose();
+ return 0;
+}
diff --git a/tools/push-to-trunk/auto_roll.py b/tools/push-to-trunk/auto_push.py
similarity index 65%
rename from tools/push-to-trunk/auto_roll.py
rename to tools/push-to-trunk/auto_push.py
index ebe2397..9a43c3f 100755
--- a/tools/push-to-trunk/auto_roll.py
+++ b/tools/push-to-trunk/auto_push.py
@@ -44,6 +44,8 @@
SETTINGS_LOCATION: "~/.auto-roll",
}
+PUSH_MESSAGE_RE = re.compile(r".* \(based on bleeding_edge revision r(\d+)\)$")
+
class Preparation(Step):
MESSAGE = "Preparation."
@@ -53,7 +55,7 @@
self.CommonPrepare()
-class CheckAutoRollSettings(Step):
+class CheckAutoPushSettings(Step):
MESSAGE = "Checking settings file."
def RunStep(self):
@@ -77,31 +79,6 @@
% self["tree_message"])
-class FetchLatestRevision(Step):
- MESSAGE = "Fetching latest V8 revision."
-
- def RunStep(self):
- match = re.match(r"^r(\d+) ", self.GitSVNLog())
- if not match: # pragma: no cover
- self.Die("Could not extract current svn revision from log.")
- self["latest"] = match.group(1)
-
-
-class CheckLastPush(Step):
- MESSAGE = "Checking last V8 push to trunk."
-
- def RunStep(self):
- last_push_hash = self.FindLastTrunkPush()
- last_push = int(self.GitSVNFindSVNRev(last_push_hash))
-
- # TODO(machenbach): This metric counts all revisions. It could be
- # improved by counting only the revisions on bleeding_edge.
- if int(self["latest"]) - last_push < 10: # pragma: no cover
- # This makes sure the script doesn't push twice in a row when the cron
- # job retries several times.
- self.Die("Last push too recently: %d" % last_push)
-
-
class FetchLKGR(Step):
MESSAGE = "Fetching V8 LKGR."
@@ -111,55 +88,51 @@
self["lkgr"] = self.ReadURL(lkgr_url, wait_plan=[5, 20, 300, 300])
-class PushToTrunk(Step):
- MESSAGE = "Pushing to trunk if possible."
-
- def PushTreeStatus(self, message):
- if not self._options.status_password:
- print "Skipping tree status update without password file."
- return
- params = {
- "message": message,
- "username": "v8-auto-roll@chromium.org",
- "password": FileToText(self._options.status_password).strip(),
- }
- params = urllib.urlencode(params)
- print "Pushing tree status: '%s'" % message
- self.ReadURL("https://v8-status.appspot.com/status", params,
- wait_plan=[5, 20])
+class CheckLastPush(Step):
+ MESSAGE = "Checking last V8 push to trunk."
def RunStep(self):
- latest = int(self["latest"])
- lkgr = int(self["lkgr"])
- if latest == lkgr:
- print "ToT (r%d) is clean. Pushing to trunk." % latest
- self.PushTreeStatus("Tree is closed (preparing to push)")
+ last_push = self.FindLastTrunkPush()
- # TODO(machenbach): Update the script before calling it.
- try:
- if self._options.push:
- P = push_to_trunk.PushToTrunk
- self._side_effect_handler.Call(
- P(push_to_trunk.CONFIG, self._side_effect_handler).Run,
- ["-a", self._options.author,
- "-r", self._options.reviewer,
- "-f"])
- finally:
- self.PushTreeStatus(self["tree_message"])
- else:
- print("ToT (r%d) is ahead of the LKGR (r%d). Skipping push to trunk."
- % (latest, lkgr))
+ # Retrieve the bleeding edge revision of the last push from the text in
+ # the push commit message.
+ last_push_title = self.GitLog(n=1, format="%s", git_hash=last_push)
+ last_push_be = PUSH_MESSAGE_RE.match(last_push_title).group(1)
+
+ if not last_push_be: # pragma: no cover
+ self.Die("Could not retrieve bleeding edge revision for trunk push %s"
+ % last_push)
+
+ # TODO(machenbach): This metric counts all revisions. It could be
+ # improved by counting only the revisions on bleeding_edge.
+ if int(self["lkgr"]) - int(last_push_be) < 10: # pragma: no cover
+ # This makes sure the script doesn't push twice in a row when the cron
+ # job retries several times.
+ self.Die("Last push too recently: %s" % last_push_be)
-class AutoRoll(ScriptsBase):
+class PushToTrunk(Step):
+ MESSAGE = "Pushing to trunk if specified."
+
+ def RunStep(self):
+ print "Pushing lkgr %s to trunk." % self["lkgr"]
+
+ # TODO(machenbach): Update the script before calling it.
+ if self._options.push:
+ P = push_to_trunk.PushToTrunk
+ self._side_effect_handler.Call(
+ P(push_to_trunk.CONFIG, self._side_effect_handler).Run,
+ ["--author", self._options.author,
+ "--reviewer", self._options.reviewer,
+ "--revision", self["lkgr"],
+ "--force"])
+
+
+class AutoPush(ScriptsBase):
def _PrepareOptions(self, parser):
- parser.add_argument("-c", "--chromium",
- help=("Deprecated."))
parser.add_argument("-p", "--push",
help="Push to trunk. Dry run if unspecified.",
default=False, action="store_true")
- parser.add_argument("--status-password",
- help="A file with the password to the status app.")
def _ProcessOptions(self, options):
if not options.author or not options.reviewer: # pragma: no cover
@@ -171,14 +144,13 @@
def _Steps(self):
return [
Preparation,
- CheckAutoRollSettings,
+ CheckAutoPushSettings,
CheckTreeStatus,
- FetchLatestRevision,
- CheckLastPush,
FetchLKGR,
+ CheckLastPush,
PushToTrunk,
]
if __name__ == "__main__": # pragma: no cover
- sys.exit(AutoRoll(CONFIG).Run())
+ sys.exit(AutoPush(CONFIG).Run())
diff --git a/tools/push-to-trunk/test_scripts.py b/tools/push-to-trunk/test_scripts.py
index 957ea63..9107db9 100644
--- a/tools/push-to-trunk/test_scripts.py
+++ b/tools/push-to-trunk/test_scripts.py
@@ -31,10 +31,9 @@
import traceback
import unittest
-import auto_roll
-from auto_roll import CheckLastPush
-from auto_roll import FetchLatestRevision
-from auto_roll import SETTINGS_LOCATION
+import auto_push
+from auto_push import CheckLastPush
+from auto_push import SETTINGS_LOCATION
import common_includes
from common_includes import *
import merge_to_branch
@@ -68,9 +67,8 @@
}
-AUTO_ROLL_ARGS = [
+AUTO_PUSH_ARGS = [
"-a", "author@chromium.org",
- "-c", TEST_CONFIG[CHROMIUM],
"-r", "reviewer@chromium.org",
]
@@ -832,18 +830,20 @@
def testCheckLastPushRecently(self):
self.ExpectGit([
- Git("svn log -1 --oneline", "r101 | Text"),
- Git("svn log -1 --oneline ChangeLog", "r99 | Prepare push to trunk..."),
+ Git(("log -1 --format=%H --grep="
+ "\"^Version [[:digit:]]*\.[[:digit:]]*\.[[:digit:]]* (based\" "
+ "svn/trunk"), "hash2\n"),
+ Git("log -1 --format=%s hash2",
+ "Version 3.4.5 (based on bleeding_edge revision r99)\n"),
])
- self.RunStep(auto_roll.AutoRoll, FetchLatestRevision, AUTO_ROLL_ARGS)
- self.assertRaises(Exception, lambda: self.RunStep(auto_roll.AutoRoll,
- CheckLastPush,
- AUTO_ROLL_ARGS))
+ self._state["lkgr"] = "101"
- def testAutoRoll(self):
- password = self.MakeEmptyTempFile()
- TextToFile("PW", password)
+ self.assertRaises(Exception, lambda: self.RunStep(auto_push.AutoPush,
+ CheckLastPush,
+ AUTO_PUSH_ARGS))
+
+ def testAutoPush(self):
TEST_CONFIG[DOT_GIT_LOCATION] = self.MakeEmptyTempFile()
TEST_CONFIG[SETTINGS_LOCATION] = "~/.doesnotexist"
@@ -852,38 +852,30 @@
"{\"message\": \"Tree is throttled\"}"),
URL("https://v8-status.appspot.com/lkgr", Exception("Network problem")),
URL("https://v8-status.appspot.com/lkgr", "100"),
- URL("https://v8-status.appspot.com/status",
- ("username=v8-auto-roll%40chromium.org&"
- "message=Tree+is+closed+%28preparing+to+push%29&password=PW"), ""),
- URL("https://v8-status.appspot.com/status",
- ("username=v8-auto-roll%40chromium.org&"
- "message=Tree+is+throttled&password=PW"), ""),
])
self.ExpectGit([
Git("status -s -uno", ""),
Git("status -s -b -uno", "## some_branch\n"),
Git("svn fetch", ""),
- Git("svn log -1 --oneline", "r100 | Text"),
Git(("log -1 --format=%H --grep=\""
"^Version [[:digit:]]*\.[[:digit:]]*\.[[:digit:]]* (based\""
" svn/trunk"), "push_hash\n"),
- Git("svn find-rev push_hash", "65"),
+ Git("log -1 --format=%s push_hash",
+ "Version 3.4.5 (based on bleeding_edge revision r79)\n"),
])
- auto_roll.AutoRoll(TEST_CONFIG, self).Run(
- AUTO_ROLL_ARGS + ["--status-password", password, "--push"])
+ auto_push.AutoPush(TEST_CONFIG, self).Run(AUTO_PUSH_ARGS + ["--push"])
state = json.loads(FileToText("%s-state.json"
% TEST_CONFIG[PERSISTFILE_BASENAME]))
self.assertEquals("100", state["lkgr"])
- self.assertEquals("100", state["latest"])
- def testAutoRollStoppedBySettings(self):
+ def testAutoPushStoppedBySettings(self):
TEST_CONFIG[DOT_GIT_LOCATION] = self.MakeEmptyTempFile()
TEST_CONFIG[SETTINGS_LOCATION] = self.MakeEmptyTempFile()
- TextToFile("{\"enable_auto_roll\": false}", TEST_CONFIG[SETTINGS_LOCATION])
+ TextToFile("{\"enable_auto_push\": false}", TEST_CONFIG[SETTINGS_LOCATION])
self.ExpectReadURL([])
@@ -893,11 +885,11 @@
Git("svn fetch", ""),
])
- def RunAutoRoll():
- auto_roll.AutoRoll(TEST_CONFIG, self).Run(AUTO_ROLL_ARGS)
- self.assertRaises(Exception, RunAutoRoll)
+ def RunAutoPush():
+ auto_push.AutoPush(TEST_CONFIG, self).Run(AUTO_PUSH_ARGS)
+ self.assertRaises(Exception, RunAutoPush)
- def testAutoRollStoppedByTreeStatus(self):
+ def testAutoPushStoppedByTreeStatus(self):
TEST_CONFIG[DOT_GIT_LOCATION] = self.MakeEmptyTempFile()
TEST_CONFIG[SETTINGS_LOCATION] = "~/.doesnotexist"
@@ -912,9 +904,9 @@
Git("svn fetch", ""),
])
- def RunAutoRoll():
- auto_roll.AutoRoll(TEST_CONFIG, self).Run(AUTO_ROLL_ARGS)
- self.assertRaises(Exception, RunAutoRoll)
+ def RunAutoPush():
+ auto_push.AutoPush(TEST_CONFIG, self).Run(AUTO_PUSH_ARGS)
+ self.assertRaises(Exception, RunAutoPush)
def testMergeToBranch(self):
TEST_CONFIG[ALREADY_MERGING_SENTINEL_FILE] = self.MakeEmptyTempFile()
diff --git a/tools/shell-utils.h b/tools/shell-utils.h
new file mode 100644
index 0000000..ac61fb6
--- /dev/null
+++ b/tools/shell-utils.h
@@ -0,0 +1,67 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Utility functions used by parser-shell and lexer-shell.
+
+#include <stdio.h>
+
+namespace v8 {
+namespace internal {
+
+enum Encoding {
+ LATIN1,
+ UTF8,
+ UTF16
+};
+
+const byte* ReadFileAndRepeat(const char* name, int* size, int repeat) {
+ FILE* file = fopen(name, "rb");
+ *size = 0;
+ if (file == NULL) return NULL;
+
+ fseek(file, 0, SEEK_END);
+ int file_size = ftell(file);
+ rewind(file);
+
+ *size = file_size * repeat;
+
+ byte* chars = new byte[*size + 1];
+ for (int i = 0; i < file_size;) {
+ int read = static_cast<int>(fread(&chars[i], 1, file_size - i, file));
+ i += read;
+ }
+ fclose(file);
+
+ for (int i = file_size; i < *size; i++) {
+ chars[i] = chars[i - file_size];
+ }
+ chars[*size] = 0;
+
+ return chars;
+}
+
+} } // namespace v8::internal