Version 3.20.15
Rollback to 3.20.12.1, the last known "good" V8 in preparation for the Chrome
M30 branch, plus the TypedArray API and correctness patches r16033 and r16084.
R=jkummerow@chromium.org
Review URL: https://codereview.chromium.org/22715004
git-svn-id: http://v8.googlecode.com/svn/trunk@16140 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/api.cc b/src/api.cc
index e04fbef..b80d1be 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -46,7 +46,6 @@
#include "heap-profiler.h"
#include "heap-snapshot-generator-inl.h"
#include "icu_util.h"
-#include "json-parser.h"
#include "messages.h"
#ifdef COMPRESS_STARTUP_DATA_BZ2
#include "natives.h"
@@ -399,7 +398,7 @@
kSnapshotContext,
kLibraries,
kExperimentalLibraries,
-#if defined(V8_I18N_SUPPORT)
+#if defined(ENABLE_I18N_SUPPORT)
kI18NExtension,
#endif
kCompressedStartupDataCount
@@ -443,7 +442,7 @@
compressed_data[kExperimentalLibraries].raw_size =
i::ExperimentalNatives::GetRawScriptsSize();
-#if defined(V8_I18N_SUPPORT)
+#if defined(ENABLE_I18N_SUPPORT)
i::Vector<const ii:byte> i18n_extension_source =
i::I18NNatives::GetScriptsSource();
compressed_data[kI18NExtension].data =
@@ -483,7 +482,7 @@
decompressed_data[kExperimentalLibraries].raw_size);
i::ExperimentalNatives::SetRawScriptsSource(exp_libraries_source);
-#if defined(V8_I18N_SUPPORT)
+#if defined(ENABLE_I18N_SUPPORT)
ASSERT_EQ(i::I18NNatives::GetRawScriptsSize(),
decompressed_data[kI18NExtension].raw_size);
i::Vector<const char> i18n_extension_source(
@@ -676,16 +675,6 @@
}
-int V8::Eternalize(i::Isolate* isolate, i::Object** handle) {
- return isolate->eternal_handles()->Create(isolate, *handle);
-}
-
-
-i::Object** V8::GetEternal(i::Isolate* isolate, int index) {
- return isolate->eternal_handles()->Get(index).location();
-}
-
-
// --- H a n d l e s ---
@@ -2618,29 +2607,6 @@
}
-// --- J S O N ---
-
-Local<Object> JSON::Parse(Local<String> json_string) {
- i::Isolate* isolate = i::Isolate::Current();
- EnsureInitializedForIsolate(isolate, "v8::JSON::Parse");
- ENTER_V8(isolate);
- i::HandleScope scope(isolate);
- i::Handle<i::String> source = i::Handle<i::String>(
- FlattenGetString(Utils::OpenHandle(*json_string)));
- EXCEPTION_PREAMBLE(isolate);
- i::Handle<i::Object> result;
- if (source->IsSeqOneByteString()) {
- result = i::JsonParser<true>::Parse(source);
- } else {
- result = i::JsonParser<false>::Parse(source);
- }
- has_pending_exception = result.is_null();
- EXCEPTION_BAILOUT_CHECK(isolate, Local<Object>());
- return Utils::ToLocal(
- i::Handle<i::JSObject>::cast(scope.CloseAndEscape(result)));
-}
-
-
// --- D a t a ---
bool Value::FullIsUndefined() const {
@@ -7601,18 +7567,6 @@
}
-int64_t CpuProfile::GetStartTime() const {
- const i::CpuProfile* profile = reinterpret_cast<const i::CpuProfile*>(this);
- return profile->start_time_us();
-}
-
-
-int64_t CpuProfile::GetEndTime() const {
- const i::CpuProfile* profile = reinterpret_cast<const i::CpuProfile*>(this);
- return profile->end_time_us();
-}
-
-
int CpuProfile::GetSamplesCount() const {
return reinterpret_cast<const i::CpuProfile*>(this)->samples_count();
}
diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc
index 5f3a999..eff47e2 100644
--- a/src/arm/builtins-arm.cc
+++ b/src/arm/builtins-arm.cc
@@ -119,9 +119,9 @@
// Initial map for the builtin InternalArray functions should be maps.
__ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
__ SmiTst(r2);
- __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
+ __ Assert(ne, "Unexpected initial map for InternalArray function");
__ CompareObjectType(r2, r3, r4, MAP_TYPE);
- __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
+ __ Assert(eq, "Unexpected initial map for InternalArray function");
}
// Run the native code for the InternalArray function called as a normal
@@ -147,9 +147,9 @@
// Initial map for the builtin Array functions should be maps.
__ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
__ SmiTst(r2);
- __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
+ __ Assert(ne, "Unexpected initial map for Array function");
__ CompareObjectType(r2, r3, r4, MAP_TYPE);
- __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
+ __ Assert(eq, "Unexpected initial map for Array function");
}
// Run the native code for the Array function called as a normal function.
@@ -178,7 +178,7 @@
if (FLAG_debug_code) {
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
__ cmp(function, Operand(r2));
- __ Assert(eq, kUnexpectedStringFunction);
+ __ Assert(eq, "Unexpected String function");
}
// Load the first arguments in r0 and get rid of the rest.
@@ -224,10 +224,10 @@
if (FLAG_debug_code) {
__ ldrb(r4, FieldMemOperand(map, Map::kInstanceSizeOffset));
__ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
- __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
+ __ Assert(eq, "Unexpected string wrapper instance size");
__ ldrb(r4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
__ cmp(r4, Operand::Zero());
- __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
+ __ Assert(eq, "Unexpected unused properties of string wrapper");
}
__ str(map, FieldMemOperand(r0, HeapObject::kMapOffset));
@@ -471,7 +471,7 @@
// r0: offset of first field after pre-allocated fields
if (FLAG_debug_code) {
__ cmp(r0, r6);
- __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
+ __ Assert(le, "Unexpected number of pre-allocated property fields.");
}
__ InitializeFieldsWithFiller(r5, r0, r7);
// To allow for truncation.
@@ -503,7 +503,7 @@
// Done if no extra properties are to be allocated.
__ b(eq, &allocated);
- __ Assert(pl, kPropertyAllocationCountFailed);
+ __ Assert(pl, "Property allocation count failed.");
// Scale the number of elements by pointer size and add the header for
// FixedArrays to the start of the next object calculation from above.
@@ -547,7 +547,7 @@
} else if (FLAG_debug_code) {
__ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
__ cmp(r7, r8);
- __ Assert(eq, kUndefinedValueNotLoaded);
+ __ Assert(eq, "Undefined value not loaded.");
}
__ b(&entry);
__ bind(&loop);
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index 98a835f..ba98b96 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -246,6 +246,17 @@
}
+void UnaryOpStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { r0 };
+ descriptor->register_param_count_ = 1;
+ descriptor->register_params_ = registers;
+ descriptor->deoptimization_handler_ =
+ FUNCTION_ADDR(UnaryOpIC_Miss);
+}
+
+
void StoreGlobalStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
@@ -509,8 +520,9 @@
Label after_sentinel;
__ JumpIfNotSmi(r3, &after_sentinel);
if (FLAG_debug_code) {
+ const char* message = "Expected 0 as a Smi sentinel";
__ cmp(r3, Operand::Zero());
- __ Assert(eq, kExpected0AsASmiSentinel);
+ __ Assert(eq, message);
}
__ ldr(r3, GlobalObjectOperand());
__ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
@@ -3905,9 +3917,9 @@
__ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset));
if (FLAG_debug_code) {
__ SmiTst(regexp_data);
- __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected);
+ __ Check(ne, "Unexpected type for RegExp data, FixedArray expected");
__ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE);
- __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected);
+ __ Check(eq, "Unexpected type for RegExp data, FixedArray expected");
}
// regexp_data: RegExp data (FixedArray)
@@ -4249,7 +4261,7 @@
// Assert that we do not have a cons or slice (indirect strings) here.
// Sequential strings have already been ruled out.
__ tst(r0, Operand(kIsIndirectStringMask));
- __ Assert(eq, kExternalStringExpectedButNotFound);
+ __ Assert(eq, "external string expected, but not found");
}
__ ldr(subject,
FieldMemOperand(subject, ExternalString::kResourceDataOffset));
@@ -4631,7 +4643,7 @@
void StringCharCodeAtGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
+ __ Abort("Unexpected fallthrough to CharCodeAt slow case");
// Index is not a smi.
__ bind(&index_not_smi_);
@@ -4676,7 +4688,7 @@
call_helper.AfterCall(masm);
__ jmp(&exit_);
- __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
+ __ Abort("Unexpected fallthrough from CharCodeAt slow case");
}
@@ -4706,7 +4718,7 @@
void StringCharFromCodeGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
+ __ Abort("Unexpected fallthrough to CharFromCode slow case");
__ bind(&slow_case_);
call_helper.BeforeCall(masm);
@@ -4716,7 +4728,7 @@
call_helper.AfterCall(masm);
__ jmp(&exit_);
- __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
+ __ Abort("Unexpected fallthrough from CharFromCode slow case");
}
@@ -4773,7 +4785,7 @@
// Check that destination is actually word aligned if the flag says
// that it is.
__ tst(dest, Operand(kPointerAlignmentMask));
- __ Check(eq, kDestinationOfCopyNotAligned);
+ __ Check(eq, "Destination of copy not aligned.");
}
const int kReadAlignment = 4;
@@ -5002,7 +5014,7 @@
if (FLAG_debug_code) {
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(ip, candidate);
- __ Assert(eq, kOddballInStringTableIsNotUndefinedOrTheHole);
+ __ Assert(eq, "oddball in string table is not undefined or the hole");
}
__ jmp(&next_probe[i]);
@@ -6900,7 +6912,7 @@
}
// If we reached this point there is a problem.
- __ Abort(kUnexpectedElementsKindInArrayConstructor);
+ __ Abort("Unexpected ElementsKind in array constructor");
}
@@ -6957,7 +6969,7 @@
}
// If we reached this point there is a problem.
- __ Abort(kUnexpectedElementsKindInArrayConstructor);
+ __ Abort("Unexpected ElementsKind in array constructor");
}
@@ -7018,9 +7030,9 @@
__ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ tst(r3, Operand(kSmiTagMask));
- __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
+ __ Assert(ne, "Unexpected initial map for Array function");
__ CompareObjectType(r3, r3, r4, MAP_TYPE);
- __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
+ __ Assert(eq, "Unexpected initial map for Array function");
// We should either have undefined in ebx or a valid cell
Label okay_here;
@@ -7029,7 +7041,7 @@
__ b(eq, &okay_here);
__ ldr(r3, FieldMemOperand(r2, 0));
__ cmp(r3, Operand(cell_map));
- __ Assert(eq, kExpectedPropertyCellInRegisterEbx);
+ __ Assert(eq, "Expected property cell in register ebx");
__ bind(&okay_here);
}
@@ -7132,9 +7144,9 @@
__ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ tst(r3, Operand(kSmiTagMask));
- __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
+ __ Assert(ne, "Unexpected initial map for Array function");
__ CompareObjectType(r3, r3, r4, MAP_TYPE);
- __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
+ __ Assert(eq, "Unexpected initial map for Array function");
}
// Figure out the right elements kind
@@ -7151,7 +7163,7 @@
__ b(eq, &done);
__ cmp(r3, Operand(FAST_HOLEY_ELEMENTS));
__ Assert(eq,
- kInvalidElementsKindForInternalArrayOrInternalPackedArray);
+ "Invalid ElementsKind for InternalArray or InternalPackedArray");
__ bind(&done);
}
diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc
index 1bcf3e3..7559373 100644
--- a/src/arm/codegen-arm.cc
+++ b/src/arm/codegen-arm.cc
@@ -532,7 +532,7 @@
__ SmiTag(r9);
__ orr(r9, r9, Operand(1));
__ CompareRoot(r9, Heap::kTheHoleValueRootIndex);
- __ Assert(eq, kObjectFoundInSmiOnlyArray);
+ __ Assert(eq, "object found in smi-only array");
}
__ Strd(r4, r5, MemOperand(r7, 8, PostIndex));
@@ -728,7 +728,7 @@
// Assert that we do not have a cons or slice (indirect strings) here.
// Sequential strings have already been ruled out.
__ tst(result, Operand(kIsIndirectStringMask));
- __ Assert(eq, kExternalStringExpectedButNotFound);
+ __ Assert(eq, "external string expected, but not found");
}
// Rule out short external strings.
STATIC_CHECK(kShortExternalStringTag != 0);
diff --git a/src/arm/debug-arm.cc b/src/arm/debug-arm.cc
index 108435f..7faea08 100644
--- a/src/arm/debug-arm.cc
+++ b/src/arm/debug-arm.cc
@@ -130,7 +130,7 @@
if ((non_object_regs & (1 << r)) != 0) {
if (FLAG_debug_code) {
__ tst(reg, Operand(0xc0000000));
- __ Assert(eq, kUnableToEncodeValueAsSmi);
+ __ Assert(eq, "Unable to encode value as smi");
}
__ SmiTag(reg);
}
@@ -313,12 +313,12 @@
void Debug::GeneratePlainReturnLiveEdit(MacroAssembler* masm) {
- masm->Abort(kLiveEditFrameDroppingIsNotSupportedOnArm);
+ masm->Abort("LiveEdit frame dropping is not supported on arm");
}
void Debug::GenerateFrameDropperLiveEdit(MacroAssembler* masm) {
- masm->Abort(kLiveEditFrameDroppingIsNotSupportedOnArm);
+ masm->Abort("LiveEdit frame dropping is not supported on arm");
}
const bool Debug::kFrameDropperSupported = false;
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index b73006a..ea7b73f 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -786,9 +786,9 @@
// Check that we're not inside a with or catch context.
__ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
__ CompareRoot(r1, Heap::kWithContextMapRootIndex);
- __ Check(ne, kDeclarationInWithContext);
+ __ Check(ne, "Declaration in with context.");
__ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
- __ Check(ne, kDeclarationInCatchContext);
+ __ Check(ne, "Declaration in catch context.");
}
}
@@ -2512,7 +2512,7 @@
// Check for an uninitialized let binding.
__ ldr(r2, location);
__ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
- __ Check(eq, kLetBindingReInitialization);
+ __ Check(eq, "Let binding re-initialization.");
}
// Perform the assignment.
__ str(r0, location);
@@ -3473,23 +3473,23 @@
Register value,
uint32_t encoding_mask) {
__ SmiTst(index);
- __ Check(eq, kNonSmiIndex);
+ __ Check(eq, "Non-smi index");
__ SmiTst(value);
- __ Check(eq, kNonSmiValue);
+ __ Check(eq, "Non-smi value");
__ ldr(ip, FieldMemOperand(string, String::kLengthOffset));
__ cmp(index, ip);
- __ Check(lt, kIndexIsTooLarge);
+ __ Check(lt, "Index is too large");
__ cmp(index, Operand(Smi::FromInt(0)));
- __ Check(ge, kIndexIsNegative);
+ __ Check(ge, "Index is negative");
__ ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset));
__ ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
__ and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask));
__ cmp(ip, Operand(encoding_mask));
- __ Check(eq, kUnexpectedStringType);
+ __ Check(eq, "Unexpected string type");
}
@@ -3849,7 +3849,7 @@
Handle<FixedArray> jsfunction_result_caches(
isolate()->native_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
- __ Abort(kAttemptToUseUndefinedCache);
+ __ Abort("Attempt to use undefined cache.");
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
context()->Plug(r0);
return;
@@ -4030,7 +4030,7 @@
// elements_end: Array end.
if (generate_debug_code_) {
__ cmp(array_length, Operand::Zero());
- __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
+ __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
}
__ bind(&loop);
__ ldr(string, MemOperand(element, kPointerSize, PostIndex));
@@ -4349,12 +4349,35 @@
break;
}
+ case Token::SUB:
+ EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
+ break;
+
+ case Token::BIT_NOT:
+ EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
+ break;
+
default:
UNREACHABLE();
}
}
+void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
+ const char* comment) {
+ // TODO(svenpanne): Allowing format strings in Comment would be nice here...
+ Comment cmt(masm_, comment);
+ UnaryOpStub stub(expr->op());
+ // UnaryOpStub expects the argument to be in the
+ // accumulator register r0.
+ VisitForAccumulatorValue(expr->expression());
+ SetSourcePosition(expr->position());
+ CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
+ expr->UnaryOperationFeedbackId());
+ context()->Plug(r0);
+}
+
+
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Comment cmnt(masm_, "[ CountOperation");
SetSourcePosition(expr->position());
diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc
index 43f0fd3..1857b4a 100644
--- a/src/arm/lithium-arm.cc
+++ b/src/arm/lithium-arm.cc
@@ -437,7 +437,7 @@
}
-void LChunkBuilder::Abort(BailoutReason reason) {
+void LChunkBuilder::Abort(const char* reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -645,7 +645,7 @@
new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
int vreg = allocator_->GetVirtualRegister();
if (!allocator_->AllocationOk()) {
- Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
+ Abort("Out of virtual registers while trying to allocate temp register.");
vreg = 0;
}
operand->set_virtual_register(vreg);
@@ -1325,6 +1325,15 @@
}
+LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
+ ASSERT(instr->value()->representation().IsInteger32());
+ ASSERT(instr->representation().IsInteger32());
+ if (instr->HasNoUses()) return NULL;
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new(zone()) LBitNotI(value));
+}
+
+
LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
if (instr->representation().IsDouble()) {
return DoArithmeticD(Token::DIV, instr);
@@ -1823,6 +1832,17 @@
}
+LInstruction* LChunkBuilder::DoNumericConstraint(HNumericConstraint* instr) {
+ return NULL;
+}
+
+
+LInstruction* LChunkBuilder::DoInductionVariableAnnotation(
+ HInductionVariableAnnotation* instr) {
+ return NULL;
+}
+
+
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
LOperand* value = UseRegisterOrConstantAtStart(instr->index());
LOperand* length = UseRegister(instr->length());
@@ -1996,6 +2016,19 @@
}
+LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
+ LUnallocated* temp1 = NULL;
+ LOperand* temp2 = NULL;
+ if (!instr->CanOmitPrototypeChecks()) {
+ temp1 = TempRegister();
+ temp2 = TempRegister();
+ }
+ LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp1, temp2);
+ if (instr->CanOmitPrototypeChecks()) return result;
+ return AssignEnvironment(result);
+}
+
+
LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
return AssignEnvironment(new(zone()) LCheckFunction(value));
@@ -2004,16 +2037,10 @@
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = NULL;
- if (!instr->CanOmitMapChecks()) {
- value = UseRegisterAtStart(instr->value());
- if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
- }
- LCheckMaps* result = new(zone()) LCheckMaps(value);
- if (!instr->CanOmitMapChecks()) {
- AssignEnvironment(result);
- if (instr->has_migration_target()) return AssignPointerMap(result);
- }
- return result;
+ if (!instr->CanOmitMapChecks()) value = UseRegisterAtStart(instr->value());
+ LInstruction* result = new(zone()) LCheckMaps(value);
+ if (instr->CanOmitMapChecks()) return result;
+ return AssignEnvironment(result);
}
@@ -2295,7 +2322,7 @@
LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
bool is_in_object = instr->access().IsInobject();
bool needs_write_barrier = instr->NeedsWriteBarrier();
- bool needs_write_barrier_for_map = instr->has_transition() &&
+ bool needs_write_barrier_for_map = !instr->transition().is_null() &&
instr->NeedsWriteBarrierForMap();
LOperand* obj;
@@ -2415,7 +2442,7 @@
LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
- Abort(kTooManySpillSlotsNeededForOSR);
+ Abort("Too many spill slots needed for OSR");
spill_index = 0;
}
return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
diff --git a/src/arm/lithium-arm.h b/src/arm/lithium-arm.h
index 7ce907a..c568ad6 100644
--- a/src/arm/lithium-arm.h
+++ b/src/arm/lithium-arm.h
@@ -50,6 +50,7 @@
V(ArithmeticD) \
V(ArithmeticT) \
V(BitI) \
+ V(BitNotI) \
V(BoundsCheck) \
V(Branch) \
V(CallConstantFunction) \
@@ -67,6 +68,7 @@
V(CheckNonSmi) \
V(CheckMaps) \
V(CheckMapValue) \
+ V(CheckPrototypeMaps) \
V(CheckSmi) \
V(ClampDToUint8) \
V(ClampIToUint8) \
@@ -1376,6 +1378,18 @@
};
+class LBitNotI: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LBitNotI(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(BitNotI, "bit-not-i")
+};
+
+
class LAddI: public LTemplateInstruction<1, 2, 0> {
public:
LAddI(LOperand* left, LOperand* right) {
@@ -2136,7 +2150,7 @@
virtual void PrintDataTo(StringStream* stream);
- Handle<Map> transition() const { return hydrogen()->transition_map(); }
+ Handle<Map> transition() const { return hydrogen()->transition(); }
Representation representation() const {
return hydrogen()->field_representation();
}
@@ -2338,6 +2352,26 @@
};
+class LCheckPrototypeMaps: public LTemplateInstruction<0, 0, 2> {
+ public:
+ LCheckPrototypeMaps(LOperand* temp, LOperand* temp2) {
+ temps_[0] = temp;
+ temps_[1] = temp2;
+ }
+
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps, "check-prototype-maps")
+ DECLARE_HYDROGEN_ACCESSOR(CheckPrototypeMaps)
+
+ ZoneList<Handle<JSObject> >* prototypes() const {
+ return hydrogen()->prototypes();
+ }
+ ZoneList<Handle<Map> >* maps() const { return hydrogen()->maps(); }
+};
+
+
class LCheckSmi: public LTemplateInstruction<1, 1, 0> {
public:
explicit LCheckSmi(LOperand* value) {
@@ -2636,7 +2670,7 @@
bool is_done() const { return status_ == DONE; }
bool is_aborted() const { return status_ == ABORTED; }
- void Abort(BailoutReason reason);
+ void Abort(const char* reason);
// Methods for getting operands for Use / Define / Temp.
LUnallocated* ToUnallocated(Register reg);
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index 0b704d0..cf1e7c7 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -91,7 +91,7 @@
}
-void LCodeGen::Abort(BailoutReason reason) {
+void LCodeGen::Abort(const char* reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -334,7 +334,7 @@
// 32bit data after it.
if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) +
deopt_jump_table_.length() * 7)) {
- Abort(kGeneratedCodeIsTooLarge);
+ Abort("Generated code is too large");
}
if (deopt_jump_table_.length() > 0) {
@@ -423,7 +423,7 @@
ASSERT(literal->IsNumber());
__ mov(scratch, Operand(static_cast<int32_t>(literal->Number())));
} else if (r.IsDouble()) {
- Abort(kEmitLoadRegisterUnsupportedDoubleImmediate);
+ Abort("EmitLoadRegister: Unsupported double immediate.");
} else {
ASSERT(r.IsTagged());
__ LoadObject(scratch, literal);
@@ -461,9 +461,9 @@
__ vcvt_f64_s32(dbl_scratch, flt_scratch);
return dbl_scratch;
} else if (r.IsDouble()) {
- Abort(kUnsupportedDoubleImmediate);
+ Abort("unsupported double immediate");
} else if (r.IsTagged()) {
- Abort(kUnsupportedTaggedImmediate);
+ Abort("unsupported tagged immediate");
}
} else if (op->IsStackSlot() || op->IsArgument()) {
// TODO(regis): Why is vldr not taking a MemOperand?
@@ -534,14 +534,14 @@
ASSERT(constant->HasInteger32Value());
return Operand(constant->Integer32Value());
} else if (r.IsDouble()) {
- Abort(kToOperandUnsupportedDoubleImmediate);
+ Abort("ToOperand Unsupported double immediate.");
}
ASSERT(r.IsTagged());
return Operand(constant->handle());
} else if (op->IsRegister()) {
return Operand(ToRegister(op));
} else if (op->IsDoubleRegister()) {
- Abort(kToOperandIsDoubleRegisterUnimplemented);
+ Abort("ToOperand IsDoubleRegister unimplemented");
return Operand::Zero();
}
// Stack slots not implemented, use ToMemOperand instead.
@@ -772,7 +772,7 @@
Address entry =
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
if (entry == NULL) {
- Abort(kBailoutWasNotPrepared);
+ Abort("bailout was not prepared");
return;
}
@@ -1669,11 +1669,7 @@
__ orr(result, left, right);
break;
case Token::BIT_XOR:
- if (right_op->IsConstantOperand() && right.immediate() == int32_t(~0)) {
- __ mvn(result, Operand(left));
- } else {
- __ eor(result, left, right);
- }
+ __ eor(result, left, right);
break;
default:
UNREACHABLE();
@@ -1940,7 +1936,7 @@
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
__ cmp(ip, Operand(encoding == String::ONE_BYTE_ENCODING
? one_byte_seq_type : two_byte_seq_type));
- __ Check(eq, kUnexpectedStringType);
+ __ Check(eq, "Unexpected string type");
}
__ add(ip,
@@ -1957,6 +1953,13 @@
}
+void LCodeGen::DoBitNotI(LBitNotI* instr) {
+ Register input = ToRegister(instr->value());
+ Register result = ToRegister(instr->result());
+ __ mvn(result, Operand(input));
+}
+
+
void LCodeGen::DoThrow(LThrow* instr) {
Register input_reg = EmitLoadRegister(instr->value(), ip);
__ push(input_reg);
@@ -3197,7 +3200,7 @@
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort(kArrayIndexConstantValueTooBig);
+ Abort("array index constant value too big.");
}
} else {
key = ToRegister(instr->key());
@@ -3281,7 +3284,7 @@
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort(kArrayIndexConstantValueTooBig);
+ Abort("array index constant value too big.");
}
} else {
key = ToRegister(instr->key());
@@ -3542,7 +3545,7 @@
void LCodeGen::DoPushArgument(LPushArgument* instr) {
LOperand* argument = instr->value();
if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
- Abort(kDoPushArgumentNotImplementedForDoubleType);
+ Abort("DoPushArgument not implemented for double type.");
} else {
Register argument_reg = EmitLoadRegister(argument, ip);
__ push(argument_reg);
@@ -3762,7 +3765,7 @@
DwVfpRegister input = ToDoubleRegister(instr->value());
DwVfpRegister result = ToDoubleRegister(instr->result());
__ vabs(result, input);
- } else if (r.IsSmiOrInteger32()) {
+ } else if (r.IsInteger32()) {
EmitIntegerMathAbs(instr);
} else {
// Representation is tagged.
@@ -4316,7 +4319,7 @@
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort(kArrayIndexConstantValueTooBig);
+ Abort("array index constant value too big.");
}
} else {
key = ToRegister(instr->key());
@@ -4389,7 +4392,7 @@
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort(kArrayIndexConstantValueTooBig);
+ Abort("array index constant value too big.");
}
} else {
key = ToRegister(instr->key());
@@ -4412,7 +4415,7 @@
if (masm()->emit_debug_code()) {
__ vmrs(ip);
__ tst(ip, Operand(kVFPDefaultNaNModeControlBit));
- __ Assert(ne, kDefaultNaNModeNotSet);
+ __ Assert(ne, "Default NaN mode not set");
}
__ VFPCanonicalizeNaN(value);
}
@@ -5211,67 +5214,33 @@
}
-void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
- {
- PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
- __ push(object);
- CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr);
- __ StoreToSafepointRegisterSlot(r0, scratch0());
- }
- __ tst(scratch0(), Operand(kSmiTagMask));
- DeoptimizeIf(eq, instr->environment());
+void LCodeGen::DoCheckMapCommon(Register map_reg,
+ Handle<Map> map,
+ LEnvironment* env) {
+ Label success;
+ __ CompareMap(map_reg, map, &success);
+ DeoptimizeIf(ne, env);
+ __ bind(&success);
}
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
- class DeferredCheckMaps: public LDeferredCode {
- public:
- DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
- : LDeferredCode(codegen), instr_(instr), object_(object) {
- SetExit(check_maps());
- }
- virtual void Generate() {
- codegen()->DoDeferredInstanceMigration(instr_, object_);
- }
- Label* check_maps() { return &check_maps_; }
- virtual LInstruction* instr() { return instr_; }
- private:
- LCheckMaps* instr_;
- Label check_maps_;
- Register object_;
- };
-
if (instr->hydrogen()->CanOmitMapChecks()) return;
Register map_reg = scratch0();
-
LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
+ Label success;
SmallMapList* map_set = instr->hydrogen()->map_set();
__ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
-
- DeferredCheckMaps* deferred = NULL;
- if (instr->hydrogen()->has_migration_target()) {
- deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
- __ bind(deferred->check_maps());
- }
-
- Label success;
for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i);
__ CompareMap(map_reg, map, &success);
__ b(eq, &success);
}
-
Handle<Map> map = map_set->last();
- __ CompareMap(map_reg, map, &success);
- if (instr->hydrogen()->has_migration_target()) {
- __ b(ne, deferred->entry());
- } else {
- DeoptimizeIf(ne, instr->environment());
- }
-
+ DoCheckMapCommon(map_reg, map, instr->environment());
__ bind(&success);
}
@@ -5326,6 +5295,25 @@
}
+void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
+ if (instr->hydrogen()->CanOmitPrototypeChecks()) return;
+
+ Register prototype_reg = ToRegister(instr->temp());
+ Register map_reg = ToRegister(instr->temp2());
+
+ ZoneList<Handle<JSObject> >* prototypes = instr->prototypes();
+ ZoneList<Handle<Map> >* maps = instr->maps();
+
+ ASSERT(prototypes->length() == maps->length());
+
+ for (int i = 0; i < prototypes->length(); i++) {
+ __ LoadHeapObject(prototype_reg, prototypes->at(i));
+ __ ldr(map_reg, FieldMemOperand(prototype_reg, HeapObject::kMapOffset));
+ DoCheckMapCommon(map_reg, maps->at(i), instr->environment());
+ }
+}
+
+
void LCodeGen::DoAllocate(LAllocate* instr) {
class DeferredAllocate: public LDeferredCode {
public:
diff --git a/src/arm/lithium-codegen-arm.h b/src/arm/lithium-codegen-arm.h
index 143109c..21f7921 100644
--- a/src/arm/lithium-codegen-arm.h
+++ b/src/arm/lithium-codegen-arm.h
@@ -115,7 +115,7 @@
DwVfpRegister EmitLoadDoubleRegister(LOperand* op,
SwVfpRegister flt_scratch,
DwVfpRegister dbl_scratch);
- int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
+ int ToRepresentation(LConstantOperand* op, const Representation& r) const;
int32_t ToInteger32(LConstantOperand* op) const;
Smi* ToSmi(LConstantOperand* op) const;
double ToDouble(LConstantOperand* op) const;
@@ -154,7 +154,8 @@
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
- void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
+
+ void DoCheckMapCommon(Register map_reg, Handle<Map> map, LEnvironment* env);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
@@ -213,7 +214,7 @@
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
- void Abort(BailoutReason reason);
+ void Abort(const char* reason);
void FPRINTF_CHECKING Comment(const char* format, ...);
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
diff --git a/src/arm/lithium-gap-resolver-arm.cc b/src/arm/lithium-gap-resolver-arm.cc
index 88ac7a2..7a3c968 100644
--- a/src/arm/lithium-gap-resolver-arm.cc
+++ b/src/arm/lithium-gap-resolver-arm.cc
@@ -254,7 +254,7 @@
} else {
__ LoadObject(dst, cgen_->ToHandle(constant_source));
}
- } else if (destination->IsDoubleRegister()) {
+ } else if (source->IsDoubleRegister()) {
DwVfpRegister result = cgen_->ToDoubleRegister(destination);
double v = cgen_->ToDouble(constant_source);
__ Vmov(result, v, ip);
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index b9728ed..cd12461 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -489,7 +489,7 @@
if (emit_debug_code()) {
ldr(ip, MemOperand(address));
cmp(ip, value);
- Check(eq, kWrongAddressOrValuePassedToRecordWrite);
+ Check(eq, "Wrong address or value passed to RecordWrite");
}
Label done;
@@ -1490,7 +1490,7 @@
// In debug mode, make sure the lexical context is set.
#ifdef DEBUG
cmp(scratch, Operand::Zero());
- Check(ne, kWeShouldNotHaveAnEmptyLexicalContext);
+ Check(ne, "we should not have an empty lexical context");
#endif
// Load the native context of the current context.
@@ -1508,7 +1508,7 @@
ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
LoadRoot(ip, Heap::kNativeContextMapRootIndex);
cmp(holder_reg, ip);
- Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
+ Check(eq, "JSGlobalObject::native_context should be a native context.");
pop(holder_reg); // Restore holder.
}
@@ -1525,12 +1525,12 @@
mov(holder_reg, ip); // Move ip to its holding place.
LoadRoot(ip, Heap::kNullValueRootIndex);
cmp(holder_reg, ip);
- Check(ne, kJSGlobalProxyContextShouldNotBeNull);
+ Check(ne, "JSGlobalProxy::context() should not be null.");
ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
LoadRoot(ip, Heap::kNativeContextMapRootIndex);
cmp(holder_reg, ip);
- Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
+ Check(eq, "JSGlobalObject::native_context should be a native context.");
// Restore ip is not needed. ip is reloaded below.
pop(holder_reg); // Restore holder.
// Restore ip to holder's context.
@@ -1727,7 +1727,7 @@
// respect to register content between debug and release mode.
ldr(ip, MemOperand(topaddr));
cmp(result, ip);
- Check(eq, kUnexpectedAllocationTop);
+ Check(eq, "Unexpected allocation top");
}
// Load allocation limit into ip. Result already contains allocation top.
ldr(ip, MemOperand(topaddr, limit - top));
@@ -1825,7 +1825,7 @@
// respect to register content between debug and release mode.
ldr(ip, MemOperand(topaddr));
cmp(result, ip);
- Check(eq, kUnexpectedAllocationTop);
+ Check(eq, "Unexpected allocation top");
}
// Load allocation limit into ip. Result already contains allocation top.
ldr(ip, MemOperand(topaddr, limit - top));
@@ -1859,7 +1859,7 @@
// Update allocation top. result temporarily holds the new top.
if (emit_debug_code()) {
tst(scratch2, Operand(kObjectAlignmentMask));
- Check(eq, kUnalignedAllocationInNewSpace);
+ Check(eq, "Unaligned allocation in new space");
}
str(scratch2, MemOperand(topaddr));
@@ -1882,7 +1882,7 @@
mov(scratch, Operand(new_space_allocation_top));
ldr(scratch, MemOperand(scratch));
cmp(object, scratch);
- Check(lt, kUndoAllocationOfNonAllocatedMemory);
+ Check(lt, "Undo allocation of non allocated memory");
#endif
// Write the address of the object to un-allocate as the current top.
mov(scratch, Operand(new_space_allocation_top));
@@ -2131,7 +2131,7 @@
if (emit_debug_code()) {
vmrs(ip);
tst(ip, Operand(kVFPDefaultNaNModeControlBit));
- Assert(ne, kDefaultNaNModeNotSet);
+ Assert(ne, "Default NaN mode not set");
}
VFPCanonicalizeNaN(double_scratch);
b(&store);
@@ -2381,7 +2381,7 @@
if (emit_debug_code()) {
ldr(r1, MemOperand(r7, kLevelOffset));
cmp(r1, r6);
- Check(eq, kUnexpectedLevelAfterReturnFromApiCall);
+ Check(eq, "Unexpected level after return from api call");
}
sub(r6, r6, Operand(1));
str(r6, MemOperand(r7, kLevelOffset));
@@ -2782,9 +2782,9 @@
}
-void MacroAssembler::Assert(Condition cond, BailoutReason reason) {
+void MacroAssembler::Assert(Condition cond, const char* msg) {
if (emit_debug_code())
- Check(cond, reason);
+ Check(cond, msg);
}
@@ -2803,23 +2803,23 @@
LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
cmp(elements, ip);
b(eq, &ok);
- Abort(kJSObjectWithFastElementsMapHasSlowElements);
+ Abort("JSObject with fast elements map has slow elements");
bind(&ok);
pop(elements);
}
}
-void MacroAssembler::Check(Condition cond, BailoutReason reason) {
+void MacroAssembler::Check(Condition cond, const char* msg) {
Label L;
b(cond, &L);
- Abort(reason);
+ Abort(msg);
// will not return here
bind(&L);
}
-void MacroAssembler::Abort(BailoutReason reason) {
+void MacroAssembler::Abort(const char* msg) {
Label abort_start;
bind(&abort_start);
// We want to pass the msg string like a smi to avoid GC
@@ -2827,7 +2827,6 @@
// properly. Instead, we pass an aligned pointer that is
// a proper v8 smi, but also pass the alignment difference
// from the real pointer as a smi.
- const char* msg = GetBailoutReason(reason);
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
@@ -2970,7 +2969,7 @@
CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
b(&ok);
bind(&fail);
- Abort(kGlobalFunctionsMustHaveInitialMap);
+ Abort("Global functions must have initial map");
bind(&ok);
}
}
@@ -3039,7 +3038,7 @@
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
tst(object, Operand(kSmiTagMask));
- Check(ne, kOperandIsASmi);
+ Check(ne, "Operand is a smi");
}
}
@@ -3048,7 +3047,7 @@
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
tst(object, Operand(kSmiTagMask));
- Check(eq, kOperandIsNotSmi);
+ Check(eq, "Operand is not smi");
}
}
@@ -3057,12 +3056,12 @@
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
tst(object, Operand(kSmiTagMask));
- Check(ne, kOperandIsASmiAndNotAString);
+ Check(ne, "Operand is a smi and not a string");
push(object);
ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
CompareInstanceType(object, object, FIRST_NONSTRING_TYPE);
pop(object);
- Check(lo, kOperandIsNotAString);
+ Check(lo, "Operand is not a string");
}
}
@@ -3071,12 +3070,12 @@
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
tst(object, Operand(kSmiTagMask));
- Check(ne, kOperandIsASmiAndNotAName);
+ Check(ne, "Operand is a smi and not a name");
push(object);
ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
CompareInstanceType(object, object, LAST_NAME_TYPE);
pop(object);
- Check(le, kOperandIsNotAName);
+ Check(le, "Operand is not a name");
}
}
@@ -3085,7 +3084,7 @@
void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
if (emit_debug_code()) {
CompareRoot(reg, index);
- Check(eq, kHeapNumberMapRegisterClobbered);
+ Check(eq, "HeapNumberMap register clobbered.");
}
}
@@ -3231,7 +3230,7 @@
bind(&word_loop);
if (emit_debug_code()) {
tst(src, Operand(kPointerSize - 1));
- Assert(eq, kExpectingAlignmentForCopyBytes);
+ Assert(eq, "Expecting alignment for CopyBytes");
}
cmp(length, Operand(kPointerSize));
b(lt, &byte_loop);
@@ -3495,7 +3494,7 @@
// Check that the instruction is a ldr reg, [pc + offset] .
and_(result, result, Operand(kLdrPCPattern));
cmp(result, Operand(kLdrPCPattern));
- Check(eq, kTheInstructionToPatchShouldBeALoadFromPc);
+ Check(eq, "The instruction to patch should be a load from pc.");
// Result was clobbered. Restore it.
ldr(result, MemOperand(ldr_location));
}
diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h
index 8b9fa2b..38308e5 100644
--- a/src/arm/macro-assembler-arm.h
+++ b/src/arm/macro-assembler-arm.h
@@ -144,8 +144,6 @@
Condition cond = al);
void Call(Label* target);
- void Push(Register src) { push(src); }
- void Pop(Register dst) { pop(dst); }
// Register move. May do nothing if the registers are identical.
void Move(Register dst, Handle<Object> value);
@@ -1138,14 +1136,14 @@
// Calls Abort(msg) if the condition cond is not satisfied.
// Use --debug_code to enable.
- void Assert(Condition cond, BailoutReason reason);
+ void Assert(Condition cond, const char* msg);
void AssertFastElements(Register elements);
// Like Assert(), but always enabled.
- void Check(Condition cond, BailoutReason reason);
+ void Check(Condition cond, const char* msg);
// Print a message to stdout and abort execution.
- void Abort(BailoutReason msg);
+ void Abort(const char* msg);
// Verify restrictions about code generated in stubs.
void set_generating_stub(bool value) { generating_stub_ = value; }
diff --git a/src/array-iterator.js b/src/array-iterator.js
index defd734..8f1ab47 100644
--- a/src/array-iterator.js
+++ b/src/array-iterator.js
@@ -77,15 +77,16 @@
return CreateIteratorResultObject(void 0, true);
}
+ var elementKey = ToString(index);
iterator[arrayIteratorNextIndexSymbol] = index + 1;
if (itemKind == ARRAY_ITERATOR_KIND_VALUES)
- return CreateIteratorResultObject(array[index], false);
+ return CreateIteratorResultObject(array[elementKey], false);
if (itemKind == ARRAY_ITERATOR_KIND_ENTRIES)
- return CreateIteratorResultObject([index, array[index]], false);
+ return CreateIteratorResultObject([elementKey, array[elementKey]], false);
- return CreateIteratorResultObject(index, false);
+ return CreateIteratorResultObject(elementKey, false);
}
function ArrayEntries() {
diff --git a/src/ast.cc b/src/ast.cc
index 2077f87..e0bca67 100644
--- a/src/ast.cc
+++ b/src/ast.cc
@@ -304,6 +304,17 @@
}
+bool UnaryOperation::ResultOverwriteAllowed() {
+ switch (op_) {
+ case Token::BIT_NOT:
+ case Token::SUB:
+ return true;
+ default:
+ return false;
+ }
+}
+
+
void BinaryOperation::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) {
// TODO(olivf) If this Operation is used in a test context, then the right
// hand side has a ToBoolean stub and we want to collect the type information.
diff --git a/src/ast.h b/src/ast.h
index 0812472..f14156f 100644
--- a/src/ast.h
+++ b/src/ast.h
@@ -291,6 +291,7 @@
}
void Add(Handle<Map> handle, Zone* zone) {
+ ASSERT(!handle->is_deprecated());
list_.Add(handle.location(), zone);
}
@@ -1826,6 +1827,8 @@
public:
DECLARE_NODE_TYPE(UnaryOperation)
+ virtual bool ResultOverwriteAllowed();
+
Token::Value op() const { return op_; }
Expression* expression() const { return expression_; }
virtual int position() const { return pos_; }
@@ -1833,6 +1836,8 @@
BailoutId MaterializeTrueId() { return materialize_true_id_; }
BailoutId MaterializeFalseId() { return materialize_false_id_; }
+ TypeFeedbackId UnaryOperationFeedbackId() const { return reuse(id()); }
+
virtual void RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle);
protected:
diff --git a/src/atomicops_internals_tsan.h b/src/atomicops_internals_tsan.h
index b5162ba..e52c26c 100644
--- a/src/atomicops_internals_tsan.h
+++ b/src/atomicops_internals_tsan.h
@@ -32,12 +32,6 @@
#ifndef V8_ATOMICOPS_INTERNALS_TSAN_H_
#define V8_ATOMICOPS_INTERNALS_TSAN_H_
-namespace v8 {
-namespace internal {
-
-#ifndef TSAN_INTERFACE_ATOMIC_H
-#define TSAN_INTERFACE_ATOMIC_H
-
// This struct is not part of the public API of this module; clients may not
// use it. (However, it's exported via BASE_EXPORT because clients implicitly
// do use it at link time by inlining these functions.)
@@ -53,6 +47,12 @@
#define ATOMICOPS_COMPILER_BARRIER() __asm__ __volatile__("" : : : "memory")
+namespace v8 {
+namespace internal {
+
+#ifndef TSAN_INTERFACE_ATOMIC_H
+#define TSAN_INTERFACE_ATOMIC_H
+
#ifdef __cplusplus
extern "C" {
#endif
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index 2a385aa..c2cc6ef 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -2085,11 +2085,6 @@
"native harmony-string.js") == 0) {
if (!CompileExperimentalBuiltin(isolate(), i)) return false;
}
- if (FLAG_harmony_arrays &&
- strcmp(ExperimentalNatives::GetScriptName(i).start(),
- "native harmony-array.js") == 0) {
- if (!CompileExperimentalBuiltin(isolate(), i)) return false;
- }
}
InstallExperimentalNativeFunctions();
diff --git a/src/builtins.h b/src/builtins.h
index bb36c02..73a2e96 100644
--- a/src/builtins.h
+++ b/src/builtins.h
@@ -259,6 +259,8 @@
V(BIT_OR, 1) \
V(BIT_AND, 1) \
V(BIT_XOR, 1) \
+ V(UNARY_MINUS, 0) \
+ V(BIT_NOT, 0) \
V(SHL, 1) \
V(SAR, 1) \
V(SHR, 1) \
diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc
index 852f7b5..235950d 100644
--- a/src/code-stubs-hydrogen.cc
+++ b/src/code-stubs-hydrogen.cc
@@ -41,13 +41,13 @@
DisallowHandleDereference no_deref;
ASSERT(graph != NULL);
- BailoutReason bailout_reason = kNoReason;
+ SmartArrayPointer<char> bailout_reason;
if (!graph->Optimize(&bailout_reason)) {
- FATAL(GetBailoutReason(bailout_reason));
+ FATAL(bailout_reason.is_empty() ? "unknown" : *bailout_reason);
}
LChunk* chunk = LChunk::NewChunk(graph);
if (chunk == NULL) {
- FATAL(GetBailoutReason(graph->info()->bailout_reason()));
+ FATAL(graph->info()->bailout_reason());
}
return chunk;
}
@@ -802,6 +802,44 @@
template <>
+HValue* CodeStubGraphBuilder<UnaryOpStub>::BuildCodeInitializedStub() {
+ UnaryOpStub* stub = casted_stub();
+ Handle<Type> type = stub->GetType(graph()->isolate());
+ HValue* input = GetParameter(0);
+
+ // Prevent unwanted HChange being inserted to ensure that the stub
+ // deopts on newly encountered types.
+ if (!type->Maybe(Type::Double())) {
+ input = Add<HForceRepresentation>(input, Representation::Smi());
+ }
+
+ if (!type->Is(Type::Number())) {
+ // If we expect to see other things than Numbers, we will create a generic
+ // stub, which handles all numbers and calls into the runtime for the rest.
+ IfBuilder if_number(this);
+ if_number.If<HIsNumberAndBranch>(input);
+ if_number.Then();
+ HInstruction* res = BuildUnaryMathOp(input, type, stub->operation());
+ if_number.Return(AddInstruction(res));
+ if_number.Else();
+ HValue* function = AddLoadJSBuiltin(stub->ToJSBuiltin());
+ Add<HPushArgument>(GetParameter(0));
+ HValue* result = Add<HInvokeFunction>(function, 1);
+ if_number.Return(result);
+ if_number.End();
+ return graph()->GetConstantUndefined();
+ }
+
+ return AddInstruction(BuildUnaryMathOp(input, type, stub->operation()));
+}
+
+
+Handle<Code> UnaryOpStub::GenerateCode() {
+ return DoGenerateCode(this);
+}
+
+
+template <>
HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
ToBooleanStub* stub = casted_stub();
diff --git a/src/code-stubs.cc b/src/code-stubs.cc
index d472fa2..5f6616e 100644
--- a/src/code-stubs.cc
+++ b/src/code-stubs.cc
@@ -204,6 +204,71 @@
}
+Builtins::JavaScript UnaryOpStub::ToJSBuiltin() {
+ switch (operation_) {
+ default:
+ UNREACHABLE();
+ case Token::SUB:
+ return Builtins::UNARY_MINUS;
+ case Token::BIT_NOT:
+ return Builtins::BIT_NOT;
+ }
+}
+
+
+Handle<JSFunction> UnaryOpStub::ToJSFunction(Isolate* isolate) {
+ Handle<JSBuiltinsObject> builtins(isolate->js_builtins_object());
+ Object* builtin = builtins->javascript_builtin(ToJSBuiltin());
+ return Handle<JSFunction>(JSFunction::cast(builtin), isolate);
+}
+
+
+MaybeObject* UnaryOpStub::Result(Handle<Object> object, Isolate* isolate) {
+ Handle<JSFunction> builtin_function = ToJSFunction(isolate);
+ bool caught_exception;
+ Handle<Object> result = Execution::Call(builtin_function, object,
+ 0, NULL, &caught_exception);
+ if (caught_exception) {
+ return Failure::Exception();
+ }
+ return *result;
+}
+
+
+void UnaryOpStub::UpdateStatus(Handle<Object> object) {
+ State old_state(state_);
+ if (object->IsSmi()) {
+ state_.Add(SMI);
+ if (operation_ == Token::SUB && *object == 0) {
+ // The result (-0) has to be represented as double.
+ state_.Add(HEAP_NUMBER);
+ }
+ } else if (object->IsHeapNumber()) {
+ state_.Add(HEAP_NUMBER);
+ } else {
+ state_.Add(GENERIC);
+ }
+ TraceTransition(old_state, state_);
+}
+
+
+Handle<Type> UnaryOpStub::GetType(Isolate* isolate) {
+ if (state_.Contains(GENERIC)) {
+ return handle(Type::Any(), isolate);
+ }
+ Handle<Type> type = handle(Type::None(), isolate);
+ if (state_.Contains(SMI)) {
+ type = handle(
+ Type::Union(type, handle(Type::Smi(), isolate)), isolate);
+ }
+ if (state_.Contains(HEAP_NUMBER)) {
+ type = handle(
+ Type::Union(type, handle(Type::Double(), isolate)), isolate);
+ }
+ return type;
+}
+
+
void BinaryOpStub::Generate(MacroAssembler* masm) {
// Explicitly allow generation of nested stubs. It is safe here because
// generation code does not use any raw pointers.
@@ -289,6 +354,29 @@
#undef __
+void UnaryOpStub::PrintBaseName(StringStream* stream) {
+ CodeStub::PrintBaseName(stream);
+ if (operation_ == Token::SUB) stream->Add("Minus");
+ if (operation_ == Token::BIT_NOT) stream->Add("Not");
+}
+
+
+void UnaryOpStub::PrintState(StringStream* stream) {
+ state_.Print(stream);
+}
+
+
+void UnaryOpStub::State::Print(StringStream* stream) const {
+ stream->Add("(");
+ SimpleListPrinter printer(stream);
+ if (IsEmpty()) printer.Add("None");
+ if (Contains(GENERIC)) printer.Add("Generic");
+ if (Contains(HEAP_NUMBER)) printer.Add("HeapNumber");
+ if (Contains(SMI)) printer.Add("Smi");
+ stream->Add(")");
+}
+
+
void BinaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name;
diff --git a/src/code-stubs.h b/src/code-stubs.h
index c58acd6..84d9b02 100644
--- a/src/code-stubs.h
+++ b/src/code-stubs.h
@@ -40,6 +40,7 @@
#define CODE_STUB_LIST_ALL_PLATFORMS(V) \
V(CallFunction) \
V(CallConstruct) \
+ V(UnaryOp) \
V(BinaryOp) \
V(StringAdd) \
V(SubString) \
@@ -592,6 +593,73 @@
};
+class UnaryOpStub : public HydrogenCodeStub {
+ public:
+ // Stub without type info available -> construct uninitialized
+ explicit UnaryOpStub(Token::Value operation)
+ : HydrogenCodeStub(UNINITIALIZED), operation_(operation) { }
+ explicit UnaryOpStub(Code::ExtraICState ic_state) :
+ state_(StateBits::decode(ic_state)),
+ operation_(OperatorBits::decode(ic_state)) { }
+
+ virtual void InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor);
+
+ virtual Code::Kind GetCodeKind() const { return Code::UNARY_OP_IC; }
+ virtual InlineCacheState GetICState() {
+ if (state_.Contains(GENERIC)) {
+ return MEGAMORPHIC;
+ } else if (state_.IsEmpty()) {
+ return PREMONOMORPHIC;
+ } else {
+ return MONOMORPHIC;
+ }
+ }
+ virtual Code::ExtraICState GetExtraICState() {
+ return OperatorBits::encode(operation_) |
+ StateBits::encode(state_.ToIntegral());
+ }
+
+ Token::Value operation() { return operation_; }
+ Handle<JSFunction> ToJSFunction(Isolate* isolate);
+ Builtins::JavaScript ToJSBuiltin();
+
+ void UpdateStatus(Handle<Object> object);
+ MaybeObject* Result(Handle<Object> object, Isolate* isolate);
+ Handle<Code> GenerateCode();
+ Handle<Type> GetType(Isolate* isolate);
+
+ protected:
+ void PrintState(StringStream* stream);
+ void PrintBaseName(StringStream* stream);
+
+ private:
+ enum UnaryOpType {
+ SMI,
+ HEAP_NUMBER,
+ GENERIC,
+ NUMBER_OF_TYPES
+ };
+
+ class State : public EnumSet<UnaryOpType, byte> {
+ public:
+ State() : EnumSet<UnaryOpType, byte>() { }
+ explicit State(byte bits) : EnumSet<UnaryOpType, byte>(bits) { }
+ void Print(StringStream* stream) const;
+ };
+
+ class StateBits : public BitField<int, 0, NUMBER_OF_TYPES> { };
+ class OperatorBits : public BitField<Token::Value, NUMBER_OF_TYPES, 8> { };
+
+ State state_;
+ Token::Value operation_;
+
+ virtual CodeStub::Major MajorKey() { return UnaryOp; }
+ virtual int NotMissMinorKey() { return GetExtraICState(); }
+};
+
+
class FastCloneShallowArrayStub : public HydrogenCodeStub {
public:
// Maximum length of copied elements array.
diff --git a/src/compiler.cc b/src/compiler.cc
index ebd4995..3c51baa 100644
--- a/src/compiler.cc
+++ b/src/compiler.cc
@@ -127,7 +127,7 @@
ASSERT(language_mode() == CLASSIC_MODE);
SetLanguageMode(shared_info_->language_mode());
}
- set_bailout_reason(kUnknown);
+ set_bailout_reason("unknown");
}
@@ -342,7 +342,7 @@
const int kMaxOptCount =
FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
if (info()->opt_count() > kMaxOptCount) {
- info()->set_bailout_reason(kOptimizedTooManyTimes);
+ info()->set_bailout_reason("optimized too many times");
return AbortOptimization();
}
@@ -356,14 +356,14 @@
const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
Scope* scope = info()->scope();
if ((scope->num_parameters() + 1) > parameter_limit) {
- info()->set_bailout_reason(kTooManyParameters);
+ info()->set_bailout_reason("too many parameters");
return AbortOptimization();
}
const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
if (!info()->osr_ast_id().IsNone() &&
scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
- info()->set_bailout_reason(kTooManyParametersLocals);
+ info()->set_bailout_reason("too many parameters/locals");
return AbortOptimization();
}
@@ -458,9 +458,9 @@
ASSERT(last_status() == SUCCEEDED);
Timer t(this, &time_taken_to_optimize_);
ASSERT(graph_ != NULL);
- BailoutReason bailout_reason = kNoReason;
+ SmartArrayPointer<char> bailout_reason;
if (!graph_->Optimize(&bailout_reason)) {
- if (bailout_reason == kNoReason) graph_builder_->Bailout(bailout_reason);
+ if (!bailout_reason.is_empty()) graph_builder_->Bailout(*bailout_reason);
return SetLastStatus(BAILED_OUT);
} else {
chunk_ = LChunk::NewChunk(graph_);
@@ -485,9 +485,7 @@
DisallowDeferredHandleDereference no_deferred_handle_deref;
Handle<Code> optimized_code = chunk_->Codegen();
if (optimized_code.is_null()) {
- if (info()->bailout_reason() != kNoReason) {
- info()->set_bailout_reason(kCodeGenerationFailed);
- }
+ info()->set_bailout_reason("code generation failed");
return AbortOptimization();
}
info()->SetCode(optimized_code);
@@ -782,7 +780,7 @@
if (!result.is_null()) {
// Explicitly disable optimization for eval code. We're not yet prepared
// to handle eval-code in the optimizing compiler.
- result->DisableOptimization(kEval);
+ result->DisableOptimization("eval");
// If caller is strict mode, the result must be in strict mode or
// extended mode as well, but not the other way around. Consider:
@@ -1057,13 +1055,13 @@
// the unoptimized code.
OptimizingCompiler::Status status = optimizing_compiler->last_status();
if (info->HasAbortedDueToDependencyChange()) {
- info->set_bailout_reason(kBailedOutDueToDependentMap);
+ info->set_bailout_reason("bailed out due to dependent map");
status = optimizing_compiler->AbortOptimization();
} else if (status != OptimizingCompiler::SUCCEEDED) {
- info->set_bailout_reason(kFailedBailedOutLastTime);
+ info->set_bailout_reason("failed/bailed out last time");
status = optimizing_compiler->AbortOptimization();
} else if (isolate->DebuggerHasBreakPoints()) {
- info->set_bailout_reason(kDebuggerIsActive);
+ info->set_bailout_reason("debugger is active");
status = optimizing_compiler->AbortOptimization();
} else {
status = optimizing_compiler->GenerateAndInstallCode();
diff --git a/src/compiler.h b/src/compiler.h
index 50053e5..d36e488 100644
--- a/src/compiler.h
+++ b/src/compiler.h
@@ -258,8 +258,8 @@
SaveHandle(&script_);
}
- BailoutReason bailout_reason() const { return bailout_reason_; }
- void set_bailout_reason(BailoutReason reason) { bailout_reason_ = reason; }
+ const char* bailout_reason() const { return bailout_reason_; }
+ void set_bailout_reason(const char* reason) { bailout_reason_ = reason; }
int prologue_offset() const {
ASSERT_NE(kPrologueOffsetNotSet, prologue_offset_);
@@ -412,7 +412,7 @@
}
}
- BailoutReason bailout_reason_;
+ const char* bailout_reason_;
int prologue_offset_;
diff --git a/src/d8.cc b/src/d8.cc
index c7b66c2..6e7beeb 100644
--- a/src/d8.cc
+++ b/src/d8.cc
@@ -1406,14 +1406,6 @@
#else
options.num_parallel_files++;
#endif // V8_SHARED
- } else if (strcmp(argv[i], "--dump-heap-constants") == 0) {
-#ifdef V8_SHARED
- printf("D8 with shared library does not support constant dumping\n");
- return false;
-#else
- options.dump_heap_constants = true;
- argv[i] = NULL;
-#endif
}
#ifdef V8_SHARED
else if (strcmp(argv[i], "--dump-counters") == 0) {
@@ -1568,63 +1560,6 @@
#endif
-#ifndef V8_SHARED
-static void DumpHeapConstants(i::Isolate* isolate) {
- i::Heap* heap = isolate->heap();
-
- // Dump the INSTANCE_TYPES table to the console.
- printf("# List of known V8 instance types.\n");
-#define DUMP_TYPE(T) printf(" %d: \"%s\",\n", i::T, #T);
- printf("INSTANCE_TYPES = {\n");
- INSTANCE_TYPE_LIST(DUMP_TYPE)
- printf("}\n");
-#undef DUMP_TYPE
-
- // Dump the KNOWN_MAP table to the console.
- printf("\n# List of known V8 maps.\n");
-#define ROOT_LIST_CASE(type, name, camel_name) \
- if (n == NULL && o == heap->name()) n = #camel_name;
-#define STRUCT_LIST_CASE(upper_name, camel_name, name) \
- if (n == NULL && o == heap->name##_map()) n = #camel_name "Map";
- i::HeapObjectIterator it(heap->map_space());
- printf("KNOWN_MAPS = {\n");
- for (i::Object* o = it.Next(); o != NULL; o = it.Next()) {
- i::Map* m = i::Map::cast(o);
- const char* n = NULL;
- intptr_t p = reinterpret_cast<intptr_t>(m) & 0xfffff;
- int t = m->instance_type();
- ROOT_LIST(ROOT_LIST_CASE)
- STRUCT_LIST(STRUCT_LIST_CASE)
- if (n == NULL) continue;
- printf(" 0x%05" V8PRIxPTR ": (%d, \"%s\"),\n", p, t, n);
- }
- printf("}\n");
-#undef STRUCT_LIST_CASE
-#undef ROOT_LIST_CASE
-
- // Dump the KNOWN_OBJECTS table to the console.
- printf("\n# List of known V8 objects.\n");
-#define ROOT_LIST_CASE(type, name, camel_name) \
- if (n == NULL && o == heap->name()) n = #camel_name;
- i::OldSpaces spit(heap);
- printf("KNOWN_OBJECTS = {\n");
- for (i::PagedSpace* s = spit.next(); s != NULL; s = spit.next()) {
- i::HeapObjectIterator it(s);
- const char* sname = AllocationSpaceName(s->identity());
- for (i::Object* o = it.Next(); o != NULL; o = it.Next()) {
- const char* n = NULL;
- intptr_t p = reinterpret_cast<intptr_t>(o) & 0xfffff;
- ROOT_LIST(ROOT_LIST_CASE)
- if (n == NULL) continue;
- printf(" (\"%s\", 0x%05" V8PRIxPTR "): \"%s\",\n", sname, p, n);
- }
- }
- printf("}\n");
-#undef ROOT_LIST_CASE
-}
-#endif // V8_SHARED
-
-
class ShellArrayBufferAllocator : public v8::ArrayBuffer::Allocator {
public:
virtual void* Allocate(size_t length) {
@@ -1668,13 +1603,6 @@
PerIsolateData data(isolate);
InitializeDebugger(isolate);
-#ifndef V8_SHARED
- if (options.dump_heap_constants) {
- DumpHeapConstants(reinterpret_cast<i::Isolate*>(isolate));
- return 0;
- }
-#endif
-
if (options.stress_opt || options.stress_deopt) {
Testing::SetStressRunType(options.stress_opt
? Testing::kStressTypeOpt
diff --git a/src/d8.h b/src/d8.h
index 3b06985..4f04342 100644
--- a/src/d8.h
+++ b/src/d8.h
@@ -231,7 +231,6 @@
stress_deopt(false),
interactive_shell(false),
test_shell(false),
- dump_heap_constants(false),
num_isolates(1),
isolate_sources(NULL) { }
@@ -255,7 +254,6 @@
bool stress_deopt;
bool interactive_shell;
bool test_shell;
- bool dump_heap_constants;
int num_isolates;
SourceGroup* isolate_sources;
};
diff --git a/src/debug.cc b/src/debug.cc
index a349502..a0b9884 100644
--- a/src/debug.cc
+++ b/src/debug.cc
@@ -159,6 +159,7 @@
Code* code = Code::GetCodeFromTargetAddress(target);
if ((code->is_inline_cache_stub() &&
!code->is_binary_op_stub() &&
+ !code->is_unary_op_stub() &&
!code->is_compare_ic_stub() &&
!code->is_to_boolean_ic_stub()) ||
RelocInfo::IsConstructCall(rmode())) {
diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc
index 50d6f0b..53b9b76 100644
--- a/src/deoptimizer.cc
+++ b/src/deoptimizer.cc
@@ -2426,19 +2426,25 @@
Code* replacement_code) {
// Iterate over the back edge table and patch every interrupt
// call to an unconditional call to the replacement code.
+ ASSERT(unoptimized_code->kind() == Code::FUNCTION);
int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level();
-
- for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code);
- !back_edges.Done();
- back_edges.Next()) {
- if (static_cast<int>(back_edges.loop_depth()) == loop_nesting_level) {
+ Address back_edge_cursor = unoptimized_code->instruction_start() +
+ unoptimized_code->back_edge_table_offset();
+ uint32_t table_length = Memory::uint32_at(back_edge_cursor);
+ back_edge_cursor += kIntSize;
+ for (uint32_t i = 0; i < table_length; ++i) {
+ uint32_t loop_depth = Memory::uint32_at(back_edge_cursor + 2 * kIntSize);
+ if (static_cast<int>(loop_depth) == loop_nesting_level) {
+ // Loop back edge has the loop depth that we want to patch.
+ uint32_t pc_offset = Memory::uint32_at(back_edge_cursor + kIntSize);
+ Address pc_after = unoptimized_code->instruction_start() + pc_offset;
PatchInterruptCodeAt(unoptimized_code,
- back_edges.pc(),
+ pc_after,
interrupt_code,
replacement_code);
}
+ back_edge_cursor += FullCodeGenerator::kBackEdgeEntrySize;
}
-
unoptimized_code->set_back_edges_patched_for_osr(true);
#ifdef DEBUG
Deoptimizer::VerifyInterruptCode(
@@ -2451,20 +2457,25 @@
Code* interrupt_code,
Code* replacement_code) {
// Iterate over the back edge table and revert the patched interrupt calls.
+ ASSERT(unoptimized_code->kind() == Code::FUNCTION);
ASSERT(unoptimized_code->back_edges_patched_for_osr());
int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level();
-
- for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code);
- !back_edges.Done();
- back_edges.Next()) {
- if (static_cast<int>(back_edges.loop_depth()) <= loop_nesting_level) {
+ Address back_edge_cursor = unoptimized_code->instruction_start() +
+ unoptimized_code->back_edge_table_offset();
+ uint32_t table_length = Memory::uint32_at(back_edge_cursor);
+ back_edge_cursor += kIntSize;
+ for (uint32_t i = 0; i < table_length; ++i) {
+ uint32_t loop_depth = Memory::uint32_at(back_edge_cursor + 2 * kIntSize);
+ if (static_cast<int>(loop_depth) <= loop_nesting_level) {
+ uint32_t pc_offset = Memory::uint32_at(back_edge_cursor + kIntSize);
+ Address pc_after = unoptimized_code->instruction_start() + pc_offset;
RevertInterruptCodeAt(unoptimized_code,
- back_edges.pc(),
+ pc_after,
interrupt_code,
replacement_code);
}
+ back_edge_cursor += FullCodeGenerator::kBackEdgeEntrySize;
}
-
unoptimized_code->set_back_edges_patched_for_osr(false);
unoptimized_code->set_allow_osr_at_loop_nesting_level(0);
#ifdef DEBUG
@@ -2480,18 +2491,24 @@
Code* interrupt_code,
Code* replacement_code,
int loop_nesting_level) {
- for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code);
- !back_edges.Done();
- back_edges.Next()) {
- uint32_t loop_depth = back_edges.loop_depth();
+ CHECK(unoptimized_code->kind() == Code::FUNCTION);
+ Address back_edge_cursor = unoptimized_code->instruction_start() +
+ unoptimized_code->back_edge_table_offset();
+ uint32_t table_length = Memory::uint32_at(back_edge_cursor);
+ back_edge_cursor += kIntSize;
+ for (uint32_t i = 0; i < table_length; ++i) {
+ uint32_t loop_depth = Memory::uint32_at(back_edge_cursor + 2 * kIntSize);
CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
// Assert that all back edges for shallower loops (and only those)
// have already been patched.
+ uint32_t pc_offset = Memory::uint32_at(back_edge_cursor + kIntSize);
+ Address pc_after = unoptimized_code->instruction_start() + pc_offset;
CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
InterruptCodeIsPatched(unoptimized_code,
- back_edges.pc(),
+ pc_after,
interrupt_code,
replacement_code));
+ back_edge_cursor += FullCodeGenerator::kBackEdgeEntrySize;
}
}
#endif // DEBUG
diff --git a/src/extensions/i18n/date-format.cc b/src/extensions/i18n/date-format.cc
new file mode 100644
index 0000000..1058e37
--- /dev/null
+++ b/src/extensions/i18n/date-format.cc
@@ -0,0 +1,329 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// limitations under the License.
+
+#include "date-format.h"
+
+#include <string.h>
+
+#include "i18n-utils.h"
+#include "unicode/calendar.h"
+#include "unicode/dtfmtsym.h"
+#include "unicode/dtptngen.h"
+#include "unicode/locid.h"
+#include "unicode/numsys.h"
+#include "unicode/smpdtfmt.h"
+#include "unicode/timezone.h"
+
+namespace v8_i18n {
+
+static icu::SimpleDateFormat* InitializeDateTimeFormat(v8::Handle<v8::String>,
+ v8::Handle<v8::Object>,
+ v8::Handle<v8::Object>);
+static icu::SimpleDateFormat* CreateICUDateFormat(const icu::Locale&,
+ v8::Handle<v8::Object>);
+static void SetResolvedSettings(const icu::Locale&,
+ icu::SimpleDateFormat*,
+ v8::Handle<v8::Object>);
+
+icu::SimpleDateFormat* DateFormat::UnpackDateFormat(
+ v8::Handle<v8::Object> obj) {
+ v8::HandleScope handle_scope;
+
+ if (obj->HasOwnProperty(v8::String::New("dateFormat"))) {
+ return static_cast<icu::SimpleDateFormat*>(
+ obj->GetAlignedPointerFromInternalField(0));
+ }
+
+ return NULL;
+}
+
+void DateFormat::DeleteDateFormat(v8::Isolate* isolate,
+ v8::Persistent<v8::Object>* object,
+ void* param) {
+ // First delete the hidden C++ object.
+ // Unpacking should never return NULL here. That would only happen if
+ // this method is used as the weak callback for persistent handles not
+ // pointing to a date time formatter.
+ v8::HandleScope handle_scope(isolate);
+ v8::Local<v8::Object> handle = v8::Local<v8::Object>::New(isolate, *object);
+ delete UnpackDateFormat(handle);
+
+ // Then dispose of the persistent handle to JS object.
+ object->Dispose(isolate);
+}
+
+void DateFormat::JSInternalFormat(
+ const v8::FunctionCallbackInfo<v8::Value>& args) {
+ double millis = 0.0;
+ if (args.Length() != 2 || !args[0]->IsObject() || !args[1]->IsDate()) {
+ v8::ThrowException(v8::Exception::Error(
+ v8::String::New(
+ "Internal error. Formatter and date value have to be specified.")));
+ return;
+ } else {
+ millis = v8::Date::Cast(*args[1])->NumberValue();
+ }
+
+ icu::SimpleDateFormat* date_format = UnpackDateFormat(args[0]->ToObject());
+ if (!date_format) {
+ v8::ThrowException(v8::Exception::Error(
+ v8::String::New("DateTimeFormat method called on an object "
+ "that is not a DateTimeFormat.")));
+ return;
+ }
+
+ icu::UnicodeString result;
+ date_format->format(millis, result);
+
+ args.GetReturnValue().Set(v8::String::New(
+ reinterpret_cast<const uint16_t*>(result.getBuffer()), result.length()));
+}
+
+void DateFormat::JSInternalParse(
+ const v8::FunctionCallbackInfo<v8::Value>& args) {
+ icu::UnicodeString string_date;
+ if (args.Length() != 2 || !args[0]->IsObject() || !args[1]->IsString()) {
+ v8::ThrowException(v8::Exception::Error(
+ v8::String::New(
+ "Internal error. Formatter and string have to be specified.")));
+ return;
+ } else {
+ if (!Utils::V8StringToUnicodeString(args[1], &string_date)) {
+ string_date = "";
+ }
+ }
+
+ icu::SimpleDateFormat* date_format = UnpackDateFormat(args[0]->ToObject());
+ if (!date_format) {
+ v8::ThrowException(v8::Exception::Error(
+ v8::String::New("DateTimeFormat method called on an object "
+ "that is not a DateTimeFormat.")));
+ return;
+ }
+
+ UErrorCode status = U_ZERO_ERROR;
+ UDate date = date_format->parse(string_date, status);
+ if (U_FAILURE(status)) {
+ return;
+ }
+
+ args.GetReturnValue().Set(v8::Date::New(static_cast<double>(date)));
+}
+
+void DateFormat::JSCreateDateTimeFormat(
+ const v8::FunctionCallbackInfo<v8::Value>& args) {
+ if (args.Length() != 3 ||
+ !args[0]->IsString() ||
+ !args[1]->IsObject() ||
+ !args[2]->IsObject()) {
+ v8::ThrowException(v8::Exception::Error(
+ v8::String::New("Internal error, wrong parameters.")));
+ return;
+ }
+
+ v8::Isolate* isolate = args.GetIsolate();
+ v8::Local<v8::ObjectTemplate> date_format_template =
+ Utils::GetTemplate(isolate);
+
+ // Create an empty object wrapper.
+ v8::Local<v8::Object> local_object = date_format_template->NewInstance();
+ // But the handle shouldn't be empty.
+ // That can happen if there was a stack overflow when creating the object.
+ if (local_object.IsEmpty()) {
+ args.GetReturnValue().Set(local_object);
+ return;
+ }
+
+ // Set date time formatter as internal field of the resulting JS object.
+ icu::SimpleDateFormat* date_format = InitializeDateTimeFormat(
+ args[0]->ToString(), args[1]->ToObject(), args[2]->ToObject());
+
+ if (!date_format) {
+ v8::ThrowException(v8::Exception::Error(v8::String::New(
+ "Internal error. Couldn't create ICU date time formatter.")));
+ return;
+ } else {
+ local_object->SetAlignedPointerInInternalField(0, date_format);
+
+ v8::TryCatch try_catch;
+ local_object->Set(v8::String::New("dateFormat"), v8::String::New("valid"));
+ if (try_catch.HasCaught()) {
+ v8::ThrowException(v8::Exception::Error(
+ v8::String::New("Internal error, couldn't set property.")));
+ return;
+ }
+ }
+
+ v8::Persistent<v8::Object> wrapper(isolate, local_object);
+ // Make object handle weak so we can delete iterator once GC kicks in.
+ wrapper.MakeWeak<void>(NULL, &DeleteDateFormat);
+ args.GetReturnValue().Set(wrapper);
+ wrapper.ClearAndLeak();
+}
+
+static icu::SimpleDateFormat* InitializeDateTimeFormat(
+ v8::Handle<v8::String> locale,
+ v8::Handle<v8::Object> options,
+ v8::Handle<v8::Object> resolved) {
+ // Convert BCP47 into ICU locale format.
+ UErrorCode status = U_ZERO_ERROR;
+ icu::Locale icu_locale;
+ char icu_result[ULOC_FULLNAME_CAPACITY];
+ int icu_length = 0;
+ v8::String::AsciiValue bcp47_locale(locale);
+ if (bcp47_locale.length() != 0) {
+ uloc_forLanguageTag(*bcp47_locale, icu_result, ULOC_FULLNAME_CAPACITY,
+ &icu_length, &status);
+ if (U_FAILURE(status) || icu_length == 0) {
+ return NULL;
+ }
+ icu_locale = icu::Locale(icu_result);
+ }
+
+ icu::SimpleDateFormat* date_format = CreateICUDateFormat(icu_locale, options);
+ if (!date_format) {
+ // Remove extensions and try again.
+ icu::Locale no_extension_locale(icu_locale.getBaseName());
+ date_format = CreateICUDateFormat(no_extension_locale, options);
+
+ // Set resolved settings (pattern, numbering system, calendar).
+ SetResolvedSettings(no_extension_locale, date_format, resolved);
+ } else {
+ SetResolvedSettings(icu_locale, date_format, resolved);
+ }
+
+ return date_format;
+}
+
+static icu::SimpleDateFormat* CreateICUDateFormat(
+ const icu::Locale& icu_locale, v8::Handle<v8::Object> options) {
+ // Create time zone as specified by the user. We have to re-create time zone
+ // since calendar takes ownership.
+ icu::TimeZone* tz = NULL;
+ icu::UnicodeString timezone;
+ if (Utils::ExtractStringSetting(options, "timeZone", &timezone)) {
+ tz = icu::TimeZone::createTimeZone(timezone);
+ } else {
+ tz = icu::TimeZone::createDefault();
+ }
+
+ // Create a calendar using locale, and apply time zone to it.
+ UErrorCode status = U_ZERO_ERROR;
+ icu::Calendar* calendar =
+ icu::Calendar::createInstance(tz, icu_locale, status);
+
+ // Make formatter from skeleton. Calendar and numbering system are added
+ // to the locale as Unicode extension (if they were specified at all).
+ icu::SimpleDateFormat* date_format = NULL;
+ icu::UnicodeString skeleton;
+ if (Utils::ExtractStringSetting(options, "skeleton", &skeleton)) {
+ icu::DateTimePatternGenerator* generator =
+ icu::DateTimePatternGenerator::createInstance(icu_locale, status);
+ icu::UnicodeString pattern;
+ if (U_SUCCESS(status)) {
+ pattern = generator->getBestPattern(skeleton, status);
+ delete generator;
+ }
+
+ date_format = new icu::SimpleDateFormat(pattern, icu_locale, status);
+ if (U_SUCCESS(status)) {
+ date_format->adoptCalendar(calendar);
+ }
+ }
+
+ if (U_FAILURE(status)) {
+ delete calendar;
+ delete date_format;
+ date_format = NULL;
+ }
+
+ return date_format;
+}
+
+static void SetResolvedSettings(const icu::Locale& icu_locale,
+ icu::SimpleDateFormat* date_format,
+ v8::Handle<v8::Object> resolved) {
+ UErrorCode status = U_ZERO_ERROR;
+ icu::UnicodeString pattern;
+ date_format->toPattern(pattern);
+ resolved->Set(v8::String::New("pattern"),
+ v8::String::New(reinterpret_cast<const uint16_t*>(
+ pattern.getBuffer()), pattern.length()));
+
+ // Set time zone and calendar.
+ if (date_format) {
+ const icu::Calendar* calendar = date_format->getCalendar();
+ const char* calendar_name = calendar->getType();
+ resolved->Set(v8::String::New("calendar"), v8::String::New(calendar_name));
+
+ const icu::TimeZone& tz = calendar->getTimeZone();
+ icu::UnicodeString time_zone;
+ tz.getID(time_zone);
+
+ icu::UnicodeString canonical_time_zone;
+ icu::TimeZone::getCanonicalID(time_zone, canonical_time_zone, status);
+ if (U_SUCCESS(status)) {
+ if (canonical_time_zone == UNICODE_STRING_SIMPLE("Etc/GMT")) {
+ resolved->Set(v8::String::New("timeZone"), v8::String::New("UTC"));
+ } else {
+ resolved->Set(v8::String::New("timeZone"),
+ v8::String::New(reinterpret_cast<const uint16_t*>(
+ canonical_time_zone.getBuffer()),
+ canonical_time_zone.length()));
+ }
+ }
+ }
+
+ // Ugly hack. ICU doesn't expose numbering system in any way, so we have
+ // to assume that for given locale NumberingSystem constructor produces the
+ // same digits as NumberFormat/Calendar would.
+ status = U_ZERO_ERROR;
+ icu::NumberingSystem* numbering_system =
+ icu::NumberingSystem::createInstance(icu_locale, status);
+ if (U_SUCCESS(status)) {
+ const char* ns = numbering_system->getName();
+ resolved->Set(v8::String::New("numberingSystem"), v8::String::New(ns));
+ } else {
+ resolved->Set(v8::String::New("numberingSystem"), v8::Undefined());
+ }
+ delete numbering_system;
+
+ // Set the locale
+ char result[ULOC_FULLNAME_CAPACITY];
+ status = U_ZERO_ERROR;
+ uloc_toLanguageTag(
+ icu_locale.getName(), result, ULOC_FULLNAME_CAPACITY, FALSE, &status);
+ if (U_SUCCESS(status)) {
+ resolved->Set(v8::String::New("locale"), v8::String::New(result));
+ } else {
+ // This would never happen, since we got the locale from ICU.
+ resolved->Set(v8::String::New("locale"), v8::String::New("und"));
+ }
+}
+
+} // namespace v8_i18n
diff --git a/src/i18n.h b/src/extensions/i18n/date-format.h
similarity index 68%
rename from src/i18n.h
rename to src/extensions/i18n/date-format.h
index 37c57b1..daa5964 100644
--- a/src/i18n.h
+++ b/src/extensions/i18n/date-format.h
@@ -26,8 +26,8 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// limitations under the License.
-#ifndef V8_I18N_H_
-#define V8_I18N_H_
+#ifndef V8_EXTENSIONS_I18N_DATE_FORMAT_H_
+#define V8_EXTENSIONS_I18N_DATE_FORMAT_H_
#include "unicode/uversion.h"
#include "v8.h"
@@ -36,44 +36,36 @@
class SimpleDateFormat;
}
-namespace v8 {
-namespace internal {
-
-class I18N {
- public:
- // Creates an ObjectTemplate with one internal field.
- static Handle<ObjectTemplateInfo> GetTemplate(Isolate* isolate);
-
- // Creates an ObjectTemplate with two internal fields.
- static Handle<ObjectTemplateInfo> GetTemplate2(Isolate* isolate);
-
- private:
- I18N();
-};
+namespace v8_i18n {
class DateFormat {
public:
- // Create a formatter for the specificied locale and options. Returns the
- // resolved settings for the locale / options.
- static icu::SimpleDateFormat* InitializeDateTimeFormat(
- Isolate* isolate,
- Handle<String> locale,
- Handle<JSObject> options,
- Handle<JSObject> resolved);
+ static void JSCreateDateTimeFormat(
+ const v8::FunctionCallbackInfo<v8::Value>& args);
+
+ // Helper methods for various bindings.
// Unpacks date format object from corresponding JavaScript object.
- static icu::SimpleDateFormat* UnpackDateFormat(Isolate* isolate,
- Handle<JSObject> obj);
+ static icu::SimpleDateFormat* UnpackDateFormat(
+ v8::Handle<v8::Object> obj);
// Release memory we allocated for the DateFormat once the JS object that
// holds the pointer gets garbage collected.
static void DeleteDateFormat(v8::Isolate* isolate,
- Persistent<v8::Object>* object,
+ v8::Persistent<v8::Object>* object,
void* param);
+
+ // Formats date and returns corresponding string.
+ static void JSInternalFormat(const v8::FunctionCallbackInfo<v8::Value>& args);
+
+ // Parses date and returns corresponding Date object or undefined if parse
+ // failed.
+ static void JSInternalParse(const v8::FunctionCallbackInfo<v8::Value>& args);
+
private:
DateFormat();
};
-} } // namespace v8::internal
+} // namespace v8_i18n
-#endif // V8_I18N_H_
+#endif // V8_EXTENSIONS_I18N_DATE_FORMAT_H_
diff --git a/src/extensions/i18n/date-format.js b/src/extensions/i18n/date-format.js
index b1d28e5..04e7a7c 100644
--- a/src/extensions/i18n/date-format.js
+++ b/src/extensions/i18n/date-format.js
@@ -235,6 +235,7 @@
* Useful for subclassing.
*/
function initializeDateTimeFormat(dateFormat, locales, options) {
+ native function NativeJSCreateDateTimeFormat();
if (dateFormat.hasOwnProperty('__initializedIntlObject')) {
throw new TypeError('Trying to re-initialize DateTimeFormat object.');
@@ -291,7 +292,7 @@
year: {writable: true}
});
- var formatter = %CreateDateTimeFormat(
+ var formatter = NativeJSCreateDateTimeFormat(
requestedLocale, {skeleton: ldmlString, timeZone: tz}, resolved);
if (tz !== undefined && tz !== resolved.timeZone) {
@@ -408,6 +409,8 @@
* DateTimeFormat.
*/
function formatDate(formatter, dateValue) {
+ native function NativeJSInternalDateFormat();
+
var dateMs;
if (dateValue === undefined) {
dateMs = Date.now();
@@ -419,7 +422,7 @@
throw new RangeError('Provided date is not in valid range.');
}
- return %InternalDateFormat(formatter.formatter, new Date(dateMs));
+ return NativeJSInternalDateFormat(formatter.formatter, new Date(dateMs));
}
@@ -430,7 +433,8 @@
* Returns undefined if date string cannot be parsed.
*/
function parseDate(formatter, value) {
- return %InternalDateParse(formatter.formatter, String(value));
+ native function NativeJSInternalDateParse();
+ return NativeJSInternalDateParse(formatter.formatter, String(value));
}
diff --git a/src/extensions/i18n/i18n-extension.cc b/src/extensions/i18n/i18n-extension.cc
index b110b7d..1c77b88 100644
--- a/src/extensions/i18n/i18n-extension.cc
+++ b/src/extensions/i18n/i18n-extension.cc
@@ -30,6 +30,8 @@
#include "break-iterator.h"
#include "collator.h"
+#include "date-format.h"
+#include "locale.h"
#include "natives.h"
#include "number-format.h"
@@ -47,6 +49,26 @@
v8::Handle<v8::FunctionTemplate> Extension::GetNativeFunction(
v8::Handle<v8::String> name) {
+ // Standalone, helper methods.
+ if (name->Equals(v8::String::New("NativeJSCanonicalizeLanguageTag"))) {
+ return v8::FunctionTemplate::New(JSCanonicalizeLanguageTag);
+ } else if (name->Equals(v8::String::New("NativeJSAvailableLocalesOf"))) {
+ return v8::FunctionTemplate::New(JSAvailableLocalesOf);
+ } else if (name->Equals(v8::String::New("NativeJSGetDefaultICULocale"))) {
+ return v8::FunctionTemplate::New(JSGetDefaultICULocale);
+ } else if (name->Equals(v8::String::New("NativeJSGetLanguageTagVariants"))) {
+ return v8::FunctionTemplate::New(JSGetLanguageTagVariants);
+ }
+
+ // Date format and parse.
+ if (name->Equals(v8::String::New("NativeJSCreateDateTimeFormat"))) {
+ return v8::FunctionTemplate::New(DateFormat::JSCreateDateTimeFormat);
+ } else if (name->Equals(v8::String::New("NativeJSInternalDateFormat"))) {
+ return v8::FunctionTemplate::New(DateFormat::JSInternalFormat);
+ } else if (name->Equals(v8::String::New("NativeJSInternalDateParse"))) {
+ return v8::FunctionTemplate::New(DateFormat::JSInternalParse);
+ }
+
// Number format and parse.
if (name->Equals(v8::String::New("NativeJSCreateNumberFormat"))) {
return v8::FunctionTemplate::New(NumberFormat::JSCreateNumberFormat);
diff --git a/src/extensions/i18n/i18n-utils.cc b/src/extensions/i18n/i18n-utils.cc
index 8c87f07..eac1166 100644
--- a/src/extensions/i18n/i18n-utils.cc
+++ b/src/extensions/i18n/i18n-utils.cc
@@ -141,37 +141,35 @@
}
-static v8::Local<v8::ObjectTemplate> ToLocal(i::Handle<i::Object> handle) {
- return v8::Utils::ToLocal(i::Handle<i::ObjectTemplateInfo>::cast(handle));
-}
-
-
-template<int internal_fields, i::EternalHandles::SingletonHandle field>
-static v8::Local<v8::ObjectTemplate> GetEternal(v8::Isolate* external) {
- i::Isolate* isolate = reinterpret_cast<i::Isolate*>(external);
- if (isolate->eternal_handles()->Exists(field)) {
- return ToLocal(isolate->eternal_handles()->GetSingleton(field));
- }
- v8::Local<v8::ObjectTemplate> raw_template(v8::ObjectTemplate::New());
- raw_template->SetInternalFieldCount(internal_fields);
- return ToLocal(
- isolate->eternal_handles()->CreateSingleton(
- isolate,
- *v8::Utils::OpenHandle(*raw_template),
- field));
-}
-
-
// static
v8::Local<v8::ObjectTemplate> Utils::GetTemplate(v8::Isolate* isolate) {
- return GetEternal<1, i::EternalHandles::I18N_TEMPLATE_ONE>(isolate);
+ i::Isolate* internal = reinterpret_cast<i::Isolate*>(isolate);
+ if (internal->heap()->i18n_template_one() ==
+ internal->heap()->the_hole_value()) {
+ v8::Local<v8::ObjectTemplate> raw_template(v8::ObjectTemplate::New());
+ raw_template->SetInternalFieldCount(1);
+ internal->heap()
+ ->SetI18nTemplateOne(*v8::Utils::OpenHandle(*raw_template));
+ }
+
+ return v8::Utils::ToLocal(i::Handle<i::ObjectTemplateInfo>::cast(
+ internal->factory()->i18n_template_one()));
}
// static
v8::Local<v8::ObjectTemplate> Utils::GetTemplate2(v8::Isolate* isolate) {
- return GetEternal<2, i::EternalHandles::I18N_TEMPLATE_TWO>(isolate);
-}
+ i::Isolate* internal = reinterpret_cast<i::Isolate*>(isolate);
+ if (internal->heap()->i18n_template_two() ==
+ internal->heap()->the_hole_value()) {
+ v8::Local<v8::ObjectTemplate> raw_template(v8::ObjectTemplate::New());
+ raw_template->SetInternalFieldCount(2);
+ internal->heap()
+ ->SetI18nTemplateTwo(*v8::Utils::OpenHandle(*raw_template));
+ }
+ return v8::Utils::ToLocal(i::Handle<i::ObjectTemplateInfo>::cast(
+ internal->factory()->i18n_template_two()));
+}
} // namespace v8_i18n
diff --git a/src/extensions/i18n/i18n-utils.js b/src/extensions/i18n/i18n-utils.js
index 545082e..d7e9486 100644
--- a/src/extensions/i18n/i18n-utils.js
+++ b/src/extensions/i18n/i18n-utils.js
@@ -255,6 +255,8 @@
* lookup algorithm.
*/
function lookupMatcher(service, requestedLocales) {
+ native function NativeJSGetDefaultICULocale();
+
if (service.match(SERVICE_RE) === null) {
throw new Error('Internal error, wrong service type: ' + service);
}
@@ -285,7 +287,7 @@
// Didn't find a match, return default.
if (DEFAULT_ICU_LOCALE === undefined) {
- DEFAULT_ICU_LOCALE = %GetDefaultICULocale();
+ DEFAULT_ICU_LOCALE = NativeJSGetDefaultICULocale();
}
return {'locale': DEFAULT_ICU_LOCALE, 'extension': '', 'position': -1};
@@ -444,12 +446,14 @@
// Returns Array<Object>, where each object has maximized and base properties.
// Maximized: zh -> zh-Hans-CN
// Base: zh-CN-u-ca-gregory -> zh-CN
+ native function NativeJSGetLanguageTagVariants();
+
// Take care of grandfathered or simple cases.
if (original === resolved) {
return original;
}
- var locales = %GetLanguageTagVariants([original, resolved]);
+ var locales = NativeJSGetLanguageTagVariants([original, resolved]);
if (locales[0].maximized !== locales[1].maximized) {
return resolved;
}
@@ -467,7 +471,8 @@
* that is supported. This is required by the spec.
*/
function getAvailableLocalesOf(service) {
- var available = %AvailableLocalesOf(service);
+ native function NativeJSAvailableLocalesOf();
+ var available = NativeJSAvailableLocalesOf(service);
for (var i in available) {
if (available.hasOwnProperty(i)) {
diff --git a/src/extensions/i18n/locale.cc b/src/extensions/i18n/locale.cc
new file mode 100644
index 0000000..6b6f9ac
--- /dev/null
+++ b/src/extensions/i18n/locale.cc
@@ -0,0 +1,251 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// limitations under the License.
+
+#include "locale.h"
+
+#include <string.h>
+
+#include "unicode/brkiter.h"
+#include "unicode/coll.h"
+#include "unicode/datefmt.h"
+#include "unicode/numfmt.h"
+#include "unicode/uloc.h"
+#include "unicode/uversion.h"
+
+namespace v8_i18n {
+
+void JSCanonicalizeLanguageTag(
+ const v8::FunctionCallbackInfo<v8::Value>& args) {
+ // Expect locale id which is a string.
+ if (args.Length() != 1 || !args[0]->IsString()) {
+ v8::ThrowException(v8::Exception::SyntaxError(
+ v8::String::New("Locale identifier, as a string, is required.")));
+ return;
+ }
+
+ UErrorCode error = U_ZERO_ERROR;
+
+ char icu_result[ULOC_FULLNAME_CAPACITY];
+ int icu_length = 0;
+
+ // Return value which denotes invalid language tag.
+ const char* const kInvalidTag = "invalid-tag";
+
+ v8::String::AsciiValue locale_id(args[0]->ToString());
+ if (*locale_id == NULL) {
+ args.GetReturnValue().Set(v8::String::New(kInvalidTag));
+ return;
+ }
+
+ uloc_forLanguageTag(*locale_id, icu_result, ULOC_FULLNAME_CAPACITY,
+ &icu_length, &error);
+ if (U_FAILURE(error) || icu_length == 0) {
+ args.GetReturnValue().Set(v8::String::New(kInvalidTag));
+ return;
+ }
+
+ char result[ULOC_FULLNAME_CAPACITY];
+
+ // Force strict BCP47 rules.
+ uloc_toLanguageTag(icu_result, result, ULOC_FULLNAME_CAPACITY, TRUE, &error);
+
+ if (U_FAILURE(error)) {
+ args.GetReturnValue().Set(v8::String::New(kInvalidTag));
+ return;
+ }
+
+ args.GetReturnValue().Set(v8::String::New(result));
+}
+
+
+void JSAvailableLocalesOf(const v8::FunctionCallbackInfo<v8::Value>& args) {
+ // Expect service name which is a string.
+ if (args.Length() != 1 || !args[0]->IsString()) {
+ v8::ThrowException(v8::Exception::SyntaxError(
+ v8::String::New("Service identifier, as a string, is required.")));
+ return;
+ }
+
+ const icu::Locale* available_locales = NULL;
+
+ int32_t count = 0;
+ v8::String::AsciiValue service(args[0]->ToString());
+ if (strcmp(*service, "collator") == 0) {
+ available_locales = icu::Collator::getAvailableLocales(count);
+ } else if (strcmp(*service, "numberformat") == 0) {
+ available_locales = icu::NumberFormat::getAvailableLocales(count);
+ } else if (strcmp(*service, "dateformat") == 0) {
+ available_locales = icu::DateFormat::getAvailableLocales(count);
+ } else if (strcmp(*service, "breakiterator") == 0) {
+ available_locales = icu::BreakIterator::getAvailableLocales(count);
+ }
+
+ v8::TryCatch try_catch;
+ UErrorCode error = U_ZERO_ERROR;
+ char result[ULOC_FULLNAME_CAPACITY];
+ v8::Handle<v8::Object> locales = v8::Object::New();
+
+ for (int32_t i = 0; i < count; ++i) {
+ const char* icu_name = available_locales[i].getName();
+
+ error = U_ZERO_ERROR;
+ // No need to force strict BCP47 rules.
+ uloc_toLanguageTag(icu_name, result, ULOC_FULLNAME_CAPACITY, FALSE, &error);
+ if (U_FAILURE(error)) {
+ // This shouldn't happen, but lets not break the user.
+ continue;
+ }
+
+ // Index is just a dummy value for the property value.
+ locales->Set(v8::String::New(result), v8::Integer::New(i));
+ if (try_catch.HasCaught()) {
+ // Ignore error, but stop processing and return.
+ break;
+ }
+ }
+
+ args.GetReturnValue().Set(locales);
+}
+
+
+void JSGetDefaultICULocale(const v8::FunctionCallbackInfo<v8::Value>& args) {
+ icu::Locale default_locale;
+
+ // Set the locale
+ char result[ULOC_FULLNAME_CAPACITY];
+ UErrorCode status = U_ZERO_ERROR;
+ uloc_toLanguageTag(
+ default_locale.getName(), result, ULOC_FULLNAME_CAPACITY, FALSE, &status);
+ if (U_SUCCESS(status)) {
+ args.GetReturnValue().Set(v8::String::New(result));
+ return;
+ }
+
+ args.GetReturnValue().Set(v8::String::New("und"));
+}
+
+
+void JSGetLanguageTagVariants(const v8::FunctionCallbackInfo<v8::Value>& args) {
+ v8::TryCatch try_catch;
+
+ // Expect an array of strings.
+ if (args.Length() != 1 || !args[0]->IsArray()) {
+ v8::ThrowException(v8::Exception::SyntaxError(
+ v8::String::New("Internal error. Expected Array<String>.")));
+ return;
+ }
+
+ v8::Local<v8::Array> input = v8::Local<v8::Array>::Cast(args[0]);
+ v8::Handle<v8::Array> output = v8::Array::New(input->Length());
+ for (unsigned int i = 0; i < input->Length(); ++i) {
+ v8::Local<v8::Value> locale_id = input->Get(i);
+ if (try_catch.HasCaught()) {
+ break;
+ }
+
+ if (!locale_id->IsString()) {
+ v8::ThrowException(v8::Exception::SyntaxError(
+ v8::String::New("Internal error. Array element is missing "
+ "or it isn't a string.")));
+ return;
+ }
+
+ v8::String::AsciiValue ascii_locale_id(locale_id);
+ if (*ascii_locale_id == NULL) {
+ v8::ThrowException(v8::Exception::SyntaxError(
+ v8::String::New("Internal error. Non-ASCII locale identifier.")));
+ return;
+ }
+
+ UErrorCode error = U_ZERO_ERROR;
+
+ // Convert from BCP47 to ICU format.
+ // de-DE-u-co-phonebk -> de_DE@collation=phonebook
+ char icu_locale[ULOC_FULLNAME_CAPACITY];
+ int icu_locale_length = 0;
+ uloc_forLanguageTag(*ascii_locale_id, icu_locale, ULOC_FULLNAME_CAPACITY,
+ &icu_locale_length, &error);
+ if (U_FAILURE(error) || icu_locale_length == 0) {
+ v8::ThrowException(v8::Exception::SyntaxError(
+ v8::String::New("Internal error. Failed to convert locale to ICU.")));
+ return;
+ }
+
+ // Maximize the locale.
+ // de_DE@collation=phonebook -> de_Latn_DE@collation=phonebook
+ char icu_max_locale[ULOC_FULLNAME_CAPACITY];
+ uloc_addLikelySubtags(
+ icu_locale, icu_max_locale, ULOC_FULLNAME_CAPACITY, &error);
+
+ // Remove extensions from maximized locale.
+ // de_Latn_DE@collation=phonebook -> de_Latn_DE
+ char icu_base_max_locale[ULOC_FULLNAME_CAPACITY];
+ uloc_getBaseName(
+ icu_max_locale, icu_base_max_locale, ULOC_FULLNAME_CAPACITY, &error);
+
+ // Get original name without extensions.
+ // de_DE@collation=phonebook -> de_DE
+ char icu_base_locale[ULOC_FULLNAME_CAPACITY];
+ uloc_getBaseName(
+ icu_locale, icu_base_locale, ULOC_FULLNAME_CAPACITY, &error);
+
+ // Convert from ICU locale format to BCP47 format.
+ // de_Latn_DE -> de-Latn-DE
+ char base_max_locale[ULOC_FULLNAME_CAPACITY];
+ uloc_toLanguageTag(icu_base_max_locale, base_max_locale,
+ ULOC_FULLNAME_CAPACITY, FALSE, &error);
+
+ // de_DE -> de-DE
+ char base_locale[ULOC_FULLNAME_CAPACITY];
+ uloc_toLanguageTag(
+ icu_base_locale, base_locale, ULOC_FULLNAME_CAPACITY, FALSE, &error);
+
+ if (U_FAILURE(error)) {
+ v8::ThrowException(v8::Exception::SyntaxError(
+ v8::String::New("Internal error. Couldn't generate maximized "
+ "or base locale.")));
+ return;
+ }
+
+ v8::Handle<v8::Object> result = v8::Object::New();
+ result->Set(v8::String::New("maximized"), v8::String::New(base_max_locale));
+ result->Set(v8::String::New("base"), v8::String::New(base_locale));
+ if (try_catch.HasCaught()) {
+ break;
+ }
+
+ output->Set(i, result);
+ if (try_catch.HasCaught()) {
+ break;
+ }
+ }
+
+ args.GetReturnValue().Set(output);
+}
+
+} // namespace v8_i18n
diff --git a/src/extensions/i18n/locale.h b/src/extensions/i18n/locale.h
new file mode 100644
index 0000000..c39568e
--- /dev/null
+++ b/src/extensions/i18n/locale.h
@@ -0,0 +1,56 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// limitations under the License.
+
+#ifndef V8_EXTENSIONS_I18N_SRC_LOCALE_H_
+#define V8_EXTENSIONS_I18N_SRC_LOCALE_H_
+
+#include "unicode/uversion.h"
+#include "v8.h"
+
+namespace v8_i18n {
+
+// Canonicalizes the BCP47 language tag using BCP47 rules.
+// Returns 'invalid-tag' in case input was not well formed.
+void JSCanonicalizeLanguageTag(const v8::FunctionCallbackInfo<v8::Value>& args);
+
+// Returns a list of available locales for collator, date or number formatter.
+void JSAvailableLocalesOf(const v8::FunctionCallbackInfo<v8::Value>& args);
+
+// Returns default ICU locale.
+void JSGetDefaultICULocale(const v8::FunctionCallbackInfo<v8::Value>& args);
+
+// Returns an array of objects, that have maximized and base names of inputs.
+// Unicode extensions are dropped from both.
+// Input: ['zh-TW-u-nu-thai', 'sr']
+// Output: [{maximized: 'zh-Hant-TW', base: 'zh-TW'},
+// {maximized: 'sr-Cyrl-RS', base: 'sr'}]
+void JSGetLanguageTagVariants(const v8::FunctionCallbackInfo<v8::Value>& args);
+
+} // namespace v8_i18n
+
+#endif // V8_EXTENSIONS_I18N_LOCALE_H_
diff --git a/src/extensions/i18n/locale.js b/src/extensions/i18n/locale.js
index e478327..ea95b87 100644
--- a/src/extensions/i18n/locale.js
+++ b/src/extensions/i18n/locale.js
@@ -34,6 +34,8 @@
* Canonicalizes the language tag, or throws in case the tag is invalid.
*/
function canonicalizeLanguageTag(localeID) {
+ native function NativeJSCanonicalizeLanguageTag();
+
// null is typeof 'object' so we have to do extra check.
if (typeof localeID !== 'string' && typeof localeID !== 'object' ||
localeID === null) {
@@ -50,7 +52,7 @@
// ICU bug filled - http://bugs.icu-project.org/trac/ticket/9265.
// TODO(cira): check if -u-kn-true-kc-true-kh-true still throws after
// upgrade to ICU 4.9.
- var tag = %CanonicalizeLanguageTag(localeString);
+ var tag = NativeJSCanonicalizeLanguageTag(localeString);
if (tag === 'invalid-tag') {
throw new RangeError('Invalid language tag: ' + localeString);
}
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index c68beb5..5fc5d88 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -174,7 +174,6 @@
DEFINE_bool(harmony_numeric_literals, false,
"enable harmony numeric literals (0o77, 0b11)")
DEFINE_bool(harmony_strings, false, "enable harmony string")
-DEFINE_bool(harmony_arrays, false, "enable harmony arrays")
DEFINE_bool(harmony, false, "enable all harmony features (except typeof)")
DEFINE_implication(harmony, harmony_scoping)
DEFINE_implication(harmony, harmony_modules)
@@ -186,7 +185,6 @@
DEFINE_implication(harmony, harmony_iteration)
DEFINE_implication(harmony, harmony_numeric_literals)
DEFINE_implication(harmony, harmony_strings)
-DEFINE_implication(harmony, harmony_arrays)
DEFINE_implication(harmony_modules, harmony_scoping)
DEFINE_implication(harmony_observation, harmony_collections)
// TODO[dslomov] add harmony => harmony_typed_arrays
@@ -267,6 +265,7 @@
DEFINE_bool(deoptimize_uncommon_cases, true, "deoptimize uncommon cases")
DEFINE_bool(polymorphic_inlining, true, "polymorphic inlining")
DEFINE_bool(use_osr, true, "use on-stack replacement")
+DEFINE_bool(idefs, false, "use informative definitions")
DEFINE_bool(array_bounds_checks_elimination, true,
"perform array bounds checks elimination")
DEFINE_bool(array_bounds_checks_hoisting, false,
@@ -310,6 +309,9 @@
"the length of the parallel compilation queue")
DEFINE_int(parallel_recompilation_delay, 0,
"artificial compilation delay in ms")
+DEFINE_bool(omit_prototype_checks_for_leaf_maps, true,
+ "do not emit prototype checks if all prototypes have leaf maps, "
+ "deoptimize the optimized code if the layout of the maps changes.")
DEFINE_bool(omit_map_checks_for_leaf_maps, true,
"do not emit check maps for constant values that have a leaf map, "
"deoptimize the optimized code if the layout of the maps changes.")
diff --git a/src/full-codegen.cc b/src/full-codegen.cc
index f5539e8..6d802e9 100644
--- a/src/full-codegen.cc
+++ b/src/full-codegen.cc
@@ -512,7 +512,7 @@
void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
- __ Push(reg);
+ __ push(reg);
}
@@ -530,7 +530,7 @@
void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
- __ Pop(result_register());
+ __ pop(result_register());
}
@@ -540,7 +540,7 @@
void FullCodeGenerator::TestContext::PlugTOS() const {
// For simplicity we always test the accumulator register.
- __ Pop(result_register());
+ __ pop(result_register());
codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
codegen()->DoTest(this);
}
@@ -1006,7 +1006,7 @@
VisitForAccumulatorValue(left);
// We want the value in the accumulator for the test, and on the stack in
// case we need it.
- __ Push(result_register());
+ __ push(result_register());
Label discard, restore;
if (is_logical_and) {
DoTest(left, &discard, &restore, &restore);
@@ -1014,7 +1014,7 @@
DoTest(left, &restore, &discard, &restore);
}
__ bind(&restore);
- __ Pop(result_register());
+ __ pop(result_register());
__ jmp(&done);
__ bind(&discard);
__ Drop(1);
@@ -1024,7 +1024,7 @@
VisitForAccumulatorValue(left);
// We want the value in the accumulator for the test, and on the stack in
// case we need it.
- __ Push(result_register());
+ __ push(result_register());
Label discard;
if (is_logical_and) {
DoTest(left, &discard, &done, &discard);
@@ -1416,7 +1416,7 @@
// Extend the context before executing the catch block.
{ Comment cmnt(masm_, "[ Extend catch context");
__ Push(stmt->variable()->name());
- __ Push(result_register());
+ __ push(result_register());
PushFunctionArgumentForContextAllocation();
__ CallRuntime(Runtime::kPushCatchContext, 3);
StoreToFrameField(StandardFrameConstants::kContextOffset,
@@ -1481,7 +1481,7 @@
// preserved by the finally block. Call the finally block and then
// rethrow the exception if it returns.
__ Call(&finally_entry);
- __ Push(result_register());
+ __ push(result_register());
__ CallRuntime(Runtime::kReThrow, 1);
// Finally block implementation.
diff --git a/src/full-codegen.h b/src/full-codegen.h
index af63aed..a9db54e 100644
--- a/src/full-codegen.h
+++ b/src/full-codegen.h
@@ -31,14 +31,11 @@
#include "v8.h"
#include "allocation.h"
-#include "assert-scope.h"
#include "ast.h"
#include "code-stubs.h"
#include "codegen.h"
#include "compiler.h"
#include "data-flow.h"
-#include "globals.h"
-#include "objects.h"
namespace v8 {
namespace internal {
@@ -139,64 +136,7 @@
#error Unsupported target architecture.
#endif
- class BackEdgeTableIterator {
- public:
- explicit BackEdgeTableIterator(Code* unoptimized) {
- ASSERT(unoptimized->kind() == Code::FUNCTION);
- instruction_start_ = unoptimized->instruction_start();
- cursor_ = instruction_start_ + unoptimized->back_edge_table_offset();
- ASSERT(cursor_ < instruction_start_ + unoptimized->instruction_size());
- table_length_ = Memory::uint32_at(cursor_);
- cursor_ += kTableLengthSize;
- end_ = cursor_ + table_length_ * kEntrySize;
- }
-
- bool Done() { return cursor_ >= end_; }
-
- void Next() {
- ASSERT(!Done());
- cursor_ += kEntrySize;
- }
-
- BailoutId ast_id() {
- ASSERT(!Done());
- return BailoutId(static_cast<int>(
- Memory::uint32_at(cursor_ + kAstIdOffset)));
- }
-
- uint32_t loop_depth() {
- ASSERT(!Done());
- return Memory::uint32_at(cursor_ + kLoopDepthOffset);
- }
-
- uint32_t pc_offset() {
- ASSERT(!Done());
- return Memory::uint32_at(cursor_ + kPcOffsetOffset);
- }
-
- Address pc() {
- ASSERT(!Done());
- return instruction_start_ + pc_offset();
- }
-
- uint32_t table_length() { return table_length_; }
-
- private:
- static const int kTableLengthSize = kIntSize;
- static const int kAstIdOffset = 0 * kIntSize;
- static const int kPcOffsetOffset = 1 * kIntSize;
- static const int kLoopDepthOffset = 2 * kIntSize;
- static const int kEntrySize = 3 * kIntSize;
-
- Address cursor_;
- Address end_;
- Address instruction_start_;
- uint32_t table_length_;
- DisallowHeapAllocation no_gc_while_iterating_over_raw_addresses_;
-
- DISALLOW_COPY_AND_ASSIGN(BackEdgeTableIterator);
- };
-
+ static const int kBackEdgeEntrySize = 3 * kIntSize;
private:
class Breakable;
@@ -685,6 +625,8 @@
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
+ void EmitUnaryOperation(UnaryOperation* expr, const char* comment);
+
void VisitComma(BinaryOperation* expr);
void VisitLogicalExpression(BinaryOperation* expr);
void VisitArithmeticExpression(BinaryOperation* expr);
diff --git a/src/global-handles.cc b/src/global-handles.cc
index 5df9dd4..88ebe31 100644
--- a/src/global-handles.cc
+++ b/src/global-handles.cc
@@ -71,7 +71,6 @@
STATIC_ASSERT(static_cast<int>(NodeState::kMask) ==
Internals::kNodeStateMask);
STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue);
- STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue);
STATIC_ASSERT(NEAR_DEATH == Internals::kNodeStateIsNearDeathValue);
STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) ==
Internals::kNodeIsIndependentShift);
@@ -1019,68 +1018,4 @@
}
-EternalHandles::EternalHandles() : size_(0) {
- STATIC_ASSERT(v8::kUninitializedEternalIndex == kInvalidIndex);
- for (unsigned i = 0; i < ARRAY_SIZE(singleton_handles_); i++) {
- singleton_handles_[i] = kInvalidIndex;
- }
-}
-
-
-EternalHandles::~EternalHandles() {
- for (int i = 0; i < blocks_.length(); i++) delete[] blocks_[i];
-}
-
-
-void EternalHandles::IterateAllRoots(ObjectVisitor* visitor) {
- int limit = size_;
- for (int i = 0; i < blocks_.length(); i++) {
- ASSERT(limit > 0);
- Object** block = blocks_[i];
- visitor->VisitPointers(block, block + Min(limit, kSize));
- limit -= kSize;
- }
-}
-
-
-void EternalHandles::IterateNewSpaceRoots(ObjectVisitor* visitor) {
- for (int i = 0; i < new_space_indices_.length(); i++) {
- visitor->VisitPointer(GetLocation(new_space_indices_[i]));
- }
-}
-
-
-void EternalHandles::PostGarbageCollectionProcessing(Heap* heap) {
- int last = 0;
- for (int i = 0; i < new_space_indices_.length(); i++) {
- int index = new_space_indices_[i];
- if (heap->InNewSpace(*GetLocation(index))) {
- new_space_indices_[last++] = index;
- }
- }
- new_space_indices_.Rewind(last);
-}
-
-
-int EternalHandles::Create(Isolate* isolate, Object* object) {
- if (object == NULL) return kInvalidIndex;
- ASSERT_NE(isolate->heap()->the_hole_value(), object);
- int block = size_ >> kShift;
- int offset = size_ & kMask;
- // need to resize
- if (offset == 0) {
- Object** next_block = new Object*[kSize];
- Object* the_hole = isolate->heap()->the_hole_value();
- MemsetPointer(next_block, the_hole, kSize);
- blocks_.Add(next_block);
- }
- ASSERT_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]);
- blocks_[block][offset] = object;
- if (isolate->heap()->InNewSpace(object)) {
- new_space_indices_.Add(size_);
- }
- return size_++;
-}
-
-
} } // namespace v8::internal
diff --git a/src/global-handles.h b/src/global-handles.h
index 5a4ad13..cd75133 100644
--- a/src/global-handles.h
+++ b/src/global-handles.h
@@ -31,7 +31,6 @@
#include "../include/v8.h"
#include "../include/v8-profiler.h"
-#include "handles.h"
#include "list.h"
#include "v8utils.h"
@@ -332,76 +331,6 @@
};
-class EternalHandles {
- public:
- enum SingletonHandle {
- I18N_TEMPLATE_ONE,
- I18N_TEMPLATE_TWO,
-
- NUMBER_OF_SINGLETON_HANDLES
- };
-
- EternalHandles();
- ~EternalHandles();
-
- int NumberOfHandles() { return size_; }
-
- // Create an EternalHandle, returning the index.
- int Create(Isolate* isolate, Object* object);
-
- // Grab the handle for an existing EternalHandle.
- inline Handle<Object> Get(int index) {
- return Handle<Object>(GetLocation(index));
- }
-
- // Grab the handle for an existing SingletonHandle.
- inline Handle<Object> GetSingleton(SingletonHandle singleton) {
- ASSERT(Exists(singleton));
- return Get(singleton_handles_[singleton]);
- }
-
- // Checks whether a SingletonHandle has been assigned.
- inline bool Exists(SingletonHandle singleton) {
- return singleton_handles_[singleton] != kInvalidIndex;
- }
-
- // Assign a SingletonHandle to an empty slot and returns the handle.
- Handle<Object> CreateSingleton(Isolate* isolate,
- Object* object,
- SingletonHandle singleton) {
- ASSERT(singleton_handles_[singleton] == kInvalidIndex);
- singleton_handles_[singleton] = Create(isolate, object);
- return Get(singleton_handles_[singleton]);
- }
-
- // Iterates over all handles.
- void IterateAllRoots(ObjectVisitor* visitor);
- // Iterates over all handles which might be in new space.
- void IterateNewSpaceRoots(ObjectVisitor* visitor);
- // Rebuilds new space list.
- void PostGarbageCollectionProcessing(Heap* heap);
-
- private:
- static const int kInvalidIndex = -1;
- static const int kShift = 8;
- static const int kSize = 1 << kShift;
- static const int kMask = 0xff;
-
- // Gets the slot for an index
- inline Object** GetLocation(int index) {
- ASSERT(index >= 0 && index < size_);
- return &blocks_[index >> kShift][index & kMask];
- }
-
- int size_;
- List<Object**> blocks_;
- List<int> new_space_indices_;
- int singleton_handles_[NUMBER_OF_SINGLETON_HANDLES];
-
- DISALLOW_COPY_AND_ASSIGN(EternalHandles);
-};
-
-
} } // namespace v8::internal
#endif // V8_GLOBAL_HANDLES_H_
diff --git a/src/harmony-array.js b/src/harmony-array.js
deleted file mode 100644
index e440299..0000000
--- a/src/harmony-array.js
+++ /dev/null
@@ -1,124 +0,0 @@
-// Copyright 2013 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-'use strict';
-
-// This file relies on the fact that the following declaration has been made
-// in runtime.js:
-// var $Array = global.Array;
-
-// -------------------------------------------------------------------
-
-// ES6 draft 07-15-13, section 15.4.3.23
-function ArrayFind(predicate /* thisArg */) { // length == 1
- if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
- throw MakeTypeError("called_on_null_or_undefined",
- ["Array.prototype.find"]);
- }
-
- var array = ToObject(this);
- var length = ToInteger(array.length);
-
- if (!IS_SPEC_FUNCTION(predicate)) {
- throw MakeTypeError('called_non_callable', [predicate]);
- }
-
- var thisArg;
- if (%_ArgumentsLength() > 1) {
- thisArg = %_Arguments(1);
- }
-
- if (IS_NULL_OR_UNDEFINED(thisArg)) {
- thisArg = %GetDefaultReceiver(predicate) || thisArg;
- } else if (!IS_SPEC_OBJECT(thisArg) && %IsClassicModeFunction(predicate)) {
- thisArg = ToObject(thisArg);
- }
-
- for (var i = 0; i < length; i++) {
- if (i in array) {
- var element = array[i];
- if (%_CallFunction(thisArg, element, i, array, predicate)) {
- return element;
- }
- }
- }
-
- return;
-}
-
-
-// ES6 draft 07-15-13, section 15.4.3.24
-function ArrayFindIndex(predicate /* thisArg */) { // length == 1
- if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
- throw MakeTypeError("called_on_null_or_undefined",
- ["Array.prototype.findIndex"]);
- }
-
- var array = ToObject(this);
- var length = ToInteger(array.length);
-
- if (!IS_SPEC_FUNCTION(predicate)) {
- throw MakeTypeError('called_non_callable', [predicate]);
- }
-
- var thisArg;
- if (%_ArgumentsLength() > 1) {
- thisArg = %_Arguments(1);
- }
-
- if (IS_NULL_OR_UNDEFINED(thisArg)) {
- thisArg = %GetDefaultReceiver(predicate) || thisArg;
- } else if (!IS_SPEC_OBJECT(thisArg) && %IsClassicModeFunction(predicate)) {
- thisArg = ToObject(thisArg);
- }
-
- for (var i = 0; i < length; i++) {
- if (i in array) {
- var element = array[i];
- if (%_CallFunction(thisArg, element, i, array, predicate)) {
- return i;
- }
- }
- }
-
- return -1;
-}
-
-
-// -------------------------------------------------------------------
-
-function HarmonyArrayExtendArrayPrototype() {
- %CheckIsBootstrapping();
-
- // Set up the non-enumerable functions on the Array prototype object.
- InstallFunctions($Array.prototype, DONT_ENUM, $Array(
- "find", ArrayFind,
- "findIndex", ArrayFindIndex
- ));
-}
-
-HarmonyArrayExtendArrayPrototype();
\ No newline at end of file
diff --git a/src/heap-snapshot-generator.cc b/src/heap-snapshot-generator.cc
index 1c8a7b3..9f9f84a 100644
--- a/src/heap-snapshot-generator.cc
+++ b/src/heap-snapshot-generator.cc
@@ -369,12 +369,6 @@
HeapObjectsMap::kGcRootsFirstSubrootId +
VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
-
-static bool AddressesMatch(void* key1, void* key2) {
- return key1 == key2;
-}
-
-
HeapObjectsMap::HeapObjectsMap(Heap* heap)
: next_id_(kFirstAvailableObjectId),
entries_map_(AddressesMatch),
@@ -399,20 +393,19 @@
ASSERT(to != NULL);
ASSERT(from != NULL);
if (from == to) return;
- void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
+ void* from_value = entries_map_.Remove(from, AddressHash(from));
if (from_value == NULL) {
// It may occur that some untracked object moves to an address X and there
// is a tracked object at that address. In this case we should remove the
// entry as we know that the object has died.
- void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
+ void* to_value = entries_map_.Remove(to, AddressHash(to));
if (to_value != NULL) {
int to_entry_info_index =
static_cast<int>(reinterpret_cast<intptr_t>(to_value));
entries_.at(to_entry_info_index).addr = NULL;
}
} else {
- HashMap::Entry* to_entry = entries_map_.Lookup(to, ComputePointerHash(to),
- true);
+ HashMap::Entry* to_entry = entries_map_.Lookup(to, AddressHash(to), true);
if (to_entry->value != NULL) {
// We found the existing entry with to address for an old object.
// Without this operation we will have two EntryInfo's with the same
@@ -432,8 +425,7 @@
SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
- HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
- false);
+ HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
if (entry == NULL) return 0;
int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
EntryInfo& entry_info = entries_.at(entry_index);
@@ -445,8 +437,7 @@
SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
unsigned int size) {
ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
- HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
- true);
+ HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
if (entry->value != NULL) {
int entry_index =
static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
@@ -541,14 +532,13 @@
}
entries_.at(first_free_entry).accessed = false;
HashMap::Entry* entry = entries_map_.Lookup(
- entry_info.addr, ComputePointerHash(entry_info.addr), false);
+ entry_info.addr, AddressHash(entry_info.addr), false);
ASSERT(entry);
entry->value = reinterpret_cast<void*>(first_free_entry);
++first_free_entry;
} else {
if (entry_info.addr) {
- entries_map_.Remove(entry_info.addr,
- ComputePointerHash(entry_info.addr));
+ entries_map_.Remove(entry_info.addr, AddressHash(entry_info.addr));
}
}
}
diff --git a/src/heap-snapshot-generator.h b/src/heap-snapshot-generator.h
index cea9958..31d8088 100644
--- a/src/heap-snapshot-generator.h
+++ b/src/heap-snapshot-generator.h
@@ -266,6 +266,16 @@
void UpdateHeapObjectsMap();
void RemoveDeadEntries();
+ static bool AddressesMatch(void* key1, void* key2) {
+ return key1 == key2;
+ }
+
+ static uint32_t AddressHash(Address addr) {
+ return ComputeIntegerHash(
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(addr)),
+ v8::internal::kZeroHashSeed);
+ }
+
SnapshotObjectId next_id_;
HashMap entries_map_;
List<EntryInfo> entries_;
diff --git a/src/heap.cc b/src/heap.cc
index 53088e2..c2a2707 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -1014,8 +1014,6 @@
}
gc_post_processing_depth_--;
- isolate_->eternal_handles()->PostGarbageCollectionProcessing(this);
-
// Update relocatables.
Relocatable::PostGarbageCollectionProcessing();
@@ -2008,6 +2006,7 @@
private:
enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
+ enum SizeRestriction { SMALL, UNKNOWN_SIZE };
static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
bool should_record = false;
@@ -2059,12 +2058,15 @@
}
- template<ObjectContents object_contents, int alignment>
+ template<ObjectContents object_contents,
+ SizeRestriction size_restriction,
+ int alignment>
static inline void EvacuateObject(Map* map,
HeapObject** slot,
HeapObject* object,
int object_size) {
- SLOW_ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
+ SLOW_ASSERT((size_restriction != SMALL) ||
+ (object_size <= Page::kMaxNonCodeHeapObjectSize));
SLOW_ASSERT(object->Size() == object_size);
int allocation_size = object_size;
@@ -2077,11 +2079,17 @@
if (heap->ShouldBePromoted(object->address(), object_size)) {
MaybeObject* maybe_result;
- if (object_contents == DATA_OBJECT) {
- maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
+ if ((size_restriction != SMALL) &&
+ (allocation_size > Page::kMaxNonCodeHeapObjectSize)) {
+ maybe_result = heap->lo_space()->AllocateRaw(allocation_size,
+ NOT_EXECUTABLE);
} else {
- maybe_result =
- heap->old_pointer_space()->AllocateRaw(allocation_size);
+ if (object_contents == DATA_OBJECT) {
+ maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
+ } else {
+ maybe_result =
+ heap->old_pointer_space()->AllocateRaw(allocation_size);
+ }
}
Object* result = NULL; // Initialization to please compiler.
@@ -2155,8 +2163,10 @@
HeapObject** slot,
HeapObject* object) {
int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
- EvacuateObject<POINTER_OBJECT, kObjectAlignment>(
- map, slot, object, object_size);
+ EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(map,
+ slot,
+ object,
+ object_size);
}
@@ -2165,8 +2175,11 @@
HeapObject* object) {
int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
int object_size = FixedDoubleArray::SizeFor(length);
- EvacuateObject<DATA_OBJECT, kDoubleAlignment>(
- map, slot, object, object_size);
+ EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kDoubleAlignment>(
+ map,
+ slot,
+ object,
+ object_size);
}
@@ -2174,7 +2187,7 @@
HeapObject** slot,
HeapObject* object) {
int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
- EvacuateObject<DATA_OBJECT, kObjectAlignment>(
+ EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
map, slot, object, object_size);
}
@@ -2184,7 +2197,7 @@
HeapObject* object) {
int object_size = SeqOneByteString::cast(object)->
SeqOneByteStringSize(map->instance_type());
- EvacuateObject<DATA_OBJECT, kObjectAlignment>(
+ EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
map, slot, object, object_size);
}
@@ -2194,7 +2207,7 @@
HeapObject* object) {
int object_size = SeqTwoByteString::cast(object)->
SeqTwoByteStringSize(map->instance_type());
- EvacuateObject<DATA_OBJECT, kObjectAlignment>(
+ EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
map, slot, object, object_size);
}
@@ -2238,7 +2251,7 @@
}
int object_size = ConsString::kSize;
- EvacuateObject<POINTER_OBJECT, kObjectAlignment>(
+ EvacuateObject<POINTER_OBJECT, SMALL, kObjectAlignment>(
map, slot, object, object_size);
}
@@ -2249,7 +2262,7 @@
static inline void VisitSpecialized(Map* map,
HeapObject** slot,
HeapObject* object) {
- EvacuateObject<object_contents, kObjectAlignment>(
+ EvacuateObject<object_contents, SMALL, kObjectAlignment>(
map, slot, object, object_size);
}
@@ -2257,7 +2270,7 @@
HeapObject** slot,
HeapObject* object) {
int object_size = map->instance_size();
- EvacuateObject<object_contents, kObjectAlignment>(
+ EvacuateObject<object_contents, SMALL, kObjectAlignment>(
map, slot, object, object_size);
}
};
@@ -3205,6 +3218,9 @@
}
set_observed_symbol(Symbol::cast(obj));
+ set_i18n_template_one(the_hole_value());
+ set_i18n_template_two(the_hole_value());
+
// Handling of script id generation is in Factory::NewScript.
set_last_script_id(Smi::FromInt(v8::Script::kNoScriptId));
@@ -6592,14 +6608,6 @@
}
v->Synchronize(VisitorSynchronization::kGlobalHandles);
- // Iterate over eternal handles.
- if (mode == VISIT_ALL_IN_SCAVENGE) {
- isolate_->eternal_handles()->IterateNewSpaceRoots(v);
- } else {
- isolate_->eternal_handles()->IterateAllRoots(v);
- }
- v->Synchronize(VisitorSynchronization::kEternalHandles);
-
// Iterate over pointers being held by inactive threads.
isolate_->thread_manager()->Iterate(v);
v->Synchronize(VisitorSynchronization::kThreadManager);
diff --git a/src/heap.h b/src/heap.h
index 1b6bf8e..672b8c1 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -189,7 +189,9 @@
V(Symbol, elements_transition_symbol, ElementsTransitionSymbol) \
V(SeededNumberDictionary, empty_slow_element_dictionary, \
EmptySlowElementDictionary) \
- V(Symbol, observed_symbol, ObservedSymbol)
+ V(Symbol, observed_symbol, ObservedSymbol) \
+ V(HeapObject, i18n_template_one, I18nTemplateOne) \
+ V(HeapObject, i18n_template_two, I18nTemplateTwo)
#define ROOT_LIST(V) \
STRONG_ROOT_LIST(V) \
@@ -1300,6 +1302,12 @@
ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
global_gc_epilogue_callback_ = callback;
}
+ void SetI18nTemplateOne(ObjectTemplateInfo* tmpl) {
+ set_i18n_template_one(tmpl);
+ }
+ void SetI18nTemplateTwo(ObjectTemplateInfo* tmpl) {
+ set_i18n_template_two(tmpl);
+ }
// Heap root getters. We have versions with and without type::cast() here.
// You can't use type::cast during GC because the assert fails.
diff --git a/src/hydrogen-dehoist.cc b/src/hydrogen-dehoist.cc
index 67e6718..696d22c 100644
--- a/src/hydrogen-dehoist.cc
+++ b/src/hydrogen-dehoist.cc
@@ -38,7 +38,7 @@
HConstant* constant;
HValue* subexpression;
HBinaryOperation* binary_operation = HBinaryOperation::cast(index);
- if (binary_operation->left()->IsConstant() && index->IsAdd()) {
+ if (binary_operation->left()->IsConstant()) {
subexpression = binary_operation->right();
constant = HConstant::cast(binary_operation->left());
} else if (binary_operation->right()->IsConstant()) {
diff --git a/src/hydrogen-instructions.cc b/src/hydrogen-instructions.cc
index 997b7c2..3eb4aa6 100644
--- a/src/hydrogen-instructions.cc
+++ b/src/hydrogen-instructions.cc
@@ -149,6 +149,116 @@
}
+// This method is recursive but it is guaranteed to terminate because
+// RedefinedOperand() always dominates "this".
+bool HValue::IsRelationTrue(NumericRelation relation,
+ HValue* other,
+ int offset,
+ int scale) {
+ if (this == other) {
+ return scale == 0 && relation.IsExtendable(offset);
+ }
+
+ // Test the direct relation.
+ if (IsRelationTrueInternal(relation, other, offset, scale)) return true;
+
+ // If scale is 0 try the reversed relation.
+ if (scale == 0 &&
+ // TODO(mmassi): do we need the full, recursive IsRelationTrue?
+ other->IsRelationTrueInternal(relation.Reversed(), this, -offset)) {
+ return true;
+ }
+
+ // Try decomposition (but do not accept scaled compounds).
+ DecompositionResult decomposition;
+ if (TryDecompose(&decomposition) &&
+ decomposition.scale() == 0 &&
+ decomposition.base()->IsRelationTrue(relation, other,
+ offset + decomposition.offset(),
+ scale)) {
+ return true;
+ }
+
+ // Pass the request to the redefined value.
+ HValue* redefined = RedefinedOperand();
+ return redefined != NULL && redefined->IsRelationTrue(relation, other,
+ offset, scale);
+}
+
+
+bool HValue::TryGuaranteeRange(HValue* upper_bound) {
+ RangeEvaluationContext context = RangeEvaluationContext(this, upper_bound);
+ TryGuaranteeRangeRecursive(&context);
+ bool result = context.is_range_satisfied();
+ if (result) {
+ context.lower_bound_guarantee()->SetResponsibilityForRange(DIRECTION_LOWER);
+ context.upper_bound_guarantee()->SetResponsibilityForRange(DIRECTION_UPPER);
+ }
+ return result;
+}
+
+
+void HValue::TryGuaranteeRangeRecursive(RangeEvaluationContext* context) {
+ // Check if we already know that this value satisfies the lower bound.
+ if (context->lower_bound_guarantee() == NULL) {
+ if (IsRelationTrueInternal(NumericRelation::Ge(), context->lower_bound(),
+ context->offset(), context->scale())) {
+ context->set_lower_bound_guarantee(this);
+ }
+ }
+
+ // Check if we already know that this value satisfies the upper bound.
+ if (context->upper_bound_guarantee() == NULL) {
+ if (IsRelationTrueInternal(NumericRelation::Lt(), context->upper_bound(),
+ context->offset(), context->scale()) ||
+ (context->scale() == 0 &&
+ context->upper_bound()->IsRelationTrue(NumericRelation::Gt(),
+ this, -context->offset()))) {
+ context->set_upper_bound_guarantee(this);
+ }
+ }
+
+ if (context->is_range_satisfied()) return;
+
+ // See if our RedefinedOperand() satisfies the constraints.
+ if (RedefinedOperand() != NULL) {
+ RedefinedOperand()->TryGuaranteeRangeRecursive(context);
+ }
+ if (context->is_range_satisfied()) return;
+
+ // See if the constraints can be satisfied by decomposition.
+ DecompositionResult decomposition;
+ if (TryDecompose(&decomposition)) {
+ context->swap_candidate(&decomposition);
+ context->candidate()->TryGuaranteeRangeRecursive(context);
+ context->swap_candidate(&decomposition);
+ }
+ if (context->is_range_satisfied()) return;
+
+ // Try to modify this to satisfy the constraint.
+
+ TryGuaranteeRangeChanging(context);
+}
+
+
+RangeEvaluationContext::RangeEvaluationContext(HValue* value, HValue* upper)
+ : lower_bound_(upper->block()->graph()->GetConstant0()),
+ lower_bound_guarantee_(NULL),
+ candidate_(value),
+ upper_bound_(upper),
+ upper_bound_guarantee_(NULL),
+ offset_(0),
+ scale_(0) {
+}
+
+
+HValue* RangeEvaluationContext::ConvertGuarantee(HValue* guarantee) {
+ return guarantee->IsBoundsCheckBaseIndexInformation()
+ ? HBoundsCheckBaseIndexInformation::cast(guarantee)->bounds_check()
+ : guarantee;
+}
+
+
static int32_t ConvertAndSetOverflow(Representation r,
int64_t result,
bool* overflow) {
@@ -374,6 +484,55 @@
}
+bool HValue::Dominates(HValue* dominator, HValue* dominated) {
+ if (dominator->block() != dominated->block()) {
+ // If they are in different blocks we can use the dominance relation
+ // between the blocks.
+ return dominator->block()->Dominates(dominated->block());
+ } else {
+ // Otherwise we must see which instruction comes first, considering
+ // that phis always precede regular instructions.
+ if (dominator->IsInstruction()) {
+ if (dominated->IsInstruction()) {
+ for (HInstruction* next = HInstruction::cast(dominator)->next();
+ next != NULL;
+ next = next->next()) {
+ if (next == dominated) return true;
+ }
+ return false;
+ } else if (dominated->IsPhi()) {
+ return false;
+ } else {
+ UNREACHABLE();
+ }
+ } else if (dominator->IsPhi()) {
+ if (dominated->IsInstruction()) {
+ return true;
+ } else {
+ // We cannot compare which phi comes first.
+ UNREACHABLE();
+ }
+ } else {
+ UNREACHABLE();
+ }
+ return false;
+ }
+}
+
+
+bool HValue::TestDominanceUsingProcessedFlag(HValue* dominator,
+ HValue* dominated) {
+ if (dominator->block() != dominated->block()) {
+ return dominator->block()->Dominates(dominated->block());
+ } else {
+ // If both arguments are in the same block we check if dominator is a phi
+ // or if dominated has not already been processed: in either case we know
+ // that dominator precedes dominated.
+ return dominator->IsPhi() || !dominated->CheckFlag(kIDefsProcessingDone);
+ }
+}
+
+
bool HValue::IsDefinedAfter(HBasicBlock* other) const {
return block()->block_id() > other->block_id();
}
@@ -801,6 +960,58 @@
#endif
+HNumericConstraint* HNumericConstraint::AddToGraph(
+ HValue* constrained_value,
+ NumericRelation relation,
+ HValue* related_value,
+ HInstruction* insertion_point) {
+ if (insertion_point == NULL) {
+ if (constrained_value->IsInstruction()) {
+ insertion_point = HInstruction::cast(constrained_value);
+ } else if (constrained_value->IsPhi()) {
+ insertion_point = constrained_value->block()->first();
+ } else {
+ UNREACHABLE();
+ }
+ }
+ HNumericConstraint* result =
+ new(insertion_point->block()->zone()) HNumericConstraint(
+ constrained_value, relation, related_value);
+ result->InsertAfter(insertion_point);
+ return result;
+}
+
+
+void HNumericConstraint::PrintDataTo(StringStream* stream) {
+ stream->Add("(");
+ constrained_value()->PrintNameTo(stream);
+ stream->Add(" %s ", relation().Mnemonic());
+ related_value()->PrintNameTo(stream);
+ stream->Add(")");
+}
+
+
+HInductionVariableAnnotation* HInductionVariableAnnotation::AddToGraph(
+ HPhi* phi,
+ NumericRelation relation,
+ int operand_index) {
+ HInductionVariableAnnotation* result =
+ new(phi->block()->zone()) HInductionVariableAnnotation(phi, relation,
+ operand_index);
+ result->InsertAfter(phi->block()->first());
+ return result;
+}
+
+
+void HInductionVariableAnnotation::PrintDataTo(StringStream* stream) {
+ stream->Add("(");
+ RedefinedOperand()->PrintNameTo(stream);
+ stream->Add(" %s ", relation().Mnemonic());
+ induction_base()->PrintNameTo(stream);
+ stream->Add(")");
+}
+
+
void HDummyUse::PrintDataTo(StringStream* stream) {
value()->PrintNameTo(stream);
}
@@ -827,6 +1038,40 @@
}
+void HBoundsCheck::TryGuaranteeRangeChanging(RangeEvaluationContext* context) {
+ if (context->candidate()->ActualValue() != base()->ActualValue() ||
+ context->scale() < scale()) {
+ return;
+ }
+
+ // TODO(mmassi)
+ // Instead of checking for "same basic block" we should check for
+ // "dominates and postdominates".
+ if (context->upper_bound() == length() &&
+ context->lower_bound_guarantee() != NULL &&
+ context->lower_bound_guarantee() != this &&
+ context->lower_bound_guarantee()->block() != block() &&
+ offset() < context->offset() &&
+ index_can_increase() &&
+ context->upper_bound_guarantee() == NULL) {
+ offset_ = context->offset();
+ SetResponsibilityForRange(DIRECTION_UPPER);
+ context->set_upper_bound_guarantee(this);
+ isolate()->counters()->bounds_checks_eliminated()->Increment();
+ } else if (context->upper_bound_guarantee() != NULL &&
+ context->upper_bound_guarantee() != this &&
+ context->upper_bound_guarantee()->block() != block() &&
+ offset() > context->offset() &&
+ index_can_decrease() &&
+ context->lower_bound_guarantee() == NULL) {
+ offset_ = context->offset();
+ SetResponsibilityForRange(DIRECTION_LOWER);
+ context->set_lower_bound_guarantee(this);
+ isolate()->counters()->bounds_checks_eliminated()->Increment();
+ }
+}
+
+
void HBoundsCheck::ApplyIndexChange() {
if (skip_check()) return;
@@ -874,6 +1119,40 @@
base_ = NULL;
offset_ = 0;
scale_ = 0;
+ responsibility_direction_ = DIRECTION_NONE;
+}
+
+
+void HBoundsCheck::AddInformativeDefinitions() {
+ // TODO(mmassi): Executing this code during AddInformativeDefinitions
+ // is a hack. Move it to some other HPhase.
+ if (FLAG_array_bounds_checks_elimination) {
+ if (index()->TryGuaranteeRange(length())) {
+ set_skip_check();
+ }
+ if (DetectCompoundIndex()) {
+ HBoundsCheckBaseIndexInformation* base_index_info =
+ new(block()->graph()->zone())
+ HBoundsCheckBaseIndexInformation(this);
+ base_index_info->InsertAfter(this);
+ }
+ }
+}
+
+
+bool HBoundsCheck::IsRelationTrueInternal(NumericRelation relation,
+ HValue* related_value,
+ int offset,
+ int scale) {
+ if (related_value == length()) {
+ // A HBoundsCheck is smaller than the length it compared against.
+ return NumericRelation::Lt().CompoundImplies(relation, 0, 0, offset, scale);
+ } else if (related_value == block()->graph()->GetConstant0()) {
+ // A HBoundsCheck is greater than or equal to zero.
+ return NumericRelation::Ge().CompoundImplies(relation, 0, 0, offset, scale);
+ } else {
+ return false;
+ }
}
@@ -916,6 +1195,25 @@
}
+bool HBoundsCheckBaseIndexInformation::IsRelationTrueInternal(
+ NumericRelation relation,
+ HValue* related_value,
+ int offset,
+ int scale) {
+ if (related_value == bounds_check()->length()) {
+ return NumericRelation::Lt().CompoundImplies(
+ relation,
+ bounds_check()->offset(), bounds_check()->scale(), offset, scale);
+ } else if (related_value == block()->graph()->GetConstant0()) {
+ return NumericRelation::Ge().CompoundImplies(
+ relation,
+ bounds_check()->offset(), bounds_check()->scale(), offset, scale);
+ } else {
+ return false;
+ }
+}
+
+
void HBoundsCheckBaseIndexInformation::PrintDataTo(StringStream* stream) {
stream->Add("base: ");
base_index()->PrintNameTo(stream);
@@ -1155,29 +1453,6 @@
}
-static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
- if (!l->EqualsInteger32Constant(~0)) return false;
- *negated = r;
- return true;
-}
-
-
-static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
- if (!instr->IsBitwise()) return false;
- HBitwise* b = HBitwise::cast(instr);
- return (b->op() == Token::BIT_XOR) &&
- (MatchLeftIsOnes(b->left(), b->right(), negated) ||
- MatchLeftIsOnes(b->right(), b->left(), negated));
-}
-
-
-static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
- HValue* negated;
- return MatchNegationViaXor(instr, &negated) &&
- MatchNegationViaXor(negated, arg);
-}
-
-
HValue* HBitwise::Canonicalize() {
if (!representation().IsSmiOrInteger32()) return this;
// If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
@@ -1190,10 +1465,18 @@
!left()->CheckFlag(kUint32)) {
return left();
}
- // Optimize double negation, a common pattern used for ToInt32(x).
- HValue* arg;
- if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) {
- return arg;
+ return this;
+}
+
+
+HValue* HBitNot::Canonicalize() {
+ // Optimize ~~x, a common pattern used for ToInt32(x).
+ if (value()->IsBitNot()) {
+ HValue* result = HBitNot::cast(value())->value();
+ ASSERT(result->representation().IsInteger32());
+ if (!result->CheckFlag(kUint32)) {
+ return result;
+ }
}
return this;
}
@@ -1404,10 +1687,10 @@
// for which the map is known.
if (HasNoUses() && dominator->IsStoreNamedField()) {
HStoreNamedField* store = HStoreNamedField::cast(dominator);
- if (!store->has_transition() || store->object() != value()) return;
- HConstant* transition = HConstant::cast(store->transition());
+ UniqueValueId map_unique_id = store->transition_unique_id();
+ if (!map_unique_id.IsInitialized() || store->object() != value()) return;
for (int i = 0; i < map_set()->length(); i++) {
- if (transition->UniqueValueIdsMatch(map_unique_ids_.at(i))) {
+ if (map_unique_id == map_unique_ids_.at(i)) {
DeleteAndReplaceWith(NULL);
return;
}
@@ -1458,6 +1741,13 @@
}
+void HCheckPrototypeMaps::PrintDataTo(StringStream* stream) {
+ stream->Add("[receiver_prototype=%p,holder=%p]%s",
+ *prototypes_.first(), *prototypes_.last(),
+ CanOmitPrototypeChecks() ? " (omitted)" : "");
+}
+
+
void HCallStub::PrintDataTo(StringStream* stream) {
stream->Add("%s ",
CodeStub::MajorName(major_key_, false));
@@ -1660,6 +1950,60 @@
}
+void HPhi::AddInformativeDefinitions() {
+ if (OperandCount() == 2) {
+ // If one of the operands is an OSR block give up (this cannot be an
+ // induction variable).
+ if (OperandAt(0)->block()->is_osr_entry() ||
+ OperandAt(1)->block()->is_osr_entry()) return;
+
+ for (int operand_index = 0; operand_index < 2; operand_index++) {
+ int other_operand_index = (operand_index + 1) % 2;
+
+ static NumericRelation relations[] = {
+ NumericRelation::Ge(),
+ NumericRelation::Le()
+ };
+
+ // Check if this phi is an induction variable. If, e.g., we know that
+ // its first input is greater than the phi itself, then that must be
+ // the back edge, and the phi is always greater than its second input.
+ for (int relation_index = 0; relation_index < 2; relation_index++) {
+ if (OperandAt(operand_index)->IsRelationTrue(relations[relation_index],
+ this)) {
+ HInductionVariableAnnotation::AddToGraph(this,
+ relations[relation_index],
+ other_operand_index);
+ }
+ }
+ }
+ }
+}
+
+
+bool HPhi::IsRelationTrueInternal(NumericRelation relation,
+ HValue* other,
+ int offset,
+ int scale) {
+ if (CheckFlag(kNumericConstraintEvaluationInProgress)) return false;
+
+ SetFlag(kNumericConstraintEvaluationInProgress);
+ bool result = true;
+ for (int i = 0; i < OperandCount(); i++) {
+ // Skip OSR entry blocks
+ if (OperandAt(i)->block()->is_osr_entry()) continue;
+
+ if (!OperandAt(i)->IsRelationTrue(relation, other, offset, scale)) {
+ result = false;
+ break;
+ }
+ }
+ ClearFlag(kNumericConstraintEvaluationInProgress);
+
+ return result;
+}
+
+
InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) {
if (phi->block()->loop_information() == NULL) return NULL;
if (phi->OperandCount() != 2) return NULL;
@@ -2411,14 +2755,6 @@
}
-static void PrepareConstant(Handle<Object> object) {
- if (!object->IsJSObject()) return;
- Handle<JSObject> js_object = Handle<JSObject>::cast(object);
- if (!js_object->map()->is_deprecated()) return;
- JSObject::TryMigrateInstance(js_object);
-}
-
-
void HConstant::Initialize(Representation r) {
if (r.IsNone()) {
if (has_smi_value_ && kSmiValueSize == 31) {
@@ -2430,7 +2766,6 @@
} else if (has_external_reference_value_) {
r = Representation::External();
} else {
- PrepareConstant(handle_);
r = Representation::Tagged();
}
}
@@ -2755,6 +3090,16 @@
}
+void HCompareNumericAndBranch::AddInformativeDefinitions() {
+ NumericRelation r = NumericRelation::FromToken(token());
+ if (r.IsNone()) return;
+
+ HNumericConstraint::AddToGraph(left(), r, right(), SuccessorAt(0)->first());
+ HNumericConstraint::AddToGraph(
+ left(), r.Negated(), right(), SuccessorAt(1)->first());
+}
+
+
void HCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
stream->Add(Token::Name(token()));
stream->Add(" ");
@@ -2953,7 +3298,6 @@
HValue* typecheck) {
HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck);
check_map->map_set_.Add(map, zone);
- check_map->has_migration_target_ = map->is_migration_target();
if (map->CanOmitMapChecks() &&
value->IsConstant() &&
HConstant::cast(value)->InstanceOf(map)) {
@@ -3178,8 +3522,8 @@
if (NeedsWriteBarrier()) {
stream->Add(" (write-barrier)");
}
- if (has_transition()) {
- stream->Add(" (transition map %p)", *transition_map());
+ if (!transition().is_null()) {
+ stream->Add(" (transition map %p)", *transition());
}
}
diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h
index 3fae45b..eac5173 100644
--- a/src/hydrogen-instructions.h
+++ b/src/hydrogen-instructions.h
@@ -72,6 +72,7 @@
V(ArgumentsLength) \
V(ArgumentsObject) \
V(Bitwise) \
+ V(BitNot) \
V(BlockEntry) \
V(BoundsCheck) \
V(BoundsCheckBaseIndexInformation) \
@@ -92,6 +93,7 @@
V(CheckInstanceType) \
V(CheckMaps) \
V(CheckMapValue) \
+ V(CheckPrototypeMaps) \
V(CheckSmi) \
V(ClampToUint8) \
V(ClassOfTestAndBranch) \
@@ -120,6 +122,7 @@
V(Goto) \
V(HasCachedArrayIndexAndBranch) \
V(HasInstanceTypeAndBranch) \
+ V(InductionVariableAnnotation) \
V(InnerAllocatedObject) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
@@ -148,6 +151,7 @@
V(MathMinMax) \
V(Mod) \
V(Mul) \
+ V(NumericConstraint) \
V(OsrEntry) \
V(OuterContext) \
V(Parameter) \
@@ -538,6 +542,158 @@
};
+class NumericRelation {
+ public:
+ enum Kind { NONE, EQ, GT, GE, LT, LE, NE };
+ static const char* MnemonicFromKind(Kind kind) {
+ switch (kind) {
+ case NONE: return "NONE";
+ case EQ: return "EQ";
+ case GT: return "GT";
+ case GE: return "GE";
+ case LT: return "LT";
+ case LE: return "LE";
+ case NE: return "NE";
+ }
+ UNREACHABLE();
+ return NULL;
+ }
+ const char* Mnemonic() const { return MnemonicFromKind(kind_); }
+
+ static NumericRelation None() { return NumericRelation(NONE); }
+ static NumericRelation Eq() { return NumericRelation(EQ); }
+ static NumericRelation Gt() { return NumericRelation(GT); }
+ static NumericRelation Ge() { return NumericRelation(GE); }
+ static NumericRelation Lt() { return NumericRelation(LT); }
+ static NumericRelation Le() { return NumericRelation(LE); }
+ static NumericRelation Ne() { return NumericRelation(NE); }
+
+ bool IsNone() { return kind_ == NONE; }
+
+ static NumericRelation FromToken(Token::Value token) {
+ switch (token) {
+ case Token::EQ: return Eq();
+ case Token::EQ_STRICT: return Eq();
+ case Token::LT: return Lt();
+ case Token::GT: return Gt();
+ case Token::LTE: return Le();
+ case Token::GTE: return Ge();
+ case Token::NE: return Ne();
+ case Token::NE_STRICT: return Ne();
+ default: return None();
+ }
+ }
+
+ // The semantics of "Reversed" is that if "x rel y" is true then also
+ // "y rel.Reversed() x" is true, and that rel.Reversed().Reversed() == rel.
+ NumericRelation Reversed() {
+ switch (kind_) {
+ case NONE: return None();
+ case EQ: return Eq();
+ case GT: return Lt();
+ case GE: return Le();
+ case LT: return Gt();
+ case LE: return Ge();
+ case NE: return Ne();
+ }
+ UNREACHABLE();
+ return None();
+ }
+
+ // The semantics of "Negated" is that if "x rel y" is true then also
+ // "!(x rel.Negated() y)" is true.
+ NumericRelation Negated() {
+ switch (kind_) {
+ case NONE: return None();
+ case EQ: return Ne();
+ case GT: return Le();
+ case GE: return Lt();
+ case LT: return Ge();
+ case LE: return Gt();
+ case NE: return Eq();
+ }
+ UNREACHABLE();
+ return None();
+ }
+
+ // The semantics of "Implies" is that if "x rel y" is true
+ // then also "x other_relation y" is true.
+ bool Implies(NumericRelation other_relation) {
+ switch (kind_) {
+ case NONE: return false;
+ case EQ: return (other_relation.kind_ == EQ)
+ || (other_relation.kind_ == GE)
+ || (other_relation.kind_ == LE);
+ case GT: return (other_relation.kind_ == GT)
+ || (other_relation.kind_ == GE)
+ || (other_relation.kind_ == NE);
+ case LT: return (other_relation.kind_ == LT)
+ || (other_relation.kind_ == LE)
+ || (other_relation.kind_ == NE);
+ case GE: return (other_relation.kind_ == GE);
+ case LE: return (other_relation.kind_ == LE);
+ case NE: return (other_relation.kind_ == NE);
+ }
+ UNREACHABLE();
+ return false;
+ }
+
+ // The semantics of "IsExtendable" is that if
+ // "rel.IsExtendable(direction)" is true then
+ // "x rel y" implies "(x + direction) rel y" .
+ bool IsExtendable(int direction) {
+ switch (kind_) {
+ case NONE: return false;
+ case EQ: return false;
+ case GT: return (direction >= 0);
+ case GE: return (direction >= 0);
+ case LT: return (direction <= 0);
+ case LE: return (direction <= 0);
+ case NE: return false;
+ }
+ UNREACHABLE();
+ return false;
+ }
+
+ // CompoundImplies returns true when
+ // "((x + my_offset) >> my_scale) rel y" implies
+ // "((x + other_offset) >> other_scale) other_relation y".
+ bool CompoundImplies(NumericRelation other_relation,
+ int my_offset,
+ int my_scale,
+ int other_offset = 0,
+ int other_scale = 0) {
+ return Implies(other_relation) && ComponentsImply(
+ my_offset, my_scale, other_offset, other_scale);
+ }
+
+ private:
+ // ComponentsImply returns true when
+ // "((x + my_offset) >> my_scale) rel y" implies
+ // "((x + other_offset) >> other_scale) rel y".
+ bool ComponentsImply(int my_offset,
+ int my_scale,
+ int other_offset,
+ int other_scale) {
+ switch (kind_) {
+ case NONE: break; // Fall through to UNREACHABLE().
+ case EQ:
+ case NE: return my_offset == other_offset && my_scale == other_scale;
+ case GT:
+ case GE: return my_offset <= other_offset && my_scale >= other_scale;
+ case LT:
+ case LE: return my_offset >= other_offset && my_scale <= other_scale;
+ }
+ UNREACHABLE();
+ return false;
+ }
+
+ explicit NumericRelation(Kind kind) : kind_(kind) {}
+
+ Kind kind_;
+};
+
+
class DecompositionResult BASE_EMBEDDED {
public:
DecompositionResult() : base_(NULL), offset_(0), scale_(0) {}
@@ -583,6 +739,46 @@
};
+class RangeEvaluationContext BASE_EMBEDDED {
+ public:
+ RangeEvaluationContext(HValue* value, HValue* upper);
+
+ HValue* lower_bound() { return lower_bound_; }
+ HValue* lower_bound_guarantee() { return lower_bound_guarantee_; }
+ HValue* candidate() { return candidate_; }
+ HValue* upper_bound() { return upper_bound_; }
+ HValue* upper_bound_guarantee() { return upper_bound_guarantee_; }
+ int offset() { return offset_; }
+ int scale() { return scale_; }
+
+ bool is_range_satisfied() {
+ return lower_bound_guarantee() != NULL && upper_bound_guarantee() != NULL;
+ }
+
+ void set_lower_bound_guarantee(HValue* guarantee) {
+ lower_bound_guarantee_ = ConvertGuarantee(guarantee);
+ }
+ void set_upper_bound_guarantee(HValue* guarantee) {
+ upper_bound_guarantee_ = ConvertGuarantee(guarantee);
+ }
+
+ void swap_candidate(DecompositionResult* other_candicate) {
+ other_candicate->SwapValues(&candidate_, &offset_, &scale_);
+ }
+
+ private:
+ HValue* ConvertGuarantee(HValue* guarantee);
+
+ HValue* lower_bound_;
+ HValue* lower_bound_guarantee_;
+ HValue* candidate_;
+ HValue* upper_bound_;
+ HValue* upper_bound_guarantee_;
+ int offset_;
+ int scale_;
+};
+
+
typedef EnumSet<GVNFlag> GVNFlagSet;
@@ -620,6 +816,12 @@
// HGraph::ComputeSafeUint32Operations is responsible for setting this
// flag.
kUint32,
+ // If a phi is involved in the evaluation of a numeric constraint the
+ // recursion can cause an endless cycle: we use this flag to exit the loop.
+ kNumericConstraintEvaluationInProgress,
+ // This flag is set to true after the SetupInformativeDefinitions() pass
+ // has processed this instruction.
+ kIDefsProcessingDone,
kHasNoObservableSideEffects,
// Indicates the instruction is live during dead code elimination.
kIsLive,
@@ -757,8 +959,8 @@
return RedefinedOperandIndex() != kNoRedefinedOperand;
}
HValue* RedefinedOperand() {
- int index = RedefinedOperandIndex();
- return index == kNoRedefinedOperand ? NULL : OperandAt(index);
+ return IsInformativeDefinition() ? OperandAt(RedefinedOperandIndex())
+ : NULL;
}
// A purely informative definition is an idef that will not emit code and
@@ -769,8 +971,17 @@
// This method must always return the original HValue SSA definition
// (regardless of any iDef of this value).
HValue* ActualValue() {
- int index = RedefinedOperandIndex();
- return index == kNoRedefinedOperand ? this : OperandAt(index);
+ return IsInformativeDefinition() ? RedefinedOperand()->ActualValue()
+ : this;
+ }
+
+ virtual void AddInformativeDefinitions() {}
+
+ void UpdateRedefinedUsesWhileSettingUpInformativeDefinitions() {
+ UpdateRedefinedUsesInner<TestDominanceUsingProcessedFlag>();
+ }
+ void UpdateRedefinedUses() {
+ UpdateRedefinedUsesInner<Dominates>();
}
bool IsInteger32Constant();
@@ -921,6 +1132,12 @@
virtual void Verify() = 0;
#endif
+ bool IsRelationTrue(NumericRelation relation,
+ HValue* other,
+ int offset = 0,
+ int scale = 0);
+
+ bool TryGuaranteeRange(HValue* upper_bound);
virtual bool TryDecompose(DecompositionResult* decomposition) {
if (RedefinedOperand() != NULL) {
return RedefinedOperand()->TryDecompose(decomposition);
@@ -942,6 +1159,17 @@
}
protected:
+ void TryGuaranteeRangeRecursive(RangeEvaluationContext* context);
+
+ enum RangeGuaranteeDirection {
+ DIRECTION_NONE = 0,
+ DIRECTION_UPPER = 1,
+ DIRECTION_LOWER = 2,
+ DIRECTION_BOTH = DIRECTION_UPPER | DIRECTION_LOWER
+ };
+ virtual void SetResponsibilityForRange(RangeGuaranteeDirection direction) {}
+ virtual void TryGuaranteeRangeChanging(RangeEvaluationContext* context) {}
+
// This function must be overridden for instructions with flag kUseGVN, to
// compare the non-Operand parts of the instruction.
virtual bool DataEquals(HValue* other) {
@@ -975,6 +1203,47 @@
representation_ = r;
}
+ // Signature of a function testing if a HValue properly dominates another.
+ typedef bool (*DominanceTest)(HValue*, HValue*);
+
+ // Simple implementation of DominanceTest implemented walking the chain
+ // of Hinstructions (used in UpdateRedefinedUsesInner).
+ static bool Dominates(HValue* dominator, HValue* dominated);
+
+ // A fast implementation of DominanceTest that works only for the
+ // "current" instruction in the SetupInformativeDefinitions() phase.
+ // During that phase we use a flag to mark processed instructions, and by
+ // checking the flag we can quickly test if an instruction comes before or
+ // after the "current" one.
+ static bool TestDominanceUsingProcessedFlag(HValue* dominator,
+ HValue* dominated);
+
+ // If we are redefining an operand, update all its dominated uses (the
+ // function that checks if a use is dominated is the template argument).
+ template<DominanceTest TestDominance>
+ void UpdateRedefinedUsesInner() {
+ HValue* input = RedefinedOperand();
+ if (input != NULL) {
+ for (HUseIterator uses = input->uses(); !uses.Done(); uses.Advance()) {
+ HValue* use = uses.value();
+ if (TestDominance(this, use)) {
+ use->SetOperandAt(uses.index(), this);
+ }
+ }
+ }
+ }
+
+ // Informative definitions can override this method to state any numeric
+ // relation they provide on the redefined value.
+ // Returns true if it is guaranteed that:
+ // ((this + offset) >> scale) relation other
+ virtual bool IsRelationTrueInternal(NumericRelation relation,
+ HValue* other,
+ int offset = 0,
+ int scale = 0) {
+ return false;
+ }
+
static GVNFlagSet AllDependsOnFlagSet() {
GVNFlagSet result;
// Create changes mask.
@@ -1245,6 +1514,52 @@
};
+class HNumericConstraint : public HTemplateInstruction<2> {
+ public:
+ static HNumericConstraint* AddToGraph(HValue* constrained_value,
+ NumericRelation relation,
+ HValue* related_value,
+ HInstruction* insertion_point = NULL);
+
+ HValue* constrained_value() { return OperandAt(0); }
+ HValue* related_value() { return OperandAt(1); }
+ NumericRelation relation() { return relation_; }
+
+ virtual int RedefinedOperandIndex() { return 0; }
+ virtual bool IsPurelyInformativeDefinition() { return true; }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return representation();
+ }
+
+ virtual void PrintDataTo(StringStream* stream);
+
+ virtual bool IsRelationTrueInternal(NumericRelation other_relation,
+ HValue* other_related_value,
+ int offset = 0,
+ int scale = 0) {
+ if (related_value() == other_related_value) {
+ return relation().CompoundImplies(other_relation, offset, scale);
+ } else {
+ return false;
+ }
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(NumericConstraint)
+
+ private:
+ HNumericConstraint(HValue* constrained_value,
+ NumericRelation relation,
+ HValue* related_value)
+ : relation_(relation) {
+ SetOperandAt(0, constrained_value);
+ SetOperandAt(1, related_value);
+ }
+
+ NumericRelation relation_;
+};
+
+
class HDeoptimize: public HTemplateInstruction<0> {
public:
DECLARE_INSTRUCTION_FACTORY_P1(HDeoptimize, Deoptimizer::BailoutType);
@@ -2391,6 +2706,37 @@
};
+class HBitNot: public HUnaryOperation {
+ public:
+ DECLARE_INSTRUCTION_FACTORY_P1(HBitNot, HValue*);
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Integer32();
+ }
+ virtual Representation observed_input_representation(int index) {
+ return Representation::Integer32();
+ }
+
+ virtual HValue* Canonicalize();
+
+ DECLARE_CONCRETE_INSTRUCTION(BitNot)
+
+ protected:
+ virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ explicit HBitNot(HValue* value)
+ : HUnaryOperation(value, HType::TaggedNumber()) {
+ set_representation(Representation::Integer32());
+ SetFlag(kUseGVN);
+ SetFlag(kTruncatingToInt32);
+ SetFlag(kAllowUndefinedAsNaN);
+ }
+
+ virtual bool IsDeletable() const { return true; }
+};
+
+
class HUnaryMathOperation: public HTemplateInstruction<2> {
public:
static HInstruction* New(Zone* zone,
@@ -2532,7 +2878,6 @@
HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck);
for (int i = 0; i < maps->length(); i++) {
check_map->map_set_.Add(maps->at(i), zone);
- check_map->has_migration_target_ |= maps->at(i)->is_migration_target();
}
check_map->map_set_.Sort();
return check_map;
@@ -2551,10 +2896,6 @@
HValue* value() { return OperandAt(0); }
SmallMapList* map_set() { return &map_set_; }
- bool has_migration_target() {
- return has_migration_target_;
- }
-
virtual void FinalizeUniqueValueId();
DECLARE_CONCRETE_INSTRUCTION(CheckMaps)
@@ -2579,7 +2920,7 @@
// Clients should use one of the static New* methods above.
HCheckMaps(HValue* value, Zone *zone, HValue* typecheck)
: HTemplateInstruction<2>(value->type()),
- omit_(false), has_migration_target_(false), map_unique_ids_(0, zone) {
+ omit_(false), map_unique_ids_(0, zone) {
SetOperandAt(0, value);
// Use the object value for the dependency if NULL is passed.
// TODO(titzer): do GVN flags already express this dependency?
@@ -2601,7 +2942,6 @@
}
bool omit_;
- bool has_migration_target_;
SmallMapList map_set_;
ZoneList<UniqueValueId> map_unique_ids_;
};
@@ -2786,6 +3126,87 @@
};
+class HCheckPrototypeMaps: public HTemplateInstruction<0> {
+ public:
+ static HCheckPrototypeMaps* New(Zone* zone,
+ HValue* context,
+ Handle<JSObject> prototype,
+ Handle<JSObject> holder,
+ CompilationInfo* info) {
+ return new(zone) HCheckPrototypeMaps(prototype, holder, zone, info);
+ }
+
+ ZoneList<Handle<JSObject> >* prototypes() { return &prototypes_; }
+
+ ZoneList<Handle<Map> >* maps() { return &maps_; }
+
+ DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps)
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::None();
+ }
+
+ virtual void PrintDataTo(StringStream* stream);
+
+ virtual intptr_t Hashcode() {
+ return first_prototype_unique_id_.Hashcode() * 17 +
+ last_prototype_unique_id_.Hashcode();
+ }
+
+ virtual void FinalizeUniqueValueId() {
+ first_prototype_unique_id_ = UniqueValueId(prototypes_.first());
+ last_prototype_unique_id_ = UniqueValueId(prototypes_.last());
+ }
+
+ bool CanOmitPrototypeChecks() { return can_omit_prototype_maps_; }
+
+ protected:
+ virtual bool DataEquals(HValue* other) {
+ HCheckPrototypeMaps* b = HCheckPrototypeMaps::cast(other);
+ return first_prototype_unique_id_ == b->first_prototype_unique_id_ &&
+ last_prototype_unique_id_ == b->last_prototype_unique_id_;
+ }
+
+ private:
+ HCheckPrototypeMaps(Handle<JSObject> prototype,
+ Handle<JSObject> holder,
+ Zone* zone,
+ CompilationInfo* info)
+ : prototypes_(2, zone),
+ maps_(2, zone),
+ first_prototype_unique_id_(),
+ last_prototype_unique_id_(),
+ can_omit_prototype_maps_(true) {
+ SetFlag(kUseGVN);
+ SetGVNFlag(kDependsOnMaps);
+ // Keep a list of all objects on the prototype chain up to the holder
+ // and the expected maps.
+ while (true) {
+ prototypes_.Add(prototype, zone);
+ Handle<Map> map(prototype->map());
+ maps_.Add(map, zone);
+ can_omit_prototype_maps_ &= map->CanOmitPrototypeChecks();
+ if (prototype.is_identical_to(holder)) break;
+ prototype = Handle<JSObject>(JSObject::cast(prototype->GetPrototype()));
+ }
+ if (can_omit_prototype_maps_) {
+ // Mark in-flight compilation as dependent on those maps.
+ for (int i = 0; i < maps()->length(); i++) {
+ Handle<Map> map = maps()->at(i);
+ map->AddDependentCompilationInfo(DependentCode::kPrototypeCheckGroup,
+ info);
+ }
+ }
+ }
+
+ ZoneList<Handle<JSObject> > prototypes_;
+ ZoneList<Handle<Map> > maps_;
+ UniqueValueId first_prototype_unique_id_;
+ UniqueValueId last_prototype_unique_id_;
+ bool can_omit_prototype_maps_;
+};
+
+
class InductionVariableData;
@@ -3059,6 +3480,8 @@
induction_variable_data_ = InductionVariableData::ExaminePhi(this);
}
+ virtual void AddInformativeDefinitions();
+
virtual void PrintTo(StringStream* stream);
#ifdef DEBUG
@@ -3109,6 +3532,11 @@
inputs_[index] = value;
}
+ virtual bool IsRelationTrueInternal(NumericRelation relation,
+ HValue* other,
+ int offset = 0,
+ int scale = 0);
+
private:
ZoneList<HValue*> inputs_;
int merged_index_;
@@ -3123,6 +3551,53 @@
};
+class HInductionVariableAnnotation : public HUnaryOperation {
+ public:
+ static HInductionVariableAnnotation* AddToGraph(HPhi* phi,
+ NumericRelation relation,
+ int operand_index);
+
+ NumericRelation relation() { return relation_; }
+ HValue* induction_base() { return phi_->OperandAt(operand_index_); }
+
+ virtual int RedefinedOperandIndex() { return 0; }
+ virtual bool IsPurelyInformativeDefinition() { return true; }
+ virtual Representation RequiredInputRepresentation(int index) {
+ return representation();
+ }
+
+ virtual void PrintDataTo(StringStream* stream);
+
+ virtual bool IsRelationTrueInternal(NumericRelation other_relation,
+ HValue* other_related_value,
+ int offset = 0,
+ int scale = 0) {
+ if (induction_base() == other_related_value) {
+ return relation().CompoundImplies(other_relation, offset, scale);
+ } else {
+ return false;
+ }
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(InductionVariableAnnotation)
+
+ private:
+ HInductionVariableAnnotation(HPhi* phi,
+ NumericRelation relation,
+ int operand_index)
+ : HUnaryOperation(phi),
+ phi_(phi), relation_(relation), operand_index_(operand_index) {
+ }
+
+ // We need to store the phi both here and in the instruction operand because
+ // the operand can change if a new idef of the phi is added between the phi
+ // and this instruction (inserting an idef updates every use).
+ HPhi* phi_;
+ NumericRelation relation_;
+ int operand_index_;
+};
+
+
class HArgumentsObject: public HTemplateInstruction<0> {
public:
static HArgumentsObject* New(Zone* zone,
@@ -3212,9 +3687,6 @@
}
return false;
}
- if (has_external_reference_value_) {
- return false;
- }
ASSERT(!handle_.is_null());
Heap* heap = isolate()->heap();
@@ -3643,6 +4115,12 @@
HValue* base() { return base_; }
int offset() { return offset_; }
int scale() { return scale_; }
+ bool index_can_increase() {
+ return (responsibility_direction_ & DIRECTION_LOWER) == 0;
+ }
+ bool index_can_decrease() {
+ return (responsibility_direction_ & DIRECTION_UPPER) == 0;
+ }
void ApplyIndexChange();
bool DetectCompoundIndex() {
@@ -3666,6 +4144,11 @@
return representation();
}
+ virtual bool IsRelationTrueInternal(NumericRelation relation,
+ HValue* related_value,
+ int offset = 0,
+ int scale = 0);
+
virtual void PrintDataTo(StringStream* stream);
virtual void InferRepresentation(HInferRepresentationPhase* h_infer);
@@ -3676,17 +4159,25 @@
virtual int RedefinedOperandIndex() { return 0; }
virtual bool IsPurelyInformativeDefinition() { return skip_check(); }
+ virtual void AddInformativeDefinitions();
DECLARE_CONCRETE_INSTRUCTION(BoundsCheck)
protected:
friend class HBoundsCheckBaseIndexInformation;
+ virtual void SetResponsibilityForRange(RangeGuaranteeDirection direction) {
+ responsibility_direction_ = static_cast<RangeGuaranteeDirection>(
+ responsibility_direction_ | direction);
+ }
+
virtual bool DataEquals(HValue* other) { return true; }
+ virtual void TryGuaranteeRangeChanging(RangeEvaluationContext* context);
bool skip_check_;
HValue* base_;
int offset_;
int scale_;
+ RangeGuaranteeDirection responsibility_direction_;
bool allow_equality_;
private:
@@ -3697,6 +4188,7 @@
HBoundsCheck(HValue* index, HValue* length)
: skip_check_(false),
base_(NULL), offset_(0), scale_(0),
+ responsibility_direction_(DIRECTION_NONE),
allow_equality_(false) {
SetOperandAt(0, index);
SetOperandAt(1, length);
@@ -3731,10 +4223,22 @@
return representation();
}
+ virtual bool IsRelationTrueInternal(NumericRelation relation,
+ HValue* related_value,
+ int offset = 0,
+ int scale = 0);
virtual void PrintDataTo(StringStream* stream);
virtual int RedefinedOperandIndex() { return 0; }
virtual bool IsPurelyInformativeDefinition() { return true; }
+
+ protected:
+ virtual void SetResponsibilityForRange(RangeGuaranteeDirection direction) {
+ bounds_check()->SetResponsibilityForRange(direction);
+ }
+ virtual void TryGuaranteeRangeChanging(RangeEvaluationContext* context) {
+ bounds_check()->TryGuaranteeRangeChanging(context);
+ }
};
@@ -3907,6 +4411,8 @@
}
virtual void PrintDataTo(StringStream* stream);
+ virtual void AddInformativeDefinitions();
+
DECLARE_CONCRETE_INSTRUCTION(CompareNumericAndBranch)
private:
@@ -5866,7 +6372,7 @@
};
-class HStoreNamedField: public HTemplateInstruction<3> {
+class HStoreNamedField: public HTemplateInstruction<2> {
public:
DECLARE_INSTRUCTION_FACTORY_P3(HStoreNamedField, HValue*,
HObjectAccess, HValue*);
@@ -5898,35 +6404,24 @@
return write_barrier_mode_ == SKIP_WRITE_BARRIER;
}
- HValue* object() const { return OperandAt(0); }
- HValue* value() const { return OperandAt(1); }
- HValue* transition() const { return OperandAt(2); }
+ HValue* object() { return OperandAt(0); }
+ HValue* value() { return OperandAt(1); }
HObjectAccess access() const { return access_; }
- HValue* new_space_dominator() const { return new_space_dominator_; }
- bool has_transition() const { return has_transition_; }
-
- Handle<Map> transition_map() const {
- if (has_transition()) {
- return Handle<Map>::cast(HConstant::cast(transition())->handle());
- } else {
- return Handle<Map>();
- }
- }
-
- void SetTransition(HConstant* map_constant, CompilationInfo* info) {
- ASSERT(!has_transition()); // Only set once.
- Handle<Map> map = Handle<Map>::cast(map_constant->handle());
+ Handle<Map> transition() const { return transition_; }
+ UniqueValueId transition_unique_id() const { return transition_unique_id_; }
+ void SetTransition(Handle<Map> map, CompilationInfo* info) {
+ ASSERT(transition_.is_null()); // Only set once.
if (map->CanBeDeprecated()) {
map->AddDependentCompilationInfo(DependentCode::kTransitionGroup, info);
}
- SetOperandAt(2, map_constant);
- has_transition_ = true;
+ transition_ = map;
}
+ HValue* new_space_dominator() const { return new_space_dominator_; }
bool NeedsWriteBarrier() {
ASSERT(!(FLAG_track_double_fields && field_representation().IsDouble()) ||
- !has_transition());
+ transition_.is_null());
if (IsSkipWriteBarrier()) return false;
if (field_representation().IsDouble()) return false;
if (field_representation().IsSmi()) return false;
@@ -5941,6 +6436,10 @@
return ReceiverObjectNeedsWriteBarrier(object(), new_space_dominator());
}
+ virtual void FinalizeUniqueValueId() {
+ transition_unique_id_ = UniqueValueId(transition_);
+ }
+
Representation field_representation() const {
return access_.representation();
}
@@ -5950,19 +6449,20 @@
HObjectAccess access,
HValue* val)
: access_(access),
+ transition_(),
+ transition_unique_id_(),
new_space_dominator_(NULL),
- write_barrier_mode_(UPDATE_WRITE_BARRIER),
- has_transition_(false) {
+ write_barrier_mode_(UPDATE_WRITE_BARRIER) {
SetOperandAt(0, obj);
SetOperandAt(1, val);
- SetOperandAt(2, obj);
access.SetGVNFlags(this, true);
}
HObjectAccess access_;
+ Handle<Map> transition_;
+ UniqueValueId transition_unique_id_;
HValue* new_space_dominator_;
- WriteBarrierMode write_barrier_mode_ : 1;
- bool has_transition_ : 1;
+ WriteBarrierMode write_barrier_mode_;
};
diff --git a/src/hydrogen-uint32-analysis.cc b/src/hydrogen-uint32-analysis.cc
index 835a198..67219f5 100644
--- a/src/hydrogen-uint32-analysis.cc
+++ b/src/hydrogen-uint32-analysis.cc
@@ -33,7 +33,11 @@
bool HUint32AnalysisPhase::IsSafeUint32Use(HValue* val, HValue* use) {
// Operations that operate on bits are safe.
- if (use->IsBitwise() || use->IsShl() || use->IsSar() || use->IsShr()) {
+ if (use->IsBitwise() ||
+ use->IsShl() ||
+ use->IsSar() ||
+ use->IsShr() ||
+ use->IsBitNot()) {
return true;
} else if (use->IsChange() || use->IsSimulate()) {
// Conversions and deoptimization have special support for unt32.
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index 837c978..7ec65df 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -957,9 +957,8 @@
// Push the new increment value on the expression stack to merge into the phi.
builder_->environment()->Push(increment_);
- HBasicBlock* last_block = builder_->current_block();
- last_block->GotoNoSimulate(header_block_);
- header_block_->loop_information()->RegisterBackEdge(last_block);
+ builder_->current_block()->GotoNoSimulate(header_block_);
+ header_block_->loop_information()->RegisterBackEdge(body_block_);
builder_->set_current_block(exit_block_);
// Pop the phi from the expression stack
@@ -1050,14 +1049,12 @@
HBasicBlock* continuation) {
if (continuation->last_environment() != NULL) {
// When merging from a deopt block to a continuation, resolve differences in
- // environment by pushing constant 0 and popping extra values so that the
- // environments match during the join. Push 0 since it has the most specific
- // representation, and will not influence representation inference of the
- // phi.
+ // environment by pushing undefined and popping extra values so that the
+ // environments match during the join.
int continuation_env_length = continuation->last_environment()->length();
while (continuation_env_length != from->last_environment()->length()) {
if (continuation_env_length > from->last_environment()->length()) {
- from->last_environment()->Push(graph()->GetConstant0());
+ from->last_environment()->Push(graph()->GetConstantUndefined());
} else {
from->last_environment()->Pop();
}
@@ -1719,6 +1716,38 @@
}
+HInstruction* HGraphBuilder::BuildUnaryMathOp(
+ HValue* input, Handle<Type> type, Token::Value operation) {
+ // We only handle the numeric cases here
+ type = handle(
+ Type::Intersect(type, handle(Type::Number(), isolate())), isolate());
+
+ switch (operation) {
+ default:
+ UNREACHABLE();
+ case Token::SUB: {
+ HInstruction* instr =
+ NewUncasted<HMul>(input, graph()->GetConstantMinus1());
+ Representation rep = Representation::FromType(type);
+ if (type->Is(Type::None())) {
+ Add<HDeoptimize>(Deoptimizer::SOFT);
+ }
+ if (instr->IsBinaryOperation()) {
+ HBinaryOperation* binop = HBinaryOperation::cast(instr);
+ binop->set_observed_input_representation(1, rep);
+ binop->set_observed_input_representation(2, rep);
+ }
+ return instr;
+ }
+ case Token::BIT_NOT:
+ if (type->Is(Type::None())) {
+ Add<HDeoptimize>(Deoptimizer::SOFT);
+ }
+ return New<HBitNot>(input);
+ }
+}
+
+
void HGraphBuilder::BuildCompareNil(
HValue* value,
Handle<Type> type,
@@ -2549,7 +2578,7 @@
// The value is tracked in the bailout environment, and communicated
// through the environment as the result of the expression.
if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) {
- owner()->Bailout(kBadValueContextForArgumentsValue);
+ owner()->Bailout("bad value context for arguments value");
}
owner()->Push(value);
}
@@ -2601,7 +2630,7 @@
void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
ASSERT(!instr->IsControlInstruction());
if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
- return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
+ return owner()->Bailout("bad value context for arguments object value");
}
owner()->AddInstruction(instr);
owner()->Push(instr);
@@ -2614,7 +2643,7 @@
void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
ASSERT(!instr->HasObservableSideEffects());
if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
- return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
+ return owner()->Bailout("bad value context for arguments object value");
}
HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
@@ -2704,7 +2733,7 @@
// branch.
HOptimizedGraphBuilder* builder = owner();
if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
- builder->Bailout(kArgumentsObjectValueInATestContext);
+ builder->Bailout("arguments object value in a test context");
}
if (value->IsConstant()) {
HConstant* constant_value = HConstant::cast(value);
@@ -2750,7 +2779,7 @@
} while (false)
-void HOptimizedGraphBuilder::Bailout(BailoutReason reason) {
+void HOptimizedGraphBuilder::Bailout(const char* reason) {
current_info()->set_bailout_reason(reason);
SetStackOverflow();
}
@@ -2809,16 +2838,16 @@
bool HOptimizedGraphBuilder::BuildGraph() {
if (current_info()->function()->is_generator()) {
- Bailout(kFunctionIsAGenerator);
+ Bailout("function is a generator");
return false;
}
Scope* scope = current_info()->scope();
if (scope->HasIllegalRedeclaration()) {
- Bailout(kFunctionWithIllegalRedeclaration);
+ Bailout("function with illegal redeclaration");
return false;
}
if (scope->calls_eval()) {
- Bailout(kFunctionCallsEval);
+ Bailout("function calls eval");
return false;
}
SetUpScope(scope);
@@ -2884,7 +2913,8 @@
}
-bool HGraph::Optimize(BailoutReason* bailout_reason) {
+bool HGraph::Optimize(SmartArrayPointer<char>* bailout_reason) {
+ *bailout_reason = SmartArrayPointer<char>();
OrderBlocks();
AssignDominators();
@@ -2905,12 +2935,14 @@
Run<HPropagateDeoptimizingMarkPhase>();
if (!CheckConstPhiUses()) {
- *bailout_reason = kUnsupportedPhiUseOfConstVariable;
+ *bailout_reason = SmartArrayPointer<char>(StrDup(
+ "Unsupported phi use of const variable"));
return false;
}
Run<HRedundantPhiEliminationPhase>();
if (!CheckArgumentsPhiUses()) {
- *bailout_reason = kUnsupportedPhiUseOfArguments;
+ *bailout_reason = SmartArrayPointer<char>(StrDup(
+ "Unsupported phi use of arguments"));
return false;
}
@@ -2950,10 +2982,11 @@
// Eliminate redundant stack checks on backwards branches.
Run<HStackCheckEliminationPhase>();
- if (FLAG_array_bounds_checks_elimination) {
+ if (FLAG_idefs) SetupInformativeDefinitions();
+ if (FLAG_array_bounds_checks_elimination && !FLAG_idefs) {
Run<HBoundsCheckEliminationPhase>();
}
- if (FLAG_array_bounds_checks_hoisting) {
+ if (FLAG_array_bounds_checks_hoisting && !FLAG_idefs) {
Run<HBoundsCheckHoistingPhase>();
}
if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
@@ -2965,6 +2998,50 @@
}
+void HGraph::SetupInformativeDefinitionsInBlock(HBasicBlock* block) {
+ for (int phi_index = 0; phi_index < block->phis()->length(); phi_index++) {
+ HPhi* phi = block->phis()->at(phi_index);
+ phi->AddInformativeDefinitions();
+ phi->SetFlag(HValue::kIDefsProcessingDone);
+ // We do not support phis that "redefine just one operand".
+ ASSERT(!phi->IsInformativeDefinition());
+ }
+
+ for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
+ HInstruction* i = it.Current();
+ i->AddInformativeDefinitions();
+ i->SetFlag(HValue::kIDefsProcessingDone);
+ i->UpdateRedefinedUsesWhileSettingUpInformativeDefinitions();
+ }
+}
+
+
+// This method is recursive, so if its stack frame is large it could
+// cause a stack overflow.
+// To keep the individual stack frames small we do the actual work inside
+// SetupInformativeDefinitionsInBlock();
+void HGraph::SetupInformativeDefinitionsRecursively(HBasicBlock* block) {
+ SetupInformativeDefinitionsInBlock(block);
+ for (int i = 0; i < block->dominated_blocks()->length(); ++i) {
+ SetupInformativeDefinitionsRecursively(block->dominated_blocks()->at(i));
+ }
+
+ for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
+ HInstruction* i = it.Current();
+ if (i->IsBoundsCheck()) {
+ HBoundsCheck* check = HBoundsCheck::cast(i);
+ check->ApplyIndexChange();
+ }
+ }
+}
+
+
+void HGraph::SetupInformativeDefinitions() {
+ HPhase phase("H_Setup informative definitions", this);
+ SetupInformativeDefinitionsRecursively(entry_block());
+}
+
+
void HGraph::RestoreActualValues() {
HPhase phase("H_Restore actual values", this);
@@ -3047,7 +3124,7 @@
// not have declarations).
if (scope->arguments() != NULL) {
if (!scope->arguments()->IsStackAllocated()) {
- return Bailout(kContextAllocatedArguments);
+ return Bailout("context-allocated arguments");
}
environment()->Bind(scope->arguments(),
@@ -3068,7 +3145,7 @@
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
if (stmt->scope() != NULL) {
- return Bailout(kScopedBlock);
+ return Bailout("ScopedBlock");
}
BreakAndContinueInfo break_info(stmt);
{ BreakAndContinueScope push(&break_info, this);
@@ -3280,7 +3357,7 @@
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- return Bailout(kWithStatement);
+ return Bailout("WithStatement");
}
@@ -3295,12 +3372,12 @@
ZoneList<CaseClause*>* clauses = stmt->cases();
int clause_count = clauses->length();
if (clause_count > kCaseClauseLimit) {
- return Bailout(kSwitchStatementTooManyClauses);
+ return Bailout("SwitchStatement: too many clauses");
}
ASSERT(stmt->switch_type() != SwitchStatement::UNKNOWN_SWITCH);
if (stmt->switch_type() == SwitchStatement::GENERIC_SWITCH) {
- return Bailout(kSwitchStatementMixedOrNonLiteralSwitchLabels);
+ return Bailout("SwitchStatement: mixed or non-literal switch labels");
}
HValue* context = environment()->context();
@@ -3592,16 +3669,16 @@
ASSERT(current_block()->HasPredecessor());
if (!FLAG_optimize_for_in) {
- return Bailout(kForInStatementOptimizationIsDisabled);
+ return Bailout("ForInStatement optimization is disabled");
}
if (stmt->for_in_type() != ForInStatement::FAST_FOR_IN) {
- return Bailout(kForInStatementIsNotFastCase);
+ return Bailout("ForInStatement is not fast case");
}
if (!stmt->each()->IsVariableProxy() ||
!stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
- return Bailout(kForInStatementWithNonLocalEachVariable);
+ return Bailout("ForInStatement with non-local each variable");
}
Variable* each_var = stmt->each()->AsVariableProxy()->var();
@@ -3695,7 +3772,7 @@
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- return Bailout(kForOfStatement);
+ return Bailout("ForOfStatement");
}
@@ -3703,7 +3780,7 @@
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- return Bailout(kTryCatchStatement);
+ return Bailout("TryCatchStatement");
}
@@ -3712,7 +3789,7 @@
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- return Bailout(kTryFinallyStatement);
+ return Bailout("TryFinallyStatement");
}
@@ -3720,7 +3797,7 @@
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- return Bailout(kDebuggerStatement);
+ return Bailout("DebuggerStatement");
}
@@ -3766,7 +3843,7 @@
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- return Bailout(kSharedFunctionInfoLiteral);
+ return Bailout("SharedFunctionInfoLiteral");
}
@@ -3846,7 +3923,7 @@
case Variable::UNALLOCATED: {
if (IsLexicalVariableMode(variable->mode())) {
// TODO(rossberg): should this be an ASSERT?
- return Bailout(kReferenceToGlobalLexicalVariable);
+ return Bailout("reference to global lexical variable");
}
// Handle known global constants like 'undefined' specially to avoid a
// load from a global cell for them.
@@ -3903,7 +3980,7 @@
if (value == graph()->GetConstantHole()) {
ASSERT(IsDeclaredVariableMode(variable->mode()) &&
variable->mode() != VAR);
- return Bailout(kReferenceToUninitializedVariable);
+ return Bailout("reference to uninitialized variable");
}
return ast_context()->ReturnValue(value);
}
@@ -3915,7 +3992,7 @@
}
case Variable::LOOKUP:
- return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
+ return Bailout("reference to a variable which requires dynamic lookup");
}
}
@@ -4036,7 +4113,8 @@
int* data_size,
int* pointer_size) {
if (boilerplate->map()->is_deprecated()) {
- Handle<Object> result = JSObject::TryMigrateInstance(boilerplate);
+ Handle<Object> result =
+ JSObject::TryMigrateInstance(boilerplate);
if (result->IsSmi()) return false;
}
@@ -4213,7 +4291,7 @@
case ObjectLiteral::Property::PROTOTYPE:
case ObjectLiteral::Property::SETTER:
case ObjectLiteral::Property::GETTER:
- return Bailout(kObjectLiteralWithComplexProperty);
+ return Bailout("Object literal with complex property");
default: UNREACHABLE();
}
}
@@ -4252,7 +4330,7 @@
raw_boilerplate = Runtime::CreateArrayLiteralBoilerplate(
isolate(), literals, expr->constant_elements());
if (raw_boilerplate.is_null()) {
- return Bailout(kArrayBoilerplateCreationFailed);
+ return Bailout("array boilerplate creation failed");
}
site = isolate()->factory()->NewAllocationSite();
@@ -4343,7 +4421,7 @@
CHECK_ALIVE(VisitForValue(subexpr));
HValue* value = Pop();
- if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
+ if (!Smi::IsValid(i)) return Bailout("Non-smi key in array literal");
elements = AddLoadElements(literal);
@@ -4423,7 +4501,7 @@
if (proto_result.IsProperty()) {
// If the inherited property could induce readonly-ness, bail out.
if (proto_result.IsReadOnly() || !proto_result.IsCacheable()) {
- Bailout(kImproperObjectOnPrototypeChainForStore);
+ Bailout("improper object on prototype chain for store");
return NULL;
}
// We only need to check up to the preexisting property.
@@ -4436,9 +4514,9 @@
ASSERT(proto->GetPrototype(isolate())->IsNull());
}
ASSERT(proto->IsJSObject());
- BuildCheckPrototypeMaps(
+ Add<HCheckPrototypeMaps>(
Handle<JSObject>(JSObject::cast(map->prototype())),
- Handle<JSObject>(JSObject::cast(proto)));
+ Handle<JSObject>(JSObject::cast(proto)), top_info());
}
HObjectAccess field_access = HObjectAccess::ForField(map, lookup, name);
@@ -4476,8 +4554,7 @@
if (transition_to_field) {
Handle<Map> transition(lookup->GetTransitionMapFromMap(*map));
- HConstant* transition_constant = Add<HConstant>(transition);
- instr->SetTransition(transition_constant, top_info());
+ instr->SetTransition(transition, top_info());
// TODO(fschneider): Record the new map type of the object in the IR to
// enable elimination of redundant checks after the transition store.
instr->SetGVNFlag(kChangesMaps);
@@ -4593,7 +4670,8 @@
Handle<JSObject> holder(lookup.holder());
Handle<Map> holder_map(holder->map());
- BuildCheckPrototypeMaps(Handle<JSObject>::cast(prototype), holder);
+ Add<HCheckPrototypeMaps>(
+ Handle<JSObject>::cast(prototype), holder, top_info());
HValue* holder_value = Add<HConstant>(holder);
return BuildLoadNamedField(holder_value,
HObjectAccess::ForField(holder_map, &lookup, name));
@@ -4927,7 +5005,7 @@
if (proxy != NULL) {
Variable* var = proxy->var();
if (var->mode() == LET) {
- return Bailout(kUnsupportedLetCompoundAssignment);
+ return Bailout("unsupported let compound assignment");
}
CHECK_ALIVE(VisitForValue(operation));
@@ -4943,7 +5021,7 @@
case Variable::PARAMETER:
case Variable::LOCAL:
if (var->mode() == CONST) {
- return Bailout(kUnsupportedConstCompoundAssignment);
+ return Bailout("unsupported const compound assignment");
}
BindIfLive(var, Top());
break;
@@ -4959,7 +5037,8 @@
int count = current_info()->scope()->num_parameters();
for (int i = 0; i < count; ++i) {
if (var == current_info()->scope()->parameter(i)) {
- Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
+ Bailout(
+ "assignment to parameter, function uses arguments object");
}
}
}
@@ -4990,7 +5069,7 @@
}
case Variable::LOOKUP:
- return Bailout(kCompoundAssignmentToLookupSlot);
+ return Bailout("compound assignment to lookup slot");
}
return ast_context()->ReturnValue(Pop());
@@ -5079,7 +5158,7 @@
}
} else {
- return Bailout(kInvalidLhsInCompoundAssignment);
+ return Bailout("invalid lhs in compound assignment");
}
}
@@ -5116,11 +5195,11 @@
}
} else if (var->mode() == CONST_HARMONY) {
if (expr->op() != Token::INIT_CONST_HARMONY) {
- return Bailout(kNonInitializerAssignmentToConst);
+ return Bailout("non-initializer assignment to const");
}
}
- if (proxy->IsArguments()) return Bailout(kAssignmentToArguments);
+ if (proxy->IsArguments()) return Bailout("assignment to arguments");
// Handle the assignment.
switch (var->location()) {
@@ -5139,7 +5218,7 @@
if (var->mode() == LET && expr->op() == Token::ASSIGN) {
HValue* env_value = environment()->Lookup(var);
if (env_value == graph()->GetConstantHole()) {
- return Bailout(kAssignmentToLetVariableBeforeInitialization);
+ return Bailout("assignment to let variable before initialization");
}
}
// We do not allow the arguments object to occur in a context where it
@@ -5161,7 +5240,7 @@
int count = current_info()->scope()->num_parameters();
for (int i = 0; i < count; ++i) {
if (var == current_info()->scope()->parameter(i)) {
- return Bailout(kAssignmentToParameterInArgumentsObject);
+ return Bailout("assignment to parameter in arguments object");
}
}
}
@@ -5202,10 +5281,10 @@
}
case Variable::LOOKUP:
- return Bailout(kAssignmentToLOOKUPVariable);
+ return Bailout("assignment to LOOKUP variable");
}
} else {
- return Bailout(kInvalidLeftHandSideInAssignment);
+ return Bailout("invalid left-hand side in assignment");
}
}
@@ -5327,7 +5406,7 @@
Handle<JSObject> holder(lookup.holder());
Handle<Map> holder_map(holder->map());
AddCheckMap(object, map);
- BuildCheckPrototypeMaps(prototype, holder);
+ Add<HCheckPrototypeMaps>(prototype, holder, top_info());
HValue* holder_value = Add<HConstant>(holder);
return BuildLoadNamedField(holder_value,
HObjectAccess::ForField(holder_map, &lookup, name));
@@ -5339,7 +5418,7 @@
Handle<JSObject> holder(lookup.holder());
Handle<Map> holder_map(holder->map());
AddCheckMap(object, map);
- BuildCheckPrototypeMaps(prototype, holder);
+ Add<HCheckPrototypeMaps>(prototype, holder, top_info());
Handle<Object> constant(lookup.GetConstantFromMap(*holder_map), isolate());
return New<HConstant>(constant);
}
@@ -5375,7 +5454,7 @@
isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
- BuildCheckPrototypeMaps(prototype, object_prototype);
+ Add<HCheckPrototypeMaps>(prototype, object_prototype, top_info());
load_mode = ALLOW_RETURN_HOLE;
graph()->MarkDependsOnEmptyArrayProtoElements();
}
@@ -5821,38 +5900,11 @@
}
-void HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant,
- CompilationInfo* info) {
- HConstant* constant_value = New<HConstant>(constant);
-
- if (constant->map()->CanOmitMapChecks()) {
- constant->map()->AddDependentCompilationInfo(
- DependentCode::kPrototypeCheckGroup, info);
- return;
- }
-
- AddInstruction(constant_value);
- HCheckMaps* check =
- Add<HCheckMaps>(constant_value, handle(constant->map()), info);
- check->ClearGVNFlag(kDependsOnElementsKind);
-}
-
-
-void HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype,
- Handle<JSObject> holder) {
- BuildConstantMapCheck(prototype, top_info());
- while (!prototype.is_identical_to(holder)) {
- prototype = handle(JSObject::cast(prototype->GetPrototype()));
- BuildConstantMapCheck(prototype, top_info());
- }
-}
-
-
void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
Handle<Map> receiver_map) {
if (!holder.is_null()) {
Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
- BuildCheckPrototypeMaps(prototype, holder);
+ Add<HCheckPrototypeMaps>(prototype, holder, top_info());
}
}
@@ -6224,7 +6276,7 @@
if (target_info.isolate()->has_pending_exception()) {
// Parse or scope error, never optimize this function.
SetStackOverflow();
- target_shared->DisableOptimization(kParseScopeError);
+ target_shared->DisableOptimization("parse/scope error");
}
TraceInline(target, caller, "parse failure");
return false;
@@ -6363,7 +6415,7 @@
// Bail out if the inline function did, as we cannot residualize a call
// instead.
TraceInline(target, caller, "inline graph construction failed");
- target_shared->DisableOptimization(kInliningBailedOut);
+ target_shared->DisableOptimization("inlining bailed out");
inline_bailout_ = true;
delete target_state;
return true;
@@ -6593,9 +6645,9 @@
HValue* string = Pop();
HValue* context = environment()->context();
ASSERT(!expr->holder().is_null());
- BuildCheckPrototypeMaps(Call::GetPrototypeForPrimitiveCheck(
+ Add<HCheckPrototypeMaps>(Call::GetPrototypeForPrimitiveCheck(
STRING_CHECK, expr->holder()->GetIsolate()),
- expr->holder());
+ expr->holder(), top_info());
HInstruction* char_code =
BuildStringCharCodeAt(string, index);
if (id == kStringCharCodeAt) {
@@ -6908,7 +6960,7 @@
} else {
VariableProxy* proxy = expr->expression()->AsVariableProxy();
if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
- return Bailout(kPossibleDirectCallToEval);
+ return Bailout("possible direct call to eval");
}
bool global_call = proxy != NULL && proxy->var()->IsUnallocated();
@@ -7176,7 +7228,7 @@
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
if (expr->is_jsruntime()) {
- return Bailout(kCallToAJavaScriptRuntimeFunction);
+ return Bailout("call to a JavaScript runtime function");
}
const Runtime::Function* function = expr->function();
@@ -7216,6 +7268,8 @@
case Token::DELETE: return VisitDelete(expr);
case Token::VOID: return VisitVoid(expr);
case Token::TYPEOF: return VisitTypeof(expr);
+ case Token::SUB: return VisitSub(expr);
+ case Token::BIT_NOT: return VisitBitNot(expr);
case Token::NOT: return VisitNot(expr);
default: UNREACHABLE();
}
@@ -7241,7 +7295,7 @@
} else if (proxy != NULL) {
Variable* var = proxy->var();
if (var->IsUnallocated()) {
- Bailout(kDeleteWithGlobalVariable);
+ Bailout("delete with global variable");
} else if (var->IsStackAllocated() || var->IsContextSlot()) {
// Result of deleting non-global variables is false. 'this' is not
// really a variable, though we implement it as one. The
@@ -7251,7 +7305,7 @@
: graph()->GetConstantFalse();
return ast_context()->ReturnValue(value);
} else {
- Bailout(kDeleteWithNonGlobalVariable);
+ Bailout("delete with non-global variable");
}
} else {
// Result of deleting non-property, non-variable reference is true.
@@ -7277,6 +7331,24 @@
}
+void HOptimizedGraphBuilder::VisitSub(UnaryOperation* expr) {
+ CHECK_ALIVE(VisitForValue(expr->expression()));
+ Handle<Type> operand_type = expr->expression()->bounds().lower;
+ HValue* value = TruncateToNumber(Pop(), &operand_type);
+ HInstruction* instr = BuildUnaryMathOp(value, operand_type, Token::SUB);
+ return ast_context()->ReturnInstruction(instr, expr->id());
+}
+
+
+void HOptimizedGraphBuilder::VisitBitNot(UnaryOperation* expr) {
+ CHECK_ALIVE(VisitForValue(expr->expression()));
+ Handle<Type> operand_type = expr->expression()->bounds().lower;
+ HValue* value = TruncateToNumber(Pop(), &operand_type);
+ HInstruction* instr = BuildUnaryMathOp(value, operand_type, Token::BIT_NOT);
+ return ast_context()->ReturnInstruction(instr, expr->id());
+}
+
+
void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) {
if (ast_context()->IsTest()) {
TestContext* context = TestContext::cast(ast_context());
@@ -7365,7 +7437,7 @@
VariableProxy* proxy = target->AsVariableProxy();
Property* prop = target->AsProperty();
if (proxy == NULL && prop == NULL) {
- return Bailout(kInvalidLhsInCountOperation);
+ return Bailout("invalid lhs in count operation");
}
// Match the full code generator stack by simulating an extra stack
@@ -7379,7 +7451,7 @@
if (proxy != NULL) {
Variable* var = proxy->var();
if (var->mode() == CONST) {
- return Bailout(kUnsupportedCountOperationWithConst);
+ return Bailout("unsupported count operation with const");
}
// Argument of the count operation is a variable, not a property.
ASSERT(prop == NULL);
@@ -7413,7 +7485,7 @@
int count = current_info()->scope()->num_parameters();
for (int i = 0; i < count; ++i) {
if (var == current_info()->scope()->parameter(i)) {
- return Bailout(kAssignmentToParameterInArgumentsObject);
+ return Bailout("assignment to parameter in arguments object");
}
}
}
@@ -7430,7 +7502,7 @@
}
case Variable::LOOKUP:
- return Bailout(kLookupVariableInCountOperation);
+ return Bailout("lookup variable in count operation");
}
} else {
@@ -8019,7 +8091,7 @@
}
}
default:
- return Bailout(kUnsupportedNonPrimitiveCompare);
+ return Bailout("Unsupported non-primitive compare");
}
} else if (combined_type->Is(Type::InternalizedString()) &&
Token::IsEqualityOp(op)) {
@@ -8486,7 +8558,7 @@
}
break;
case Variable::LOOKUP:
- return Bailout(kUnsupportedLookupSlotInDeclaration);
+ return Bailout("unsupported lookup slot in declaration");
}
}
@@ -8524,7 +8596,7 @@
break;
}
case Variable::LOOKUP:
- return Bailout(kUnsupportedLookupSlotInDeclaration);
+ return Bailout("unsupported lookup slot in declaration");
}
}
@@ -8645,7 +8717,7 @@
void HOptimizedGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
- return Bailout(kInlinedRuntimeFunctionIsNonNegativeSmi);
+ return Bailout("inlined runtime function: IsNonNegativeSmi");
}
@@ -8661,7 +8733,8 @@
void HOptimizedGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
CallRuntime* call) {
- return Bailout(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf);
+ return Bailout(
+ "inlined runtime function: IsStringWrapperSafeForDefaultValueOf");
}
@@ -8715,7 +8788,7 @@
void HOptimizedGraphBuilder::GenerateClassOf(CallRuntime* call) {
// The special form detected by IsClassOfTest is detected before we get here
// and does not cause a bailout.
- return Bailout(kInlinedRuntimeFunctionClassOf);
+ return Bailout("inlined runtime function: ClassOf");
}
@@ -8932,7 +9005,7 @@
// Support for fast native caches.
void HOptimizedGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
- return Bailout(kInlinedRuntimeFunctionGetFromCache);
+ return Bailout("inlined runtime function: GetFromCache");
}
@@ -9062,7 +9135,7 @@
// Check whether two RegExps are equivalent
void HOptimizedGraphBuilder::GenerateIsRegExpEquivalent(CallRuntime* call) {
- return Bailout(kInlinedRuntimeFunctionIsRegExpEquivalent);
+ return Bailout("inlined runtime function: IsRegExpEquivalent");
}
@@ -9076,18 +9149,18 @@
void HOptimizedGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
- return Bailout(kInlinedRuntimeFunctionFastAsciiArrayJoin);
+ return Bailout("inlined runtime function: FastAsciiArrayJoin");
}
// Support for generators.
void HOptimizedGraphBuilder::GenerateGeneratorNext(CallRuntime* call) {
- return Bailout(kInlinedRuntimeFunctionGeneratorNext);
+ return Bailout("inlined runtime function: GeneratorNext");
}
void HOptimizedGraphBuilder::GenerateGeneratorThrow(CallRuntime* call) {
- return Bailout(kInlinedRuntimeFunctionGeneratorThrow);
+ return Bailout("inlined runtime function: GeneratorThrow");
}
diff --git a/src/hydrogen.h b/src/hydrogen.h
index 6312a52..2668d19 100644
--- a/src/hydrogen.h
+++ b/src/hydrogen.h
@@ -367,7 +367,7 @@
return NULL;
}
- bool Optimize(BailoutReason* bailout_reason);
+ bool Optimize(SmartArrayPointer<char>* bailout_reason);
#ifdef DEBUG
void Verify(bool do_full_verify) const;
@@ -1550,6 +1550,9 @@
ElementsKind kind,
int length);
+ HInstruction* BuildUnaryMathOp(
+ HValue* value, Handle<Type> type, Token::Value token);
+
void BuildCompareNil(
HValue* value,
Handle<Type> type,
@@ -1560,10 +1563,6 @@
int previous_object_size,
HValue* payload);
- void BuildConstantMapCheck(Handle<JSObject> constant, CompilationInfo* info);
- void BuildCheckPrototypeMaps(Handle<JSObject> prototype,
- Handle<JSObject> holder);
-
HInstruction* BuildGetNativeContext();
HInstruction* BuildGetArrayFunction();
@@ -1727,7 +1726,7 @@
HValue* context() { return environment()->context(); }
- void Bailout(BailoutReason reason);
+ void Bailout(const char* reason);
HBasicBlock* CreateJoin(HBasicBlock* first,
HBasicBlock* second,
@@ -1808,6 +1807,8 @@
void VisitDelete(UnaryOperation* expr);
void VisitVoid(UnaryOperation* expr);
void VisitTypeof(UnaryOperation* expr);
+ void VisitSub(UnaryOperation* expr);
+ void VisitBitNot(UnaryOperation* expr);
void VisitNot(UnaryOperation* expr);
void VisitComma(BinaryOperation* expr);
diff --git a/src/i18n.cc b/src/i18n.cc
deleted file mode 100644
index b2ccfd4..0000000
--- a/src/i18n.cc
+++ /dev/null
@@ -1,297 +0,0 @@
-// Copyright 2013 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// limitations under the License.
-
-#include "i18n.h"
-
-#include "unicode/calendar.h"
-#include "unicode/dtfmtsym.h"
-#include "unicode/dtptngen.h"
-#include "unicode/locid.h"
-#include "unicode/numsys.h"
-#include "unicode/smpdtfmt.h"
-#include "unicode/timezone.h"
-
-namespace v8 {
-namespace internal {
-
-namespace {
-
-icu::SimpleDateFormat* CreateICUDateFormat(
- Isolate* isolate,
- const icu::Locale& icu_locale,
- Handle<Object> options) {
- // Create time zone as specified by the user. We have to re-create time zone
- // since calendar takes ownership.
- icu::TimeZone* tz = NULL;
- MaybeObject* maybe_object = options->GetProperty(
- *isolate->factory()->NewStringFromAscii(CStrVector("timeZone")));
- Object* timezone;
- if (maybe_object->ToObject(&timezone) && timezone->IsString()) {
- v8::String::Utf8Value utf8_timezone(
- v8::Utils::ToLocal(Handle<String>(String::cast(timezone))));
- icu::UnicodeString u_timezone(icu::UnicodeString::fromUTF8(*utf8_timezone));
- tz = icu::TimeZone::createTimeZone(u_timezone);
- } else {
- tz = icu::TimeZone::createDefault();
- }
-
- // Create a calendar using locale, and apply time zone to it.
- UErrorCode status = U_ZERO_ERROR;
- icu::Calendar* calendar =
- icu::Calendar::createInstance(tz, icu_locale, status);
-
- // Make formatter from skeleton. Calendar and numbering system are added
- // to the locale as Unicode extension (if they were specified at all).
- icu::SimpleDateFormat* date_format = NULL;
- Object* skeleton;
- maybe_object = options->GetProperty(
- *isolate->factory()->NewStringFromAscii(CStrVector("skeleton")));
- if (maybe_object->ToObject(&skeleton) && skeleton->IsString()) {
- v8::String::Utf8Value utf8_skeleton(
- v8::Utils::ToLocal(Handle<String>(String::cast(skeleton))));
- icu::UnicodeString u_skeleton(icu::UnicodeString::fromUTF8(*utf8_skeleton));
- icu::DateTimePatternGenerator* generator =
- icu::DateTimePatternGenerator::createInstance(icu_locale, status);
- icu::UnicodeString pattern;
- if (U_SUCCESS(status)) {
- pattern = generator->getBestPattern(u_skeleton, status);
- delete generator;
- }
-
- date_format = new icu::SimpleDateFormat(pattern, icu_locale, status);
- if (U_SUCCESS(status)) {
- date_format->adoptCalendar(calendar);
- }
- }
-
- if (U_FAILURE(status)) {
- delete calendar;
- delete date_format;
- date_format = NULL;
- }
-
- return date_format;
-}
-
-
-void SetResolvedSettings(Isolate* isolate,
- const icu::Locale& icu_locale,
- icu::SimpleDateFormat* date_format,
- Handle<JSObject> resolved) {
- UErrorCode status = U_ZERO_ERROR;
- icu::UnicodeString pattern;
- date_format->toPattern(pattern);
- JSObject::SetProperty(
- resolved,
- isolate->factory()->NewStringFromAscii(CStrVector("pattern")),
- isolate->factory()->NewStringFromTwoByte(
- Vector<const uint16_t>(
- reinterpret_cast<const uint16_t*>(pattern.getBuffer()),
- pattern.length())),
- NONE,
- kNonStrictMode);
-
- // Set time zone and calendar.
- const icu::Calendar* calendar = date_format->getCalendar();
- const char* calendar_name = calendar->getType();
- JSObject::SetProperty(
- resolved,
- isolate->factory()->NewStringFromAscii(CStrVector("calendar")),
- isolate->factory()->NewStringFromAscii(CStrVector(calendar_name)),
- NONE,
- kNonStrictMode);
-
- const icu::TimeZone& tz = calendar->getTimeZone();
- icu::UnicodeString time_zone;
- tz.getID(time_zone);
-
- icu::UnicodeString canonical_time_zone;
- icu::TimeZone::getCanonicalID(time_zone, canonical_time_zone, status);
- if (U_SUCCESS(status)) {
- if (canonical_time_zone == UNICODE_STRING_SIMPLE("Etc/GMT")) {
- JSObject::SetProperty(
- resolved,
- isolate->factory()->NewStringFromAscii(CStrVector("timeZone")),
- isolate->factory()->NewStringFromAscii(CStrVector("UTC")),
- NONE,
- kNonStrictMode);
- } else {
- JSObject::SetProperty(
- resolved,
- isolate->factory()->NewStringFromAscii(CStrVector("timeZone")),
- isolate->factory()->NewStringFromTwoByte(
- Vector<const uint16_t>(
- reinterpret_cast<const uint16_t*>(
- canonical_time_zone.getBuffer()),
- canonical_time_zone.length())),
- NONE,
- kNonStrictMode);
- }
- }
-
- // Ugly hack. ICU doesn't expose numbering system in any way, so we have
- // to assume that for given locale NumberingSystem constructor produces the
- // same digits as NumberFormat/Calendar would.
- status = U_ZERO_ERROR;
- icu::NumberingSystem* numbering_system =
- icu::NumberingSystem::createInstance(icu_locale, status);
- if (U_SUCCESS(status)) {
- const char* ns = numbering_system->getName();
- JSObject::SetProperty(
- resolved,
- isolate->factory()->NewStringFromAscii(CStrVector("numberingSystem")),
- isolate->factory()->NewStringFromAscii(CStrVector(ns)),
- NONE,
- kNonStrictMode);
- } else {
- JSObject::SetProperty(
- resolved,
- isolate->factory()->NewStringFromAscii(CStrVector("numberingSystem")),
- isolate->factory()->undefined_value(),
- NONE,
- kNonStrictMode);
- }
- delete numbering_system;
-
- // Set the locale
- char result[ULOC_FULLNAME_CAPACITY];
- status = U_ZERO_ERROR;
- uloc_toLanguageTag(
- icu_locale.getName(), result, ULOC_FULLNAME_CAPACITY, FALSE, &status);
- if (U_SUCCESS(status)) {
- JSObject::SetProperty(
- resolved,
- isolate->factory()->NewStringFromAscii(CStrVector("locale")),
- isolate->factory()->NewStringFromAscii(CStrVector(result)),
- NONE,
- kNonStrictMode);
- } else {
- // This would never happen, since we got the locale from ICU.
- JSObject::SetProperty(
- resolved,
- isolate->factory()->NewStringFromAscii(CStrVector("locale")),
- isolate->factory()->NewStringFromAscii(CStrVector("und")),
- NONE,
- kNonStrictMode);
- }
-}
-
-
-template<int internal_fields, EternalHandles::SingletonHandle field>
-Handle<ObjectTemplateInfo> GetEternal(Isolate* isolate) {
- if (isolate->eternal_handles()->Exists(field)) {
- return Handle<ObjectTemplateInfo>::cast(
- isolate->eternal_handles()->GetSingleton(field));
- }
- v8::Local<v8::ObjectTemplate> raw_template(v8::ObjectTemplate::New());
- raw_template->SetInternalFieldCount(internal_fields);
- return Handle<ObjectTemplateInfo>::cast(
- isolate->eternal_handles()->CreateSingleton(
- isolate,
- *v8::Utils::OpenHandle(*raw_template),
- field));
-}
-
-} // namespace
-
-
-// static
-Handle<ObjectTemplateInfo> I18N::GetTemplate(Isolate* isolate) {
- return GetEternal<1, i::EternalHandles::I18N_TEMPLATE_ONE>(isolate);
-}
-
-
-// static
-Handle<ObjectTemplateInfo> I18N::GetTemplate2(Isolate* isolate) {
- return GetEternal<2, i::EternalHandles::I18N_TEMPLATE_TWO>(isolate);
-}
-
-
-// static
-icu::SimpleDateFormat* DateFormat::InitializeDateTimeFormat(
- Isolate* isolate,
- Handle<String> locale,
- Handle<JSObject> options,
- Handle<JSObject> resolved) {
- // Convert BCP47 into ICU locale format.
- UErrorCode status = U_ZERO_ERROR;
- icu::Locale icu_locale;
- char icu_result[ULOC_FULLNAME_CAPACITY];
- int icu_length = 0;
- v8::String::Utf8Value bcp47_locale(v8::Utils::ToLocal(locale));
- if (bcp47_locale.length() != 0) {
- uloc_forLanguageTag(*bcp47_locale, icu_result, ULOC_FULLNAME_CAPACITY,
- &icu_length, &status);
- if (U_FAILURE(status) || icu_length == 0) {
- return NULL;
- }
- icu_locale = icu::Locale(icu_result);
- }
-
- icu::SimpleDateFormat* date_format = CreateICUDateFormat(
- isolate, icu_locale, options);
- if (!date_format) {
- // Remove extensions and try again.
- icu::Locale no_extension_locale(icu_locale.getBaseName());
- date_format = CreateICUDateFormat(isolate, no_extension_locale, options);
-
- // Set resolved settings (pattern, numbering system, calendar).
- SetResolvedSettings(isolate, no_extension_locale, date_format, resolved);
- } else {
- SetResolvedSettings(isolate, icu_locale, date_format, resolved);
- }
-
- return date_format;
-}
-
-
-icu::SimpleDateFormat* DateFormat::UnpackDateFormat(
- Isolate* isolate,
- Handle<JSObject> obj) {
- if (obj->HasLocalProperty(
- *isolate->factory()->NewStringFromAscii(CStrVector("dateFormat")))) {
- return reinterpret_cast<icu::SimpleDateFormat*>(
- obj->GetInternalField(0));
- }
-
- return NULL;
-}
-
-
-void DateFormat::DeleteDateFormat(v8::Isolate* isolate,
- Persistent<v8::Object>* object,
- void* param) {
- // First delete the hidden C++ object.
- delete reinterpret_cast<icu::SimpleDateFormat*>(Handle<JSObject>::cast(
- v8::Utils::OpenPersistent(object))->GetInternalField(0));
-
- // Then dispose of the persistent handle to JS object.
- object->Dispose(isolate);
-}
-
-} } // namespace v8::internal
diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc
index 7bea373..e0ae006 100644
--- a/src/ia32/assembler-ia32.cc
+++ b/src/ia32/assembler-ia32.cc
@@ -1227,10 +1227,6 @@
void Assembler::test(const Operand& op, const Immediate& imm) {
- if (op.is_reg_only()) {
- test(op.reg(), imm);
- return;
- }
EnsureSpace ensure_space(this);
EMIT(0xF7);
emit_operand(eax, op);
diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc
index 59124ea..b90a17f 100644
--- a/src/ia32/builtins-ia32.cc
+++ b/src/ia32/builtins-ia32.cc
@@ -241,7 +241,7 @@
if (FLAG_debug_code) {
__ cmp(esi, edi);
__ Assert(less_equal,
- kUnexpectedNumberOfPreAllocatedPropertyFields);
+ "Unexpected number of pre-allocated property fields.");
}
__ InitializeFieldsWithFiller(ecx, esi, edx);
__ mov(edx, factory->one_pointer_filler_map());
@@ -272,7 +272,7 @@
__ sub(edx, ecx);
// Done if no extra properties are to be allocated.
__ j(zero, &allocated);
- __ Assert(positive, kPropertyAllocationCountFailed);
+ __ Assert(positive, "Property allocation count failed.");
// Scale the number of elements by pointer size and add the header for
// FixedArrays to the start of the next object calculation from above.
@@ -654,7 +654,7 @@
__ ret(2 * kPointerSize); // Remove state, eax.
__ bind(¬_tos_eax);
- __ Abort(kNoCasesLeft);
+ __ Abort("no cases left");
}
@@ -1033,9 +1033,9 @@
__ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ test(ebx, Immediate(kSmiTagMask));
- __ Assert(not_zero, kUnexpectedInitialMapForInternalArrayFunction);
+ __ Assert(not_zero, "Unexpected initial map for InternalArray function");
__ CmpObjectType(ebx, MAP_TYPE, ecx);
- __ Assert(equal, kUnexpectedInitialMapForInternalArrayFunction);
+ __ Assert(equal, "Unexpected initial map for InternalArray function");
}
// Run the native code for the InternalArray function called as a normal
@@ -1062,9 +1062,9 @@
__ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ test(ebx, Immediate(kSmiTagMask));
- __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
+ __ Assert(not_zero, "Unexpected initial map for Array function");
__ CmpObjectType(ebx, MAP_TYPE, ecx);
- __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
+ __ Assert(equal, "Unexpected initial map for Array function");
}
// Run the native code for the Array function called as a normal function.
@@ -1092,7 +1092,7 @@
if (FLAG_debug_code) {
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, ecx);
__ cmp(edi, ecx);
- __ Assert(equal, kUnexpectedStringFunction);
+ __ Assert(equal, "Unexpected String function");
}
// Load the first argument into eax and get rid of the rest
@@ -1137,9 +1137,9 @@
if (FLAG_debug_code) {
__ cmpb(FieldOperand(ecx, Map::kInstanceSizeOffset),
JSValue::kSize >> kPointerSizeLog2);
- __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
+ __ Assert(equal, "Unexpected string wrapper instance size");
__ cmpb(FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset), 0);
- __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
+ __ Assert(equal, "Unexpected unused properties of string wrapper");
}
__ mov(FieldOperand(eax, HeapObject::kMapOffset), ecx);
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index 8721656..5789f49 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -250,6 +250,17 @@
}
+void UnaryOpStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { eax };
+ descriptor->register_param_count_ = 1;
+ descriptor->register_params_ = registers;
+ descriptor->deoptimization_handler_ =
+ FUNCTION_ADDR(UnaryOpIC_Miss);
+}
+
+
void StoreGlobalStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
@@ -500,8 +511,9 @@
Label after_sentinel;
__ JumpIfNotSmi(ecx, &after_sentinel, Label::kNear);
if (FLAG_debug_code) {
+ const char* message = "Expected 0 as a Smi sentinel";
__ cmp(ecx, 0);
- __ Assert(equal, kExpected0AsASmiSentinel);
+ __ Assert(equal, message);
}
__ mov(ecx, GlobalObjectOperand());
__ mov(ecx, FieldOperand(ecx, GlobalObject::kNativeContextOffset));
@@ -3457,9 +3469,9 @@
__ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
if (FLAG_debug_code) {
__ test(ecx, Immediate(kSmiTagMask));
- __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
+ __ Check(not_zero, "Unexpected type for RegExp data, FixedArray expected");
__ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
- __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
+ __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
}
// ecx: RegExp data (FixedArray)
@@ -3819,7 +3831,7 @@
// Assert that we do not have a cons or slice (indirect strings) here.
// Sequential strings have already been ruled out.
__ test_b(ebx, kIsIndirectStringMask);
- __ Assert(zero, kExternalStringExpectedButNotFound);
+ __ Assert(zero, "external string expected, but not found");
}
__ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
// Move the pointer so that offset-wise, it looks like a sequential string.
@@ -4314,7 +4326,7 @@
edi);
}
#ifdef DEBUG
- __ Abort(kUnexpectedFallThroughFromStringComparison);
+ __ Abort("Unexpected fall-through from string comparison");
#endif
__ bind(&check_unequal_objects);
@@ -5073,9 +5085,9 @@
__ sub(scratch, Operand(esp, 1 * kPointerSize));
if (FLAG_debug_code) {
__ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
- __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1);
+ __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
__ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
- __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2);
+ __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
}
__ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
__ mov(Operand(scratch, 0), map);
@@ -5108,7 +5120,7 @@
__ sub(scratch, Operand(esp, 1 * kPointerSize));
if (FLAG_debug_code) {
__ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
- __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
+ __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
}
__ mov(Operand(scratch, kDeltaToMovImmediate), eax);
if (!ReturnTrueFalseObject()) {
@@ -5130,7 +5142,7 @@
__ sub(scratch, Operand(esp, 1 * kPointerSize));
if (FLAG_debug_code) {
__ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
- __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
+ __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
}
__ mov(Operand(scratch, kDeltaToMovImmediate), eax);
if (!ReturnTrueFalseObject()) {
@@ -5243,7 +5255,7 @@
void StringCharCodeAtGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
+ __ Abort("Unexpected fallthrough to CharCodeAt slow case");
// Index is not a smi.
__ bind(&index_not_smi_);
@@ -5293,7 +5305,7 @@
call_helper.AfterCall(masm);
__ jmp(&exit_);
- __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
+ __ Abort("Unexpected fallthrough from CharCodeAt slow case");
}
@@ -5328,7 +5340,7 @@
void StringCharFromCodeGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
+ __ Abort("Unexpected fallthrough to CharFromCode slow case");
__ bind(&slow_case_);
call_helper.BeforeCall(masm);
@@ -5340,7 +5352,7 @@
call_helper.AfterCall(masm);
__ jmp(&exit_);
- __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
+ __ Abort("Unexpected fallthrough from CharFromCode slow case");
}
@@ -7470,7 +7482,7 @@
}
// If we reached this point there is a problem.
- __ Abort(kUnexpectedElementsKindInArrayConstructor);
+ __ Abort("Unexpected ElementsKind in array constructor");
}
@@ -7533,7 +7545,7 @@
}
// If we reached this point there is a problem.
- __ Abort(kUnexpectedElementsKindInArrayConstructor);
+ __ Abort("Unexpected ElementsKind in array constructor");
}
@@ -7598,9 +7610,9 @@
__ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ test(ecx, Immediate(kSmiTagMask));
- __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
+ __ Assert(not_zero, "Unexpected initial map for Array function");
__ CmpObjectType(ecx, MAP_TYPE, ecx);
- __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
+ __ Assert(equal, "Unexpected initial map for Array function");
// We should either have undefined in ebx or a valid cell
Label okay_here;
@@ -7608,7 +7620,7 @@
__ cmp(ebx, Immediate(undefined_sentinel));
__ j(equal, &okay_here);
__ cmp(FieldOperand(ebx, 0), Immediate(cell_map));
- __ Assert(equal, kExpectedPropertyCellInRegisterEbx);
+ __ Assert(equal, "Expected property cell in register ebx");
__ bind(&okay_here);
}
@@ -7712,9 +7724,9 @@
__ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ test(ecx, Immediate(kSmiTagMask));
- __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
+ __ Assert(not_zero, "Unexpected initial map for Array function");
__ CmpObjectType(ecx, MAP_TYPE, ecx);
- __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
+ __ Assert(equal, "Unexpected initial map for Array function");
}
// Figure out the right elements kind
@@ -7733,7 +7745,7 @@
__ j(equal, &done);
__ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
__ Assert(equal,
- kInvalidElementsKindForInternalArrayOrInternalPackedArray);
+ "Invalid ElementsKind for InternalArray or InternalPackedArray");
__ bind(&done);
}
diff --git a/src/ia32/codegen-ia32.cc b/src/ia32/codegen-ia32.cc
index 28b0f4a..f488718 100644
--- a/src/ia32/codegen-ia32.cc
+++ b/src/ia32/codegen-ia32.cc
@@ -779,7 +779,7 @@
if (FLAG_debug_code) {
__ cmp(ebx, masm->isolate()->factory()->the_hole_value());
- __ Assert(equal, kObjectFoundInSmiOnlyArray);
+ __ Assert(equal, "object found in smi-only array");
}
if (CpuFeatures::IsSupported(SSE2)) {
@@ -1011,7 +1011,7 @@
// Assert that we do not have a cons or slice (indirect strings) here.
// Sequential strings have already been ruled out.
__ test(result, Immediate(kIsIndirectStringMask));
- __ Assert(zero, kExternalStringExpectedButNotFound);
+ __ Assert(zero, "external string expected, but not found");
}
// Rule out short external strings.
STATIC_CHECK(kShortExternalStringTag != 0);
diff --git a/src/ia32/debug-ia32.cc b/src/ia32/debug-ia32.cc
index fd703dc..68199f9 100644
--- a/src/ia32/debug-ia32.cc
+++ b/src/ia32/debug-ia32.cc
@@ -128,7 +128,7 @@
if ((non_object_regs & (1 << r)) != 0) {
if (FLAG_debug_code) {
__ test(reg, Immediate(0xc0000000));
- __ Assert(zero, kUnableToEncodeValueAsSmi);
+ __ Assert(zero, "Unable to encode value as smi");
}
__ SmiTag(reg);
__ push(reg);
diff --git a/src/ia32/deoptimizer-ia32.cc b/src/ia32/deoptimizer-ia32.cc
index a9bd8c5..4896806 100644
--- a/src/ia32/deoptimizer-ia32.cc
+++ b/src/ia32/deoptimizer-ia32.cc
@@ -625,7 +625,7 @@
__ pop(ecx);
if (FLAG_debug_code) {
__ cmp(ecx, Immediate(kAlignmentZapValue));
- __ Assert(equal, kAlignmentMarkerExpected);
+ __ Assert(equal, "alignment marker expected");
}
__ bind(&no_padding);
} else {
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index f08a269..8f11acc 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -745,9 +745,9 @@
// Check that we're not inside a with or catch context.
__ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
__ cmp(ebx, isolate()->factory()->with_context_map());
- __ Check(not_equal, kDeclarationInWithContext);
+ __ Check(not_equal, "Declaration in with context.");
__ cmp(ebx, isolate()->factory()->catch_context_map());
- __ Check(not_equal, kDeclarationInCatchContext);
+ __ Check(not_equal, "Declaration in catch context.");
}
}
@@ -2169,7 +2169,7 @@
__ Push(Smi::FromInt(resume_mode));
__ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
// Not reached: the runtime call returns elsewhere.
- __ Abort(kGeneratorFailedToResume);
+ __ Abort("Generator failed to resume.");
// Throw error if we attempt to operate on a running generator.
__ bind(&wrong_state);
@@ -2468,7 +2468,7 @@
// Check for an uninitialized let binding.
__ mov(edx, location);
__ cmp(edx, isolate()->factory()->the_hole_value());
- __ Check(equal, kLetBindingReInitialization);
+ __ Check(equal, "Let binding re-initialization.");
}
// Perform the assignment.
__ mov(location, eax);
@@ -3430,15 +3430,15 @@
Register value,
uint32_t encoding_mask) {
__ test(index, Immediate(kSmiTagMask));
- __ Check(zero, kNonSmiIndex);
+ __ Check(zero, "Non-smi index");
__ test(value, Immediate(kSmiTagMask));
- __ Check(zero, kNonSmiValue);
+ __ Check(zero, "Non-smi value");
__ cmp(index, FieldOperand(string, String::kLengthOffset));
- __ Check(less, kIndexIsTooLarge);
+ __ Check(less, "Index is too large");
__ cmp(index, Immediate(Smi::FromInt(0)));
- __ Check(greater_equal, kIndexIsNegative);
+ __ Check(greater_equal, "Index is negative");
__ push(value);
__ mov(value, FieldOperand(string, HeapObject::kMapOffset));
@@ -3446,7 +3446,7 @@
__ and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
__ cmp(value, Immediate(encoding_mask));
- __ Check(equal, kUnexpectedStringType);
+ __ Check(equal, "Unexpected string type");
__ pop(value);
}
@@ -3818,7 +3818,7 @@
Handle<FixedArray> jsfunction_result_caches(
isolate()->native_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
- __ Abort(kAttemptToUseUndefinedCache);
+ __ Abort("Attempt to use undefined cache.");
__ mov(eax, isolate()->factory()->undefined_value());
context()->Plug(eax);
return;
@@ -4000,7 +4000,7 @@
// scratch, string_length, elements.
if (generate_debug_code_) {
__ cmp(index, array_length);
- __ Assert(less, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
+ __ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin");
}
__ bind(&loop);
__ mov(string, FieldOperand(elements,
@@ -4347,12 +4347,34 @@
break;
}
+ case Token::SUB:
+ EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
+ break;
+
+ case Token::BIT_NOT:
+ EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
+ break;
+
default:
UNREACHABLE();
}
}
+void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
+ const char* comment) {
+ Comment cmt(masm_, comment);
+ UnaryOpStub stub(expr->op());
+ // UnaryOpStub expects the argument to be in the
+ // accumulator register eax.
+ VisitForAccumulatorValue(expr->expression());
+ SetSourcePosition(expr->position());
+ CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
+ expr->UnaryOperationFeedbackId());
+ context()->Plug(eax);
+}
+
+
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Comment cmnt(masm_, "[ CountOperation");
SetSourcePosition(expr->position());
diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc
index 1e0f14e..bf0c80b 100644
--- a/src/ia32/ic-ia32.cc
+++ b/src/ia32/ic-ia32.cc
@@ -483,7 +483,7 @@
// based on 32 bits of the map pointer and the string hash.
if (FLAG_debug_code) {
__ cmp(eax, FieldOperand(edx, HeapObject::kMapOffset));
- __ Check(equal, kMapIsNoLongerInEax);
+ __ Check(equal, "Map is no longer in eax.");
}
__ mov(ebx, eax); // Keep the map around for later.
__ shr(eax, KeyedLookupCache::kMapHashShift);
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index 061ec9b..7a601cf 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -113,7 +113,7 @@
}
-void LCodeGen::Abort(BailoutReason reason) {
+void LCodeGen::Abort(const char* reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -220,7 +220,7 @@
dynamic_frame_alignment_ &&
FLAG_debug_code) {
__ test(esp, Immediate(kPointerSize));
- __ Assert(zero, kFrameIsExpectedToBeAligned);
+ __ Assert(zero, "frame is expected to be aligned");
}
// Reserve space for the stack slots needed by the code.
@@ -882,7 +882,7 @@
} else if (context->IsConstantOperand()) {
HConstant* constant =
chunk_->LookupConstant(LConstantOperand::cast(context));
- __ LoadObject(esi, Handle<Object>::cast(constant->handle()));
+ __ LoadHeapObject(esi, Handle<Context>::cast(constant->handle()));
} else {
UNREACHABLE();
}
@@ -948,7 +948,7 @@
Address entry =
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
if (entry == NULL) {
- Abort(kBailoutWasNotPrepared);
+ Abort("bailout was not prepared");
return;
}
@@ -1679,9 +1679,8 @@
ASSERT(left->IsRegister());
if (right->IsConstantOperand()) {
- int32_t right_operand =
- ToRepresentation(LConstantOperand::cast(right),
- instr->hydrogen()->representation());
+ int right_operand = ToRepresentation(LConstantOperand::cast(right),
+ instr->hydrogen()->representation());
switch (instr->op()) {
case Token::BIT_AND:
__ and_(ToRegister(left), right_operand);
@@ -1690,11 +1689,7 @@
__ or_(ToRegister(left), right_operand);
break;
case Token::BIT_XOR:
- if (right_operand == int32_t(~0)) {
- __ not_(ToRegister(left));
- } else {
- __ xor_(ToRegister(left), right_operand);
- }
+ __ xor_(ToRegister(left), right_operand);
break;
default:
UNREACHABLE();
@@ -1981,7 +1976,7 @@
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
__ cmp(value, Immediate(encoding == String::ONE_BYTE_ENCODING
? one_byte_seq_type : two_byte_seq_type));
- __ Check(equal, kUnexpectedStringType);
+ __ Check(equal, "Unexpected string type");
__ pop(value);
}
@@ -1995,6 +1990,13 @@
}
+void LCodeGen::DoBitNotI(LBitNotI* instr) {
+ LOperand* input = instr->value();
+ ASSERT(input->Equals(instr->result()));
+ __ not_(ToRegister(input));
+}
+
+
void LCodeGen::DoThrow(LThrow* instr) {
__ push(ToOperand(instr->value()));
ASSERT(ToRegister(instr->context()).is(esi));
@@ -2861,7 +2863,7 @@
__ cmp(Operand(esp,
(parameter_count + extra_value_count) * kPointerSize),
Immediate(kAlignmentZapValue));
- __ Assert(equal, kExpectedAlignmentMarker);
+ __ Assert(equal, "expected alignment marker");
}
__ Ret((parameter_count + extra_value_count) * kPointerSize, ecx);
} else {
@@ -2874,7 +2876,7 @@
__ cmp(Operand(esp, reg, times_pointer_size,
extra_value_count * kPointerSize),
Immediate(kAlignmentZapValue));
- __ Assert(equal, kExpectedAlignmentMarker);
+ __ Assert(equal, "expected alignment marker");
}
// emit code to restore stack based on instr->parameter_count()
@@ -3445,7 +3447,7 @@
if (key->IsConstantOperand()) {
int constant_value = ToInteger32(LConstantOperand::cast(key));
if (constant_value & 0xF0000000) {
- Abort(kArrayIndexConstantValueTooBig);
+ Abort("array index constant value too big");
}
return Operand(elements_pointer_reg,
((constant_value + additional_index) << shift_size)
@@ -3819,7 +3821,7 @@
__ xorps(scratch, scratch);
__ subsd(scratch, input_reg);
__ pand(input_reg, scratch);
- } else if (r.IsSmiOrInteger32()) {
+ } else if (r.IsInteger32()) {
EmitIntegerMathAbs(instr);
} else { // Tagged case.
DeferredMathAbsTaggedHeapNumber* deferred =
@@ -5791,68 +5793,31 @@
}
-void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
- {
- PushSafepointRegistersScope scope(this);
- __ push(object);
- __ xor_(esi, esi);
- __ CallRuntimeSaveDoubles(Runtime::kMigrateInstance);
- RecordSafepointWithRegisters(
- instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
-
- __ test(eax, Immediate(kSmiTagMask));
- }
- DeoptimizeIf(zero, instr->environment());
+void LCodeGen::DoCheckMapCommon(Register reg,
+ Handle<Map> map,
+ LInstruction* instr) {
+ Label success;
+ __ CompareMap(reg, map, &success);
+ DeoptimizeIf(not_equal, instr->environment());
+ __ bind(&success);
}
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
- class DeferredCheckMaps: public LDeferredCode {
- public:
- DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
- : LDeferredCode(codegen), instr_(instr), object_(object) {
- SetExit(check_maps());
- }
- virtual void Generate() {
- codegen()->DoDeferredInstanceMigration(instr_, object_);
- }
- Label* check_maps() { return &check_maps_; }
- virtual LInstruction* instr() { return instr_; }
- private:
- LCheckMaps* instr_;
- Label check_maps_;
- Register object_;
- };
-
if (instr->hydrogen()->CanOmitMapChecks()) return;
-
LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- SmallMapList* map_set = instr->hydrogen()->map_set();
-
- DeferredCheckMaps* deferred = NULL;
- if (instr->hydrogen()->has_migration_target()) {
- deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
- __ bind(deferred->check_maps());
- }
-
Label success;
+ SmallMapList* map_set = instr->hydrogen()->map_set();
for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i);
__ CompareMap(reg, map, &success);
__ j(equal, &success);
}
-
Handle<Map> map = map_set->last();
- __ CompareMap(reg, map, &success);
- if (instr->hydrogen()->has_migration_target()) {
- __ j(not_equal, deferred->entry());
- } else {
- DeoptimizeIf(not_equal, instr->environment());
- }
-
+ DoCheckMapCommon(reg, map, instr);
__ bind(&success);
}
@@ -6029,6 +5994,22 @@
}
+void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
+ if (instr->hydrogen()->CanOmitPrototypeChecks()) return;
+ Register reg = ToRegister(instr->temp());
+
+ ZoneList<Handle<JSObject> >* prototypes = instr->prototypes();
+ ZoneList<Handle<Map> >* maps = instr->maps();
+
+ ASSERT(prototypes->length() == maps->length());
+
+ for (int i = 0; i < prototypes->length(); i++) {
+ __ LoadHeapObject(reg, prototypes->at(i));
+ DoCheckMapCommon(reg, maps->at(i), instr);
+ }
+}
+
+
void LCodeGen::DoAllocate(LAllocate* instr) {
class DeferredAllocate: public LDeferredCode {
public:
diff --git a/src/ia32/lithium-codegen-ia32.h b/src/ia32/lithium-codegen-ia32.h
index c9a7899..0beef85 100644
--- a/src/ia32/lithium-codegen-ia32.h
+++ b/src/ia32/lithium-codegen-ia32.h
@@ -163,7 +163,8 @@
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
- void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
+
+ void DoCheckMapCommon(Register reg, Handle<Map> map, LInstruction* instr);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
@@ -211,7 +212,7 @@
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
- void Abort(BailoutReason reason);
+ void Abort(const char* reason);
void FPRINTF_CHECKING Comment(const char* format, ...);
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
@@ -294,7 +295,7 @@
Register ToRegister(int index) const;
XMMRegister ToDoubleRegister(int index) const;
X87Register ToX87Register(int index) const;
- int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
+ int ToRepresentation(LConstantOperand* op, const Representation& r) const;
int32_t ToInteger32(LConstantOperand* op) const;
ExternalReference ToExternalReference(LConstantOperand* op) const;
diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc
index 52f39d4..2fa038b 100644
--- a/src/ia32/lithium-ia32.cc
+++ b/src/ia32/lithium-ia32.cc
@@ -487,7 +487,7 @@
}
-void LChunkBuilder::Abort(BailoutReason reason) {
+void LChunkBuilder::Abort(const char* reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -698,7 +698,7 @@
new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
int vreg = allocator_->GetVirtualRegister();
if (!allocator_->AllocationOk()) {
- Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
+ Abort("Out of virtual registers while trying to allocate temp register.");
vreg = 0;
}
operand->set_virtual_register(vreg);
@@ -1414,6 +1414,16 @@
}
+LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
+ ASSERT(instr->value()->representation().IsInteger32());
+ ASSERT(instr->representation().IsInteger32());
+ if (instr->HasNoUses()) return NULL;
+ LOperand* input = UseRegisterAtStart(instr->value());
+ LBitNotI* result = new(zone()) LBitNotI(input);
+ return DefineSameAsFirst(result);
+}
+
+
LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
if (instr->representation().IsDouble()) {
return DoArithmeticD(Token::DIV, instr);
@@ -1841,6 +1851,17 @@
}
+LInstruction* LChunkBuilder::DoNumericConstraint(HNumericConstraint* instr) {
+ return NULL;
+}
+
+
+LInstruction* LChunkBuilder::DoInductionVariableAnnotation(
+ HInductionVariableAnnotation* instr) {
+ return NULL;
+}
+
+
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
return AssignEnvironment(new(zone()) LBoundsCheck(
UseRegisterOrConstantAtStart(instr->index()),
@@ -2028,6 +2049,15 @@
}
+LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
+ LUnallocated* temp = NULL;
+ if (!instr->CanOmitPrototypeChecks()) temp = TempRegister();
+ LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp);
+ if (instr->CanOmitPrototypeChecks()) return result;
+ return AssignEnvironment(result);
+}
+
+
LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
// If the target is in new space, we'll emit a global cell compare and so
// want the value in a register. If the target gets promoted before we
@@ -2041,16 +2071,10 @@
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = NULL;
- if (!instr->CanOmitMapChecks()) {
- value = UseRegisterAtStart(instr->value());
- if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
- }
+ if (!instr->CanOmitMapChecks()) value = UseRegisterAtStart(instr->value());
LCheckMaps* result = new(zone()) LCheckMaps(value);
- if (!instr->CanOmitMapChecks()) {
- AssignEnvironment(result);
- if (instr->has_migration_target()) return AssignPointerMap(result);
- }
- return result;
+ if (instr->CanOmitMapChecks()) return result;
+ return AssignEnvironment(result);
}
@@ -2394,7 +2418,7 @@
bool is_external_location = instr->access().IsExternalMemory() &&
instr->access().offset() == 0;
bool needs_write_barrier = instr->NeedsWriteBarrier();
- bool needs_write_barrier_for_map = instr->has_transition() &&
+ bool needs_write_barrier_for_map = !instr->transition().is_null() &&
instr->NeedsWriteBarrierForMap();
LOperand* obj;
@@ -2543,7 +2567,7 @@
LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
- Abort(kTooManySpillSlotsNeededForOSR);
+ Abort("Too many spill slots needed for OSR");
spill_index = 0;
}
return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
diff --git a/src/ia32/lithium-ia32.h b/src/ia32/lithium-ia32.h
index effecb7..6b0f9d0 100644
--- a/src/ia32/lithium-ia32.h
+++ b/src/ia32/lithium-ia32.h
@@ -50,6 +50,7 @@
V(ArithmeticD) \
V(ArithmeticT) \
V(BitI) \
+ V(BitNotI) \
V(BoundsCheck) \
V(Branch) \
V(CallConstantFunction) \
@@ -67,6 +68,7 @@
V(CheckMaps) \
V(CheckMapValue) \
V(CheckNonSmi) \
+ V(CheckPrototypeMaps) \
V(CheckSmi) \
V(ClampDToUint8) \
V(ClampIToUint8) \
@@ -1356,6 +1358,18 @@
};
+class LBitNotI: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LBitNotI(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(BitNotI, "bit-not-i")
+};
+
+
class LAddI: public LTemplateInstruction<1, 2, 0> {
public:
LAddI(LOperand* left, LOperand* right) {
@@ -2222,7 +2236,7 @@
virtual void PrintDataTo(StringStream* stream);
- Handle<Map> transition() const { return hydrogen()->transition_map(); }
+ Handle<Map> transition() const { return hydrogen()->transition(); }
Representation representation() const {
return hydrogen()->field_representation();
}
@@ -2437,6 +2451,24 @@
};
+class LCheckPrototypeMaps: public LTemplateInstruction<0, 0, 1> {
+ public:
+ explicit LCheckPrototypeMaps(LOperand* temp) {
+ temps_[0] = temp;
+ }
+
+ LOperand* temp() { return temps_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps, "check-prototype-maps")
+ DECLARE_HYDROGEN_ACCESSOR(CheckPrototypeMaps)
+
+ ZoneList<Handle<JSObject> >* prototypes() const {
+ return hydrogen()->prototypes();
+ }
+ ZoneList<Handle<Map> >* maps() const { return hydrogen()->maps(); }
+};
+
+
class LCheckSmi: public LTemplateInstruction<1, 1, 0> {
public:
explicit LCheckSmi(LOperand* value) {
@@ -2768,7 +2800,7 @@
bool is_done() const { return status_ == DONE; }
bool is_aborted() const { return status_ == ABORTED; }
- void Abort(BailoutReason reason);
+ void Abort(const char* reason);
// Methods for getting operands for Use / Define / Temp.
LUnallocated* ToUnallocated(Register reg);
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc
index 8b1be3c..2ab5a25 100644
--- a/src/ia32/macro-assembler-ia32.cc
+++ b/src/ia32/macro-assembler-ia32.cc
@@ -678,7 +678,7 @@
JumpIfSmi(object, &ok);
cmp(FieldOperand(object, HeapObject::kMapOffset),
isolate()->factory()->heap_number_map());
- Check(equal, kOperandNotANumber);
+ Check(equal, "Operand not a number");
bind(&ok);
}
}
@@ -687,7 +687,7 @@
void MacroAssembler::AssertSmi(Register object) {
if (emit_debug_code()) {
test(object, Immediate(kSmiTagMask));
- Check(equal, kOperandIsNotASmi);
+ Check(equal, "Operand is not a smi");
}
}
@@ -695,12 +695,12 @@
void MacroAssembler::AssertString(Register object) {
if (emit_debug_code()) {
test(object, Immediate(kSmiTagMask));
- Check(not_equal, kOperandIsASmiAndNotAString);
+ Check(not_equal, "Operand is a smi and not a string");
push(object);
mov(object, FieldOperand(object, HeapObject::kMapOffset));
CmpInstanceType(object, FIRST_NONSTRING_TYPE);
pop(object);
- Check(below, kOperandIsNotAString);
+ Check(below, "Operand is not a string");
}
}
@@ -708,12 +708,12 @@
void MacroAssembler::AssertName(Register object) {
if (emit_debug_code()) {
test(object, Immediate(kSmiTagMask));
- Check(not_equal, kOperandIsASmiAndNotAName);
+ Check(not_equal, "Operand is a smi and not a name");
push(object);
mov(object, FieldOperand(object, HeapObject::kMapOffset));
CmpInstanceType(object, LAST_NAME_TYPE);
pop(object);
- Check(below_equal, kOperandIsNotAName);
+ Check(below_equal, "Operand is not a name");
}
}
@@ -721,7 +721,7 @@
void MacroAssembler::AssertNotSmi(Register object) {
if (emit_debug_code()) {
test(object, Immediate(kSmiTagMask));
- Check(not_equal, kOperandIsASmi);
+ Check(not_equal, "Operand is a smi");
}
}
@@ -734,7 +734,7 @@
push(Immediate(CodeObject()));
if (emit_debug_code()) {
cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
- Check(not_equal, kCodeObjectNotProperlyPatched);
+ Check(not_equal, "code object not properly patched");
}
}
@@ -743,7 +743,7 @@
if (emit_debug_code()) {
cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
Immediate(Smi::FromInt(type)));
- Check(equal, kStackFrameTypesMustMatch);
+ Check(equal, "stack frame types must match");
}
leave();
}
@@ -1024,7 +1024,7 @@
// When generating debug code, make sure the lexical context is set.
if (emit_debug_code()) {
cmp(scratch1, Immediate(0));
- Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
+ Check(not_equal, "we should not have an empty lexical context");
}
// Load the native context of the current context.
int offset =
@@ -1037,7 +1037,7 @@
// Read the first word and compare to native_context_map.
cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
isolate()->factory()->native_context_map());
- Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
+ Check(equal, "JSGlobalObject::native_context should be a native context.");
}
// Check if both contexts are the same.
@@ -1056,12 +1056,12 @@
// Check the context is a native context.
if (emit_debug_code()) {
cmp(scratch2, isolate()->factory()->null_value());
- Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
+ Check(not_equal, "JSGlobalProxy::context() should not be null.");
// Read the first word and compare to native_context_map(),
cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
isolate()->factory()->native_context_map());
- Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
+ Check(equal, "JSGlobalObject::native_context should be a native context.");
}
int token_offset = Context::kHeaderSize +
@@ -1206,7 +1206,7 @@
#ifdef DEBUG
// Assert that result actually contains top on entry.
cmp(result, Operand::StaticVariable(allocation_top));
- Check(equal, kUnexpectedAllocationTop);
+ Check(equal, "Unexpected allocation top");
#endif
return;
}
@@ -1226,7 +1226,7 @@
AllocationFlags flags) {
if (emit_debug_code()) {
test(result_end, Immediate(kObjectAlignmentMask));
- Check(zero, kUnalignedAllocationInNewSpace);
+ Check(zero, "Unaligned allocation in new space");
}
ExternalReference allocation_top =
@@ -1458,7 +1458,7 @@
and_(object, Immediate(~kHeapObjectTagMask));
#ifdef DEBUG
cmp(object, Operand::StaticVariable(new_space_allocation_top));
- Check(below, kUndoAllocationOfNonAllocatedMemory);
+ Check(below, "Undo allocation of non allocated memory");
#endif
mov(Operand::StaticVariable(new_space_allocation_top), object);
}
@@ -2062,7 +2062,7 @@
// previous handle scope.
mov(Operand::StaticVariable(next_address), ebx);
sub(Operand::StaticVariable(level_address), Immediate(1));
- Assert(above_equal, kInvalidHandleScopeLevel);
+ Assert(above_equal, "Invalid HandleScope level");
cmp(edi, Operand::StaticVariable(limit_address));
j(not_equal, &delete_allocated_handles);
bind(&leave_exit_frame);
@@ -2104,7 +2104,7 @@
cmp(return_value, isolate()->factory()->null_value());
j(equal, &ok, Label::kNear);
- Abort(kAPICallReturnedInvalidObject);
+ Abort("API call returned invalid object");
bind(&ok);
#endif
@@ -2390,7 +2390,7 @@
if (emit_debug_code()) {
cmp(FieldOperand(dst, HeapObject::kMapOffset),
isolate()->factory()->with_context_map());
- Check(not_equal, kVariableResolvedToWithContext);
+ Check(not_equal, "Variable resolved to with context.");
}
}
@@ -2477,7 +2477,7 @@
CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
jmp(&ok);
bind(&fail);
- Abort(kGlobalFunctionsMustHaveInitialMap);
+ Abort("Global functions must have initial map");
bind(&ok);
}
}
@@ -2578,7 +2578,7 @@
and_(eax, kTopMask);
shr(eax, 11);
cmp(eax, Immediate(tos));
- Check(equal, kUnexpectedFPUStackDepthAfterInstruction);
+ Check(equal, "Unexpected FPU stack depth after instruction");
fnclex();
pop(eax);
}
@@ -2661,8 +2661,8 @@
}
-void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
- if (emit_debug_code()) Check(cc, reason);
+void MacroAssembler::Assert(Condition cc, const char* msg) {
+ if (emit_debug_code()) Check(cc, msg);
}
@@ -2679,16 +2679,16 @@
cmp(FieldOperand(elements, HeapObject::kMapOffset),
Immediate(factory->fixed_cow_array_map()));
j(equal, &ok);
- Abort(kJSObjectWithFastElementsMapHasSlowElements);
+ Abort("JSObject with fast elements map has slow elements");
bind(&ok);
}
}
-void MacroAssembler::Check(Condition cc, BailoutReason reason) {
+void MacroAssembler::Check(Condition cc, const char* msg) {
Label L;
j(cc, &L);
- Abort(reason);
+ Abort(msg);
// will not return here
bind(&L);
}
@@ -2709,13 +2709,12 @@
}
-void MacroAssembler::Abort(BailoutReason reason) {
+void MacroAssembler::Abort(const char* msg) {
// We want to pass the msg string like a smi to avoid GC
// problems, however msg is not guaranteed to be aligned
// properly. Instead, we pass an aligned pointer that is
// a proper v8 smi, but also pass the alignment difference
// from the real pointer as a smi.
- const char* msg = GetBailoutReason(reason);
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
@@ -3119,7 +3118,7 @@
if (emit_debug_code()) {
mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
- Check(less_equal, kLiveBytesCountOverflowChunkSize);
+ Check(less_equal, "Live Bytes Count overflow chunk size");
}
bind(&done);
diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h
index 165c9ce..3bca930 100644
--- a/src/ia32/macro-assembler-ia32.h
+++ b/src/ia32/macro-assembler-ia32.h
@@ -807,8 +807,6 @@
void Drop(int element_count);
void Call(Label* target) { call(target); }
- void Push(Register src) { push(src); }
- void Pop(Register dst) { pop(dst); }
// Emit call to the code we are currently generating.
void CallSelf() {
@@ -846,15 +844,15 @@
// Calls Abort(msg) if the condition cc is not satisfied.
// Use --debug_code to enable.
- void Assert(Condition cc, BailoutReason reason);
+ void Assert(Condition cc, const char* msg);
void AssertFastElements(Register elements);
// Like Assert(), but always enabled.
- void Check(Condition cc, BailoutReason reason);
+ void Check(Condition cc, const char* msg);
// Print a message to stdout and abort execution.
- void Abort(BailoutReason reason);
+ void Abort(const char* msg);
// Check that the stack is aligned.
void CheckStackAlignment();
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index b7828b8..123506f 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -3153,7 +3153,7 @@
__ j(equal, &miss);
} else if (FLAG_debug_code) {
__ cmp(eax, factory()->the_hole_value());
- __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
+ __ Check(not_equal, "DontDelete cells can't contain the hole");
}
HandlerFrontendFooter(name, &success, &miss);
diff --git a/src/ic.cc b/src/ic.cc
index 3c22580..a55160a 100644
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -390,6 +390,7 @@
case Code::KEYED_CALL_IC: return KeyedCallIC::Clear(address, target);
case Code::COMPARE_IC: return CompareIC::Clear(address, target);
case Code::COMPARE_NIL_IC: return CompareNilIC::Clear(address, target);
+ case Code::UNARY_OP_IC:
case Code::BINARY_OP_IC:
case Code::TO_BOOLEAN_IC:
// Clearing these is tricky and does not
@@ -2588,6 +2589,27 @@
}
+MaybeObject* UnaryOpIC::Transition(Handle<Object> object) {
+ Code::ExtraICState extra_ic_state = target()->extended_extra_ic_state();
+ UnaryOpStub stub(extra_ic_state);
+
+ stub.UpdateStatus(object);
+
+ Handle<Code> code = stub.GetCode(isolate());
+ set_target(*code);
+
+ return stub.Result(object, isolate());
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, UnaryOpIC_Miss) {
+ HandleScope scope(isolate);
+ Handle<Object> object = args.at<Object>(0);
+ UnaryOpIC ic(isolate);
+ return ic.Transition(object);
+}
+
+
static BinaryOpIC::TypeInfo TypeInfoFromValue(Handle<Object> value,
Token::Value op) {
v8::internal::TypeInfo type = v8::internal::TypeInfo::FromValue(value);
diff --git a/src/ic.h b/src/ic.h
index fcf0de5..7820d40 100644
--- a/src/ic.h
+++ b/src/ic.h
@@ -714,6 +714,14 @@
};
+class UnaryOpIC: public IC {
+ public:
+ explicit UnaryOpIC(Isolate* isolate) : IC(EXTRA_CALL_FRAME, isolate) { }
+
+ MUST_USE_RESULT MaybeObject* Transition(Handle<Object> object);
+};
+
+
// Type Recording BinaryOpIC, that records the types of the inputs and outputs.
class BinaryOpIC: public IC {
public:
diff --git a/src/icu_util.cc b/src/icu_util.cc
index b9bd65e..91f4527 100644
--- a/src/icu_util.cc
+++ b/src/icu_util.cc
@@ -27,7 +27,7 @@
#include "icu_util.h"
-#if defined(_WIN32) && defined(V8_I18N_SUPPORT)
+#if defined(_WIN32) && defined(ENABLE_I18N_SUPPORT)
#include <windows.h>
#include "unicode/putil.h"
@@ -42,7 +42,7 @@
namespace internal {
bool InitializeICU() {
-#if defined(_WIN32) && defined(V8_I18N_SUPPORT)
+#if defined(_WIN32) && defined(ENABLE_I18N_SUPPORT)
// We expect to find the ICU data module alongside the current module.
HMODULE module = LoadLibraryA(ICU_UTIL_DATA_SHARED_MODULE_NAME);
if (!module) return false;
diff --git a/src/isolate.cc b/src/isolate.cc
index 448c719..61f1e2d 100644
--- a/src/isolate.cc
+++ b/src/isolate.cc
@@ -1774,7 +1774,6 @@
inner_pointer_to_code_cache_(NULL),
write_iterator_(NULL),
global_handles_(NULL),
- eternal_handles_(NULL),
context_switcher_(NULL),
thread_manager_(NULL),
fp_stubs_generated_(false),
@@ -2053,8 +2052,6 @@
code_range_ = NULL;
delete global_handles_;
global_handles_ = NULL;
- delete eternal_handles_;
- eternal_handles_ = NULL;
delete string_stream_debug_object_cache_;
string_stream_debug_object_cache_ = NULL;
@@ -2186,7 +2183,6 @@
inner_pointer_to_code_cache_ = new InnerPointerToCodeCache(this);
write_iterator_ = new ConsStringIteratorOp();
global_handles_ = new GlobalHandles(this);
- eternal_handles_ = new EternalHandles();
bootstrapper_ = new Bootstrapper(this);
handle_scope_implementer_ = new HandleScopeImplementer(this);
stub_cache_ = new StubCache(this);
diff --git a/src/isolate.h b/src/isolate.h
index 74bfc29..c008317 100644
--- a/src/isolate.h
+++ b/src/isolate.h
@@ -922,8 +922,6 @@
GlobalHandles* global_handles() { return global_handles_; }
- EternalHandles* eternal_handles() { return eternal_handles_; }
-
ThreadManager* thread_manager() { return thread_manager_; }
ContextSwitcher* context_switcher() { return context_switcher_; }
@@ -1297,7 +1295,6 @@
InnerPointerToCodeCache* inner_pointer_to_code_cache_;
ConsStringIteratorOp* write_iterator_;
GlobalHandles* global_handles_;
- EternalHandles* eternal_handles_;
ContextSwitcher* context_switcher_;
ThreadManager* thread_manager_;
RuntimeState runtime_state_;
diff --git a/src/lithium.cc b/src/lithium.cc
index 790a218..e9c3531 100644
--- a/src/lithium.cc
+++ b/src/lithium.cc
@@ -425,7 +425,7 @@
int values = graph->GetMaximumValueID();
CompilationInfo* info = graph->info();
if (values > LUnallocated::kMaxVirtualRegisters) {
- info->set_bailout_reason(kNotEnoughVirtualRegistersForValues);
+ info->set_bailout_reason("not enough virtual registers for values");
return NULL;
}
LAllocator allocator(values, graph);
@@ -434,7 +434,7 @@
if (chunk == NULL) return NULL;
if (!allocator.Allocate(chunk)) {
- info->set_bailout_reason(kNotEnoughVirtualRegistersRegalloc);
+ info->set_bailout_reason("not enough virtual registers (regalloc)");
return NULL;
}
diff --git a/src/log.cc b/src/log.cc
index a1e5a67..b89c2bf 100644
--- a/src/log.cc
+++ b/src/log.cc
@@ -1644,6 +1644,7 @@
case Code::FUNCTION:
case Code::OPTIMIZED_FUNCTION:
return; // We log this later using LogCompiledFunctions.
+ case Code::UNARY_OP_IC: // fall through
case Code::BINARY_OP_IC: // fall through
case Code::COMPARE_IC: // fall through
case Code::COMPARE_NIL_IC: // fall through
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index 0e84267..91da8a0 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -431,8 +431,8 @@
heap()->weak_embedded_maps_verification_enabled()) {
VerifyWeakEmbeddedMapsInOptimizedCode();
}
- if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) {
- VerifyOmittedMapChecks();
+ if (FLAG_collect_maps && FLAG_omit_prototype_checks_for_leaf_maps) {
+ VerifyOmittedPrototypeChecks();
}
#endif
@@ -503,13 +503,13 @@
}
-void MarkCompactCollector::VerifyOmittedMapChecks() {
+void MarkCompactCollector::VerifyOmittedPrototypeChecks() {
HeapObjectIterator iterator(heap()->map_space());
for (HeapObject* obj = iterator.Next();
obj != NULL;
obj = iterator.Next()) {
Map* map = Map::cast(obj);
- map->VerifyOmittedMapChecks();
+ map->VerifyOmittedPrototypeChecks();
}
}
#endif // VERIFY_HEAP
diff --git a/src/mark-compact.h b/src/mark-compact.h
index ee845a0..16e49e1 100644
--- a/src/mark-compact.h
+++ b/src/mark-compact.h
@@ -638,7 +638,7 @@
static void VerifyMarkbitsAreClean(PagedSpace* space);
static void VerifyMarkbitsAreClean(NewSpace* space);
void VerifyWeakEmbeddedMapsInOptimizedCode();
- void VerifyOmittedMapChecks();
+ void VerifyOmittedPrototypeChecks();
#endif
// Sweep a single page from the given space conservatively.
diff --git a/src/messages.js b/src/messages.js
index 2debbf8..b586d24 100644
--- a/src/messages.js
+++ b/src/messages.js
@@ -228,18 +228,16 @@
}
}
}
- if (CanBeSafelyTreatedAsAnErrorObject(obj)) {
- return %_CallFunction(obj, ErrorToString);
- }
+ if (IsNativeErrorObject(obj)) return %_CallFunction(obj, ErrorToString);
return %_CallFunction(obj, ObjectToString);
}
-// To determine whether we can safely stringify an object using ErrorToString
-// without the risk of side-effects, we need to check whether the object is
-// either an instance of a native error type (via '%_ClassOf'), or has $Error
-// in its prototype chain and hasn't overwritten 'toString' with something
-// strange and unusual.
-function CanBeSafelyTreatedAsAnErrorObject(obj) {
+
+// To check if something is a native error we need to check the
+// concrete native error types. It is not sufficient to use instanceof
+// since it possible to create an object that has Error.prototype on
+// its prototype chain. This is the case for DOMException for example.
+function IsNativeErrorObject(obj) {
switch (%_ClassOf(obj)) {
case 'Error':
case 'EvalError':
@@ -250,9 +248,7 @@
case 'URIError':
return true;
}
-
- var objToString = %GetDataProperty(obj, "toString");
- return obj instanceof $Error && objToString === ErrorToString;
+ return false;
}
@@ -261,7 +257,7 @@
// the error to string method. This is to avoid leaking error
// objects between script tags in a browser setting.
function ToStringCheckErrorObject(obj) {
- if (CanBeSafelyTreatedAsAnErrorObject(obj)) {
+ if (IsNativeErrorObject(obj)) {
return %_CallFunction(obj, ErrorToString);
} else {
return ToString(obj);
diff --git a/src/mips/assembler-mips.h b/src/mips/assembler-mips.h
index cb0896a..8d533b3 100644
--- a/src/mips/assembler-mips.h
+++ b/src/mips/assembler-mips.h
@@ -358,11 +358,6 @@
// Return true if this is a register operand.
INLINE(bool is_reg() const);
- inline int32_t immediate() const {
- ASSERT(!is_reg());
- return imm32_;
- }
-
Register rm() const { return rm_; }
private:
diff --git a/src/mips/builtins-mips.cc b/src/mips/builtins-mips.cc
index d424cbc..3f5dca0 100644
--- a/src/mips/builtins-mips.cc
+++ b/src/mips/builtins-mips.cc
@@ -123,10 +123,10 @@
// Initial map for the builtin InternalArray functions should be maps.
__ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
__ And(t0, a2, Operand(kSmiTagMask));
- __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
+ __ Assert(ne, "Unexpected initial map for InternalArray function",
t0, Operand(zero_reg));
__ GetObjectType(a2, a3, t0);
- __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
+ __ Assert(eq, "Unexpected initial map for InternalArray function",
t0, Operand(MAP_TYPE));
}
@@ -153,10 +153,10 @@
// Initial map for the builtin Array functions should be maps.
__ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
__ And(t0, a2, Operand(kSmiTagMask));
- __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
+ __ Assert(ne, "Unexpected initial map for Array function (1)",
t0, Operand(zero_reg));
__ GetObjectType(a2, a3, t0);
- __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
+ __ Assert(eq, "Unexpected initial map for Array function (2)",
t0, Operand(MAP_TYPE));
}
@@ -185,7 +185,7 @@
Register function = a1;
if (FLAG_debug_code) {
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
- __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
+ __ Assert(eq, "Unexpected String function", function, Operand(a2));
}
// Load the first arguments in a0 and get rid of the rest.
@@ -231,10 +231,10 @@
__ LoadGlobalFunctionInitialMap(function, map, t0);
if (FLAG_debug_code) {
__ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset));
- __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
+ __ Assert(eq, "Unexpected string wrapper instance size",
t0, Operand(JSValue::kSize >> kPointerSizeLog2));
__ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
- __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
+ __ Assert(eq, "Unexpected unused properties of string wrapper",
t0, Operand(zero_reg));
}
__ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset));
@@ -489,7 +489,7 @@
__ addu(a0, t5, t0);
// a0: offset of first field after pre-allocated fields
if (FLAG_debug_code) {
- __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
+ __ Assert(le, "Unexpected number of pre-allocated property fields.",
a0, Operand(t6));
}
__ InitializeFieldsWithFiller(t5, a0, t7);
@@ -522,7 +522,7 @@
// Done if no extra properties are to be allocated.
__ Branch(&allocated, eq, a3, Operand(zero_reg));
- __ Assert(greater_equal, kPropertyAllocationCountFailed,
+ __ Assert(greater_equal, "Property allocation count failed.",
a3, Operand(zero_reg));
// Scale the number of elements by pointer size and add the header for
@@ -569,7 +569,7 @@
__ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
} else if (FLAG_debug_code) {
__ LoadRoot(t8, Heap::kUndefinedValueRootIndex);
- __ Assert(eq, kUndefinedValueNotLoaded, t7, Operand(t8));
+ __ Assert(eq, "Undefined value not loaded.", t7, Operand(t8));
}
__ jmp(&entry);
__ bind(&loop);
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
index 8a03a9a..0e1b224 100644
--- a/src/mips/code-stubs-mips.cc
+++ b/src/mips/code-stubs-mips.cc
@@ -247,6 +247,17 @@
}
+void UnaryOpStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { a0 };
+ descriptor->register_param_count_ = 1;
+ descriptor->register_params_ = registers;
+ descriptor->deoptimization_handler_ =
+ FUNCTION_ADDR(UnaryOpIC_Miss);
+}
+
+
void StoreGlobalStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
@@ -509,7 +520,8 @@
Label after_sentinel;
__ JumpIfNotSmi(a3, &after_sentinel);
if (FLAG_debug_code) {
- __ Assert(eq, kExpected0AsASmiSentinel, a3, Operand(zero_reg));
+ const char* message = "Expected 0 as a Smi sentinel";
+ __ Assert(eq, message, a3, Operand(zero_reg));
}
__ lw(a3, GlobalObjectOperand());
__ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
@@ -667,7 +679,7 @@
Label* not_number) {
__ AssertRootValue(heap_number_map,
Heap::kHeapNumberMapRootIndex,
- kHeapNumberMapRegisterClobbered);
+ "HeapNumberMap register clobbered.");
Label is_smi, done;
@@ -717,7 +729,7 @@
Label* not_number) {
__ AssertRootValue(heap_number_map,
Heap::kHeapNumberMapRootIndex,
- kHeapNumberMapRegisterClobbered);
+ "HeapNumberMap register clobbered.");
Label done;
Label not_in_int32_range;
@@ -794,7 +806,7 @@
__ bind(&obj_is_not_smi);
__ AssertRootValue(heap_number_map,
Heap::kHeapNumberMapRootIndex,
- kHeapNumberMapRegisterClobbered);
+ "HeapNumberMap register clobbered.");
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32);
// Load the number.
@@ -841,7 +853,7 @@
__ AssertRootValue(heap_number_map,
Heap::kHeapNumberMapRootIndex,
- kHeapNumberMapRegisterClobbered);
+ "HeapNumberMap register clobbered.");
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, &maybe_undefined);
@@ -4267,12 +4279,12 @@
if (FLAG_debug_code) {
__ And(t0, regexp_data, Operand(kSmiTagMask));
__ Check(nz,
- kUnexpectedTypeForRegExpDataFixedArrayExpected,
+ "Unexpected type for RegExp data, FixedArray expected",
t0,
Operand(zero_reg));
__ GetObjectType(regexp_data, a0, a0);
__ Check(eq,
- kUnexpectedTypeForRegExpDataFixedArrayExpected,
+ "Unexpected type for RegExp data, FixedArray expected",
a0,
Operand(FIXED_ARRAY_TYPE));
}
@@ -4627,7 +4639,7 @@
// Sequential strings have already been ruled out.
__ And(at, a0, Operand(kIsIndirectStringMask));
__ Assert(eq,
- kExternalStringExpectedButNotFound,
+ "external string expected, but not found",
at,
Operand(zero_reg));
}
@@ -5008,7 +5020,7 @@
void StringCharCodeAtGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
+ __ Abort("Unexpected fallthrough to CharCodeAt slow case");
// Index is not a smi.
__ bind(&index_not_smi_);
@@ -5057,7 +5069,7 @@
call_helper.AfterCall(masm);
__ jmp(&exit_);
- __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
+ __ Abort("Unexpected fallthrough from CharCodeAt slow case");
}
@@ -5094,7 +5106,7 @@
void StringCharFromCodeGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
+ __ Abort("Unexpected fallthrough to CharFromCode slow case");
__ bind(&slow_case_);
call_helper.BeforeCall(masm);
@@ -5105,7 +5117,7 @@
call_helper.AfterCall(masm);
__ Branch(&exit_);
- __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
+ __ Abort("Unexpected fallthrough from CharFromCode slow case");
}
@@ -5160,7 +5172,7 @@
// that it is.
__ And(scratch4, dest, Operand(kPointerAlignmentMask));
__ Check(eq,
- kDestinationOfCopyNotAligned,
+ "Destination of copy not aligned.",
scratch4,
Operand(zero_reg));
}
@@ -5360,7 +5372,7 @@
// Must be the hole (deleted entry).
if (FLAG_debug_code) {
__ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
- __ Assert(eq, kOddballInStringTableIsNotUndefinedOrTheHole,
+ __ Assert(eq, "oddball in string table is not undefined or the hole",
scratch, Operand(candidate));
}
__ jmp(&next_probe[i]);
@@ -6568,7 +6580,7 @@
// filled with kZapValue by the GC.
// Dereference the address and check for this.
__ lw(t0, MemOperand(t9));
- __ Assert(ne, kReceivedInvalidReturnAddress, t0,
+ __ Assert(ne, "Received invalid return address.", t0,
Operand(reinterpret_cast<uint32_t>(kZapValue)));
}
__ Jump(t9);
@@ -7319,7 +7331,7 @@
}
// If we reached this point there is a problem.
- __ Abort(kUnexpectedElementsKindInArrayConstructor);
+ __ Abort("Unexpected ElementsKind in array constructor");
}
@@ -7374,7 +7386,7 @@
}
// If we reached this point there is a problem.
- __ Abort(kUnexpectedElementsKindInArrayConstructor);
+ __ Abort("Unexpected ElementsKind in array constructor");
}
@@ -7435,10 +7447,10 @@
__ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ And(at, a3, Operand(kSmiTagMask));
- __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
+ __ Assert(ne, "Unexpected initial map for Array function",
at, Operand(zero_reg));
__ GetObjectType(a3, a3, t0);
- __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
+ __ Assert(eq, "Unexpected initial map for Array function",
t0, Operand(MAP_TYPE));
// We should either have undefined in a2 or a valid cell.
@@ -7447,7 +7459,7 @@
__ LoadRoot(at, Heap::kUndefinedValueRootIndex);
__ Branch(&okay_here, eq, a2, Operand(at));
__ lw(a3, FieldMemOperand(a2, 0));
- __ Assert(eq, kExpectedPropertyCellInRegisterA2,
+ __ Assert(eq, "Expected property cell in register a2",
a3, Operand(cell_map));
__ bind(&okay_here);
}
@@ -7547,10 +7559,10 @@
__ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ And(at, a3, Operand(kSmiTagMask));
- __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
+ __ Assert(ne, "Unexpected initial map for Array function",
at, Operand(zero_reg));
__ GetObjectType(a3, a3, t0);
- __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
+ __ Assert(eq, "Unexpected initial map for Array function",
t0, Operand(MAP_TYPE));
}
@@ -7567,7 +7579,7 @@
Label done;
__ Branch(&done, eq, a3, Operand(FAST_ELEMENTS));
__ Assert(
- eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray,
+ eq, "Invalid ElementsKind for InternalArray or InternalPackedArray",
a3, Operand(FAST_HOLEY_ELEMENTS));
__ bind(&done);
}
diff --git a/src/mips/codegen-mips.cc b/src/mips/codegen-mips.cc
index 10490e7..3f74154 100644
--- a/src/mips/codegen-mips.cc
+++ b/src/mips/codegen-mips.cc
@@ -289,7 +289,7 @@
__ SmiTag(t5);
__ Or(t5, t5, Operand(1));
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
- __ Assert(eq, kObjectFoundInSmiOnlyArray, at, Operand(t5));
+ __ Assert(eq, "object found in smi-only array", at, Operand(t5));
}
__ sw(t0, MemOperand(t3)); // mantissa
__ sw(t1, MemOperand(t3, kIntSize)); // exponent
@@ -489,7 +489,7 @@
// Assert that we do not have a cons or slice (indirect strings) here.
// Sequential strings have already been ruled out.
__ And(at, result, Operand(kIsIndirectStringMask));
- __ Assert(eq, kExternalStringExpectedButNotFound,
+ __ Assert(eq, "external string expected, but not found",
at, Operand(zero_reg));
}
// Rule out short external strings.
diff --git a/src/mips/debug-mips.cc b/src/mips/debug-mips.cc
index 020228f..30cc4db 100644
--- a/src/mips/debug-mips.cc
+++ b/src/mips/debug-mips.cc
@@ -142,7 +142,8 @@
if ((non_object_regs & (1 << r)) != 0) {
if (FLAG_debug_code) {
__ And(at, reg, 0xc0000000);
- __ Assert(eq, kUnableToEncodeValueAsSmi, at, Operand(zero_reg));
+ __ Assert(
+ eq, "Unable to encode value as smi", at, Operand(zero_reg));
}
__ sll(reg, reg, kSmiTagSize);
}
@@ -324,12 +325,12 @@
void Debug::GeneratePlainReturnLiveEdit(MacroAssembler* masm) {
- masm->Abort(kLiveEditFrameDroppingIsNotSupportedOnMips);
+ masm->Abort("LiveEdit frame dropping is not supported on mips");
}
void Debug::GenerateFrameDropperLiveEdit(MacroAssembler* masm) {
- masm->Abort(kLiveEditFrameDroppingIsNotSupportedOnMips);
+ masm->Abort("LiveEdit frame dropping is not supported on mips");
}
diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc
index b60502c..1084af0 100644
--- a/src/mips/full-codegen-mips.cc
+++ b/src/mips/full-codegen-mips.cc
@@ -786,10 +786,10 @@
// Check that we're not inside a with or catch context.
__ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
__ LoadRoot(t0, Heap::kWithContextMapRootIndex);
- __ Check(ne, kDeclarationInWithContext,
+ __ Check(ne, "Declaration in with context.",
a1, Operand(t0));
__ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
- __ Check(ne, kDeclarationInCatchContext,
+ __ Check(ne, "Declaration in catch context.",
a1, Operand(t0));
}
}
@@ -2529,7 +2529,7 @@
// Check for an uninitialized let binding.
__ lw(a2, location);
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
- __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
+ __ Check(eq, "Let binding re-initialization.", a2, Operand(t0));
}
// Perform the assignment.
__ sw(v0, location);
@@ -3492,21 +3492,21 @@
Register value,
uint32_t encoding_mask) {
__ And(at, index, Operand(kSmiTagMask));
- __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
+ __ Check(eq, "Non-smi index", at, Operand(zero_reg));
__ And(at, value, Operand(kSmiTagMask));
- __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
+ __ Check(eq, "Non-smi value", at, Operand(zero_reg));
__ lw(at, FieldMemOperand(string, String::kLengthOffset));
- __ Check(lt, kIndexIsTooLarge, index, Operand(at));
+ __ Check(lt, "Index is too large", index, Operand(at));
- __ Check(ge, kIndexIsNegative, index, Operand(zero_reg));
+ __ Check(ge, "Index is negative", index, Operand(zero_reg));
__ lw(at, FieldMemOperand(string, HeapObject::kMapOffset));
__ lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset));
__ And(at, at, Operand(kStringRepresentationMask | kStringEncodingMask));
__ Subu(at, at, Operand(encoding_mask));
- __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg));
+ __ Check(eq, "Unexpected string type", at, Operand(zero_reg));
}
@@ -3881,7 +3881,7 @@
Handle<FixedArray> jsfunction_result_caches(
isolate()->native_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
- __ Abort(kAttemptToUseUndefinedCache);
+ __ Abort("Attempt to use undefined cache.");
__ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
context()->Plug(v0);
return;
@@ -4063,7 +4063,7 @@
// element: Current array element.
// elements_end: Array end.
if (generate_debug_code_) {
- __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin,
+ __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin",
array_length, Operand(zero_reg));
}
__ bind(&loop);
@@ -4382,12 +4382,35 @@
break;
}
+ case Token::SUB:
+ EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
+ break;
+
+ case Token::BIT_NOT:
+ EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
+ break;
+
default:
UNREACHABLE();
}
}
+void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
+ const char* comment) {
+ // TODO(svenpanne): Allowing format strings in Comment would be nice here...
+ Comment cmt(masm_, comment);
+ UnaryOpStub stub(expr->op());
+ // GenericUnaryOpStub expects the argument to be in a0.
+ VisitForAccumulatorValue(expr->expression());
+ SetSourcePosition(expr->position());
+ __ mov(a0, result_register());
+ CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
+ expr->UnaryOperationFeedbackId());
+ context()->Plug(v0);
+}
+
+
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Comment cmnt(masm_, "[ CountOperation");
SetSourcePosition(expr->position());
diff --git a/src/mips/lithium-codegen-mips.cc b/src/mips/lithium-codegen-mips.cc
index 34e601c..be5809d 100644
--- a/src/mips/lithium-codegen-mips.cc
+++ b/src/mips/lithium-codegen-mips.cc
@@ -91,7 +91,7 @@
}
-void LChunkBuilder::Abort(BailoutReason reason) {
+void LChunkBuilder::Abort(const char* reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -324,7 +324,7 @@
// end of the jump table.
if (!is_int16((masm()->pc_offset() / Assembler::kInstrSize) +
deopt_jump_table_.length() * 12)) {
- Abort(kGeneratedCodeIsTooLarge);
+ Abort("Generated code is too large");
}
if (deopt_jump_table_.length() > 0) {
@@ -411,7 +411,7 @@
ASSERT(constant->HasSmiValue());
__ li(scratch, Operand(Smi::FromInt(constant->Integer32Value())));
} else if (r.IsDouble()) {
- Abort(kEmitLoadRegisterUnsupportedDoubleImmediate);
+ Abort("EmitLoadRegister: Unsupported double immediate.");
} else {
ASSERT(r.IsTagged());
__ LoadObject(scratch, literal);
@@ -449,9 +449,9 @@
__ cvt_d_w(dbl_scratch, flt_scratch);
return dbl_scratch;
} else if (r.IsDouble()) {
- Abort(kUnsupportedDoubleImmediate);
+ Abort("unsupported double immediate");
} else if (r.IsTagged()) {
- Abort(kUnsupportedTaggedImmediate);
+ Abort("unsupported tagged immediate");
}
} else if (op->IsStackSlot() || op->IsArgument()) {
MemOperand mem_op = ToMemOperand(op);
@@ -520,14 +520,14 @@
ASSERT(constant->HasInteger32Value());
return Operand(constant->Integer32Value());
} else if (r.IsDouble()) {
- Abort(kToOperandUnsupportedDoubleImmediate);
+ Abort("ToOperand Unsupported double immediate.");
}
ASSERT(r.IsTagged());
return Operand(constant->handle());
} else if (op->IsRegister()) {
return Operand(ToRegister(op));
} else if (op->IsDoubleRegister()) {
- Abort(kToOperandIsDoubleRegisterUnimplemented);
+ Abort("ToOperand IsDoubleRegister unimplemented");
return Operand(0);
}
// Stack slots not implemented, use ToMemOperand instead.
@@ -748,7 +748,7 @@
Address entry =
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
if (entry == NULL) {
- Abort(kBailoutWasNotPrepared);
+ Abort("bailout was not prepared");
return;
}
@@ -1057,16 +1057,20 @@
HValue* left = hmod->left();
HValue* right = hmod->right();
if (hmod->HasPowerOf2Divisor()) {
+ const Register scratch = scratch0();
const Register left_reg = ToRegister(instr->left());
+ ASSERT(!left_reg.is(scratch));
const Register result_reg = ToRegister(instr->result());
// Note: The code below even works when right contains kMinInt.
int32_t divisor = Abs(right->GetInteger32Constant());
+ __ mov(scratch, left_reg);
+
Label left_is_not_negative, done;
if (left->CanBeNegative()) {
- __ Branch(left_reg.is(result_reg) ? PROTECT : USE_DELAY_SLOT,
- &left_is_not_negative, ge, left_reg, Operand(zero_reg));
+ __ Branch(USE_DELAY_SLOT, &left_is_not_negative,
+ ge, left_reg, Operand(zero_reg));
__ subu(result_reg, zero_reg, left_reg);
__ And(result_reg, result_reg, divisor - 1);
if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
@@ -1077,13 +1081,15 @@
}
__ bind(&left_is_not_negative);
- __ And(result_reg, left_reg, divisor - 1);
+ __ And(result_reg, scratch, divisor - 1);
__ bind(&done);
} else if (hmod->fixed_right_arg().has_value) {
+ const Register scratch = scratch0();
const Register left_reg = ToRegister(instr->left());
const Register result_reg = ToRegister(instr->result());
- const Register right_reg = ToRegister(instr->right());
+
+ Register right_reg = EmitLoadRegister(instr->right(), scratch);
int32_t divisor = hmod->fixed_right_arg().value;
ASSERT(IsPowerOf2(divisor));
@@ -1093,8 +1099,8 @@
Label left_is_not_negative, done;
if (left->CanBeNegative()) {
- __ Branch(left_reg.is(result_reg) ? PROTECT : USE_DELAY_SLOT,
- &left_is_not_negative, ge, left_reg, Operand(zero_reg));
+ __ Branch(USE_DELAY_SLOT, &left_is_not_negative,
+ ge, left_reg, Operand(zero_reg));
__ subu(result_reg, zero_reg, left_reg);
__ And(result_reg, result_reg, divisor - 1);
if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
@@ -1503,11 +1509,7 @@
__ Or(result, left, right);
break;
case Token::BIT_XOR:
- if (right_op->IsConstantOperand() && right.immediate() == int32_t(~0)) {
- __ Nor(result, zero_reg, left);
- } else {
- __ Xor(result, left, right);
- }
+ __ Xor(result, left, right);
break;
default:
UNREACHABLE();
@@ -1768,7 +1770,7 @@
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
__ Subu(at, at, Operand(encoding == String::ONE_BYTE_ENCODING
? one_byte_seq_type : two_byte_seq_type));
- __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg));
+ __ Check(eq, "Unexpected string type", at, Operand(zero_reg));
}
__ Addu(scratch,
@@ -1785,6 +1787,13 @@
}
+void LCodeGen::DoBitNotI(LBitNotI* instr) {
+ Register input = ToRegister(instr->value());
+ Register result = ToRegister(instr->result());
+ __ Nor(result, zero_reg, Operand(input));
+}
+
+
void LCodeGen::DoThrow(LThrow* instr) {
Register input_reg = EmitLoadRegister(instr->value(), at);
__ push(input_reg);
@@ -3067,7 +3076,7 @@
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort(kArrayIndexConstantValueTooBig);
+ Abort("array index constant value too big.");
}
} else {
key = ToRegister(instr->key());
@@ -3153,7 +3162,7 @@
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort(kArrayIndexConstantValueTooBig);
+ Abort("array index constant value too big.");
}
} else {
key = ToRegister(instr->key());
@@ -3424,7 +3433,7 @@
void LCodeGen::DoPushArgument(LPushArgument* instr) {
LOperand* argument = instr->value();
if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
- Abort(kDoPushArgumentNotImplementedForDoubleType);
+ Abort("DoPushArgument not implemented for double type.");
} else {
Register argument_reg = EmitLoadRegister(argument, at);
__ push(argument_reg);
@@ -3643,7 +3652,7 @@
FPURegister input = ToDoubleRegister(instr->value());
FPURegister result = ToDoubleRegister(instr->result());
__ abs_d(result, input);
- } else if (r.IsSmiOrInteger32()) {
+ } else if (r.IsInteger32()) {
EmitIntegerMathAbs(instr);
} else {
// Representation is tagged.
@@ -4249,7 +4258,7 @@
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort(kArrayIndexConstantValueTooBig);
+ Abort("array index constant value too big.");
}
} else {
key = ToRegister(instr->key());
@@ -4327,7 +4336,7 @@
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort(kArrayIndexConstantValueTooBig);
+ Abort("array index constant value too big.");
}
} else {
key = ToRegister(instr->key());
@@ -5184,63 +5193,31 @@
}
-void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
- {
- PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
- __ push(object);
- CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr);
- __ StoreToSafepointRegisterSlot(v0, scratch0());
- }
- __ And(at, scratch0(), Operand(kSmiTagMask));
- DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
+void LCodeGen::DoCheckMapCommon(Register map_reg,
+ Handle<Map> map,
+ LEnvironment* env) {
+ Label success;
+ __ CompareMapAndBranch(map_reg, map, &success, eq, &success);
+ DeoptimizeIf(al, env);
+ __ bind(&success);
}
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
- class DeferredCheckMaps: public LDeferredCode {
- public:
- DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
- : LDeferredCode(codegen), instr_(instr), object_(object) {
- SetExit(check_maps());
- }
- virtual void Generate() {
- codegen()->DoDeferredInstanceMigration(instr_, object_);
- }
- Label* check_maps() { return &check_maps_; }
- virtual LInstruction* instr() { return instr_; }
- private:
- LCheckMaps* instr_;
- Label check_maps_;
- Register object_;
- };
-
if (instr->hydrogen()->CanOmitMapChecks()) return;
Register map_reg = scratch0();
LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
+ Label success;
SmallMapList* map_set = instr->hydrogen()->map_set();
__ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
-
- DeferredCheckMaps* deferred = NULL;
- if (instr->hydrogen()->has_migration_target()) {
- deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
- __ bind(deferred->check_maps());
- }
-
- Label success;
for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i);
__ CompareMapAndBranch(map_reg, map, &success, eq, &success);
}
Handle<Map> map = map_set->last();
- __ CompareMapAndBranch(map_reg, map, &success, eq, &success);
- if (instr->hydrogen()->has_migration_target()) {
- __ Branch(deferred->entry());
- } else {
- DeoptimizeIf(al, instr->environment());
- }
-
+ DoCheckMapCommon(map_reg, map, instr->environment());
__ bind(&success);
}
@@ -5295,6 +5272,25 @@
}
+void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
+ if (instr->hydrogen()->CanOmitPrototypeChecks()) return;
+
+ Register prototype_reg = ToRegister(instr->temp());
+ Register map_reg = ToRegister(instr->temp2());
+
+ ZoneList<Handle<JSObject> >* prototypes = instr->prototypes();
+ ZoneList<Handle<Map> >* maps = instr->maps();
+
+ ASSERT(prototypes->length() == maps->length());
+
+ for (int i = 0; i < prototypes->length(); i++) {
+ __ LoadHeapObject(prototype_reg, prototypes->at(i));
+ __ lw(map_reg, FieldMemOperand(prototype_reg, HeapObject::kMapOffset));
+ DoCheckMapCommon(map_reg, maps->at(i), instr->environment());
+ }
+}
+
+
void LCodeGen::DoAllocate(LAllocate* instr) {
class DeferredAllocate: public LDeferredCode {
public:
diff --git a/src/mips/lithium-codegen-mips.h b/src/mips/lithium-codegen-mips.h
index 670c4cc..a485b67 100644
--- a/src/mips/lithium-codegen-mips.h
+++ b/src/mips/lithium-codegen-mips.h
@@ -114,7 +114,7 @@
DoubleRegister EmitLoadDoubleRegister(LOperand* op,
FloatRegister flt_scratch,
DoubleRegister dbl_scratch);
- int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
+ int ToRepresentation(LConstantOperand* op, const Representation& r) const;
int32_t ToInteger32(LConstantOperand* op) const;
Smi* ToSmi(LConstantOperand* op) const;
double ToDouble(LConstantOperand* op) const;
@@ -153,7 +153,7 @@
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
- void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
+ void DoCheckMapCommon(Register map_reg, Handle<Map> map, LEnvironment* env);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
@@ -213,7 +213,7 @@
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
- void Abort(BailoutReason reason);
+ void Abort(const char* reason);
void FPRINTF_CHECKING Comment(const char* format, ...);
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
diff --git a/src/mips/lithium-gap-resolver-mips.cc b/src/mips/lithium-gap-resolver-mips.cc
index 460e13b..771b228 100644
--- a/src/mips/lithium-gap-resolver-mips.cc
+++ b/src/mips/lithium-gap-resolver-mips.cc
@@ -258,7 +258,7 @@
} else {
__ LoadObject(dst, cgen_->ToHandle(constant_source));
}
- } else if (destination->IsDoubleRegister()) {
+ } else if (source->IsDoubleRegister()) {
DoubleRegister result = cgen_->ToDoubleRegister(destination);
double v = cgen_->ToDouble(constant_source);
__ Move(result, v);
diff --git a/src/mips/lithium-mips.cc b/src/mips/lithium-mips.cc
index 38ac19f..a5371f7 100644
--- a/src/mips/lithium-mips.cc
+++ b/src/mips/lithium-mips.cc
@@ -442,7 +442,7 @@
}
-void LCodeGen::Abort(BailoutReason reason) {
+void LCodeGen::Abort(const char* reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -650,7 +650,7 @@
new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
int vreg = allocator_->GetVirtualRegister();
if (!allocator_->AllocationOk()) {
- Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
+ Abort("Out of virtual registers while trying to allocate temp register.");
vreg = 0;
}
operand->set_virtual_register(vreg);
@@ -1327,6 +1327,15 @@
}
+LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
+ ASSERT(instr->value()->representation().IsInteger32());
+ ASSERT(instr->representation().IsInteger32());
+ if (instr->HasNoUses()) return NULL;
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new(zone()) LBitNotI(value));
+}
+
+
LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
if (instr->representation().IsDouble()) {
return DoArithmeticD(Token::DIV, instr);
@@ -1744,6 +1753,17 @@
}
+LInstruction* LChunkBuilder::DoNumericConstraint(HNumericConstraint* instr) {
+ return NULL;
+}
+
+
+LInstruction* LChunkBuilder::DoInductionVariableAnnotation(
+ HInductionVariableAnnotation* instr) {
+ return NULL;
+}
+
+
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
LOperand* value = UseRegisterOrConstantAtStart(instr->index());
LOperand* length = UseRegister(instr->length());
@@ -1917,6 +1937,19 @@
}
+LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
+ LUnallocated* temp1 = NULL;
+ LOperand* temp2 = NULL;
+ if (!instr->CanOmitPrototypeChecks()) {
+ temp1 = TempRegister();
+ temp2 = TempRegister();
+ }
+ LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp1, temp2);
+ if (instr->CanOmitPrototypeChecks()) return result;
+ return AssignEnvironment(result);
+}
+
+
LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
return AssignEnvironment(new(zone()) LCheckFunction(value));
@@ -1925,16 +1958,10 @@
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = NULL;
- if (!instr->CanOmitMapChecks()) {
- value = UseRegisterAtStart(instr->value());
- if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
- }
- LCheckMaps* result = new(zone()) LCheckMaps(value);
- if (!instr->CanOmitMapChecks()) {
- AssignEnvironment(result);
- if (instr->has_migration_target()) return AssignPointerMap(result);
- }
- return result;
+ if (!instr->CanOmitMapChecks()) value = UseRegisterAtStart(instr->value());
+ LInstruction* result = new(zone()) LCheckMaps(value);
+ if (instr->CanOmitMapChecks()) return result;
+ return AssignEnvironment(result);
}
@@ -2218,7 +2245,7 @@
LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
bool is_in_object = instr->access().IsInobject();
bool needs_write_barrier = instr->NeedsWriteBarrier();
- bool needs_write_barrier_for_map = instr->has_transition() &&
+ bool needs_write_barrier_for_map = !instr->transition().is_null() &&
instr->NeedsWriteBarrierForMap();
LOperand* obj;
@@ -2338,7 +2365,7 @@
LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
- Abort(kTooManySpillSlotsNeededForOSR);
+ Abort("Too many spill slots needed for OSR");
spill_index = 0;
}
return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
diff --git a/src/mips/lithium-mips.h b/src/mips/lithium-mips.h
index a21c323..44c909e 100644
--- a/src/mips/lithium-mips.h
+++ b/src/mips/lithium-mips.h
@@ -50,6 +50,7 @@
V(ArithmeticD) \
V(ArithmeticT) \
V(BitI) \
+ V(BitNotI) \
V(BoundsCheck) \
V(Branch) \
V(CallConstantFunction) \
@@ -67,6 +68,7 @@
V(CheckMaps) \
V(CheckMapValue) \
V(CheckNonSmi) \
+ V(CheckPrototypeMaps) \
V(CheckSmi) \
V(ClampDToUint8) \
V(ClampIToUint8) \
@@ -1354,6 +1356,18 @@
};
+class LBitNotI: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LBitNotI(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(BitNotI, "bit-not-i")
+};
+
+
class LAddI: public LTemplateInstruction<1, 2, 0> {
public:
LAddI(LOperand* left, LOperand* right) {
@@ -2114,7 +2128,7 @@
virtual void PrintDataTo(StringStream* stream);
- Handle<Map> transition() const { return hydrogen()->transition_map(); }
+ Handle<Map> transition() const { return hydrogen()->transition(); }
Representation representation() const {
return hydrogen()->field_representation();
}
@@ -2310,6 +2324,26 @@
};
+class LCheckPrototypeMaps: public LTemplateInstruction<0, 0, 2> {
+ public:
+ LCheckPrototypeMaps(LOperand* temp, LOperand* temp2) {
+ temps_[0] = temp;
+ temps_[1] = temp2;
+ }
+
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps, "check-prototype-maps")
+ DECLARE_HYDROGEN_ACCESSOR(CheckPrototypeMaps)
+
+ ZoneList<Handle<JSObject> >* prototypes() const {
+ return hydrogen()->prototypes();
+ }
+ ZoneList<Handle<Map> >* maps() const { return hydrogen()->maps(); }
+};
+
+
class LCheckSmi: public LTemplateInstruction<1, 1, 0> {
public:
explicit LCheckSmi(LOperand* value) {
@@ -2608,7 +2642,7 @@
bool is_done() const { return status_ == DONE; }
bool is_aborted() const { return status_ == ABORTED; }
- void Abort(BailoutReason reason);
+ void Abort(const char* reason);
// Methods for getting operands for Use / Define / Temp.
LUnallocated* ToUnallocated(Register reg);
diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc
index a7ec713..ea08a55 100644
--- a/src/mips/macro-assembler-mips.cc
+++ b/src/mips/macro-assembler-mips.cc
@@ -256,7 +256,7 @@
if (emit_debug_code()) {
lw(at, MemOperand(address));
Assert(
- eq, kWrongAddressOrValuePassedToRecordWrite, at, Operand(value));
+ eq, "Wrong address or value passed to RecordWrite", at, Operand(value));
}
Label done;
@@ -358,7 +358,7 @@
lw(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
// In debug mode, make sure the lexical context is set.
#ifdef DEBUG
- Check(ne, kWeShouldNotHaveAnEmptyLexicalContext,
+ Check(ne, "we should not have an empty lexical context",
scratch, Operand(zero_reg));
#endif
@@ -374,7 +374,7 @@
// Read the first word and compare to the native_context_map.
lw(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
LoadRoot(at, Heap::kNativeContextMapRootIndex);
- Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
+ Check(eq, "JSGlobalObject::native_context should be a native context.",
holder_reg, Operand(at));
pop(holder_reg); // Restore holder.
}
@@ -388,12 +388,12 @@
push(holder_reg); // Temporarily save holder on the stack.
mov(holder_reg, at); // Move at to its holding place.
LoadRoot(at, Heap::kNullValueRootIndex);
- Check(ne, kJSGlobalProxyContextShouldNotBeNull,
+ Check(ne, "JSGlobalProxy::context() should not be null.",
holder_reg, Operand(at));
lw(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
LoadRoot(at, Heap::kNativeContextMapRootIndex);
- Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
+ Check(eq, "JSGlobalObject::native_context should be a native context.",
holder_reg, Operand(at));
// Restore at is not needed. at is reloaded below.
pop(holder_reg); // Restore holder.
@@ -2938,7 +2938,7 @@
// immediately below so this use of t9 does not cause difference with
// respect to register content between debug and release mode.
lw(t9, MemOperand(topaddr));
- Check(eq, kUnexpectedAllocationTop, result, Operand(t9));
+ Check(eq, "Unexpected allocation top", result, Operand(t9));
}
// Load allocation limit into t9. Result already contains allocation top.
lw(t9, MemOperand(topaddr, limit - top));
@@ -3008,7 +3008,7 @@
// immediately below so this use of t9 does not cause difference with
// respect to register content between debug and release mode.
lw(t9, MemOperand(topaddr));
- Check(eq, kUnexpectedAllocationTop, result, Operand(t9));
+ Check(eq, "Unexpected allocation top", result, Operand(t9));
}
// Load allocation limit into t9. Result already contains allocation top.
lw(t9, MemOperand(topaddr, limit - top));
@@ -3028,7 +3028,7 @@
// Update allocation top. result temporarily holds the new top.
if (emit_debug_code()) {
And(t9, scratch2, Operand(kObjectAlignmentMask));
- Check(eq, kUnalignedAllocationInNewSpace, t9, Operand(zero_reg));
+ Check(eq, "Unaligned allocation in new space", t9, Operand(zero_reg));
}
sw(scratch2, MemOperand(topaddr));
@@ -3050,7 +3050,7 @@
// Check that the object un-allocated is below the current top.
li(scratch, Operand(new_space_allocation_top));
lw(scratch, MemOperand(scratch));
- Check(less, kUndoAllocationOfNonAllocatedMemory,
+ Check(less, "Undo allocation of non allocated memory",
object, Operand(scratch));
#endif
// Write the address of the object to un-allocate as the current top.
@@ -3303,7 +3303,7 @@
bind(&word_loop);
if (emit_debug_code()) {
And(scratch, src, kPointerSize - 1);
- Assert(eq, kExpectingAlignmentForCopyBytes,
+ Assert(eq, "Expecting alignment for CopyBytes",
scratch, Operand(zero_reg));
}
Branch(&byte_loop, lt, length, Operand(kPointerSize));
@@ -4029,7 +4029,7 @@
sw(s0, MemOperand(s3, kNextOffset));
if (emit_debug_code()) {
lw(a1, MemOperand(s3, kLevelOffset));
- Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2));
+ Check(eq, "Unexpected level after return from api call", a1, Operand(s2));
}
Subu(s2, s2, Operand(1));
sw(s2, MemOperand(s3, kLevelOffset));
@@ -4383,10 +4383,10 @@
// -----------------------------------------------------------------------------
// Debugging.
-void MacroAssembler::Assert(Condition cc, BailoutReason reason,
+void MacroAssembler::Assert(Condition cc, const char* msg,
Register rs, Operand rt) {
if (emit_debug_code())
- Check(cc, reason, rs, rt);
+ Check(cc, msg, rs, rt);
}
@@ -4394,7 +4394,7 @@
Heap::RootListIndex index) {
if (emit_debug_code()) {
LoadRoot(at, index);
- Check(eq, kRegisterDidNotMatchExpectedRoot, reg, Operand(at));
+ Check(eq, "Register did not match expected root", reg, Operand(at));
}
}
@@ -4411,24 +4411,24 @@
Branch(&ok, eq, elements, Operand(at));
LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex);
Branch(&ok, eq, elements, Operand(at));
- Abort(kJSObjectWithFastElementsMapHasSlowElements);
+ Abort("JSObject with fast elements map has slow elements");
bind(&ok);
pop(elements);
}
}
-void MacroAssembler::Check(Condition cc, BailoutReason reason,
+void MacroAssembler::Check(Condition cc, const char* msg,
Register rs, Operand rt) {
Label L;
Branch(&L, cc, rs, rt);
- Abort(reason);
+ Abort(msg);
// Will not return here.
bind(&L);
}
-void MacroAssembler::Abort(BailoutReason reason) {
+void MacroAssembler::Abort(const char* msg) {
Label abort_start;
bind(&abort_start);
// We want to pass the msg string like a smi to avoid GC
@@ -4436,7 +4436,6 @@
// properly. Instead, we pass an aligned pointer that is
// a proper v8 smi, but also pass the alignment difference
// from the real pointer as a smi.
- const char* msg = GetBailoutReason(reason);
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
@@ -4580,7 +4579,7 @@
CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
Branch(&ok);
bind(&fail);
- Abort(kGlobalFunctionsMustHaveInitialMap);
+ Abort("Global functions must have initial map");
bind(&ok);
}
}
@@ -4863,7 +4862,7 @@
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
andi(at, object, kSmiTagMask);
- Check(ne, kOperandIsASmi, at, Operand(zero_reg));
+ Check(ne, "Operand is a smi", at, Operand(zero_reg));
}
}
@@ -4872,7 +4871,7 @@
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
andi(at, object, kSmiTagMask);
- Check(eq, kOperandIsASmi, at, Operand(zero_reg));
+ Check(eq, "Operand is a smi", at, Operand(zero_reg));
}
}
@@ -4881,11 +4880,11 @@
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
And(t0, object, Operand(kSmiTagMask));
- Check(ne, kOperandIsASmiAndNotAString, t0, Operand(zero_reg));
+ Check(ne, "Operand is a smi and not a string", t0, Operand(zero_reg));
push(object);
lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset));
- Check(lo, kOperandIsNotAString, object, Operand(FIRST_NONSTRING_TYPE));
+ Check(lo, "Operand is not a string", object, Operand(FIRST_NONSTRING_TYPE));
pop(object);
}
}
@@ -4895,11 +4894,11 @@
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
And(t0, object, Operand(kSmiTagMask));
- Check(ne, kOperandIsASmiAndNotAName, t0, Operand(zero_reg));
+ Check(ne, "Operand is a smi and not a name", t0, Operand(zero_reg));
push(object);
lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset));
- Check(le, kOperandIsNotAName, object, Operand(LAST_NAME_TYPE));
+ Check(le, "Operand is not a name", object, Operand(LAST_NAME_TYPE));
pop(object);
}
}
@@ -4907,11 +4906,11 @@
void MacroAssembler::AssertRootValue(Register src,
Heap::RootListIndex root_value_index,
- BailoutReason reason) {
+ const char* message) {
if (emit_debug_code()) {
ASSERT(!src.is(at));
LoadRoot(at, root_value_index);
- Check(eq, reason, src, Operand(at));
+ Check(eq, message, src, Operand(at));
}
}
@@ -5128,7 +5127,7 @@
// At this point scratch is a lui(at, ...) instruction.
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
- Check(eq, kTheInstructionToPatchShouldBeALui,
+ Check(eq, "The instruction to patch should be a lui.",
scratch, Operand(LUI));
lw(scratch, MemOperand(li_location));
}
@@ -5140,7 +5139,7 @@
// scratch is now ori(at, ...).
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
- Check(eq, kTheInstructionToPatchShouldBeAnOri,
+ Check(eq, "The instruction to patch should be an ori.",
scratch, Operand(ORI));
lw(scratch, MemOperand(li_location, kInstrSize));
}
@@ -5157,7 +5156,7 @@
lw(value, MemOperand(li_location));
if (emit_debug_code()) {
And(value, value, kOpcodeMask);
- Check(eq, kTheInstructionShouldBeALui,
+ Check(eq, "The instruction should be a lui.",
value, Operand(LUI));
lw(value, MemOperand(li_location));
}
@@ -5168,7 +5167,7 @@
lw(scratch, MemOperand(li_location, kInstrSize));
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
- Check(eq, kTheInstructionShouldBeAnOri,
+ Check(eq, "The instruction should be an ori.",
scratch, Operand(ORI));
lw(scratch, MemOperand(li_location, kInstrSize));
}
diff --git a/src/mips/macro-assembler-mips.h b/src/mips/macro-assembler-mips.h
index ac37db2..bc3e7c4 100644
--- a/src/mips/macro-assembler-mips.h
+++ b/src/mips/macro-assembler-mips.h
@@ -627,11 +627,11 @@
void MultiPushFPU(RegList regs);
void MultiPushReversedFPU(RegList regs);
+ // Lower case push() for compatibility with arch-independent code.
void push(Register src) {
Addu(sp, sp, Operand(-kPointerSize));
sw(src, MemOperand(sp, 0));
}
- void Push(Register src) { push(src); }
// Push a handle.
void Push(Handle<Object> handle);
@@ -676,11 +676,11 @@
void MultiPopFPU(RegList regs);
void MultiPopReversedFPU(RegList regs);
+ // Lower case pop() for compatibility with arch-independent code.
void pop(Register dst) {
lw(dst, MemOperand(sp, 0));
Addu(sp, sp, Operand(kPointerSize));
}
- void Pop(Register dst) { pop(dst); }
// Pop two registers. Pops rightmost register first (from lower address).
void Pop(Register src1, Register src2) {
@@ -1286,15 +1286,15 @@
// Calls Abort(msg) if the condition cc is not satisfied.
// Use --debug_code to enable.
- void Assert(Condition cc, BailoutReason reason, Register rs, Operand rt);
+ void Assert(Condition cc, const char* msg, Register rs, Operand rt);
void AssertRegisterIsRoot(Register reg, Heap::RootListIndex index);
void AssertFastElements(Register elements);
// Like Assert(), but always enabled.
- void Check(Condition cc, BailoutReason reason, Register rs, Operand rt);
+ void Check(Condition cc, const char* msg, Register rs, Operand rt);
// Print a message to stdout and abort execution.
- void Abort(BailoutReason msg);
+ void Abort(const char* msg);
// Verify restrictions about code generated in stubs.
void set_generating_stub(bool value) { generating_stub_ = value; }
@@ -1378,7 +1378,7 @@
// enabled via --debug-code.
void AssertRootValue(Register src,
Heap::RootListIndex root_value_index,
- BailoutReason reason);
+ const char* message);
// ---------------------------------------------------------------------------
// HeapNumber utilities.
diff --git a/src/object-observe.js b/src/object-observe.js
index f5e0d9d..a5c12bf 100644
--- a/src/object-observe.js
+++ b/src/object-observe.js
@@ -394,10 +394,7 @@
if (ObjectIsFrozen(object)) return null;
var objectInfo = objectInfoMap.get(object);
- if (IS_UNDEFINED(objectInfo)) {
- objectInfo = CreateObjectInfo(object);
- %SetIsObserved(object);
- }
+ if (IS_UNDEFINED(objectInfo)) objectInfo = CreateObjectInfo(object);
if (IS_NULL(objectInfo.notifier)) {
objectInfo.notifier = { __proto__: notifierPrototype };
diff --git a/src/objects-debug.cc b/src/objects-debug.cc
index e0cb8c9..395f95c 100644
--- a/src/objects-debug.cc
+++ b/src/objects-debug.cc
@@ -366,12 +366,9 @@
}
-void Map::VerifyOmittedMapChecks() {
- if (!FLAG_omit_map_checks_for_leaf_maps) return;
- if (!is_stable() ||
- is_deprecated() ||
- HasTransitionArray() ||
- is_dictionary_map()) {
+void Map::VerifyOmittedPrototypeChecks() {
+ if (!FLAG_omit_prototype_checks_for_leaf_maps) return;
+ if (HasTransitionArray() || is_dictionary_map()) {
CHECK_EQ(0, dependent_code()->number_of_entries(
DependentCode::kPrototypeCheckGroup));
}
diff --git a/src/objects-inl.h b/src/objects-inl.h
index 1694757..128dc6b 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -3617,17 +3617,6 @@
}
-void Map::set_migration_target(bool value) {
- set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
-}
-
-
-bool Map::is_migration_target() {
- if (!FLAG_track_fields) return false;
- return IsMigrationTarget::decode(bit_field3());
-}
-
-
void Map::freeze() {
set_bit_field3(IsFrozen::update(bit_field3(), true));
}
@@ -3688,6 +3677,11 @@
}
+bool Map::CanOmitPrototypeChecks() {
+ return is_stable() && FLAG_omit_prototype_checks_for_leaf_maps;
+}
+
+
bool Map::CanOmitMapChecks() {
return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
}
@@ -3822,6 +3816,7 @@
int Code::major_key() {
ASSERT(kind() == STUB ||
+ kind() == UNARY_OP_IC ||
kind() == BINARY_OP_IC ||
kind() == COMPARE_IC ||
kind() == COMPARE_NIL_IC ||
@@ -3836,6 +3831,7 @@
void Code::set_major_key(int major) {
ASSERT(kind() == STUB ||
+ kind() == UNARY_OP_IC ||
kind() == BINARY_OP_IC ||
kind() == COMPARE_IC ||
kind() == COMPARE_NIL_IC ||
@@ -4025,6 +4021,21 @@
}
+byte Code::unary_op_type() {
+ ASSERT(is_unary_op_stub());
+ return UnaryOpTypeField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
+}
+
+
+void Code::set_unary_op_type(byte value) {
+ ASSERT(is_unary_op_stub());
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = UnaryOpTypeField::update(previous, value);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
+}
+
+
byte Code::to_boolean_state() {
return extended_extra_ic_state();
}
@@ -4209,20 +4220,7 @@
ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
-
-
-void Map::set_bit_field3(uint32_t bits) {
- // Ensure the upper 2 bits have the same value by sign extending it. This is
- // necessary to be able to use the 31st bit.
- int value = bits << 1;
- WRITE_FIELD(this, kBitField3Offset, Smi::FromInt(value >> 1));
-}
-
-
-uint32_t Map::bit_field3() {
- Object* value = READ_FIELD(this, kBitField3Offset);
- return Smi::cast(value)->value();
-}
+SMI_ACCESSORS(Map, bit_field3, kBitField3Offset)
void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {
diff --git a/src/objects-visiting.h b/src/objects-visiting.h
index 2175737..32e457b 100644
--- a/src/objects-visiting.h
+++ b/src/objects-visiting.h
@@ -141,7 +141,7 @@
(base == kVisitJSObject));
ASSERT(IsAligned(object_size, kPointerSize));
ASSERT(kMinObjectSizeInWords * kPointerSize <= object_size);
- ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
+ ASSERT(object_size < Page::kMaxNonCodeHeapObjectSize);
const VisitorId specialization = static_cast<VisitorId>(
base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords);
diff --git a/src/objects.cc b/src/objects.cc
index 734bf40..2e9badb 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -2719,7 +2719,6 @@
Handle<Map>(new_map);
return maybe_map;
}
- new_map->set_migration_target(true);
}
new_map->set_owns_descriptors(true);
@@ -6518,7 +6517,6 @@
result->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
result->set_dictionary_map(true);
- result->set_migration_target(false);
#ifdef VERIFY_HEAP
if (FLAG_verify_heap && result->is_shared()) {
@@ -9806,7 +9804,7 @@
}
-void SharedFunctionInfo::DisableOptimization(BailoutReason reason) {
+void SharedFunctionInfo::DisableOptimization(const char* reason) {
// Disable optimization for the shared function info and mark the
// code as non-optimizable. The marker on the shared function info
// is there because we flush non-optimized code thereby loosing the
@@ -9824,7 +9822,7 @@
if (FLAG_trace_opt) {
PrintF("[disabled optimization for ");
ShortPrint();
- PrintF(", reason: %s]\n", GetBailoutReason(reason));
+ PrintF(", reason: %s]\n", reason);
}
}
@@ -10795,17 +10793,18 @@
// If there is no back edge table, the "table start" will be at or after
// (due to alignment) the end of the instruction stream.
if (static_cast<int>(offset) < instruction_size()) {
- FullCodeGenerator::BackEdgeTableIterator back_edges(this);
-
- PrintF(out, "Back edges (size = %u)\n", back_edges.table_length());
+ Address back_edge_cursor = instruction_start() + offset;
+ uint32_t table_length = Memory::uint32_at(back_edge_cursor);
+ PrintF(out, "Back edges (size = %u)\n", table_length);
PrintF(out, "ast_id pc_offset loop_depth\n");
-
- for ( ; !back_edges.Done(); back_edges.Next()) {
- PrintF(out, "%6d %9u %10u\n", back_edges.ast_id().ToInt(),
- back_edges.pc_offset(),
- back_edges.loop_depth());
+ for (uint32_t i = 0; i < table_length; ++i) {
+ uint32_t ast_id = Memory::uint32_at(back_edge_cursor);
+ uint32_t pc_offset = Memory::uint32_at(back_edge_cursor + kIntSize);
+ uint32_t loop_depth = Memory::uint32_at(back_edge_cursor +
+ 2 * kIntSize);
+ PrintF(out, "%6u %9u %10u\n", ast_id, pc_offset, loop_depth);
+ back_edge_cursor += FullCodeGenerator::kBackEdgeEntrySize;
}
-
PrintF(out, "\n");
}
#ifdef OBJECT_PRINT
@@ -15965,15 +15964,4 @@
}
-const char* GetBailoutReason(BailoutReason reason) {
- ASSERT(reason < kLastErrorMessage);
-#define ERROR_MESSAGES_TEXTS(C, T) T,
- static const char* error_messages_[] = {
- ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS)
- };
-#undef ERROR_MESSAGES_TEXTS
- return error_messages_[reason];
-}
-
-
} } // namespace v8::internal
diff --git a/src/objects.h b/src/objects.h
index b2dc181..d370c32 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -1046,287 +1046,7 @@
V(AccessCheckNeeded) \
V(Cell) \
V(PropertyCell) \
- V(ObjectHashTable)
-
-
-#define ERROR_MESSAGES_LIST(V) \
- V(kNoReason, "no reason") \
- \
- V(k32BitValueInRegisterIsNotZeroExtended, \
- "32 bit value in register is not zero-extended") \
- V(kAlignmentMarkerExpected, "alignment marker expected") \
- V(kAllocationIsNotDoubleAligned, "Allocation is not double aligned") \
- V(kAPICallReturnedInvalidObject, "API call returned invalid object") \
- V(kArgumentsObjectValueInATestContext, \
- "arguments object value in a test context") \
- V(kArrayBoilerplateCreationFailed, "array boilerplate creation failed") \
- V(kArrayIndexConstantValueTooBig, "array index constant value too big") \
- V(kAssignmentToArguments, "assignment to arguments") \
- V(kAssignmentToLetVariableBeforeInitialization, \
- "assignment to let variable before initialization") \
- V(kAssignmentToLOOKUPVariable, "assignment to LOOKUP variable") \
- V(kAssignmentToParameterFunctionUsesArgumentsObject, \
- "assignment to parameter, function uses arguments object") \
- V(kAssignmentToParameterInArgumentsObject, \
- "assignment to parameter in arguments object") \
- V(kAttemptToUseUndefinedCache, "Attempt to use undefined cache") \
- V(kBadValueContextForArgumentsObjectValue, \
- "bad value context for arguments object value") \
- V(kBadValueContextForArgumentsValue, \
- "bad value context for arguments value") \
- V(kBailedOutDueToDependentMap, "bailed out due to dependent map") \
- V(kBailoutWasNotPrepared, "bailout was not prepared") \
- V(kBinaryStubGenerateFloatingPointCode, \
- "BinaryStub_GenerateFloatingPointCode") \
- V(kBothRegistersWereSmisInSelectNonSmi, \
- "Both registers were smis in SelectNonSmi") \
- V(kCallToAJavaScriptRuntimeFunction, \
- "call to a JavaScript runtime function") \
- V(kCannotTranslatePositionInChangedArea, \
- "Cannot translate position in changed area") \
- V(kCodeGenerationFailed, "code generation failed") \
- V(kCodeObjectNotProperlyPatched, "code object not properly patched") \
- V(kCompoundAssignmentToLookupSlot, "compound assignment to lookup slot") \
- V(kContextAllocatedArguments, "context-allocated arguments") \
- V(kDebuggerIsActive, "debugger is active") \
- V(kDebuggerStatement, "DebuggerStatement") \
- V(kDeclarationInCatchContext, "Declaration in catch context") \
- V(kDeclarationInWithContext, "Declaration in with context") \
- V(kDefaultNaNModeNotSet, "Default NaN mode not set") \
- V(kDeleteWithGlobalVariable, "delete with global variable") \
- V(kDeleteWithNonGlobalVariable, "delete with non-global variable") \
- V(kDestinationOfCopyNotAligned, "Destination of copy not aligned") \
- V(kDontDeleteCellsCannotContainTheHole, \
- "DontDelete cells can't contain the hole") \
- V(kDoPushArgumentNotImplementedForDoubleType, \
- "DoPushArgument not implemented for double type") \
- V(kEmitLoadRegisterUnsupportedDoubleImmediate, \
- "EmitLoadRegister: Unsupported double immediate") \
- V(kEval, "eval") \
- V(kExpected0AsASmiSentinel, "Expected 0 as a Smi sentinel") \
- V(kExpectedAlignmentMarker, "expected alignment marker") \
- V(kExpectedPropertyCellInRegisterA2, \
- "Expected property cell in register a2") \
- V(kExpectedPropertyCellInRegisterEbx, \
- "Expected property cell in register ebx") \
- V(kExpectedPropertyCellInRegisterRbx, \
- "Expected property cell in register rbx") \
- V(kExpectingAlignmentForCopyBytes, \
- "Expecting alignment for CopyBytes") \
- V(kExternalStringExpectedButNotFound, \
- "external string expected, but not found") \
- V(kFailedBailedOutLastTime, "failed/bailed out last time") \
- V(kForInStatementIsNotFastCase, "ForInStatement is not fast case") \
- V(kForInStatementOptimizationIsDisabled, \
- "ForInStatement optimization is disabled") \
- V(kForInStatementWithNonLocalEachVariable, \
- "ForInStatement with non-local each variable") \
- V(kForOfStatement, "ForOfStatement") \
- V(kFrameIsExpectedToBeAligned, "frame is expected to be aligned") \
- V(kFunctionCallsEval, "function calls eval") \
- V(kFunctionIsAGenerator, "function is a generator") \
- V(kFunctionWithIllegalRedeclaration, "function with illegal redeclaration") \
- V(kGeneratedCodeIsTooLarge, "Generated code is too large") \
- V(kGeneratorFailedToResume, "Generator failed to resume") \
- V(kGenerator, "generator") \
- V(kGlobalFunctionsMustHaveInitialMap, \
- "Global functions must have initial map") \
- V(kHeapNumberMapRegisterClobbered, "HeapNumberMap register clobbered") \
- V(kImproperObjectOnPrototypeChainForStore, \
- "improper object on prototype chain for store") \
- V(kIndexIsNegative, "Index is negative") \
- V(kIndexIsTooLarge, "Index is too large") \
- V(kInlinedRuntimeFunctionClassOf, "inlined runtime function: ClassOf") \
- V(kInlinedRuntimeFunctionFastAsciiArrayJoin, \
- "inlined runtime function: FastAsciiArrayJoin") \
- V(kInlinedRuntimeFunctionGeneratorNext, \
- "inlined runtime function: GeneratorNext") \
- V(kInlinedRuntimeFunctionGeneratorThrow, \
- "inlined runtime function: GeneratorThrow") \
- V(kInlinedRuntimeFunctionGetFromCache, \
- "inlined runtime function: GetFromCache") \
- V(kInlinedRuntimeFunctionIsNonNegativeSmi, \
- "inlined runtime function: IsNonNegativeSmi") \
- V(kInlinedRuntimeFunctionIsRegExpEquivalent, \
- "inlined runtime function: IsRegExpEquivalent") \
- V(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf, \
- "inlined runtime function: IsStringWrapperSafeForDefaultValueOf") \
- V(kInliningBailedOut, "inlining bailed out") \
- V(kInputGPRIsExpectedToHaveUpper32Cleared, \
- "input GPR is expected to have upper32 cleared") \
- V(kInstanceofStubUnexpectedCallSiteCacheCheck, \
- "InstanceofStub unexpected call site cache (check)") \
- V(kInstanceofStubUnexpectedCallSiteCacheCmp1, \
- "InstanceofStub unexpected call site cache (cmp 1)") \
- V(kInstanceofStubUnexpectedCallSiteCacheCmp2, \
- "InstanceofStub unexpected call site cache (cmp 2)") \
- V(kInstanceofStubUnexpectedCallSiteCacheMov, \
- "InstanceofStub unexpected call site cache (mov)") \
- V(kInteger32ToSmiFieldWritingToNonSmiLocation, \
- "Integer32ToSmiField writing to non-smi location") \
- V(kInvalidCaptureReferenced, "Invalid capture referenced") \
- V(kInvalidElementsKindForInternalArrayOrInternalPackedArray, \
- "Invalid ElementsKind for InternalArray or InternalPackedArray") \
- V(kInvalidHandleScopeLevel, "Invalid HandleScope level") \
- V(kInvalidLeftHandSideInAssignment, "invalid left-hand side in assignment") \
- V(kInvalidLhsInCompoundAssignment, "invalid lhs in compound assignment") \
- V(kInvalidLhsInCountOperation, "invalid lhs in count operation") \
- V(kInvalidMinLength, "Invalid min_length") \
- V(kJSGlobalObjectNativeContextShouldBeANativeContext, \
- "JSGlobalObject::native_context should be a native context") \
- V(kJSGlobalProxyContextShouldNotBeNull, \
- "JSGlobalProxy::context() should not be null") \
- V(kJSObjectWithFastElementsMapHasSlowElements, \
- "JSObject with fast elements map has slow elements") \
- V(kLetBindingReInitialization, "Let binding re-initialization") \
- V(kLiveBytesCountOverflowChunkSize, "Live Bytes Count overflow chunk size") \
- V(kLiveEditFrameDroppingIsNotSupportedOnArm, \
- "LiveEdit frame dropping is not supported on arm") \
- V(kLiveEditFrameDroppingIsNotSupportedOnMips, \
- "LiveEdit frame dropping is not supported on mips") \
- V(kLiveEdit, "LiveEdit") \
- V(kLookupVariableInCountOperation, \
- "lookup variable in count operation") \
- V(kMapIsNoLongerInEax, "Map is no longer in eax") \
- V(kNoCasesLeft, "no cases left") \
- V(kNoEmptyArraysHereInEmitFastAsciiArrayJoin, \
- "No empty arrays here in EmitFastAsciiArrayJoin") \
- V(kNonInitializerAssignmentToConst, \
- "non-initializer assignment to const") \
- V(kNonSmiIndex, "Non-smi index") \
- V(kNonSmiKeyInArrayLiteral, "Non-smi key in array literal") \
- V(kNonSmiValue, "Non-smi value") \
- V(kNotEnoughVirtualRegistersForValues, \
- "not enough virtual registers for values") \
- V(kNotEnoughVirtualRegistersRegalloc, \
- "not enough virtual registers (regalloc)") \
- V(kObjectFoundInSmiOnlyArray, "object found in smi-only array") \
- V(kObjectLiteralWithComplexProperty, \
- "Object literal with complex property") \
- V(kOddballInStringTableIsNotUndefinedOrTheHole, \
- "oddball in string table is not undefined or the hole") \
- V(kOperandIsASmiAndNotAName, "Operand is a smi and not a name") \
- V(kOperandIsASmiAndNotAString, "Operand is a smi and not a string") \
- V(kOperandIsASmi, "Operand is a smi") \
- V(kOperandIsNotAName, "Operand is not a name") \
- V(kOperandIsNotANumber, "Operand is not a number") \
- V(kOperandIsNotASmi, "Operand is not a smi") \
- V(kOperandIsNotAString, "Operand is not a string") \
- V(kOperandIsNotSmi, "Operand is not smi") \
- V(kOperandNotANumber, "Operand not a number") \
- V(kOptimizedTooManyTimes, "optimized too many times") \
- V(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister, \
- "Out of virtual registers while trying to allocate temp register") \
- V(kParseScopeError, "parse/scope error") \
- V(kPossibleDirectCallToEval, "possible direct call to eval") \
- V(kPropertyAllocationCountFailed, "Property allocation count failed") \
- V(kReceivedInvalidReturnAddress, "Received invalid return address") \
- V(kReferenceToAVariableWhichRequiresDynamicLookup, \
- "reference to a variable which requires dynamic lookup") \
- V(kReferenceToGlobalLexicalVariable, \
- "reference to global lexical variable") \
- V(kReferenceToUninitializedVariable, "reference to uninitialized variable") \
- V(kRegisterDidNotMatchExpectedRoot, "Register did not match expected root") \
- V(kRegisterWasClobbered, "register was clobbered") \
- V(kScopedBlock, "ScopedBlock") \
- V(kSharedFunctionInfoLiteral, "SharedFunctionInfoLiteral") \
- V(kSmiAdditionOverflow, "Smi addition overflow") \
- V(kSmiSubtractionOverflow, "Smi subtraction overflow") \
- V(kStackFrameTypesMustMatch, "stack frame types must match") \
- V(kSwitchStatementMixedOrNonLiteralSwitchLabels, \
- "SwitchStatement: mixed or non-literal switch labels") \
- V(kSwitchStatementTooManyClauses, "SwitchStatement: too many clauses") \
- V(kTheInstructionShouldBeALui, "The instruction should be a lui") \
- V(kTheInstructionShouldBeAnOri, "The instruction should be an ori") \
- V(kTheInstructionToPatchShouldBeALoadFromPc, \
- "The instruction to patch should be a load from pc") \
- V(kTheInstructionToPatchShouldBeALui, \
- "The instruction to patch should be a lui") \
- V(kTheInstructionToPatchShouldBeAnOri, \
- "The instruction to patch should be an ori") \
- V(kTooManyParametersLocals, "too many parameters/locals") \
- V(kTooManyParameters, "too many parameters") \
- V(kTooManySpillSlotsNeededForOSR, "Too many spill slots needed for OSR") \
- V(kToOperandIsDoubleRegisterUnimplemented, \
- "ToOperand IsDoubleRegister unimplemented") \
- V(kToOperandUnsupportedDoubleImmediate, \
- "ToOperand Unsupported double immediate") \
- V(kTryCatchStatement, "TryCatchStatement") \
- V(kTryFinallyStatement, "TryFinallyStatement") \
- V(kUnableToEncodeValueAsSmi, "Unable to encode value as smi") \
- V(kUnalignedAllocationInNewSpace, "Unaligned allocation in new space") \
- V(kUndefinedValueNotLoaded, "Undefined value not loaded") \
- V(kUndoAllocationOfNonAllocatedMemory, \
- "Undo allocation of non allocated memory") \
- V(kUnexpectedAllocationTop, "Unexpected allocation top") \
- V(kUnexpectedElementsKindInArrayConstructor, \
- "Unexpected ElementsKind in array constructor") \
- V(kUnexpectedFallthroughFromCharCodeAtSlowCase, \
- "Unexpected fallthrough from CharCodeAt slow case") \
- V(kUnexpectedFallthroughFromCharFromCodeSlowCase, \
- "Unexpected fallthrough from CharFromCode slow case") \
- V(kUnexpectedFallThroughFromStringComparison, \
- "Unexpected fall-through from string comparison") \
- V(kUnexpectedFallThroughInBinaryStubGenerateFloatingPointCode, \
- "Unexpected fall-through in BinaryStub_GenerateFloatingPointCode") \
- V(kUnexpectedFallthroughToCharCodeAtSlowCase, \
- "Unexpected fallthrough to CharCodeAt slow case") \
- V(kUnexpectedFallthroughToCharFromCodeSlowCase, \
- "Unexpected fallthrough to CharFromCode slow case") \
- V(kUnexpectedFPUStackDepthAfterInstruction, \
- "Unexpected FPU stack depth after instruction") \
- V(kUnexpectedInitialMapForArrayFunction1, \
- "Unexpected initial map for Array function (1)") \
- V(kUnexpectedInitialMapForArrayFunction2, \
- "Unexpected initial map for Array function (2)") \
- V(kUnexpectedInitialMapForArrayFunction, \
- "Unexpected initial map for Array function") \
- V(kUnexpectedInitialMapForInternalArrayFunction, \
- "Unexpected initial map for InternalArray function") \
- V(kUnexpectedLevelAfterReturnFromApiCall, \
- "Unexpected level after return from api call") \
- V(kUnexpectedNumberOfPreAllocatedPropertyFields, \
- "Unexpected number of pre-allocated property fields") \
- V(kUnexpectedStringFunction, "Unexpected String function") \
- V(kUnexpectedStringType, "Unexpected string type") \
- V(kUnexpectedStringWrapperInstanceSize, \
- "Unexpected string wrapper instance size") \
- V(kUnexpectedTypeForRegExpDataFixedArrayExpected, \
- "Unexpected type for RegExp data, FixedArray expected") \
- V(kUnexpectedUnusedPropertiesOfStringWrapper, \
- "Unexpected unused properties of string wrapper") \
- V(kUninitializedKSmiConstantRegister, "Uninitialized kSmiConstantRegister") \
- V(kUnknown, "unknown") \
- V(kUnsupportedConstCompoundAssignment, \
- "unsupported const compound assignment") \
- V(kUnsupportedCountOperationWithConst, \
- "unsupported count operation with const") \
- V(kUnsupportedDoubleImmediate, "unsupported double immediate") \
- V(kUnsupportedLetCompoundAssignment, "unsupported let compound assignment") \
- V(kUnsupportedLookupSlotInDeclaration, \
- "unsupported lookup slot in declaration") \
- V(kUnsupportedNonPrimitiveCompare, "Unsupported non-primitive compare") \
- V(kUnsupportedPhiUseOfArguments, "Unsupported phi use of arguments") \
- V(kUnsupportedPhiUseOfConstVariable, \
- "Unsupported phi use of const variable") \
- V(kUnsupportedTaggedImmediate, "unsupported tagged immediate") \
- V(kVariableResolvedToWithContext, "Variable resolved to with context") \
- V(kWeShouldNotHaveAnEmptyLexicalContext, \
- "we should not have an empty lexical context") \
- V(kWithStatement, "WithStatement") \
- V(kWrongAddressOrValuePassedToRecordWrite, \
- "Wrong address or value passed to RecordWrite")
-
-
-#define ERROR_MESSAGES_CONSTANTS(C, T) C,
-enum BailoutReason {
- ERROR_MESSAGES_LIST(ERROR_MESSAGES_CONSTANTS)
- kLastErrorMessage
-};
-#undef ERROR_MESSAGES_CONSTANTS
-
-
-const char* GetBailoutReason(BailoutReason reason);
+ V(ObjectHashTable) \
// Object is the abstract superclass for all classes in the
@@ -4781,6 +4501,7 @@
V(KEYED_CALL_IC) \
V(STORE_IC) \
V(KEYED_STORE_IC) \
+ V(UNARY_OP_IC) \
V(BINARY_OP_IC) \
V(COMPARE_IC) \
V(COMPARE_NIL_IC) \
@@ -4899,7 +4620,8 @@
// TODO(danno): This is a bit of a hack right now since there are still
// clients of this API that pass "extra" values in for argc. These clients
// should be retrofitted to used ExtendedExtraICState.
- return kind == COMPARE_NIL_IC || kind == TO_BOOLEAN_IC;
+ return kind == COMPARE_NIL_IC || kind == TO_BOOLEAN_IC ||
+ kind == UNARY_OP_IC;
}
inline StubType type(); // Only valid for monomorphic IC stubs.
@@ -4914,6 +4636,7 @@
inline bool is_keyed_store_stub() { return kind() == KEYED_STORE_IC; }
inline bool is_call_stub() { return kind() == CALL_IC; }
inline bool is_keyed_call_stub() { return kind() == KEYED_CALL_IC; }
+ inline bool is_unary_op_stub() { return kind() == UNARY_OP_IC; }
inline bool is_binary_op_stub() { return kind() == BINARY_OP_IC; }
inline bool is_compare_ic_stub() { return kind() == COMPARE_IC; }
inline bool is_compare_nil_ic_stub() { return kind() == COMPARE_NIL_IC; }
@@ -4987,6 +4710,10 @@
inline CheckType check_type();
inline void set_check_type(CheckType value);
+ // [type-recording unary op type]: For kind UNARY_OP_IC.
+ inline byte unary_op_type();
+ inline void set_unary_op_type(byte value);
+
// [to_boolean_foo]: For kind TO_BOOLEAN_IC tells what state the stub is in.
inline byte to_boolean_state();
@@ -5225,6 +4952,9 @@
// KindSpecificFlags1 layout (STUB and OPTIMIZED_FUNCTION)
static const int kStackSlotsFirstBit = 0;
static const int kStackSlotsBitCount = 24;
+ static const int kUnaryOpTypeFirstBit =
+ kStackSlotsFirstBit + kStackSlotsBitCount;
+ static const int kUnaryOpTypeBitCount = 3;
static const int kHasFunctionCacheFirstBit =
kStackSlotsFirstBit + kStackSlotsBitCount;
static const int kHasFunctionCacheBitCount = 1;
@@ -5233,12 +4963,15 @@
static const int kMarkedForDeoptimizationBitCount = 1;
STATIC_ASSERT(kStackSlotsFirstBit + kStackSlotsBitCount <= 32);
+ STATIC_ASSERT(kUnaryOpTypeFirstBit + kUnaryOpTypeBitCount <= 32);
STATIC_ASSERT(kHasFunctionCacheFirstBit + kHasFunctionCacheBitCount <= 32);
STATIC_ASSERT(kMarkedForDeoptimizationFirstBit +
kMarkedForDeoptimizationBitCount <= 32);
class StackSlotsField: public BitField<int,
kStackSlotsFirstBit, kStackSlotsBitCount> {}; // NOLINT
+ class UnaryOpTypeField: public BitField<int,
+ kUnaryOpTypeFirstBit, kUnaryOpTypeBitCount> {}; // NOLINT
class HasFunctionCacheField: public BitField<bool,
kHasFunctionCacheFirstBit, kHasFunctionCacheBitCount> {}; // NOLINT
class MarkedForDeoptimizationField: public BitField<bool,
@@ -5443,8 +5176,8 @@
inline void set_bit_field2(byte value);
// Bit field 3.
- inline uint32_t bit_field3();
- inline void set_bit_field3(uint32_t bits);
+ inline int bit_field3();
+ inline void set_bit_field3(int value);
class EnumLengthBits: public BitField<int, 0, 11> {};
class NumberOfOwnDescriptorsBits: public BitField<int, 11, 11> {};
@@ -5456,7 +5189,6 @@
class Deprecated: public BitField<bool, 27, 1> {};
class IsFrozen: public BitField<bool, 28, 1> {};
class IsUnstable: public BitField<bool, 29, 1> {};
- class IsMigrationTarget: public BitField<bool, 30, 1> {};
// Tells whether the object in the prototype property will be used
// for instances created from this function. If the prototype
@@ -5763,8 +5495,6 @@
inline bool is_frozen();
inline void mark_unstable();
inline bool is_stable();
- inline void set_migration_target(bool value);
- inline bool is_migration_target();
inline void deprecate();
inline bool is_deprecated();
inline bool CanBeDeprecated();
@@ -5911,6 +5641,7 @@
// the descriptor array.
inline void NotifyLeafMapLayoutChange();
+ inline bool CanOmitPrototypeChecks();
inline bool CanOmitMapChecks();
void AddDependentCompilationInfo(DependentCode::DependencyGroup group,
@@ -5927,7 +5658,7 @@
#ifdef VERIFY_HEAP
void SharedMapVerify();
- void VerifyOmittedMapChecks();
+ void VerifyOmittedPrototypeChecks();
#endif
inline int visitor_id();
@@ -6574,7 +6305,7 @@
// Disable (further) attempted optimization of all functions sharing this
// shared function info.
- void DisableOptimization(BailoutReason reason);
+ void DisableOptimization(const char* reason);
// Lookup the bailout ID and ASSERT that it exists in the non-optimized
// code, returns whether it asserted (i.e., always true if assertions are
@@ -10056,7 +9787,6 @@
V(kHandleScope, "handlescope", "(Handle scope)") \
V(kBuiltins, "builtins", "(Builtins)") \
V(kGlobalHandles, "globalhandles", "(Global handles)") \
- V(kEternalHandles, "eternalhandles", "(Eternal handles)") \
V(kThreadManager, "threadmanager", "(Thread manager)") \
V(kExtensions, "Extensions", "(Extensions)")
diff --git a/src/parser.cc b/src/parser.cc
index 4947790..df568ef 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -3197,20 +3197,6 @@
factory()->NewNumberLiteral(1),
position);
}
- // The same idea for '-foo' => 'foo*(-1)'.
- if (op == Token::SUB) {
- return factory()->NewBinaryOperation(Token::MUL,
- expression,
- factory()->NewNumberLiteral(-1),
- position);
- }
- // ...and one more time for '~foo' => 'foo^(~0)'.
- if (op == Token::BIT_NOT) {
- return factory()->NewBinaryOperation(Token::BIT_XOR,
- expression,
- factory()->NewNumberLiteral(~0),
- position);
- }
return factory()->NewUnaryOperation(op, expression, position);
diff --git a/src/platform-linux.cc b/src/platform-linux.cc
index 8856833..5c252bb 100644
--- a/src/platform-linux.cc
+++ b/src/platform-linux.cc
@@ -239,8 +239,7 @@
#else
#if defined(__ARM_PCS_VFP)
return true;
-#elif defined(__ARM_PCS) || defined(__SOFTFP__) || defined(__SOFTFP) || \
- !defined(__VFP_FP__)
+#elif defined(__ARM_PCS) || defined(__SOFTFP) || !defined(__VFP_FP__)
return false;
#else
#error "Your version of GCC does not report the FP ABI compiled for." \
diff --git a/src/profile-generator.cc b/src/profile-generator.cc
index e772a54..4e2e389 100644
--- a/src/profile-generator.cc
+++ b/src/profile-generator.cc
@@ -376,8 +376,8 @@
: title_(title),
uid_(uid),
record_samples_(record_samples),
- start_time_us_(OS::Ticks()),
- end_time_us_(0) {
+ start_time_ms_(OS::TimeCurrentMillis()),
+ end_time_ms_(0) {
}
@@ -388,13 +388,13 @@
void CpuProfile::CalculateTotalTicksAndSamplingRate() {
- end_time_us_ = OS::Ticks();
+ end_time_ms_ = OS::TimeCurrentMillis();
top_down_.CalculateTotalTicks();
- double duration_ms = (end_time_us_ - start_time_us_) / 1000.;
- if (duration_ms < 1) duration_ms = 1;
+ double duration = end_time_ms_ - start_time_ms_;
+ if (duration < 1) duration = 1;
unsigned ticks = top_down_.root()->total_ticks();
- double rate = ticks / duration_ms;
+ double rate = ticks / duration;
top_down_.SetTickRatePerMs(rate);
}
diff --git a/src/profile-generator.h b/src/profile-generator.h
index 0cc397e..7861ccd 100644
--- a/src/profile-generator.h
+++ b/src/profile-generator.h
@@ -209,15 +209,12 @@
void AddPath(const Vector<CodeEntry*>& path);
void CalculateTotalTicksAndSamplingRate();
- const char* title() const { return title_; }
- unsigned uid() const { return uid_; }
- const ProfileTree* top_down() const { return &top_down_; }
+ INLINE(const char* title() const) { return title_; }
+ INLINE(unsigned uid() const) { return uid_; }
+ INLINE(const ProfileTree* top_down() const) { return &top_down_; }
- int samples_count() const { return samples_.length(); }
- ProfileNode* sample(int index) const { return samples_.at(index); }
-
- int64_t start_time_us() const { return start_time_us_; }
- int64_t end_time_us() const { return end_time_us_; }
+ INLINE(int samples_count() const) { return samples_.length(); }
+ INLINE(ProfileNode* sample(int index) const) { return samples_.at(index); }
void UpdateTicksScale();
@@ -228,8 +225,8 @@
const char* title_;
unsigned uid_;
bool record_samples_;
- int64_t start_time_us_;
- int64_t end_time_us_;
+ double start_time_ms_;
+ double end_time_ms_;
List<ProfileNode*> samples_;
ProfileTree top_down_;
diff --git a/src/runtime.cc b/src/runtime.cc
index 0916b93..ef6eeb3 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -66,23 +66,6 @@
#include "v8threads.h"
#include "vm-state-inl.h"
-#ifdef V8_I18N_SUPPORT
-#include "i18n.h"
-#include "unicode/brkiter.h"
-#include "unicode/calendar.h"
-#include "unicode/coll.h"
-#include "unicode/datefmt.h"
-#include "unicode/dtfmtsym.h"
-#include "unicode/dtptngen.h"
-#include "unicode/locid.h"
-#include "unicode/numfmt.h"
-#include "unicode/numsys.h"
-#include "unicode/smpdtfmt.h"
-#include "unicode/timezone.h"
-#include "unicode/uloc.h"
-#include "unicode/uversion.h"
-#endif
-
#ifndef _STLP_VENDOR_CSTD
// STLPort doesn't import fpclassify and isless into the std namespace.
using std::fpclassify;
@@ -7253,6 +7236,15 @@
}
+RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberNot) {
+ SealHandleScope shs(isolate);
+ ASSERT(args.length() == 1);
+
+ CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]);
+ return isolate->heap()->NumberFromInt32(~x);
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberShl) {
SealHandleScope shs(isolate);
ASSERT(args.length() == 2);
@@ -8555,21 +8547,23 @@
// Use linear search of the unoptimized code's back edge table to find
// the AST id matching the PC.
- uint32_t target_pc_offset =
- static_cast<uint32_t>(frame->pc() - unoptimized->instruction_start());
+ Address start = unoptimized->instruction_start();
+ unsigned target_pc_offset = static_cast<unsigned>(frame->pc() - start);
+ Address table_cursor = start + unoptimized->back_edge_table_offset();
+ uint32_t table_length = Memory::uint32_at(table_cursor);
+ table_cursor += kIntSize;
uint32_t loop_depth = 0;
-
- for (FullCodeGenerator::BackEdgeTableIterator back_edges(*unoptimized);
- !back_edges.Done();
- back_edges.Next()) {
- if (back_edges.pc_offset() == target_pc_offset) {
- ast_id = back_edges.ast_id();
- loop_depth = back_edges.loop_depth();
+ for (unsigned i = 0; i < table_length; ++i) {
+ // Table entries are (AST id, pc offset) pairs.
+ uint32_t pc_offset = Memory::uint32_at(table_cursor + kIntSize);
+ if (pc_offset == target_pc_offset) {
+ ast_id = BailoutId(static_cast<int>(Memory::uint32_at(table_cursor)));
+ loop_depth = Memory::uint32_at(table_cursor + 2 * kIntSize);
break;
}
+ table_cursor += FullCodeGenerator::kBackEdgeEntrySize;
}
ASSERT(!ast_id.IsNone());
-
if (FLAG_trace_osr) {
PrintF("[replacing on-stack at AST id %d, loop depth %d in ",
ast_id.ToInt(), loop_depth);
@@ -8686,8 +8680,8 @@
CONVERT_ARG_HANDLE_CHECKED(JSObject, arguments, 2);
CONVERT_SMI_ARG_CHECKED(offset, 3);
CONVERT_SMI_ARG_CHECKED(argc, 4);
- RUNTIME_ASSERT(offset >= 0);
- RUNTIME_ASSERT(argc >= 0);
+ ASSERT(offset >= 0);
+ ASSERT(argc >= 0);
// If there are too many arguments, allocate argv via malloc.
const int argv_small_size = 10;
@@ -9480,7 +9474,7 @@
ASSERT_EQ(1, args.length());
CONVERT_ARG_HANDLE_CHECKED(String, source, 0);
- source = Handle<String>(FlattenGetString(source));
+ source = Handle<String>(source->TryFlattenGetString());
// Optimized fast case where we only have ASCII characters.
Handle<Object> result;
if (source->IsSeqOneByteString()) {
@@ -13370,304 +13364,6 @@
#endif // ENABLE_DEBUGGER_SUPPORT
-#ifdef V8_I18N_SUPPORT
-RUNTIME_FUNCTION(MaybeObject*, Runtime_CanonicalizeLanguageTag) {
- HandleScope scope(isolate);
-
- ASSERT(args.length() == 1);
- CONVERT_ARG_HANDLE_CHECKED(String, locale_id_str, 0);
-
- v8::String::Utf8Value locale_id(v8::Utils::ToLocal(locale_id_str));
-
- // Return value which denotes invalid language tag.
- const char* const kInvalidTag = "invalid-tag";
-
- UErrorCode error = U_ZERO_ERROR;
- char icu_result[ULOC_FULLNAME_CAPACITY];
- int icu_length = 0;
-
- uloc_forLanguageTag(*locale_id, icu_result, ULOC_FULLNAME_CAPACITY,
- &icu_length, &error);
- if (U_FAILURE(error) || icu_length == 0) {
- return isolate->heap()->AllocateStringFromOneByte(CStrVector(kInvalidTag));
- }
-
- char result[ULOC_FULLNAME_CAPACITY];
-
- // Force strict BCP47 rules.
- uloc_toLanguageTag(icu_result, result, ULOC_FULLNAME_CAPACITY, TRUE, &error);
-
- if (U_FAILURE(error)) {
- return isolate->heap()->AllocateStringFromOneByte(CStrVector(kInvalidTag));
- }
-
- return isolate->heap()->AllocateStringFromOneByte(CStrVector(result));
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_AvailableLocalesOf) {
- HandleScope scope(isolate);
-
- ASSERT(args.length() == 1);
- CONVERT_ARG_HANDLE_CHECKED(String, service, 0);
-
- const icu::Locale* available_locales = NULL;
- int32_t count = 0;
-
- if (service->IsUtf8EqualTo(CStrVector("collator"))) {
- available_locales = icu::Collator::getAvailableLocales(count);
- } else if (service->IsUtf8EqualTo(CStrVector("numberformat"))) {
- available_locales = icu::NumberFormat::getAvailableLocales(count);
- } else if (service->IsUtf8EqualTo(CStrVector("dateformat"))) {
- available_locales = icu::DateFormat::getAvailableLocales(count);
- } else if (service->IsUtf8EqualTo(CStrVector("breakiterator"))) {
- available_locales = icu::BreakIterator::getAvailableLocales(count);
- }
-
- UErrorCode error = U_ZERO_ERROR;
- char result[ULOC_FULLNAME_CAPACITY];
- Handle<JSObject> locales =
- isolate->factory()->NewJSObject(isolate->object_function());
-
- for (int32_t i = 0; i < count; ++i) {
- const char* icu_name = available_locales[i].getName();
-
- error = U_ZERO_ERROR;
- // No need to force strict BCP47 rules.
- uloc_toLanguageTag(icu_name, result, ULOC_FULLNAME_CAPACITY, FALSE, &error);
- if (U_FAILURE(error)) {
- // This shouldn't happen, but lets not break the user.
- continue;
- }
-
- RETURN_IF_EMPTY_HANDLE(isolate,
- JSObject::SetLocalPropertyIgnoreAttributes(
- locales,
- isolate->factory()->NewStringFromAscii(CStrVector(result)),
- isolate->factory()->NewNumber(i),
- NONE));
- }
-
- return *locales;
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_GetDefaultICULocale) {
- SealHandleScope shs(isolate);
-
- ASSERT(args.length() == 0);
-
- icu::Locale default_locale;
-
- // Set the locale
- char result[ULOC_FULLNAME_CAPACITY];
- UErrorCode status = U_ZERO_ERROR;
- uloc_toLanguageTag(
- default_locale.getName(), result, ULOC_FULLNAME_CAPACITY, FALSE, &status);
- if (U_SUCCESS(status)) {
- return isolate->heap()->AllocateStringFromOneByte(CStrVector(result));
- }
-
- return isolate->heap()->AllocateStringFromOneByte(CStrVector("und"));
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLanguageTagVariants) {
- HandleScope scope(isolate);
-
- ASSERT(args.length() == 1);
-
- CONVERT_ARG_HANDLE_CHECKED(JSArray, input, 0);
-
- uint32_t length = static_cast<uint32_t>(input->length()->Number());
- Handle<FixedArray> output = isolate->factory()->NewFixedArray(length);
- Handle<Name> maximized =
- isolate->factory()->NewStringFromAscii(CStrVector("maximized"));
- Handle<Name> base =
- isolate->factory()->NewStringFromAscii(CStrVector("base"));
- for (unsigned int i = 0; i < length; ++i) {
- MaybeObject* maybe_string = input->GetElement(i);
- Object* locale_id;
- if (!maybe_string->ToObject(&locale_id) || !locale_id->IsString()) {
- return isolate->Throw(isolate->heap()->illegal_argument_string());
- }
-
- v8::String::Utf8Value utf8_locale_id(
- v8::Utils::ToLocal(Handle<String>(String::cast(locale_id))));
-
- UErrorCode error = U_ZERO_ERROR;
-
- // Convert from BCP47 to ICU format.
- // de-DE-u-co-phonebk -> de_DE@collation=phonebook
- char icu_locale[ULOC_FULLNAME_CAPACITY];
- int icu_locale_length = 0;
- uloc_forLanguageTag(*utf8_locale_id, icu_locale, ULOC_FULLNAME_CAPACITY,
- &icu_locale_length, &error);
- if (U_FAILURE(error) || icu_locale_length == 0) {
- return isolate->Throw(isolate->heap()->illegal_argument_string());
- }
-
- // Maximize the locale.
- // de_DE@collation=phonebook -> de_Latn_DE@collation=phonebook
- char icu_max_locale[ULOC_FULLNAME_CAPACITY];
- uloc_addLikelySubtags(
- icu_locale, icu_max_locale, ULOC_FULLNAME_CAPACITY, &error);
-
- // Remove extensions from maximized locale.
- // de_Latn_DE@collation=phonebook -> de_Latn_DE
- char icu_base_max_locale[ULOC_FULLNAME_CAPACITY];
- uloc_getBaseName(
- icu_max_locale, icu_base_max_locale, ULOC_FULLNAME_CAPACITY, &error);
-
- // Get original name without extensions.
- // de_DE@collation=phonebook -> de_DE
- char icu_base_locale[ULOC_FULLNAME_CAPACITY];
- uloc_getBaseName(
- icu_locale, icu_base_locale, ULOC_FULLNAME_CAPACITY, &error);
-
- // Convert from ICU locale format to BCP47 format.
- // de_Latn_DE -> de-Latn-DE
- char base_max_locale[ULOC_FULLNAME_CAPACITY];
- uloc_toLanguageTag(icu_base_max_locale, base_max_locale,
- ULOC_FULLNAME_CAPACITY, FALSE, &error);
-
- // de_DE -> de-DE
- char base_locale[ULOC_FULLNAME_CAPACITY];
- uloc_toLanguageTag(
- icu_base_locale, base_locale, ULOC_FULLNAME_CAPACITY, FALSE, &error);
-
- if (U_FAILURE(error)) {
- return isolate->Throw(isolate->heap()->illegal_argument_string());
- }
-
- Handle<JSObject> result =
- isolate->factory()->NewJSObject(isolate->object_function());
- RETURN_IF_EMPTY_HANDLE(isolate,
- JSObject::SetLocalPropertyIgnoreAttributes(
- result,
- maximized,
- isolate->factory()->NewStringFromAscii(CStrVector(base_max_locale)),
- NONE));
- RETURN_IF_EMPTY_HANDLE(isolate,
- JSObject::SetLocalPropertyIgnoreAttributes(
- result,
- base,
- isolate->factory()->NewStringFromAscii(CStrVector(base_locale)),
- NONE));
- output->set(i, *result);
- }
-
- Handle<JSArray> result = isolate->factory()->NewJSArrayWithElements(output);
- result->set_length(Smi::FromInt(length));
- return *result;
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateDateTimeFormat) {
- HandleScope scope(isolate);
-
- ASSERT(args.length() == 3);
-
- CONVERT_ARG_HANDLE_CHECKED(String, locale, 0);
- CONVERT_ARG_HANDLE_CHECKED(JSObject, options, 1);
- CONVERT_ARG_HANDLE_CHECKED(JSObject, resolved, 2);
-
- Handle<ObjectTemplateInfo> date_format_template =
- I18N::GetTemplate(isolate);
-
- // Create an empty object wrapper.
- bool has_pending_exception = false;
- Handle<JSObject> local_object = Execution::InstantiateObject(
- date_format_template, &has_pending_exception);
- if (has_pending_exception) {
- ASSERT(isolate->has_pending_exception());
- return Failure::Exception();
- }
-
- // Set date time formatter as internal field of the resulting JS object.
- icu::SimpleDateFormat* date_format = DateFormat::InitializeDateTimeFormat(
- isolate, locale, options, resolved);
-
- if (!date_format) return isolate->ThrowIllegalOperation();
-
- local_object->SetInternalField(0, reinterpret_cast<Smi*>(date_format));
-
- RETURN_IF_EMPTY_HANDLE(isolate,
- JSObject::SetLocalPropertyIgnoreAttributes(
- local_object,
- isolate->factory()->NewStringFromAscii(CStrVector("dateFormat")),
- isolate->factory()->NewStringFromAscii(CStrVector("valid")),
- NONE));
-
- Persistent<v8::Object> wrapper(reinterpret_cast<v8::Isolate*>(isolate),
- v8::Utils::ToLocal(local_object));
- // Make object handle weak so we can delete the data format once GC kicks in.
- wrapper.MakeWeak<void>(NULL, &DateFormat::DeleteDateFormat);
- Handle<Object> result = Utils::OpenPersistent(wrapper);
- wrapper.ClearAndLeak();
- return *result;
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalDateFormat) {
- HandleScope scope(isolate);
-
- ASSERT(args.length() == 2);
-
- CONVERT_ARG_HANDLE_CHECKED(JSObject, date_format_holder, 0);
- CONVERT_ARG_HANDLE_CHECKED(JSDate, date, 1);
-
- bool has_pending_exception = false;
- double millis = Execution::ToNumber(date, &has_pending_exception)->Number();
- if (has_pending_exception) {
- ASSERT(isolate->has_pending_exception());
- return Failure::Exception();
- }
-
- icu::SimpleDateFormat* date_format =
- DateFormat::UnpackDateFormat(isolate, date_format_holder);
- if (!date_format) return isolate->ThrowIllegalOperation();
-
- icu::UnicodeString result;
- date_format->format(millis, result);
-
- return *isolate->factory()->NewStringFromTwoByte(
- Vector<const uint16_t>(
- reinterpret_cast<const uint16_t*>(result.getBuffer()),
- result.length()));
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalDateParse) {
- HandleScope scope(isolate);
-
- ASSERT(args.length() == 2);
-
- CONVERT_ARG_HANDLE_CHECKED(JSObject, date_format_holder, 0);
- CONVERT_ARG_HANDLE_CHECKED(String, date_string, 1);
-
- v8::String::Utf8Value utf8_date(v8::Utils::ToLocal(date_string));
- icu::UnicodeString u_date(icu::UnicodeString::fromUTF8(*utf8_date));
- icu::SimpleDateFormat* date_format =
- DateFormat::UnpackDateFormat(isolate, date_format_holder);
- if (!date_format) return isolate->ThrowIllegalOperation();
-
- UErrorCode status = U_ZERO_ERROR;
- UDate date = date_format->parse(u_date, status);
- if (U_FAILURE(status)) return isolate->heap()->undefined_value();
-
- bool has_pending_exception = false;
- Handle<JSDate> result = Handle<JSDate>::cast(
- Execution::NewDate(static_cast<double>(date), &has_pending_exception));
- if (has_pending_exception) {
- ASSERT(isolate->has_pending_exception());
- return Failure::Exception();
- }
- return *result;
-}
-#endif // V8_I18N_SUPPORT
-
-
// Finds the script object from the script data. NOTE: This operation uses
// heap traversal to find the function generated for the source position
// for the requested break point. For lazily compiled functions several heap
@@ -13786,18 +13482,6 @@
}
-RUNTIME_FUNCTION(MaybeObject*, Runtime_MigrateInstance) {
- HandleScope scope(isolate);
- ASSERT(args.length() == 1);
- CONVERT_ARG_HANDLE_CHECKED(Object, object, 0);
- if (!object->IsJSObject()) return Smi::FromInt(0);
- Handle<JSObject> js_object = Handle<JSObject>::cast(object);
- if (!js_object->map()->is_deprecated()) return Smi::FromInt(0);
- JSObject::MigrateInstance(js_object);
- return *object;
-}
-
-
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFromCache) {
SealHandleScope shs(isolate);
// This is only called from codegen, so checks might be more lax.
@@ -14041,9 +13725,6 @@
ASSERT(proto->IsJSGlobalObject());
obj = JSReceiver::cast(proto);
}
- if (obj->IsJSProxy())
- return isolate->heap()->undefined_value();
-
ASSERT(!(obj->map()->is_observed() && obj->IsJSObject() &&
JSObject::cast(obj)->HasFastElements()));
ASSERT(obj->IsJSObject());
diff --git a/src/runtime.h b/src/runtime.h
index ade7e73..398cb3b 100644
--- a/src/runtime.h
+++ b/src/runtime.h
@@ -109,7 +109,6 @@
F(DebugCallbackSupportsStepping, 1, 1) \
F(DebugPrepareStepInIfStepping, 1, 1) \
F(FlattenString, 1, 1) \
- F(MigrateInstance, 1, 1) \
\
/* Array join support */ \
F(PushIfAbsent, 2, 1) \
@@ -158,6 +157,7 @@
F(NumberOr, 2, 1) \
F(NumberAnd, 2, 1) \
F(NumberXor, 2, 1) \
+ F(NumberNot, 1, 1) \
\
F(NumberShl, 2, 1) \
F(NumberShr, 2, 1) \
@@ -534,26 +534,6 @@
#define RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F)
#endif
-
-#ifdef V8_I18N_SUPPORT
-#define RUNTIME_FUNCTION_LIST_I18N_SUPPORT(F) \
- /* i18n support */ \
- /* Standalone, helper methods. */ \
- F(CanonicalizeLanguageTag, 1, 1) \
- F(AvailableLocalesOf, 1, 1) \
- F(GetDefaultICULocale, 0, 1) \
- F(GetLanguageTagVariants, 1, 1) \
- \
- /* Date format and parse. */ \
- F(CreateDateTimeFormat, 3, 1) \
- F(InternalDateFormat, 2, 1) \
- F(InternalDateParse, 2, 1) \
-
-#else
-#define RUNTIME_FUNCTION_LIST_I18N_SUPPORT(F)
-#endif
-
-
#ifdef DEBUG
#define RUNTIME_FUNCTION_LIST_DEBUG(F) \
/* Testing */ \
@@ -571,8 +551,7 @@
RUNTIME_FUNCTION_LIST_ALWAYS_1(F) \
RUNTIME_FUNCTION_LIST_ALWAYS_2(F) \
RUNTIME_FUNCTION_LIST_DEBUG(F) \
- RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F) \
- RUNTIME_FUNCTION_LIST_I18N_SUPPORT(F)
+ RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F)
// ----------------------------------------------------------------------------
// INLINE_FUNCTION_LIST defines all inlined functions accessed
diff --git a/src/runtime.js b/src/runtime.js
index 5339570..90fb36b 100644
--- a/src/runtime.js
+++ b/src/runtime.js
@@ -294,6 +294,20 @@
}
+// ECMA-262, section 11.4.7, page 47.
+function UNARY_MINUS() {
+ var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
+ return %NumberUnaryMinus(x);
+}
+
+
+// ECMA-262, section 11.4.8, page 48.
+function BIT_NOT() {
+ var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
+ return %NumberNot(x);
+}
+
+
// ECMA-262, section 11.7.1, page 51.
function SHL(y) {
var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
diff --git a/src/serialize.cc b/src/serialize.cc
index 746c926..6c5a620 100644
--- a/src/serialize.cc
+++ b/src/serialize.cc
@@ -1304,7 +1304,6 @@
// No active or weak handles.
CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles());
- CHECK_EQ(0, isolate->eternal_handles()->NumberOfHandles());
// We don't support serializing installed extensions.
CHECK(!isolate->has_installed_extensions());
diff --git a/src/spaces.h b/src/spaces.h
index aa864b6..b47452e 100644
--- a/src/spaces.h
+++ b/src/spaces.h
@@ -784,9 +784,8 @@
// Maximum object size that fits in a page. Objects larger than that size
// are allocated in large object space and are never moved in memory. This
// also applies to new space allocation, since objects are never migrated
- // from new space to large object space. Takes double alignment into account.
- static const int kMaxNonCodeHeapObjectSize =
- kNonCodeObjectAreaSize - kPointerSize;
+ // from new space to large object space.
+ static const int kMaxNonCodeHeapObjectSize = kNonCodeObjectAreaSize;
// Page size mask.
static const intptr_t kPageAlignmentMask = (1 << kPageSizeBits) - 1;
diff --git a/src/type-info.cc b/src/type-info.cc
index 336b459..769df07 100644
--- a/src/type-info.cc
+++ b/src/type-info.cc
@@ -384,6 +384,17 @@
}
+Handle<Type> TypeFeedbackOracle::UnaryType(TypeFeedbackId id) {
+ Handle<Object> object = GetInfo(id);
+ if (!object->IsCode()) {
+ return handle(Type::None(), isolate());
+ }
+ Handle<Code> code = Handle<Code>::cast(object);
+ ASSERT(code->is_unary_op_stub());
+ return UnaryOpStub(code->extended_extra_ic_state()).GetType(isolate());
+}
+
+
void TypeFeedbackOracle::BinaryType(TypeFeedbackId id,
Handle<Type>* left,
Handle<Type>* right,
@@ -647,6 +658,7 @@
}
break;
+ case Code::UNARY_OP_IC:
case Code::BINARY_OP_IC:
case Code::COMPARE_IC:
case Code::TO_BOOLEAN_IC:
diff --git a/src/type-info.h b/src/type-info.h
index 4b376c8..1a7c67d 100644
--- a/src/type-info.h
+++ b/src/type-info.h
@@ -297,6 +297,7 @@
byte ToBooleanTypes(TypeFeedbackId id);
// Get type information for arithmetic operations and compares.
+ Handle<Type> UnaryType(TypeFeedbackId id);
void BinaryType(TypeFeedbackId id,
Handle<Type>* left,
Handle<Type>* right,
diff --git a/src/typing.cc b/src/typing.cc
index 727c104..4220d21 100644
--- a/src/typing.cc
+++ b/src/typing.cc
@@ -404,6 +404,8 @@
RECURSE(Visit(expr->expression()));
// Collect type feedback.
+ Handle<Type> op_type = oracle()->UnaryType(expr->UnaryOperationFeedbackId());
+ NarrowLowerType(expr->expression(), op_type);
if (expr->op() == Token::NOT) {
// TODO(rossberg): only do in test or value context.
expr->expression()->RecordToBooleanTypeFeedback(oracle());
@@ -417,6 +419,16 @@
case Token::VOID:
NarrowType(expr, Bounds(Type::Undefined(), isolate_));
break;
+ case Token::ADD:
+ case Token::SUB: {
+ Type* upper = *expr->expression()->bounds().upper;
+ if (!upper->Is(Type::Number())) upper = Type::Number();
+ NarrowType(expr, Bounds(Type::Smi(), upper, isolate_));
+ break;
+ }
+ case Token::BIT_NOT:
+ NarrowType(expr, Bounds(Type::Smi(), Type::Signed32(), isolate_));
+ break;
case Token::TYPEOF:
NarrowType(expr, Bounds(Type::InternalizedString(), isolate_));
break;
diff --git a/src/v8.cc b/src/v8.cc
index 93f3efb..cfec0c0 100644
--- a/src/v8.cc
+++ b/src/v8.cc
@@ -271,12 +271,7 @@
FLAG_gc_global = true;
FLAG_max_new_space_size = (1 << (kPageSizeBits - 10)) * 2;
}
-
- if (FLAG_parallel_recompilation &&
- (FLAG_trace_hydrogen || FLAG_trace_hydrogen_stubs)) {
- FLAG_parallel_recompilation = false;
- PrintF("Parallel recompilation has been disabled for tracing.\n");
- }
+ if (FLAG_trace_hydrogen) FLAG_parallel_recompilation = false;
if (FLAG_sweeper_threads <= 0) {
if (FLAG_concurrent_sweeping) {
diff --git a/src/version.cc b/src/version.cc
index 6460efd..2acc01c 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,8 +34,8 @@
// system so their names cannot be changed without changing the scripts.
#define MAJOR_VERSION 3
#define MINOR_VERSION 20
-#define BUILD_NUMBER 14
-#define PATCH_LEVEL 2
+#define BUILD_NUMBER 15
+#define PATCH_LEVEL 0
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0
diff --git a/src/x64/assembler-x64-inl.h b/src/x64/assembler-x64-inl.h
index 826c06e..ae9aeee 100644
--- a/src/x64/assembler-x64-inl.h
+++ b/src/x64/assembler-x64-inl.h
@@ -373,14 +373,13 @@
bool RelocInfo::IsPatchedReturnSequence() {
// The recognized call sequence is:
- // movq(kScratchRegister, address); call(kScratchRegister);
+ // movq(kScratchRegister, immediate64); call(kScratchRegister);
// It only needs to be distinguished from a return sequence
// movq(rsp, rbp); pop(rbp); ret(n); int3 *6
// The 11th byte is int3 (0xCC) in the return sequence and
// REX.WB (0x48+register bit) for the call sequence.
#ifdef ENABLE_DEBUGGER_SUPPORT
- return pc_[Assembler::kMoveAddressIntoScratchRegisterInstructionLength] !=
- 0xCC;
+ return pc_[2 + kPointerSize] != 0xCC;
#else
return false;
#endif
diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc
index 8969d89..f5939c3 100644
--- a/src/x64/assembler-x64.cc
+++ b/src/x64/assembler-x64.cc
@@ -164,7 +164,10 @@
// Patch the code at the current PC with a call to the target address.
// Additional guard int3 instructions can be added if required.
void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
- int code_size = Assembler::kCallSequenceLength + guard_bytes;
+ // Load register with immediate 64 and call through a register instructions
+ // takes up 13 bytes and int3 takes up one byte.
+ static const int kCallCodeSize = 13;
+ int code_size = kCallCodeSize + guard_bytes;
// Create a code patcher.
CodePatcher patcher(pc_, code_size);
@@ -180,7 +183,7 @@
patcher.masm()->call(r10);
// Check that the size of the code generated is as expected.
- ASSERT_EQ(Assembler::kCallSequenceLength,
+ ASSERT_EQ(kCallCodeSize,
patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize));
// Add the requested number of int3 instructions after the call.
diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h
index 4e36b6e..07afc12 100644
--- a/src/x64/assembler-x64.h
+++ b/src/x64/assembler-x64.h
@@ -579,36 +579,29 @@
// Distance between the address of the code target in the call instruction
// and the return address pushed on the stack.
static const int kCallTargetAddressOffset = 4; // Use 32-bit displacement.
- // The length of call(kScratchRegister).
- static const int kCallScratchRegisterInstructionLength = 3;
- // The length of call(Immediate32).
- static const int kShortCallInstructionLength = 5;
- // The length of movq(kScratchRegister, address).
- static const int kMoveAddressIntoScratchRegisterInstructionLength =
- 2 + kPointerSize;
- // The length of movq(kScratchRegister, address) and call(kScratchRegister).
- static const int kCallSequenceLength =
- kMoveAddressIntoScratchRegisterInstructionLength +
- kCallScratchRegisterInstructionLength;
+ // Distance between the start of the JS return sequence and where the
+ // 32-bit displacement of a near call would be, relative to the pushed
+ // return address. TODO: Use return sequence length instead.
+ // Should equal Debug::kX64JSReturnSequenceLength - kCallTargetAddressOffset;
+ static const int kPatchReturnSequenceAddressOffset = 13 - 4;
+ // Distance between start of patched debug break slot and where the
+ // 32-bit displacement of a near call would be, relative to the pushed
+ // return address. TODO: Use return sequence length instead.
+ // Should equal Debug::kX64JSReturnSequenceLength - kCallTargetAddressOffset;
+ static const int kPatchDebugBreakSlotAddressOffset = 13 - 4;
+ // TODO(X64): Rename this, removing the "Real", after changing the above.
+ static const int kRealPatchReturnSequenceAddressOffset = 2;
- // The js return and debug break slot must be able to contain an indirect
- // call sequence, some x64 JS code is padded with int3 to make it large
+ // Some x64 JS code is padded with int3 to make it large
// enough to hold an instruction when the debugger patches it.
- static const int kJSReturnSequenceLength = kCallSequenceLength;
- static const int kDebugBreakSlotLength = kCallSequenceLength;
- static const int kPatchDebugBreakSlotReturnOffset = kCallTargetAddressOffset;
- // Distance between the start of the JS return sequence and where the
- // 32-bit displacement of a short call would be. The short call is from
- // SetDebugBreakAtIC from debug-x64.cc.
- static const int kPatchReturnSequenceAddressOffset =
- kJSReturnSequenceLength - kPatchDebugBreakSlotReturnOffset;
- // Distance between the start of the JS return sequence and where the
- // 32-bit displacement of a short call would be. The short call is from
- // SetDebugBreakAtIC from debug-x64.cc.
- static const int kPatchDebugBreakSlotAddressOffset =
- kDebugBreakSlotLength - kPatchDebugBreakSlotReturnOffset;
- static const int kRealPatchReturnSequenceAddressOffset =
- kMoveAddressIntoScratchRegisterInstructionLength - kPointerSize;
+ static const int kJumpInstructionLength = 13;
+ static const int kCallInstructionLength = 13;
+ static const int kJSReturnSequenceLength = 13;
+ static const int kShortCallInstructionLength = 5;
+ static const int kPatchDebugBreakSlotReturnOffset = 4;
+
+ // The debug break slot must be able to contain a call instruction.
+ static const int kDebugBreakSlotLength = kCallInstructionLength;
// One byte opcode for test eax,0xXXXXXXXX.
static const byte kTestEaxByte = 0xA9;
diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc
index 18a6e56..d34e4f7 100644
--- a/src/x64/builtins-x64.cc
+++ b/src/x64/builtins-x64.cc
@@ -59,9 +59,9 @@
int num_extra_args = 0;
if (extra_args == NEEDS_CALLED_FUNCTION) {
num_extra_args = 1;
- __ PopReturnAddressTo(kScratchRegister);
+ __ pop(kScratchRegister); // Save return address.
__ push(rdi);
- __ PushReturnAddressFrom(kScratchRegister);
+ __ push(kScratchRegister); // Restore return address.
} else {
ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
}
@@ -249,7 +249,7 @@
if (FLAG_debug_code) {
__ cmpq(rsi, rdi);
__ Assert(less_equal,
- kUnexpectedNumberOfPreAllocatedPropertyFields);
+ "Unexpected number of pre-allocated property fields.");
}
__ InitializeFieldsWithFiller(rcx, rsi, rdx);
__ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
@@ -280,7 +280,7 @@
__ subq(rdx, rcx);
// Done if no extra properties are to be allocated.
__ j(zero, &allocated);
- __ Assert(positive, kPropertyAllocationCountFailed);
+ __ Assert(positive, "Property allocation count failed.");
// Scale the number of elements by pointer size and add the header for
// FixedArrays to the start of the next object calculation from above.
@@ -429,10 +429,10 @@
}
// Remove caller arguments from the stack and return.
- __ PopReturnAddressTo(rcx);
+ __ pop(rcx);
SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
__ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
- __ PushReturnAddressFrom(rcx);
+ __ push(rcx);
Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->constructed_objects(), 1);
__ ret(0);
@@ -723,7 +723,7 @@
__ ret(2 * kPointerSize); // Remove state, rax.
__ bind(¬_tos_rax);
- __ Abort(kNoCasesLeft);
+ __ Abort("no cases left");
}
@@ -772,9 +772,9 @@
{ Label done;
__ testq(rax, rax);
__ j(not_zero, &done);
- __ PopReturnAddressTo(rbx);
+ __ pop(rbx);
__ Push(masm->isolate()->factory()->undefined_value());
- __ PushReturnAddressFrom(rbx);
+ __ push(rbx);
__ incq(rax);
__ bind(&done);
}
@@ -895,9 +895,9 @@
__ cmpq(rdx, Immediate(1));
__ j(not_equal, &non_proxy);
- __ PopReturnAddressTo(rdx);
+ __ pop(rdx); // return address
__ push(rdi); // re-add proxy object as additional argument
- __ PushReturnAddressFrom(rdx);
+ __ push(rdx);
__ incq(rax);
__ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
__ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
@@ -1113,9 +1113,9 @@
// Will both indicate a NULL and a Smi.
STATIC_ASSERT(kSmiTag == 0);
Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
- __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
+ __ Check(not_smi, "Unexpected initial map for InternalArray function");
__ CmpObjectType(rbx, MAP_TYPE, rcx);
- __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
+ __ Check(equal, "Unexpected initial map for InternalArray function");
}
// Run the native code for the InternalArray function called as a normal
@@ -1143,9 +1143,9 @@
// Will both indicate a NULL and a Smi.
STATIC_ASSERT(kSmiTag == 0);
Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
- __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
+ __ Check(not_smi, "Unexpected initial map for Array function");
__ CmpObjectType(rbx, MAP_TYPE, rcx);
- __ Check(equal, kUnexpectedInitialMapForArrayFunction);
+ __ Check(equal, "Unexpected initial map for Array function");
}
// Run the native code for the Array function called as a normal function.
@@ -1173,7 +1173,7 @@
if (FLAG_debug_code) {
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
__ cmpq(rdi, rcx);
- __ Assert(equal, kUnexpectedStringFunction);
+ __ Assert(equal, "Unexpected String function");
}
// Load the first argument into rax and get rid of the rest
@@ -1182,9 +1182,9 @@
__ testq(rax, rax);
__ j(zero, &no_arguments);
__ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
- __ PopReturnAddressTo(rcx);
+ __ pop(rcx);
__ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
- __ PushReturnAddressFrom(rcx);
+ __ push(rcx);
__ movq(rax, rbx);
// Lookup the argument in the number to string cache.
@@ -1219,9 +1219,9 @@
if (FLAG_debug_code) {
__ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
Immediate(JSValue::kSize >> kPointerSizeLog2));
- __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
+ __ Assert(equal, "Unexpected string wrapper instance size");
__ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
- __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
+ __ Assert(equal, "Unexpected unused properties of string wrapper");
}
__ movq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
@@ -1268,9 +1268,9 @@
// stack, and jump back to the case where the argument is a string.
__ bind(&no_arguments);
__ LoadRoot(rbx, Heap::kempty_stringRootIndex);
- __ PopReturnAddressTo(rcx);
+ __ pop(rcx);
__ lea(rsp, Operand(rsp, kPointerSize));
- __ PushReturnAddressFrom(rcx);
+ __ push(rcx);
__ jmp(&argument_is_string);
// At this point the argument is already a string. Call runtime to
@@ -1313,10 +1313,10 @@
__ pop(rbp);
// Remove caller arguments from the stack.
- __ PopReturnAddressTo(rcx);
+ __ pop(rcx);
SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
__ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
- __ PushReturnAddressFrom(rcx);
+ __ push(rcx);
}
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index ad33a8c..551a716 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -246,6 +246,17 @@
}
+void UnaryOpStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { rax };
+ descriptor->register_param_count_ = 1;
+ descriptor->register_params_ = registers;
+ descriptor->deoptimization_handler_ =
+ FUNCTION_ADDR(UnaryOpIC_Miss);
+}
+
+
void StoreGlobalStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
@@ -419,12 +430,12 @@
// Create a new closure through the slower runtime call.
__ bind(&gc);
- __ PopReturnAddressTo(rcx);
+ __ pop(rcx); // Temporarily remove return address.
__ pop(rdx);
__ push(rsi);
__ push(rdx);
__ PushRoot(Heap::kFalseValueRootIndex);
- __ PushReturnAddressFrom(rcx);
+ __ push(rcx); // Restore return address.
__ TailCallRuntime(Runtime::kNewClosure, 3, 1);
}
@@ -500,8 +511,9 @@
Label after_sentinel;
__ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear);
if (FLAG_debug_code) {
+ const char* message = "Expected 0 as a Smi sentinel";
__ cmpq(rcx, Immediate(0));
- __ Assert(equal, kExpected0AsASmiSentinel);
+ __ Assert(equal, message);
}
__ movq(rcx, GlobalObjectOperand());
__ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
@@ -683,13 +695,13 @@
void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
- __ PopReturnAddressTo(rcx);
+ __ pop(rcx); // Save return address.
__ push(rdx);
__ push(rax);
// Left and right arguments are now on top.
__ Push(Smi::FromInt(MinorKey()));
- __ PushReturnAddressFrom(rcx);
+ __ push(rcx); // Push return address.
// Patch the caller to an appropriate specialized stub and return the
// operation result to the caller of the stub.
@@ -942,7 +954,7 @@
// Set the map.
__ AssertRootValue(heap_number_map,
Heap::kHeapNumberMapRootIndex,
- kHeapNumberMapRegisterClobbered);
+ "HeapNumberMap register clobbered.");
__ movq(FieldOperand(rax, HeapObject::kMapOffset),
heap_number_map);
__ cvtqsi2sd(xmm0, rbx);
@@ -962,7 +974,8 @@
}
// No fall-through from this generated code.
if (FLAG_debug_code) {
- __ Abort(kUnexpectedFallThroughInBinaryStubGenerateFloatingPointCode);
+ __ Abort("Unexpected fall-through in "
+ "BinaryStub_GenerateFloatingPointCode.");
}
}
@@ -971,10 +984,10 @@
MacroAssembler* masm) {
// Push arguments, but ensure they are under the return address
// for a tail call.
- __ PopReturnAddressTo(rcx);
+ __ pop(rcx);
__ push(rdx);
__ push(rax);
- __ PushReturnAddressFrom(rcx);
+ __ push(rcx);
}
@@ -2142,10 +2155,10 @@
__ JumpIfNotSmi(value, &miss);
// Prepare tail call to StoreIC_ArrayLength.
- __ PopReturnAddressTo(scratch);
+ __ pop(scratch);
__ push(receiver);
__ push(value);
- __ PushReturnAddressFrom(scratch);
+ __ push(scratch); // return address
ExternalReference ref =
ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), masm->isolate());
@@ -2211,9 +2224,9 @@
// Slow-case: Handle non-smi or out-of-bounds access to arguments
// by calling the runtime system.
__ bind(&slow);
- __ PopReturnAddressTo(rbx);
+ __ pop(rbx); // Return address.
__ push(rdx);
- __ PushReturnAddressFrom(rbx);
+ __ push(rbx);
__ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
}
@@ -2603,9 +2616,9 @@
if (FLAG_debug_code) {
Condition is_smi = masm->CheckSmi(rax);
__ Check(NegateCondition(is_smi),
- kUnexpectedTypeForRegExpDataFixedArrayExpected);
+ "Unexpected type for RegExp data, FixedArray expected");
__ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
- __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
+ __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
}
// rax: RegExp data (FixedArray)
@@ -2971,7 +2984,7 @@
// Assert that we do not have a cons or slice (indirect strings) here.
// Sequential strings have already been ruled out.
__ testb(rbx, Immediate(kIsIndirectStringMask));
- __ Assert(zero, kExternalStringExpectedButNotFound);
+ __ Assert(zero, "external string expected, but not found");
}
__ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
// Move the pointer so that offset-wise, it looks like a sequential string.
@@ -3435,7 +3448,7 @@
}
#ifdef DEBUG
- __ Abort(kUnexpectedFallThroughFromStringComparison);
+ __ Abort("Unexpected fall-through from string comparison");
#endif
__ bind(&check_unequal_objects);
@@ -3473,7 +3486,7 @@
}
// Push arguments below the return address to prepare jump to builtin.
- __ PopReturnAddressTo(rcx);
+ __ pop(rcx);
__ push(rdx);
__ push(rax);
@@ -3486,7 +3499,8 @@
__ Push(Smi::FromInt(NegativeComparisonResult(cc)));
}
- __ PushReturnAddressFrom(rcx);
+ // Restore return address on the stack.
+ __ push(rcx);
// Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
// tagged as a small integer.
@@ -3655,9 +3669,9 @@
// Check for function proxy.
__ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
__ j(not_equal, &non_function);
- __ PopReturnAddressTo(rcx);
+ __ pop(rcx);
__ push(rdi); // put proxy as additional argument under return address
- __ PushReturnAddressFrom(rcx);
+ __ push(rcx);
__ Set(rax, argc_ + 1);
__ Set(rbx, 0);
__ SetCallKind(rcx, CALL_AS_METHOD);
@@ -4261,7 +4275,7 @@
if (FLAG_debug_code) {
__ movl(rdi, Immediate(kWordBeforeMapCheckValue));
__ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
- __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck);
+ __ Assert(equal, "InstanceofStub unexpected call site cache (check).");
}
__ movq(kScratchRegister,
Operand(kScratchRegister, kOffsetToMapCheckValue));
@@ -4303,7 +4317,7 @@
if (FLAG_debug_code) {
__ movl(rax, Immediate(kWordBeforeResultValue));
__ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
- __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
+ __ Assert(equal, "InstanceofStub unexpected call site cache (mov).");
}
__ Set(rax, 0);
}
@@ -4326,7 +4340,7 @@
if (FLAG_debug_code) {
__ movl(rax, Immediate(kWordBeforeResultValue));
__ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
- __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
+ __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
}
}
__ ret(2 * kPointerSize + extra_stack_space);
@@ -4335,9 +4349,9 @@
__ bind(&slow);
if (HasCallSiteInlineCheck()) {
// Remove extra value from the stack.
- __ PopReturnAddressTo(rcx);
+ __ pop(rcx);
__ pop(rax);
- __ PushReturnAddressFrom(rcx);
+ __ push(rcx);
}
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
}
@@ -4390,7 +4404,7 @@
void StringCharCodeAtGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
+ __ Abort("Unexpected fallthrough to CharCodeAt slow case");
Factory* factory = masm->isolate()->factory();
// Index is not a smi.
@@ -4440,7 +4454,7 @@
call_helper.AfterCall(masm);
__ jmp(&exit_);
- __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
+ __ Abort("Unexpected fallthrough from CharCodeAt slow case");
}
@@ -4466,7 +4480,7 @@
void StringCharFromCodeGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
+ __ Abort("Unexpected fallthrough to CharFromCode slow case");
__ bind(&slow_case_);
call_helper.BeforeCall(masm);
@@ -4478,7 +4492,7 @@
call_helper.AfterCall(masm);
__ jmp(&exit_);
- __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
+ __ Abort("Unexpected fallthrough from CharFromCode slow case");
}
@@ -4808,10 +4822,10 @@
void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm,
Register temp) {
- __ PopReturnAddressTo(temp);
+ __ pop(temp);
__ pop(rdx);
__ pop(rax);
- __ PushReturnAddressFrom(temp);
+ __ push(temp);
}
@@ -5026,7 +5040,7 @@
if (FLAG_debug_code) {
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
__ cmpq(kScratchRegister, candidate);
- __ Assert(equal, kOddballInStringTableIsNotUndefinedOrTheHole);
+ __ Assert(equal, "oddball in string table is not undefined or the hole");
}
__ jmp(&next_probe[i]);
@@ -5515,9 +5529,9 @@
// Inline comparison of ASCII strings.
__ IncrementCounter(counters->string_compare_native(), 1);
// Drop arguments from the stack
- __ PopReturnAddressTo(rcx);
+ __ pop(rcx);
__ addq(rsp, Immediate(2 * kPointerSize));
- __ PushReturnAddressFrom(rcx);
+ __ push(rcx);
GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
// Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
@@ -5786,10 +5800,10 @@
// Handle more complex cases in runtime.
__ bind(&runtime);
- __ PopReturnAddressTo(tmp1);
+ __ pop(tmp1); // Return address.
__ push(left);
__ push(right);
- __ PushReturnAddressFrom(tmp1);
+ __ push(tmp1);
if (equality) {
__ TailCallRuntime(Runtime::kStringEquals, 2, 1);
} else {
@@ -6397,14 +6411,16 @@
// the runtime.
__ bind(&slow_elements);
- __ PopReturnAddressTo(rdi);
+ __ pop(rdi); // Pop return address and remember to put back later for tail
+ // call.
__ push(rbx);
__ push(rcx);
__ push(rax);
__ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ push(rdx);
- __ PushReturnAddressFrom(rdi);
+ __ push(rdi); // Return return address so that tail call returns to right
+ // place.
__ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
// Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
@@ -6451,7 +6467,7 @@
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
__ movq(rbx, MemOperand(rbp, parameter_count_offset));
masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
- __ PopReturnAddressTo(rcx);
+ __ pop(rcx);
int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
? kPointerSize
: 0;
@@ -6523,7 +6539,7 @@
}
// If we reached this point there is a problem.
- __ Abort(kUnexpectedElementsKindInArrayConstructor);
+ __ Abort("Unexpected ElementsKind in array constructor");
}
@@ -6586,7 +6602,7 @@
}
// If we reached this point there is a problem.
- __ Abort(kUnexpectedElementsKindInArrayConstructor);
+ __ Abort("Unexpected ElementsKind in array constructor");
}
@@ -6652,9 +6668,9 @@
// Will both indicate a NULL and a Smi.
STATIC_ASSERT(kSmiTag == 0);
Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
- __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
+ __ Check(not_smi, "Unexpected initial map for Array function");
__ CmpObjectType(rcx, MAP_TYPE, rcx);
- __ Check(equal, kUnexpectedInitialMapForArrayFunction);
+ __ Check(equal, "Unexpected initial map for Array function");
// We should either have undefined in rbx or a valid cell
Label okay_here;
@@ -6662,7 +6678,7 @@
__ Cmp(rbx, undefined_sentinel);
__ j(equal, &okay_here);
__ Cmp(FieldOperand(rbx, 0), cell_map);
- __ Assert(equal, kExpectedPropertyCellInRegisterRbx);
+ __ Assert(equal, "Expected property cell in register rbx");
__ bind(&okay_here);
}
@@ -6767,9 +6783,9 @@
// Will both indicate a NULL and a Smi.
STATIC_ASSERT(kSmiTag == 0);
Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
- __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
+ __ Check(not_smi, "Unexpected initial map for Array function");
__ CmpObjectType(rcx, MAP_TYPE, rcx);
- __ Check(equal, kUnexpectedInitialMapForArrayFunction);
+ __ Check(equal, "Unexpected initial map for Array function");
}
// Figure out the right elements kind
@@ -6788,7 +6804,7 @@
__ j(equal, &done);
__ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
__ Assert(equal,
- kInvalidElementsKindForInternalArrayOrInternalPackedArray);
+ "Invalid ElementsKind for InternalArray or InternalPackedArray");
__ bind(&done);
}
diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc
index a39f14b..a823bf2 100644
--- a/src/x64/codegen-x64.cc
+++ b/src/x64/codegen-x64.cc
@@ -394,7 +394,7 @@
if (FLAG_debug_code) {
__ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
- __ Assert(equal, kObjectFoundInSmiOnlyArray);
+ __ Assert(equal, "object found in smi-only array");
}
__ movq(FieldOperand(r14, r9, times_8, FixedDoubleArray::kHeaderSize), r15);
@@ -577,7 +577,7 @@
// Assert that we do not have a cons or slice (indirect strings) here.
// Sequential strings have already been ruled out.
__ testb(result, Immediate(kIsIndirectStringMask));
- __ Assert(zero, kExternalStringExpectedButNotFound);
+ __ Assert(zero, "external string expected, but not found");
}
// Rule out short external strings.
STATIC_CHECK(kShortExternalStringTag != 0);
diff --git a/src/x64/debug-x64.cc b/src/x64/debug-x64.cc
index e6bc929..a337b0d 100644
--- a/src/x64/debug-x64.cc
+++ b/src/x64/debug-x64.cc
@@ -48,10 +48,11 @@
// CodeGenerator::VisitReturnStatement and VirtualFrame::Exit in codegen-x64.cc
// for the precise return instructions sequence.
void BreakLocationIterator::SetDebugBreakAtReturn() {
- ASSERT(Assembler::kJSReturnSequenceLength >= Assembler::kCallSequenceLength);
+ ASSERT(Assembler::kJSReturnSequenceLength >=
+ Assembler::kCallInstructionLength);
rinfo()->PatchCodeWithCall(
Isolate::Current()->debug()->debug_break_return()->entry(),
- Assembler::kJSReturnSequenceLength - Assembler::kCallSequenceLength);
+ Assembler::kJSReturnSequenceLength - Assembler::kCallInstructionLength);
}
@@ -81,7 +82,7 @@
ASSERT(IsDebugBreakSlot());
rinfo()->PatchCodeWithCall(
Isolate::Current()->debug()->debug_break_slot()->entry(),
- Assembler::kDebugBreakSlotLength - Assembler::kCallSequenceLength);
+ Assembler::kDebugBreakSlotLength - Assembler::kCallInstructionLength);
}
diff --git a/src/x64/deoptimizer-x64.cc b/src/x64/deoptimizer-x64.cc
index e9cf567..b45e966 100644
--- a/src/x64/deoptimizer-x64.cc
+++ b/src/x64/deoptimizer-x64.cc
@@ -42,7 +42,7 @@
int Deoptimizer::patch_size() {
- return Assembler::kCallSequenceLength;
+ return Assembler::kCallInstructionLength;
}
@@ -69,7 +69,7 @@
Address call_address = instruction_start + deopt_data->Pc(i)->value();
// There is room enough to write a long call instruction because we pad
// LLazyBailout instructions with nops if necessary.
- CodePatcher patcher(call_address, Assembler::kCallSequenceLength);
+ CodePatcher patcher(call_address, Assembler::kCallInstructionLength);
patcher.masm()->Call(GetDeoptimizationEntry(isolate, i, LAZY),
RelocInfo::NONE64);
ASSERT(prev_call_address == NULL ||
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index 6333e87..bac4e79 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -753,9 +753,9 @@
// Check that we're not inside a with or catch context.
__ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
__ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
- __ Check(not_equal, kDeclarationInWithContext);
+ __ Check(not_equal, "Declaration in with context.");
__ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
- __ Check(not_equal, kDeclarationInCatchContext);
+ __ Check(not_equal, "Declaration in catch context.");
}
}
@@ -2192,7 +2192,7 @@
__ Push(Smi::FromInt(resume_mode));
__ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
// Not reached: the runtime call returns elsewhere.
- __ Abort(kGeneratorFailedToResume);
+ __ Abort("Generator failed to resume.");
// Throw error if we attempt to operate on a running generator.
__ bind(&wrong_state);
@@ -2456,7 +2456,7 @@
// Check for an uninitialized let binding.
__ movq(rdx, location);
__ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
- __ Check(equal, kLetBindingReInitialization);
+ __ Check(equal, "Let binding re-initialization.");
}
// Perform the assignment.
__ movq(location, rax);
@@ -3398,14 +3398,14 @@
Register index,
Register value,
uint32_t encoding_mask) {
- __ Check(masm()->CheckSmi(index), kNonSmiIndex);
- __ Check(masm()->CheckSmi(value), kNonSmiValue);
+ __ Check(masm()->CheckSmi(index), "Non-smi index");
+ __ Check(masm()->CheckSmi(value), "Non-smi value");
__ SmiCompare(index, FieldOperand(string, String::kLengthOffset));
- __ Check(less, kIndexIsTooLarge);
+ __ Check(less, "Index is too large");
__ SmiCompare(index, Smi::FromInt(0));
- __ Check(greater_equal, kIndexIsNegative);
+ __ Check(greater_equal, "Index is negative");
__ push(value);
__ movq(value, FieldOperand(string, HeapObject::kMapOffset));
@@ -3413,7 +3413,7 @@
__ andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
__ cmpq(value, Immediate(encoding_mask));
- __ Check(equal, kUnexpectedStringType);
+ __ Check(equal, "Unexpected string type");
__ pop(value);
}
@@ -3777,7 +3777,7 @@
Handle<FixedArray> jsfunction_result_caches(
isolate()->native_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
- __ Abort(kAttemptToUseUndefinedCache);
+ __ Abort("Attempt to use undefined cache.");
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
context()->Plug(rax);
return;
@@ -3971,7 +3971,7 @@
// scratch, string_length(int32), elements(FixedArray*).
if (generate_debug_code_) {
__ cmpq(index, array_length);
- __ Assert(below, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
+ __ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin");
}
__ bind(&loop);
__ movq(string, FieldOperand(elements,
@@ -4335,12 +4335,35 @@
break;
}
+ case Token::SUB:
+ EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
+ break;
+
+ case Token::BIT_NOT:
+ EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
+ break;
+
default:
UNREACHABLE();
}
}
+void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
+ const char* comment) {
+ // TODO(svenpanne): Allowing format strings in Comment would be nice here...
+ Comment cmt(masm_, comment);
+ UnaryOpStub stub(expr->op());
+ // UnaryOpStub expects the argument to be in the
+ // accumulator register rax.
+ VisitForAccumulatorValue(expr->expression());
+ SetSourcePosition(expr->position());
+ CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
+ expr->UnaryOperationFeedbackId());
+ context()->Plug(rax);
+}
+
+
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Comment cmnt(masm_, "[ CountOperation");
SetSourcePosition(expr->position());
@@ -4796,7 +4819,7 @@
ASSERT(!result_register().is(rdx));
ASSERT(!result_register().is(rcx));
// Cook return address on top of stack (smi encoded Code* delta)
- __ PopReturnAddressTo(rdx);
+ __ pop(rdx);
__ Move(rcx, masm_->CodeObject());
__ subq(rdx, rcx);
__ Integer32ToSmi(rdx, rdx);
diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc
index 4837b9a..6e238c7 100644
--- a/src/x64/ic-x64.cc
+++ b/src/x64/ic-x64.cc
@@ -570,10 +570,10 @@
__ j(not_zero, &slow);
// Everything is fine, call runtime.
- __ PopReturnAddressTo(rcx);
+ __ pop(rcx);
__ push(rdx); // receiver
__ push(rax); // key
- __ PushReturnAddressFrom(rcx);
+ __ push(rcx); // return address
// Perform tail call to the entry.
__ TailCallExternalReference(
@@ -1369,10 +1369,10 @@
Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->load_miss(), 1);
- __ PopReturnAddressTo(rbx);
+ __ pop(rbx);
__ push(rax); // receiver
__ push(rcx); // name
- __ PushReturnAddressFrom(rbx);
+ __ push(rbx); // return address
// Perform tail call to the entry.
ExternalReference ref =
@@ -1388,10 +1388,10 @@
// -- rsp[0] : return address
// -----------------------------------
- __ PopReturnAddressTo(rbx);
+ __ pop(rbx);
__ push(rax); // receiver
__ push(rcx); // name
- __ PushReturnAddressFrom(rbx);
+ __ push(rbx); // return address
// Perform tail call to the entry.
__ TailCallRuntime(Runtime::kGetProperty, 2, 1);
@@ -1408,10 +1408,10 @@
Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->keyed_load_miss(), 1);
- __ PopReturnAddressTo(rbx);
+ __ pop(rbx);
__ push(rdx); // receiver
__ push(rax); // name
- __ PushReturnAddressFrom(rbx);
+ __ push(rbx); // return address
// Perform tail call to the entry.
ExternalReference ref = miss_mode == MISS_FORCE_GENERIC
@@ -1429,10 +1429,10 @@
// -- rsp[0] : return address
// -----------------------------------
- __ PopReturnAddressTo(rbx);
+ __ pop(rbx);
__ push(rdx); // receiver
__ push(rax); // name
- __ PushReturnAddressFrom(rbx);
+ __ push(rbx); // return address
// Perform tail call to the entry.
__ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
@@ -1468,11 +1468,11 @@
// -- rsp[0] : return address
// -----------------------------------
- __ PopReturnAddressTo(rbx);
+ __ pop(rbx);
__ push(rdx); // receiver
__ push(rcx); // name
__ push(rax); // value
- __ PushReturnAddressFrom(rbx);
+ __ push(rbx); // return address
// Perform tail call to the entry.
ExternalReference ref =
@@ -1512,13 +1512,13 @@
// -- rdx : receiver
// -- rsp[0] : return address
// -----------------------------------
- __ PopReturnAddressTo(rbx);
+ __ pop(rbx);
__ push(rdx);
__ push(rcx);
__ push(rax);
__ Push(Smi::FromInt(NONE)); // PropertyAttributes
__ Push(Smi::FromInt(strict_mode));
- __ PushReturnAddressFrom(rbx);
+ __ push(rbx); // return address
// Do tail-call to runtime routine.
__ TailCallRuntime(Runtime::kSetProperty, 5, 1);
@@ -1534,13 +1534,13 @@
// -- rsp[0] : return address
// -----------------------------------
- __ PopReturnAddressTo(rbx);
+ __ pop(rbx);
__ push(rdx); // receiver
__ push(rcx); // key
__ push(rax); // value
__ Push(Smi::FromInt(NONE)); // PropertyAttributes
__ Push(Smi::FromInt(strict_mode)); // Strict mode.
- __ PushReturnAddressFrom(rbx);
+ __ push(rbx); // return address
// Do tail-call to runtime routine.
__ TailCallRuntime(Runtime::kSetProperty, 5, 1);
@@ -1555,11 +1555,11 @@
// -- rsp[0] : return address
// -----------------------------------
- __ PopReturnAddressTo(rbx);
+ __ pop(rbx);
__ push(rdx); // receiver
__ push(rcx); // key
__ push(rax); // value
- __ PushReturnAddressFrom(rbx);
+ __ push(rbx); // return address
// Do tail-call to runtime routine.
ExternalReference ref(IC_Utility(kStoreIC_Slow), masm->isolate());
@@ -1575,11 +1575,11 @@
// -- rsp[0] : return address
// -----------------------------------
- __ PopReturnAddressTo(rbx);
+ __ pop(rbx);
__ push(rdx); // receiver
__ push(rcx); // key
__ push(rax); // value
- __ PushReturnAddressFrom(rbx);
+ __ push(rbx); // return address
// Do tail-call to runtime routine.
ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
@@ -1595,11 +1595,11 @@
// -- rsp[0] : return address
// -----------------------------------
- __ PopReturnAddressTo(rbx);
+ __ pop(rbx);
__ push(rdx); // receiver
__ push(rcx); // key
__ push(rax); // value
- __ PushReturnAddressFrom(rbx);
+ __ push(rbx); // return address
// Do tail-call to runtime routine.
ExternalReference ref = miss_mode == MISS_FORCE_GENERIC
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index d4c125b..f84c357 100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -96,7 +96,7 @@
}
-void LChunkBuilder::Abort(BailoutReason reason) {
+void LChunkBuilder::Abort(const char* reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -661,7 +661,7 @@
Address entry =
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
if (entry == NULL) {
- Abort(kBailoutWasNotPrepared);
+ Abort("bailout was not prepared");
return;
}
@@ -1270,7 +1270,7 @@
bool can_overflow =
instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
if (right->IsConstantOperand()) {
- int32_t right_value = ToInteger32(LConstantOperand::cast(right));
+ int right_value = ToInteger32(LConstantOperand::cast(right));
if (right_value == -1) {
__ negl(left);
} else if (right_value == 0) {
@@ -1362,7 +1362,7 @@
ASSERT(left->IsRegister());
if (right->IsConstantOperand()) {
- int32_t right_operand = ToInteger32(LConstantOperand::cast(right));
+ int right_operand = ToInteger32(LConstantOperand::cast(right));
switch (instr->op()) {
case Token::BIT_AND:
__ andl(ToRegister(left), Immediate(right_operand));
@@ -1371,11 +1371,7 @@
__ orl(ToRegister(left), Immediate(right_operand));
break;
case Token::BIT_XOR:
- if (right_operand == int32_t(~0)) {
- __ not_(ToRegister(left));
- } else {
- __ xorl(ToRegister(left), Immediate(right_operand));
- }
+ __ xorl(ToRegister(left), Immediate(right_operand));
break;
default:
UNREACHABLE();
@@ -1446,7 +1442,7 @@
break;
}
} else {
- int32_t value = ToInteger32(LConstantOperand::cast(right));
+ int value = ToInteger32(LConstantOperand::cast(right));
uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
switch (instr->op()) {
case Token::ROR:
@@ -1646,7 +1642,7 @@
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
__ cmpq(value, Immediate(encoding == String::ONE_BYTE_ENCODING
? one_byte_seq_type : two_byte_seq_type));
- __ Check(equal, kUnexpectedStringType);
+ __ Check(equal, "Unexpected string type");
__ pop(value);
}
@@ -1660,6 +1656,13 @@
}
+void LCodeGen::DoBitNotI(LBitNotI* instr) {
+ LOperand* input = instr->value();
+ ASSERT(input->Equals(instr->result()));
+ __ not_(ToRegister(input));
+}
+
+
void LCodeGen::DoThrow(LThrow* instr) {
__ push(ToRegister(instr->value()));
CallRuntime(Runtime::kThrow, 1, instr);
@@ -2565,7 +2568,7 @@
// The argument count parameter is a smi
__ SmiToInteger32(reg, reg);
Register return_addr_reg = reg.is(rcx) ? rbx : rcx;
- __ PopReturnAddressTo(return_addr_reg);
+ __ pop(return_addr_reg);
__ shl(reg, Immediate(kPointerSizeLog2));
__ addq(rsp, reg);
__ jmp(return_addr_reg);
@@ -2895,8 +2898,8 @@
if (instr->length()->IsConstantOperand() &&
instr->index()->IsConstantOperand()) {
- int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index()));
- int32_t const_length = ToInteger32(LConstantOperand::cast(instr->length()));
+ int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
+ int const_length = ToInteger32(LConstantOperand::cast(instr->length()));
int index = (const_length - const_index) + 1;
__ movq(result, Operand(arguments, index * kPointerSize));
} else {
@@ -3083,9 +3086,9 @@
Register elements_pointer_reg = ToRegister(elements_pointer);
int shift_size = ElementsKindToShiftSize(elements_kind);
if (key->IsConstantOperand()) {
- int32_t constant_value = ToInteger32(LConstantOperand::cast(key));
+ int constant_value = ToInteger32(LConstantOperand::cast(key));
if (constant_value & 0xF0000000) {
- Abort(kArrayIndexConstantValueTooBig);
+ Abort("array index constant value too big");
}
return Operand(elements_pointer_reg,
((constant_value + additional_index) << shift_size)
@@ -3423,17 +3426,6 @@
}
-void LCodeGen::EmitInteger64MathAbs(LMathAbs* instr) {
- Register input_reg = ToRegister(instr->value());
- __ testq(input_reg, input_reg);
- Label is_positive;
- __ j(not_sign, &is_positive, Label::kNear);
- __ neg(input_reg); // Sets flags.
- DeoptimizeIf(negative, instr->environment());
- __ bind(&is_positive);
-}
-
-
void LCodeGen::DoMathAbs(LMathAbs* instr) {
// Class for deferred case.
class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
@@ -3459,8 +3451,6 @@
__ andpd(input_reg, scratch);
} else if (r.IsInteger32()) {
EmitIntegerMathAbs(instr);
- } else if (r.IsSmi()) {
- EmitInteger64MathAbs(instr);
} else { // Tagged case.
DeferredMathAbsTaggedHeapNumber* deferred =
new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
@@ -4093,7 +4083,7 @@
__ AssertZeroExtended(reg);
}
if (instr->index()->IsConstantOperand()) {
- int32_t constant_index =
+ int constant_index =
ToInteger32(LConstantOperand::cast(instr->index()));
if (instr->hydrogen()->length()->representation().IsSmi()) {
__ Cmp(reg, Smi::FromInt(constant_index));
@@ -4110,7 +4100,7 @@
} else {
Operand length = ToOperand(instr->length());
if (instr->index()->IsConstantOperand()) {
- int32_t constant_index =
+ int constant_index =
ToInteger32(LConstantOperand::cast(instr->index()));
if (instr->hydrogen()->length()->representation().IsSmi()) {
__ Cmp(length, Smi::FromInt(constant_index));
@@ -4397,7 +4387,7 @@
// DoStringCharCodeAt above.
STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
if (instr->index()->IsConstantOperand()) {
- int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index()));
+ int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
__ Push(Smi::FromInt(const_index));
} else {
Register index = ToRegister(instr->index());
@@ -4971,64 +4961,31 @@
}
-void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
- {
- PushSafepointRegistersScope scope(this);
- __ push(object);
- CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr);
- __ testq(rax, Immediate(kSmiTagMask));
- }
- DeoptimizeIf(zero, instr->environment());
+void LCodeGen::DoCheckMapCommon(Register reg,
+ Handle<Map> map,
+ LInstruction* instr) {
+ Label success;
+ __ CompareMap(reg, map, &success);
+ DeoptimizeIf(not_equal, instr->environment());
+ __ bind(&success);
}
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
- class DeferredCheckMaps: public LDeferredCode {
- public:
- DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
- : LDeferredCode(codegen), instr_(instr), object_(object) {
- SetExit(check_maps());
- }
- virtual void Generate() {
- codegen()->DoDeferredInstanceMigration(instr_, object_);
- }
- Label* check_maps() { return &check_maps_; }
- virtual LInstruction* instr() { return instr_; }
- private:
- LCheckMaps* instr_;
- Label check_maps_;
- Register object_;
- };
-
if (instr->hydrogen()->CanOmitMapChecks()) return;
-
LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- SmallMapList* map_set = instr->hydrogen()->map_set();
-
- DeferredCheckMaps* deferred = NULL;
- if (instr->hydrogen()->has_migration_target()) {
- deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
- __ bind(deferred->check_maps());
- }
-
Label success;
+ SmallMapList* map_set = instr->hydrogen()->map_set();
for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i);
__ CompareMap(reg, map, &success);
__ j(equal, &success);
}
-
Handle<Map> map = map_set->last();
- __ CompareMap(reg, map, &success);
- if (instr->hydrogen()->has_migration_target()) {
- __ j(not_equal, deferred->entry());
- } else {
- DeoptimizeIf(not_equal, instr->environment());
- }
-
+ DoCheckMapCommon(reg, map, instr);
__ bind(&success);
}
@@ -5082,6 +5039,22 @@
}
+void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
+ if (instr->hydrogen()->CanOmitPrototypeChecks()) return;
+ Register reg = ToRegister(instr->temp());
+
+ ZoneList<Handle<JSObject> >* prototypes = instr->prototypes();
+ ZoneList<Handle<Map> >* maps = instr->maps();
+
+ ASSERT(prototypes->length() == maps->length());
+
+ for (int i = 0; i < prototypes->length(); i++) {
+ __ LoadHeapObject(reg, prototypes->at(i));
+ DoCheckMapCommon(reg, maps->at(i), instr);
+ }
+}
+
+
void LCodeGen::DoAllocate(LAllocate* instr) {
class DeferredAllocate: public LDeferredCode {
public:
diff --git a/src/x64/lithium-codegen-x64.h b/src/x64/lithium-codegen-x64.h
index e134229..4286d07 100644
--- a/src/x64/lithium-codegen-x64.h
+++ b/src/x64/lithium-codegen-x64.h
@@ -102,6 +102,7 @@
XMMRegister ToDoubleRegister(LOperand* op) const;
bool IsInteger32Constant(LConstantOperand* op) const;
bool IsSmiConstant(LConstantOperand* op) const;
+ int ToRepresentation(LConstantOperand* op, const Representation& r) const;
int32_t ToInteger32(LConstantOperand* op) const;
Smi* ToSmi(LConstantOperand* op) const;
double ToDouble(LConstantOperand* op) const;
@@ -131,7 +132,8 @@
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
- void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
+
+ void DoCheckMapCommon(Register reg, Handle<Map> map, LInstruction* instr);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
@@ -177,7 +179,7 @@
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
- void Abort(BailoutReason reason);
+ void Abort(const char* reason);
void FPRINTF_CHECKING Comment(const char* format, ...);
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
@@ -266,7 +268,6 @@
uint32_t additional_index = 0);
void EmitIntegerMathAbs(LMathAbs* instr);
- void EmitInteger64MathAbs(LMathAbs* instr);
// Support for recording safepoint and position information.
void RecordSafepoint(LPointerMap* pointers,
diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc
index 913e170..e403165 100644
--- a/src/x64/lithium-x64.cc
+++ b/src/x64/lithium-x64.cc
@@ -443,7 +443,7 @@
}
-void LCodeGen::Abort(BailoutReason reason) {
+void LCodeGen::Abort(const char* reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -654,7 +654,7 @@
new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
int vreg = allocator_->GetVirtualRegister();
if (!allocator_->AllocationOk()) {
- Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
+ Abort("Out of virtual registers while trying to allocate temp register.");
vreg = 0;
}
operand->set_virtual_register(vreg);
@@ -1321,6 +1321,16 @@
}
+LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
+ ASSERT(instr->value()->representation().IsInteger32());
+ ASSERT(instr->representation().IsInteger32());
+ if (instr->HasNoUses()) return NULL;
+ LOperand* input = UseRegisterAtStart(instr->value());
+ LBitNotI* result = new(zone()) LBitNotI(input);
+ return DefineSameAsFirst(result);
+}
+
+
LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
if (instr->representation().IsDouble()) {
return DoArithmeticD(Token::DIV, instr);
@@ -1735,6 +1745,17 @@
}
+LInstruction* LChunkBuilder::DoNumericConstraint(HNumericConstraint* instr) {
+ return NULL;
+}
+
+
+LInstruction* LChunkBuilder::DoInductionVariableAnnotation(
+ HInductionVariableAnnotation* instr) {
+ return NULL;
+}
+
+
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
LOperand* value = UseRegisterOrConstantAtStart(instr->index());
LOperand* length = Use(instr->length());
@@ -1899,6 +1920,15 @@
}
+LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
+ LUnallocated* temp = NULL;
+ if (!instr->CanOmitPrototypeChecks()) temp = TempRegister();
+ LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp);
+ if (instr->CanOmitPrototypeChecks()) return result;
+ return AssignEnvironment(result);
+}
+
+
LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
return AssignEnvironment(new(zone()) LCheckFunction(value));
@@ -1907,16 +1937,10 @@
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = NULL;
- if (!instr->CanOmitMapChecks()) {
- value = UseRegisterAtStart(instr->value());
- if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
- }
+ if (!instr->CanOmitMapChecks()) value = UseRegisterAtStart(instr->value());
LCheckMaps* result = new(zone()) LCheckMaps(value);
- if (!instr->CanOmitMapChecks()) {
- AssignEnvironment(result);
- if (instr->has_migration_target()) return AssignPointerMap(result);
- }
- return result;
+ if (instr->CanOmitMapChecks()) return result;
+ return AssignEnvironment(result);
}
@@ -2209,7 +2233,7 @@
bool is_external_location = instr->access().IsExternalMemory() &&
instr->access().offset() == 0;
bool needs_write_barrier = instr->NeedsWriteBarrier();
- bool needs_write_barrier_for_map = instr->has_transition() &&
+ bool needs_write_barrier_for_map = !instr->transition().is_null() &&
instr->NeedsWriteBarrierForMap();
LOperand* obj;
@@ -2344,7 +2368,7 @@
LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
- Abort(kTooManySpillSlotsNeededForOSR);
+ Abort("Too many spill slots needed for OSR");
spill_index = 0;
}
return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
diff --git a/src/x64/lithium-x64.h b/src/x64/lithium-x64.h
index c3b9db4..31e5437 100644
--- a/src/x64/lithium-x64.h
+++ b/src/x64/lithium-x64.h
@@ -50,6 +50,7 @@
V(ArithmeticD) \
V(ArithmeticT) \
V(BitI) \
+ V(BitNotI) \
V(BoundsCheck) \
V(Branch) \
V(CallConstantFunction) \
@@ -67,6 +68,7 @@
V(CheckMaps) \
V(CheckMapValue) \
V(CheckNonSmi) \
+ V(CheckPrototypeMaps) \
V(CheckSmi) \
V(ClampDToUint8) \
V(ClampIToUint8) \
@@ -1310,6 +1312,18 @@
};
+class LBitNotI: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LBitNotI(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(BitNotI, "bit-not-i")
+};
+
+
class LAddI: public LTemplateInstruction<1, 2, 0> {
public:
LAddI(LOperand* left, LOperand* right) {
@@ -2052,7 +2066,7 @@
virtual void PrintDataTo(StringStream* stream);
- Handle<Map> transition() const { return hydrogen()->transition_map(); }
+ Handle<Map> transition() const { return hydrogen()->transition(); }
Representation representation() const {
return hydrogen()->field_representation();
}
@@ -2248,6 +2262,24 @@
};
+class LCheckPrototypeMaps: public LTemplateInstruction<0, 0, 1> {
+ public:
+ explicit LCheckPrototypeMaps(LOperand* temp) {
+ temps_[0] = temp;
+ }
+
+ LOperand* temp() { return temps_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps, "check-prototype-maps")
+ DECLARE_HYDROGEN_ACCESSOR(CheckPrototypeMaps)
+
+ ZoneList<Handle<JSObject> >* prototypes() const {
+ return hydrogen()->prototypes();
+ }
+ ZoneList<Handle<Map> >* maps() const { return hydrogen()->maps(); }
+};
+
+
class LCheckSmi: public LTemplateInstruction<1, 1, 0> {
public:
explicit LCheckSmi(LOperand* value) {
@@ -2541,7 +2573,7 @@
bool is_done() const { return status_ == DONE; }
bool is_aborted() const { return status_ == ABORTED; }
- void Abort(BailoutReason reason);
+ void Abort(const char* reason);
// Methods for getting operands for Use / Define / Temp.
LUnallocated* ToUnallocated(Register reg);
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 9c9b162..13d7dda 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -155,7 +155,7 @@
}
}
// Size of movq(destination, src);
- return Assembler::kMoveAddressIntoScratchRegisterInstructionLength;
+ return 10;
}
@@ -449,8 +449,8 @@
}
-void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
- if (emit_debug_code()) Check(cc, reason);
+void MacroAssembler::Assert(Condition cc, const char* msg) {
+ if (emit_debug_code()) Check(cc, msg);
}
@@ -466,16 +466,16 @@
CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Heap::kFixedCOWArrayMapRootIndex);
j(equal, &ok, Label::kNear);
- Abort(kJSObjectWithFastElementsMapHasSlowElements);
+ Abort("JSObject with fast elements map has slow elements");
bind(&ok);
}
}
-void MacroAssembler::Check(Condition cc, BailoutReason reason) {
+void MacroAssembler::Check(Condition cc, const char* msg) {
Label L;
j(cc, &L, Label::kNear);
- Abort(reason);
+ Abort(msg);
// Control will not return here.
bind(&L);
}
@@ -508,13 +508,12 @@
}
-void MacroAssembler::Abort(BailoutReason reason) {
+void MacroAssembler::Abort(const char* msg) {
// We want to pass the msg string like a smi to avoid GC
// problems, however msg is not guaranteed to be aligned
// properly. Instead, we pass an aligned pointer that is
// a proper v8 smi, but also pass the alignment difference
// from the real pointer as a smi.
- const char* msg = GetBailoutReason(reason);
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
// Note: p0 might not be a valid Smi _value_, but it has a valid Smi tag.
@@ -839,7 +838,7 @@
CompareRoot(return_value, Heap::kNullValueRootIndex);
j(equal, &ok, Label::kNear);
- Abort(kAPICallReturnedInvalidObject);
+ Abort("API call returned invalid object");
bind(&ok);
#endif
@@ -1039,7 +1038,7 @@
RelocInfo::NONE64);
cmpq(dst, kSmiConstantRegister);
if (allow_stub_calls()) {
- Assert(equal, kUninitializedKSmiConstantRegister);
+ Assert(equal, "Uninitialized kSmiConstantRegister");
} else {
Label ok;
j(equal, &ok, Label::kNear);
@@ -1107,7 +1106,7 @@
Label ok;
j(zero, &ok, Label::kNear);
if (allow_stub_calls()) {
- Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
+ Abort("Integer32ToSmiField writing to non-smi location");
} else {
int3();
}
@@ -1690,12 +1689,12 @@
if (emit_debug_code()) {
movq(kScratchRegister, src1);
addq(kScratchRegister, src2);
- Check(no_overflow, kSmiAdditionOverflow);
+ Check(no_overflow, "Smi addition overflow");
}
lea(dst, Operand(src1, src2, times_1, 0));
} else {
addq(dst, src2);
- Assert(no_overflow, kSmiAdditionOverflow);
+ Assert(no_overflow, "Smi addition overflow");
}
}
@@ -1727,7 +1726,7 @@
movq(dst, src1);
}
subq(dst, src2);
- Assert(no_overflow, kSmiSubtractionOverflow);
+ Assert(no_overflow, "Smi subtraction overflow");
}
@@ -1759,7 +1758,7 @@
movq(dst, src1);
}
subq(dst, src2);
- Assert(no_overflow, kSmiSubtractionOverflow);
+ Assert(no_overflow, "Smi subtraction overflow");
}
@@ -2156,7 +2155,7 @@
#ifdef DEBUG
if (allow_stub_calls()) { // Check contains a stub call.
Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
- Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
+ Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
}
#endif
STATIC_ASSERT(kSmiTag == 0);
@@ -2511,8 +2510,8 @@
int MacroAssembler::CallSize(ExternalReference ext) {
// Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
- return LoadAddressSize(ext) +
- Assembler::kCallScratchRegisterInstructionLength;
+ const int kCallInstructionSize = 3;
+ return LoadAddressSize(ext) + kCallInstructionSize;
}
@@ -2799,9 +2798,9 @@
if (is_uint16(bytes_dropped)) {
ret(bytes_dropped);
} else {
- PopReturnAddressTo(scratch);
+ pop(scratch);
addq(rsp, Immediate(bytes_dropped));
- PushReturnAddressFrom(scratch);
+ push(scratch);
ret(0);
}
}
@@ -2985,7 +2984,7 @@
XMMRegister scratch) {
if (FLAG_debug_code) {
cmpq(src, Immediate(0xffffffff));
- Assert(below_equal, kInputGPRIsExpectedToHaveUpper32Cleared);
+ Assert(below_equal, "input GPR is expected to have upper32 cleared");
}
cvtqsi2sd(dst, src);
}
@@ -3034,7 +3033,7 @@
j(is_smi, &ok, Label::kNear);
Cmp(FieldOperand(object, HeapObject::kMapOffset),
isolate()->factory()->heap_number_map());
- Check(equal, kOperandIsNotANumber);
+ Check(equal, "Operand is not a number");
bind(&ok);
}
}
@@ -3043,7 +3042,7 @@
void MacroAssembler::AssertNotSmi(Register object) {
if (emit_debug_code()) {
Condition is_smi = CheckSmi(object);
- Check(NegateCondition(is_smi), kOperandIsASmi);
+ Check(NegateCondition(is_smi), "Operand is a smi");
}
}
@@ -3051,7 +3050,7 @@
void MacroAssembler::AssertSmi(Register object) {
if (emit_debug_code()) {
Condition is_smi = CheckSmi(object);
- Check(is_smi, kOperandIsNotASmi);
+ Check(is_smi, "Operand is not a smi");
}
}
@@ -3059,7 +3058,7 @@
void MacroAssembler::AssertSmi(const Operand& object) {
if (emit_debug_code()) {
Condition is_smi = CheckSmi(object);
- Check(is_smi, kOperandIsNotASmi);
+ Check(is_smi, "Operand is not a smi");
}
}
@@ -3069,7 +3068,7 @@
ASSERT(!int32_register.is(kScratchRegister));
movq(kScratchRegister, 0x100000000l, RelocInfo::NONE64);
cmpq(kScratchRegister, int32_register);
- Check(above_equal, k32BitValueInRegisterIsNotZeroExtended);
+ Check(above_equal, "32 bit value in register is not zero-extended");
}
}
@@ -3077,12 +3076,12 @@
void MacroAssembler::AssertString(Register object) {
if (emit_debug_code()) {
testb(object, Immediate(kSmiTagMask));
- Check(not_equal, kOperandIsASmiAndNotAString);
+ Check(not_equal, "Operand is a smi and not a string");
push(object);
movq(object, FieldOperand(object, HeapObject::kMapOffset));
CmpInstanceType(object, FIRST_NONSTRING_TYPE);
pop(object);
- Check(below, kOperandIsNotAString);
+ Check(below, "Operand is not a string");
}
}
@@ -3090,24 +3089,24 @@
void MacroAssembler::AssertName(Register object) {
if (emit_debug_code()) {
testb(object, Immediate(kSmiTagMask));
- Check(not_equal, kOperandIsASmiAndNotAName);
+ Check(not_equal, "Operand is a smi and not a name");
push(object);
movq(object, FieldOperand(object, HeapObject::kMapOffset));
CmpInstanceType(object, LAST_NAME_TYPE);
pop(object);
- Check(below_equal, kOperandIsNotAName);
+ Check(below_equal, "Operand is not a name");
}
}
void MacroAssembler::AssertRootValue(Register src,
Heap::RootListIndex root_value_index,
- BailoutReason reason) {
+ const char* message) {
if (emit_debug_code()) {
ASSERT(!src.is(kScratchRegister));
LoadRoot(kScratchRegister, root_value_index);
cmpq(src, kScratchRegister);
- Check(equal, reason);
+ Check(equal, message);
}
}
@@ -3458,7 +3457,7 @@
isolate()->factory()->undefined_value(),
RelocInfo::EMBEDDED_OBJECT);
cmpq(Operand(rsp, 0), kScratchRegister);
- Check(not_equal, kCodeObjectNotProperlyPatched);
+ Check(not_equal, "code object not properly patched");
}
}
@@ -3467,7 +3466,7 @@
if (emit_debug_code()) {
Move(kScratchRegister, Smi::FromInt(type));
cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
- Check(equal, kStackFrameTypesMustMatch);
+ Check(equal, "stack frame types must match");
}
movq(rsp, rbp);
pop(rbp);
@@ -3568,7 +3567,8 @@
// from the caller stack.
lea(rsp, Operand(r15, 1 * kPointerSize));
- PushReturnAddressFrom(rcx);
+ // Push the return address to get ready to return.
+ push(rcx);
LeaveExitFrameEpilogue();
}
@@ -3612,7 +3612,7 @@
// When generating debug code, make sure the lexical context is set.
if (emit_debug_code()) {
cmpq(scratch, Immediate(0));
- Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
+ Check(not_equal, "we should not have an empty lexical context");
}
// Load the native context of the current context.
int offset =
@@ -3624,7 +3624,7 @@
if (emit_debug_code()) {
Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
isolate()->factory()->native_context_map());
- Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
+ Check(equal, "JSGlobalObject::native_context should be a native context.");
}
// Check if both contexts are the same.
@@ -3643,12 +3643,12 @@
movq(holder_reg,
FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
CompareRoot(holder_reg, Heap::kNullValueRootIndex);
- Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
+ Check(not_equal, "JSGlobalProxy::context() should not be null.");
// Read the first word and compare to native_context_map(),
movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
- Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
+ Check(equal, "JSGlobalObject::native_context should be a native context.");
pop(holder_reg);
}
@@ -3794,7 +3794,7 @@
// Assert that result actually contains top on entry.
Operand top_operand = ExternalOperand(allocation_top);
cmpq(result, top_operand);
- Check(equal, kUnexpectedAllocationTop);
+ Check(equal, "Unexpected allocation top");
#endif
return;
}
@@ -3815,7 +3815,7 @@
AllocationFlags flags) {
if (emit_debug_code()) {
testq(result_end, Immediate(kObjectAlignmentMask));
- Check(zero, kUnalignedAllocationInNewSpace);
+ Check(zero, "Unaligned allocation in new space");
}
ExternalReference allocation_top =
@@ -3862,7 +3862,7 @@
// always safe because the limit of the heap is always aligned.
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
testq(result, Immediate(kDoubleAlignmentMask));
- Check(zero, kAllocationIsNotDoubleAligned);
+ Check(zero, "Allocation is not double aligned");
}
// Calculate new top and bail out if new space is exhausted.
@@ -3941,7 +3941,7 @@
// always safe because the limit of the heap is always aligned.
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
testq(result, Immediate(kDoubleAlignmentMask));
- Check(zero, kAllocationIsNotDoubleAligned);
+ Check(zero, "Allocation is not double aligned");
}
// Calculate new top and bail out if new space is exhausted.
@@ -3975,7 +3975,7 @@
Operand top_operand = ExternalOperand(new_space_allocation_top);
#ifdef DEBUG
cmpq(object, top_operand);
- Check(below, kUndoAllocationOfNonAllocatedMemory);
+ Check(below, "Undo allocation of non allocated memory");
#endif
movq(top_operand, object);
}
@@ -4165,7 +4165,7 @@
ASSERT(min_length >= 0);
if (emit_debug_code()) {
cmpl(length, Immediate(min_length));
- Assert(greater_equal, kInvalidMinLength);
+ Assert(greater_equal, "Invalid min_length");
}
Label loop, done, short_string, short_loop;
@@ -4249,7 +4249,7 @@
if (emit_debug_code()) {
CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
Heap::kWithContextMapRootIndex);
- Check(not_equal, kVariableResolvedToWithContext);
+ Check(not_equal, "Variable resolved to with context.");
}
}
@@ -4340,7 +4340,7 @@
CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
jmp(&ok);
bind(&fail);
- Abort(kGlobalFunctionsMustHaveInitialMap);
+ Abort("Global functions must have initial map");
bind(&ok);
}
}
diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h
index 61abc20..e611c8a 100644
--- a/src/x64/macro-assembler-x64.h
+++ b/src/x64/macro-assembler-x64.h
@@ -823,10 +823,6 @@
void Drop(int stack_elements);
void Call(Label* target) { call(target); }
- void Push(Register src) { push(src); }
- void Pop(Register dst) { pop(dst); }
- void PushReturnAddressFrom(Register src) { push(src); }
- void PopReturnAddressTo(Register dst) { pop(dst); }
// Control Flow
void Jump(Address destination, RelocInfo::Mode rmode);
@@ -841,7 +837,7 @@
// The size of the code generated for different call instructions.
int CallSize(Address destination, RelocInfo::Mode rmode) {
- return kCallSequenceLength;
+ return kCallInstructionLength;
}
int CallSize(ExternalReference ext);
int CallSize(Handle<Code> code_object) {
@@ -1006,7 +1002,7 @@
// enabled via --debug-code.
void AssertRootValue(Register src,
Heap::RootListIndex root_value_index,
- BailoutReason reason);
+ const char* message);
// ---------------------------------------------------------------------------
// Exception handling
@@ -1323,15 +1319,15 @@
// Calls Abort(msg) if the condition cc is not satisfied.
// Use --debug_code to enable.
- void Assert(Condition cc, BailoutReason reason);
+ void Assert(Condition cc, const char* msg);
void AssertFastElements(Register elements);
// Like Assert(), but always enabled.
- void Check(Condition cc, BailoutReason reason);
+ void Check(Condition cc, const char* msg);
// Print a message to stdout and abort execution.
- void Abort(BailoutReason msg);
+ void Abort(const char* msg);
// Check that the stack is aligned.
void CheckStackAlignment();
diff --git a/src/x64/regexp-macro-assembler-x64.cc b/src/x64/regexp-macro-assembler-x64.cc
index dcd317c..106ffb7 100644
--- a/src/x64/regexp-macro-assembler-x64.cc
+++ b/src/x64/regexp-macro-assembler-x64.cc
@@ -397,7 +397,7 @@
// Fail on partial or illegal capture (start of capture after end of capture).
// This must not happen (no back-reference can reference a capture that wasn't
// closed before in the reg-exp).
- __ Check(greater_equal, kInvalidCaptureReferenced);
+ __ Check(greater_equal, "Invalid capture referenced");
// Succeed on empty capture (including non-participating capture)
__ j(equal, &fallthrough);
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index 7ad250a..39ff656 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -830,11 +830,11 @@
object->map()->unused_property_fields() == 0) {
// The properties must be extended before we can store the value.
// We jump to a runtime call that extends the properties array.
- __ PopReturnAddressTo(scratch1);
+ __ pop(scratch1); // Return address.
__ push(receiver_reg);
__ Push(transition);
__ push(value_reg);
- __ PushReturnAddressFrom(scratch1);
+ __ push(scratch1);
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
masm->isolate()),
@@ -1284,7 +1284,7 @@
Handle<ExecutableAccessorInfo> callback) {
// Insert additional parameters into the stack frame above return address.
ASSERT(!scratch4().is(reg));
- __ PopReturnAddressTo(scratch4());
+ __ pop(scratch4()); // Get return address to place it below.
__ push(receiver()); // receiver
__ push(reg); // holder
@@ -1324,7 +1324,7 @@
ASSERT(!name_arg.is(scratch4()));
__ movq(name_arg, rsp);
- __ PushReturnAddressFrom(scratch4());
+ __ push(scratch4()); // Restore return address.
// v8::Arguments::values_ and handler for name.
const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1;
@@ -1444,10 +1444,10 @@
} else { // !compile_followup_inline
// Call the runtime system to load the interceptor.
// Check that the maps haven't changed.
- __ PopReturnAddressTo(scratch2());
+ __ pop(scratch2()); // save old return address
PushInterceptorArguments(masm(), receiver(), holder_reg,
this->name(), interceptor_holder);
- __ PushReturnAddressFrom(scratch2());
+ __ push(scratch2()); // restore old return address
ExternalReference ref = ExternalReference(
IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
@@ -2650,12 +2650,12 @@
HandlerFrontend(object, receiver(), holder, name, &success);
__ bind(&success);
- __ PopReturnAddressTo(scratch1());
+ __ pop(scratch1()); // remove the return address
__ push(receiver());
__ Push(callback); // callback info
__ Push(name);
__ push(value());
- __ PushReturnAddressFrom(scratch1());
+ __ push(scratch1()); // restore return address
// Do tail-call to the runtime system.
ExternalReference store_callback_property =
@@ -2717,12 +2717,12 @@
Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
Handle<JSObject> object,
Handle<Name> name) {
- __ PopReturnAddressTo(scratch1());
+ __ pop(scratch1()); // remove the return address
__ push(receiver());
__ push(this->name());
__ push(value());
__ Push(Smi::FromInt(strict_mode()));
- __ PushReturnAddressFrom(scratch1());
+ __ push(scratch1()); // restore return address
// Do tail-call to the runtime system.
ExternalReference store_ic_property =
@@ -2938,7 +2938,7 @@
__ j(equal, &miss);
} else if (FLAG_debug_code) {
__ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
- __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
+ __ Check(not_equal, "DontDelete cells can't contain the hole");
}
HandlerFrontendFooter(name, &success, &miss);
diff --git a/src/zone.h b/src/zone.h
index 1f14115..a12ed79 100644
--- a/src/zone.h
+++ b/src/zone.h
@@ -246,11 +246,6 @@
explicit ZoneSplayTree(Zone* zone)
: SplayTree<Config, ZoneAllocationPolicy>(ZoneAllocationPolicy(zone)) {}
~ZoneSplayTree();
-
- INLINE(void* operator new(size_t size, Zone* zone));
-
- void operator delete(void* pointer) { UNREACHABLE(); }
- void operator delete(void* pointer, Zone* zone) { UNREACHABLE(); }
};