Merge V8 at branches/3.2 r8200: Initial merge by Git
Change-Id: I5c434306e98132997e9c5f6024b6ce200b255edf
diff --git a/src/SConscript b/src/SConscript
index 417e283..a68ee3e 100755
--- a/src/SConscript
+++ b/src/SConscript
@@ -297,11 +297,6 @@
'''.split()
-EXPERIMENTAL_LIBRARY_FILES = '''
-proxy.js
-'''.split()
-
-
def Abort(message):
print message
sys.exit(1)
@@ -329,16 +324,9 @@
# compile it.
library_files = [s for s in LIBRARY_FILES]
library_files.append('macros.py')
- libraries_src = env.JS2C(['libraries.cc'], library_files, TYPE='CORE')
+ libraries_src, libraries_empty_src = env.JS2C(['libraries.cc', 'libraries-empty.cc'], library_files, TYPE='CORE')
libraries_obj = context.ConfigureObject(env, libraries_src, CPPPATH=['.'])
- # Combine the experimental JavaScript library files into a C++ file
- # and compile it.
- experimental_library_files = [ s for s in EXPERIMENTAL_LIBRARY_FILES ]
- experimental_library_files.append('macros.py')
- experimental_libraries_src = env.JS2C(['experimental-libraries.cc'], experimental_library_files, TYPE='EXPERIMENTAL')
- experimental_libraries_obj = context.ConfigureObject(env, experimental_libraries_src, CPPPATH=['.'])
-
source_objs = context.ConfigureObject(env, source_files)
non_snapshot_files = [source_objs]
@@ -355,7 +343,7 @@
mksnapshot_env = env.Copy()
mksnapshot_env.Replace(**context.flags['mksnapshot'])
mksnapshot_src = 'mksnapshot.cc'
- mksnapshot = mksnapshot_env.Program('mksnapshot', [mksnapshot_src, libraries_obj, experimental_libraries_obj, non_snapshot_files, empty_snapshot_obj], PDB='mksnapshot.exe.pdb')
+ mksnapshot = mksnapshot_env.Program('mksnapshot', [mksnapshot_src, libraries_obj, non_snapshot_files, empty_snapshot_obj], PDB='mksnapshot.exe.pdb')
if context.use_snapshot:
if context.build_snapshot:
snapshot_cc = env.Snapshot('snapshot.cc', mksnapshot, LOGFILE=File('snapshot.log').abspath)
@@ -364,7 +352,7 @@
snapshot_obj = context.ConfigureObject(env, snapshot_cc, CPPPATH=['.'])
else:
snapshot_obj = empty_snapshot_obj
- library_objs = [non_snapshot_files, libraries_obj, experimental_libraries_obj, snapshot_obj]
+ library_objs = [non_snapshot_files, libraries_obj, snapshot_obj]
return (library_objs, d8_objs, [mksnapshot], preparser_objs)
diff --git a/src/api.cc b/src/api.cc
index c3684f7..247507f 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -3709,7 +3709,6 @@
// Create the environment.
env = isolate->bootstrapper()->CreateEnvironment(
- isolate,
Utils::OpenHandle(*global_object),
proxy_template,
extensions);
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index d66daea..8c147f9 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -1817,9 +1817,6 @@
case TRBinaryOpIC::ODDBALL:
GenerateOddballStub(masm);
break;
- case TRBinaryOpIC::BOTH_STRING:
- GenerateBothStringStub(masm);
- break;
case TRBinaryOpIC::STRING:
GenerateStringStub(masm);
break;
@@ -2260,36 +2257,6 @@
}
-void TypeRecordingBinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
- Label call_runtime;
- ASSERT(operands_type_ == TRBinaryOpIC::BOTH_STRING);
- ASSERT(op_ == Token::ADD);
- // If both arguments are strings, call the string add stub.
- // Otherwise, do a transition.
-
- // Registers containing left and right operands respectively.
- Register left = r1;
- Register right = r0;
-
- // Test if left operand is a string.
- __ JumpIfSmi(left, &call_runtime);
- __ CompareObjectType(left, r2, r2, FIRST_NONSTRING_TYPE);
- __ b(ge, &call_runtime);
-
- // Test if right operand is a string.
- __ JumpIfSmi(right, &call_runtime);
- __ CompareObjectType(right, r2, r2, FIRST_NONSTRING_TYPE);
- __ b(ge, &call_runtime);
-
- StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
- GenerateRegisterArgsPush(masm);
- __ TailCallStub(&string_add_stub);
-
- __ bind(&call_runtime);
- GenerateTypeTransition(masm);
-}
-
-
void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
ASSERT(operands_type_ == TRBinaryOpIC::INT32);
@@ -2440,6 +2407,8 @@
// Save the left value on the stack.
__ Push(r5, r4);
+ Label pop_and_call_runtime;
+
// Allocate a heap number to store the result.
heap_number_result = r5;
GenerateHeapResultAllocation(masm,
@@ -2447,7 +2416,7 @@
heap_number_map,
scratch1,
scratch2,
- &call_runtime);
+ &pop_and_call_runtime);
// Load the left value from the value saved on the stack.
__ Pop(r1, r0);
@@ -2458,6 +2427,10 @@
if (FLAG_debug_code) {
__ stop("Unreachable code.");
}
+
+ __ bind(&pop_and_call_runtime);
+ __ Drop(2);
+ __ b(&call_runtime);
}
break;
@@ -3468,11 +3441,20 @@
#ifdef ENABLE_LOGGING_AND_PROFILING
// If this is the outermost JS call, set js_entry_sp value.
+ Label non_outermost_js;
ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate);
__ mov(r5, Operand(ExternalReference(js_entry_sp)));
__ ldr(r6, MemOperand(r5));
- __ cmp(r6, Operand(0, RelocInfo::NONE));
- __ str(fp, MemOperand(r5), eq);
+ __ cmp(r6, Operand(0));
+ __ b(ne, &non_outermost_js);
+ __ str(fp, MemOperand(r5));
+ __ mov(ip, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
+ Label cont;
+ __ b(&cont);
+ __ bind(&non_outermost_js);
+ __ mov(ip, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
+ __ bind(&cont);
+ __ push(ip);
#endif
// Call a faked try-block that does the invoke.
@@ -3530,27 +3512,22 @@
__ mov(lr, Operand(pc));
masm->add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
- // Unlink this frame from the handler chain. When reading the
- // address of the next handler, there is no need to use the address
- // displacement since the current stack pointer (sp) points directly
- // to the stack handler.
- __ ldr(r3, MemOperand(sp, StackHandlerConstants::kNextOffset));
- __ mov(ip, Operand(ExternalReference(Isolate::k_handler_address, isolate)));
- __ str(r3, MemOperand(ip));
- // No need to restore registers
- __ add(sp, sp, Operand(StackHandlerConstants::kSize));
-
-#ifdef ENABLE_LOGGING_AND_PROFILING
- // If current FP value is the same as js_entry_sp value, it means that
- // the current function is the outermost.
- __ mov(r5, Operand(ExternalReference(js_entry_sp)));
- __ ldr(r6, MemOperand(r5));
- __ cmp(fp, Operand(r6));
- __ mov(r6, Operand(0, RelocInfo::NONE), LeaveCC, eq);
- __ str(r6, MemOperand(r5), eq);
-#endif
+ // Unlink this frame from the handler chain.
+ __ PopTryHandler();
__ bind(&exit); // r0 holds result
+#ifdef ENABLE_LOGGING_AND_PROFILING
+ // Check if the current stack frame is marked as the outermost JS frame.
+ Label non_outermost_js_2;
+ __ pop(r5);
+ __ cmp(r5, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
+ __ b(ne, &non_outermost_js_2);
+ __ mov(r6, Operand(0));
+ __ mov(r5, Operand(ExternalReference(js_entry_sp)));
+ __ str(r6, MemOperand(r5));
+ __ bind(&non_outermost_js_2);
+#endif
+
// Restore the top frame descriptors from the stack.
__ pop(r3);
__ mov(ip,
@@ -3711,7 +3688,7 @@
__ b(ne, &slow);
// Null is not instance of anything.
- __ cmp(scratch, Operand(masm->isolate()->factory()->null_value()));
+ __ cmp(scratch, Operand(FACTORY->null_value()));
__ b(ne, &object_not_null);
__ mov(r0, Operand(Smi::FromInt(1)));
__ Ret(HasArgsInRegisters() ? 0 : 2);
@@ -4209,7 +4186,7 @@
__ bind(&failure);
// For failure and exception return null.
- __ mov(r0, Operand(masm->isolate()->factory()->null_value()));
+ __ mov(r0, Operand(FACTORY->null_value()));
__ add(sp, sp, Operand(4 * kPointerSize));
__ Ret();
@@ -4280,8 +4257,6 @@
const int kMaxInlineLength = 100;
Label slowcase;
Label done;
- Factory* factory = masm->isolate()->factory();
-
__ ldr(r1, MemOperand(sp, kPointerSize * 2));
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize == 1);
@@ -4316,7 +4291,7 @@
// Interleave operations for better latency.
__ ldr(r2, ContextOperand(cp, Context::GLOBAL_INDEX));
__ add(r3, r0, Operand(JSRegExpResult::kSize));
- __ mov(r4, Operand(factory->empty_fixed_array()));
+ __ mov(r4, Operand(FACTORY->empty_fixed_array()));
__ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
__ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
__ ldr(r2, ContextOperand(r2, Context::REGEXP_RESULT_MAP_INDEX));
@@ -4337,13 +4312,13 @@
// r5: Number of elements in array, untagged.
// Set map.
- __ mov(r2, Operand(factory->fixed_array_map()));
+ __ mov(r2, Operand(FACTORY->fixed_array_map()));
__ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
// Set FixedArray length.
__ mov(r6, Operand(r5, LSL, kSmiTagSize));
__ str(r6, FieldMemOperand(r3, FixedArray::kLengthOffset));
// Fill contents of fixed-array with the-hole.
- __ mov(r2, Operand(factory->the_hole_value()));
+ __ mov(r2, Operand(FACTORY->the_hole_value()));
__ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
// Fill fixed array elements with hole.
// r0: JSArray, tagged.
diff --git a/src/arm/code-stubs-arm.h b/src/arm/code-stubs-arm.h
index 0bb0025..d82afc7 100644
--- a/src/arm/code-stubs-arm.h
+++ b/src/arm/code-stubs-arm.h
@@ -158,7 +158,6 @@
void GenerateHeapNumberStub(MacroAssembler* masm);
void GenerateOddballStub(MacroAssembler* masm);
void GenerateStringStub(MacroAssembler* masm);
- void GenerateBothStringStub(MacroAssembler* masm);
void GenerateGenericStub(MacroAssembler* masm);
void GenerateAddStrings(MacroAssembler* masm);
void GenerateCallRuntime(MacroAssembler* masm);
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index 85e4262..871b453 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -3161,14 +3161,15 @@
void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
ASSERT(args->length() >= 2);
- int arg_count = args->length() - 2; // 2 ~ receiver and function.
- for (int i = 0; i < arg_count + 1; i++) {
- VisitForStackValue(args->at(i));
+ int arg_count = args->length() - 2; // For receiver and function.
+ VisitForStackValue(args->at(0)); // Receiver.
+ for (int i = 0; i < arg_count; i++) {
+ VisitForStackValue(args->at(i + 1));
}
- VisitForAccumulatorValue(args->last()); // Function.
+ VisitForAccumulatorValue(args->at(arg_count + 1)); // Function.
- // InvokeFunction requires the function in r1. Move it in there.
- __ mov(r1, result_register());
+ // InvokeFunction requires function in r1. Move it in there.
+ if (!result_register().is(r1)) __ mov(r1, result_register());
ParameterCount count(arg_count);
__ InvokeFunction(r1, count, CALL_FUNCTION);
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -4294,6 +4295,7 @@
default:
break;
}
+
__ Call(ic, mode);
}
@@ -4315,6 +4317,7 @@
default:
break;
}
+
__ Call(ic, RelocInfo::CODE_TARGET);
if (patch_site != NULL && patch_site->is_bound()) {
patch_site->EmitPatchInfo();
diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc
index db04f33..8acf7c2 100644
--- a/src/arm/ic-arm.cc
+++ b/src/arm/ic-arm.cc
@@ -926,6 +926,217 @@
__ TailCallExternalReference(ref, 2, 1);
}
+// Returns the code marker, or the 0 if the code is not marked.
+static inline int InlinedICSiteMarker(Address address,
+ Address* inline_end_address) {
+ if (V8::UseCrankshaft()) return false;
+
+ // If the instruction after the call site is not the pseudo instruction nop1
+ // then this is not related to an inlined in-object property load. The nop1
+ // instruction is located just after the call to the IC in the deferred code
+ // handling the miss in the inlined code. After the nop1 instruction there is
+ // a branch instruction for jumping back from the deferred code.
+ Address address_after_call = address + Assembler::kCallTargetAddressOffset;
+ Instr instr_after_call = Assembler::instr_at(address_after_call);
+ int code_marker = MacroAssembler::GetCodeMarker(instr_after_call);
+
+ // A negative result means the code is not marked.
+ if (code_marker <= 0) return 0;
+
+ Address address_after_nop = address_after_call + Assembler::kInstrSize;
+ Instr instr_after_nop = Assembler::instr_at(address_after_nop);
+ // There may be some reg-reg move and frame merging code to skip over before
+ // the branch back from the DeferredReferenceGetKeyedValue code to the inlined
+ // code.
+ while (!Assembler::IsBranch(instr_after_nop)) {
+ address_after_nop += Assembler::kInstrSize;
+ instr_after_nop = Assembler::instr_at(address_after_nop);
+ }
+
+ // Find the end of the inlined code for handling the load.
+ int b_offset =
+ Assembler::GetBranchOffset(instr_after_nop) + Assembler::kPcLoadDelta;
+ ASSERT(b_offset < 0); // Jumping back from deferred code.
+ *inline_end_address = address_after_nop + b_offset;
+
+ return code_marker;
+}
+
+
+bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
+ if (V8::UseCrankshaft()) return false;
+
+ // Find the end of the inlined code for handling the load if this is an
+ // inlined IC call site.
+ Address inline_end_address = 0;
+ if (InlinedICSiteMarker(address, &inline_end_address)
+ != Assembler::PROPERTY_ACCESS_INLINED) {
+ return false;
+ }
+
+ // Patch the offset of the property load instruction (ldr r0, [r1, #+XXX]).
+ // The immediate must be representable in 12 bits.
+ ASSERT((JSObject::kMaxInstanceSize - JSObject::kHeaderSize) < (1 << 12));
+ Address ldr_property_instr_address =
+ inline_end_address - Assembler::kInstrSize;
+ ASSERT(Assembler::IsLdrRegisterImmediate(
+ Assembler::instr_at(ldr_property_instr_address)));
+ Instr ldr_property_instr = Assembler::instr_at(ldr_property_instr_address);
+ ldr_property_instr = Assembler::SetLdrRegisterImmediateOffset(
+ ldr_property_instr, offset - kHeapObjectTag);
+ Assembler::instr_at_put(ldr_property_instr_address, ldr_property_instr);
+
+ // Indicate that code has changed.
+ CPU::FlushICache(ldr_property_instr_address, 1 * Assembler::kInstrSize);
+
+ // Patch the map check.
+ // For PROPERTY_ACCESS_INLINED, the load map instruction is generated
+ // 4 instructions before the end of the inlined code.
+ // See codgen-arm.cc CodeGenerator::EmitNamedLoad.
+ int ldr_map_offset = -4;
+ Address ldr_map_instr_address =
+ inline_end_address + ldr_map_offset * Assembler::kInstrSize;
+ Assembler::set_target_address_at(ldr_map_instr_address,
+ reinterpret_cast<Address>(map));
+ return true;
+}
+
+
+bool LoadIC::PatchInlinedContextualLoad(Address address,
+ Object* map,
+ Object* cell,
+ bool is_dont_delete) {
+ // Find the end of the inlined code for handling the contextual load if
+ // this is inlined IC call site.
+ Address inline_end_address = 0;
+ int marker = InlinedICSiteMarker(address, &inline_end_address);
+ if (!((marker == Assembler::PROPERTY_ACCESS_INLINED_CONTEXT) ||
+ (marker == Assembler::PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE))) {
+ return false;
+ }
+ // On ARM we don't rely on the is_dont_delete argument as the hint is already
+ // embedded in the code marker.
+ bool marker_is_dont_delete =
+ marker == Assembler::PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE;
+
+ // These are the offsets from the end of the inlined code.
+ // See codgen-arm.cc CodeGenerator::EmitNamedLoad.
+ int ldr_map_offset = marker_is_dont_delete ? -5: -8;
+ int ldr_cell_offset = marker_is_dont_delete ? -2: -5;
+ if (FLAG_debug_code && marker_is_dont_delete) {
+ // Three extra instructions were generated to check for the_hole_value.
+ ldr_map_offset -= 3;
+ ldr_cell_offset -= 3;
+ }
+ Address ldr_map_instr_address =
+ inline_end_address + ldr_map_offset * Assembler::kInstrSize;
+ Address ldr_cell_instr_address =
+ inline_end_address + ldr_cell_offset * Assembler::kInstrSize;
+
+ // Patch the map check.
+ Assembler::set_target_address_at(ldr_map_instr_address,
+ reinterpret_cast<Address>(map));
+ // Patch the cell address.
+ Assembler::set_target_address_at(ldr_cell_instr_address,
+ reinterpret_cast<Address>(cell));
+
+ return true;
+}
+
+
+bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) {
+ if (V8::UseCrankshaft()) return false;
+
+ // Find the end of the inlined code for the store if there is an
+ // inlined version of the store.
+ Address inline_end_address = 0;
+ if (InlinedICSiteMarker(address, &inline_end_address)
+ != Assembler::PROPERTY_ACCESS_INLINED) {
+ return false;
+ }
+
+ // Compute the address of the map load instruction.
+ Address ldr_map_instr_address =
+ inline_end_address -
+ (CodeGenerator::GetInlinedNamedStoreInstructionsAfterPatch() *
+ Assembler::kInstrSize);
+
+ // Update the offsets if initializing the inlined store. No reason
+ // to update the offsets when clearing the inlined version because
+ // it will bail out in the map check.
+ if (map != HEAP->null_value()) {
+ // Patch the offset in the actual store instruction.
+ Address str_property_instr_address =
+ ldr_map_instr_address + 3 * Assembler::kInstrSize;
+ Instr str_property_instr = Assembler::instr_at(str_property_instr_address);
+ ASSERT(Assembler::IsStrRegisterImmediate(str_property_instr));
+ str_property_instr = Assembler::SetStrRegisterImmediateOffset(
+ str_property_instr, offset - kHeapObjectTag);
+ Assembler::instr_at_put(str_property_instr_address, str_property_instr);
+
+ // Patch the offset in the add instruction that is part of the
+ // write barrier.
+ Address add_offset_instr_address =
+ str_property_instr_address + Assembler::kInstrSize;
+ Instr add_offset_instr = Assembler::instr_at(add_offset_instr_address);
+ ASSERT(Assembler::IsAddRegisterImmediate(add_offset_instr));
+ add_offset_instr = Assembler::SetAddRegisterImmediateOffset(
+ add_offset_instr, offset - kHeapObjectTag);
+ Assembler::instr_at_put(add_offset_instr_address, add_offset_instr);
+
+ // Indicate that code has changed.
+ CPU::FlushICache(str_property_instr_address, 2 * Assembler::kInstrSize);
+ }
+
+ // Patch the map check.
+ Assembler::set_target_address_at(ldr_map_instr_address,
+ reinterpret_cast<Address>(map));
+
+ return true;
+}
+
+
+bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
+ if (V8::UseCrankshaft()) return false;
+
+ Address inline_end_address = 0;
+ if (InlinedICSiteMarker(address, &inline_end_address)
+ != Assembler::PROPERTY_ACCESS_INLINED) {
+ return false;
+ }
+
+ // Patch the map check.
+ Address ldr_map_instr_address =
+ inline_end_address -
+ (CodeGenerator::GetInlinedKeyedLoadInstructionsAfterPatch() *
+ Assembler::kInstrSize);
+ Assembler::set_target_address_at(ldr_map_instr_address,
+ reinterpret_cast<Address>(map));
+ return true;
+}
+
+
+bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
+ if (V8::UseCrankshaft()) return false;
+
+ // Find the end of the inlined code for handling the store if this is an
+ // inlined IC call site.
+ Address inline_end_address = 0;
+ if (InlinedICSiteMarker(address, &inline_end_address)
+ != Assembler::PROPERTY_ACCESS_INLINED) {
+ return false;
+ }
+
+ // Patch the map check.
+ Address ldr_map_instr_address =
+ inline_end_address -
+ (CodeGenerator::kInlinedKeyedStoreInstructionsAfterPatch *
+ Assembler::kInstrSize);
+ Assembler::set_target_address_at(ldr_map_instr_address,
+ reinterpret_cast<Address>(map));
+ return true;
+}
+
Object* KeyedLoadIC_Miss(Arguments args);
diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc
index faf6404..3f1d15b 100644
--- a/src/arm/lithium-arm.cc
+++ b/src/arm/lithium-arm.cc
@@ -61,21 +61,22 @@
#ifdef DEBUG
void LInstruction::VerifyCall() {
- // Call instructions can use only fixed registers as temporaries and
- // outputs because all registers are blocked by the calling convention.
- // Inputs operands must use a fixed register or use-at-start policy or
- // a non-register policy.
+ // Call instructions can use only fixed registers as
+ // temporaries and outputs because all registers
+ // are blocked by the calling convention.
+ // Inputs must use a fixed register.
ASSERT(Output() == NULL ||
LUnallocated::cast(Output())->HasFixedPolicy() ||
!LUnallocated::cast(Output())->HasRegisterPolicy());
for (UseIterator it(this); it.HasNext(); it.Advance()) {
- LUnallocated* operand = LUnallocated::cast(it.Next());
- ASSERT(operand->HasFixedPolicy() ||
- operand->IsUsedAtStart());
+ LOperand* operand = it.Next();
+ ASSERT(LUnallocated::cast(operand)->HasFixedPolicy() ||
+ !LUnallocated::cast(operand)->HasRegisterPolicy());
}
for (TempIterator it(this); it.HasNext(); it.Advance()) {
- LUnallocated* operand = LUnallocated::cast(it.Next());
- ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
+ LOperand* operand = it.Next();
+ ASSERT(LUnallocated::cast(operand)->HasFixedPolicy() ||
+ !LUnallocated::cast(operand)->HasRegisterPolicy());
}
}
#endif
@@ -300,13 +301,6 @@
}
-void LInvokeFunction::PrintDataTo(StringStream* stream) {
- stream->Add("= ");
- InputAt(0)->PrintTo(stream);
- stream->Add(" #%d / ", arity());
-}
-
-
void LCallKeyed::PrintDataTo(StringStream* stream) {
stream->Add("[r2] #%d / ", arity());
}
@@ -1218,14 +1212,6 @@
}
-LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
- LOperand* function = UseFixed(instr->function(), r1);
- argument_count_ -= instr->argument_count();
- LInvokeFunction* result = new LInvokeFunction(function);
- return MarkAsCall(DefineFixed(result, r0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
-}
-
-
LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
BuiltinFunctionId op = instr->op();
if (op == kMathLog || op == kMathSin || op == kMathCos) {
@@ -1960,13 +1946,6 @@
}
-LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
- LOperand* left = UseRegisterAtStart(instr->left());
- LOperand* right = UseRegisterAtStart(instr->right());
- return MarkAsCall(DefineFixed(new LStringAdd(left, right), r0), instr);
-}
-
-
LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
LOperand* string = UseRegister(instr->string());
LOperand* index = UseRegisterOrConstant(instr->index());
diff --git a/src/arm/lithium-arm.h b/src/arm/lithium-arm.h
index 4add6bf..6da7c86 100644
--- a/src/arm/lithium-arm.h
+++ b/src/arm/lithium-arm.h
@@ -106,7 +106,6 @@
V(InstanceOfAndBranch) \
V(InstanceOfKnownGlobal) \
V(Integer32ToDouble) \
- V(InvokeFunction) \
V(IsNull) \
V(IsNullAndBranch) \
V(IsObject) \
@@ -153,7 +152,6 @@
V(StoreKeyedSpecializedArrayElement) \
V(StoreNamedField) \
V(StoreNamedGeneric) \
- V(StringAdd) \
V(StringCharCodeAt) \
V(StringCharFromCode) \
V(StringLength) \
@@ -1414,23 +1412,6 @@
};
-class LInvokeFunction: public LTemplateInstruction<1, 1, 0> {
- public:
- explicit LInvokeFunction(LOperand* function) {
- inputs_[0] = function;
- }
-
- DECLARE_CONCRETE_INSTRUCTION(InvokeFunction, "invoke-function")
- DECLARE_HYDROGEN_ACCESSOR(InvokeFunction)
-
- LOperand* function() { return inputs_[0]; }
-
- virtual void PrintDataTo(StringStream* stream);
-
- int arity() const { return hydrogen()->argument_count() - 1; }
-};
-
-
class LCallKeyed: public LTemplateInstruction<1, 1, 0> {
public:
explicit LCallKeyed(LOperand* key) {
@@ -1725,22 +1706,6 @@
};
-class LStringAdd: public LTemplateInstruction<1, 2, 0> {
- public:
- LStringAdd(LOperand* left, LOperand* right) {
- inputs_[0] = left;
- inputs_[1] = right;
- }
-
- DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string-add")
- DECLARE_HYDROGEN_ACCESSOR(StringAdd)
-
- LOperand* left() { return inputs_[0]; }
- LOperand* right() { return inputs_[1]; }
-};
-
-
-
class LStringCharCodeAt: public LTemplateInstruction<1, 2, 0> {
public:
LStringCharCodeAt(LOperand* string, LOperand* index) {
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index 2d415cb..4912449 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -91,7 +91,7 @@
void LCodeGen::FinishCode(Handle<Code> code) {
ASSERT(is_done());
- code->set_stack_slots(GetStackSlotCount());
+ code->set_stack_slots(StackSlotCount());
code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
PopulateDeoptimizationData(code);
Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
@@ -149,7 +149,7 @@
__ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
// Reserve space for the stack slots needed by the code.
- int slots = GetStackSlotCount();
+ int slots = StackSlotCount();
if (slots > 0) {
if (FLAG_debug_code) {
__ mov(r0, Operand(slots));
@@ -263,7 +263,7 @@
bool LCodeGen::GenerateSafepointTable() {
ASSERT(is_done());
- safepoints_.Emit(masm(), GetStackSlotCount());
+ safepoints_.Emit(masm(), StackSlotCount());
return !is_aborted();
}
@@ -459,7 +459,7 @@
translation->StoreDoubleStackSlot(op->index());
} else if (op->IsArgument()) {
ASSERT(is_tagged);
- int src_index = GetStackSlotCount() + op->index();
+ int src_index = StackSlotCount() + op->index();
translation->StoreStackSlot(src_index);
} else if (op->IsRegister()) {
Register reg = ToRegister(op);
@@ -2180,7 +2180,7 @@
__ push(r0);
__ CallRuntime(Runtime::kTraceExit, 1);
}
- int32_t sp_delta = (GetParameterCount() + 1) * kPointerSize;
+ int32_t sp_delta = (ParameterCount() + 1) * kPointerSize;
__ mov(sp, fp);
__ ldm(ia_w, sp, fp.bit() | lr.bit());
__ add(sp, sp, Operand(sp_delta));
@@ -2861,49 +2861,9 @@
void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Register result = ToRegister(instr->result());
- Register scratch1 = result;
- Register scratch2 = scratch0();
- Label done, check_sign_on_zero;
-
- // Extract exponent bits.
- __ vmov(scratch1, input.high());
- __ ubfx(scratch2,
- scratch1,
- HeapNumber::kExponentShift,
- HeapNumber::kExponentBits);
-
- // If the number is in ]-0.5, +0.5[, the result is +/- 0.
- __ cmp(scratch2, Operand(HeapNumber::kExponentBias - 2));
- __ mov(result, Operand(0), LeaveCC, le);
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
- __ b(le, &check_sign_on_zero);
- } else {
- __ b(le, &done);
- }
-
- // The following conversion will not work with numbers
- // outside of ]-2^32, 2^32[.
- __ cmp(scratch2, Operand(HeapNumber::kExponentBias + 32));
- DeoptimizeIf(ge, instr->environment());
-
- // Save the original sign for later comparison.
- __ and_(scratch2, scratch1, Operand(HeapNumber::kSignMask));
-
- __ vmov(double_scratch0(), 0.5);
- __ vadd(input, input, double_scratch0());
-
- // Check sign of the result: if the sign changed, the input
- // value was in ]0.5, 0[ and the result should be -0.
- __ vmov(scratch1, input.high());
- __ eor(scratch1, scratch1, Operand(scratch2), SetCC);
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
- DeoptimizeIf(mi, instr->environment());
- } else {
- __ mov(result, Operand(0), LeaveCC, mi);
- __ b(mi, &done);
- }
-
- __ EmitVFPTruncate(kRoundToMinusInf,
+ Register scratch1 = scratch0();
+ Register scratch2 = result;
+ __ EmitVFPTruncate(kRoundToNearest,
double_scratch0().low(),
input,
scratch1,
@@ -2913,14 +2873,14 @@
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
// Test for -0.
+ Label done;
__ cmp(result, Operand(0));
__ b(ne, &done);
- __ bind(&check_sign_on_zero);
__ vmov(scratch1, input.high());
__ tst(scratch1, Operand(HeapNumber::kSignMask));
DeoptimizeIf(ne, instr->environment());
+ __ bind(&done);
}
- __ bind(&done);
}
@@ -3065,21 +3025,6 @@
}
-void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
- ASSERT(ToRegister(instr->function()).is(r1));
- ASSERT(instr->HasPointerMap());
- ASSERT(instr->HasDeoptimizationEnvironment());
- LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
- RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- SafepointGenerator generator(this, pointers, env->deoptimization_index());
- ParameterCount count(instr->arity());
- __ InvokeFunction(r1, count, CALL_FUNCTION, &generator);
- __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
-}
-
-
void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
ASSERT(ToRegister(instr->result()).is(r0));
@@ -3278,14 +3223,6 @@
}
-void LCodeGen::DoStringAdd(LStringAdd* instr) {
- __ push(ToRegister(instr->left()));
- __ push(ToRegister(instr->right()));
- StringAddStub stub(NO_STRING_CHECK_IN_STUB);
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
-}
-
-
void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
class DeferredStringCharCodeAt: public LDeferredCode {
public:
diff --git a/src/arm/lithium-codegen-arm.h b/src/arm/lithium-codegen-arm.h
index 1110ea6..8a4ea27 100644
--- a/src/arm/lithium-codegen-arm.h
+++ b/src/arm/lithium-codegen-arm.h
@@ -158,8 +158,8 @@
Register temporary,
Register temporary2);
- int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
- int GetParameterCount() const { return scope()->num_parameters(); }
+ int StackSlotCount() const { return chunk()->spill_slot_count(); }
+ int ParameterCount() const { return scope()->num_parameters(); }
void Abort(const char* format, ...);
void Comment(const char* format, ...);
diff --git a/src/arm/regexp-macro-assembler-arm.cc b/src/arm/regexp-macro-assembler-arm.cc
index 4bd8c80..1c59823 100644
--- a/src/arm/regexp-macro-assembler-arm.cc
+++ b/src/arm/regexp-macro-assembler-arm.cc
@@ -605,7 +605,7 @@
}
-Handle<Object> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
+Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
// Finalize code - write the entry point code now we know how many
// registers we need.
@@ -813,7 +813,7 @@
Code::ComputeFlags(Code::REGEXP),
masm_->CodeObject());
PROFILE(Isolate::Current(), RegExpCodeCreateEvent(*code, *source));
- return Handle<Object>::cast(code);
+ return Handle<HeapObject>::cast(code);
}
diff --git a/src/arm/regexp-macro-assembler-arm.h b/src/arm/regexp-macro-assembler-arm.h
index b57d0eb..d771e40 100644
--- a/src/arm/regexp-macro-assembler-arm.h
+++ b/src/arm/regexp-macro-assembler-arm.h
@@ -82,7 +82,7 @@
virtual bool CheckSpecialCharacterClass(uc16 type,
Label* on_no_match);
virtual void Fail();
- virtual Handle<Object> GetCode(Handle<String> source);
+ virtual Handle<HeapObject> GetCode(Handle<String> source);
virtual void GoTo(Label* label);
virtual void IfRegisterGE(int reg, int comparand, Label* if_ge);
virtual void IfRegisterLT(int reg, int comparand, Label* if_lt);
diff --git a/src/assembler.cc b/src/assembler.cc
index ca30e19..bfecc77 100644
--- a/src/assembler.cc
+++ b/src/assembler.cc
@@ -74,6 +74,18 @@
const char* RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
// -----------------------------------------------------------------------------
+// Implementation of AssemblerBase
+
+AssemblerBase::AssemblerBase(Isolate* isolate)
+ : isolate_(isolate),
+ jit_cookie_(0) {
+ if (FLAG_mask_constants_with_cookie && isolate != NULL) {
+ jit_cookie_ = V8::RandomPrivate(isolate);
+ }
+}
+
+
+// -----------------------------------------------------------------------------
// Implementation of Label
int Label::pos() const {
diff --git a/src/assembler.h b/src/assembler.h
index e8cecc3..395bbd5 100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -48,12 +48,14 @@
class AssemblerBase: public Malloced {
public:
- explicit AssemblerBase(Isolate* isolate) : isolate_(isolate) {}
+ explicit AssemblerBase(Isolate* isolate);
Isolate* isolate() const { return isolate_; }
+ int jit_cookie() { return jit_cookie_; }
private:
Isolate* isolate_;
+ int jit_cookie_;
};
// -----------------------------------------------------------------------------
diff --git a/src/ast.cc b/src/ast.cc
index 303189d..7ae0f34 100644
--- a/src/ast.cc
+++ b/src/ast.cc
@@ -413,7 +413,8 @@
bool Throw::IsInlineable() const {
- return true;
+ // TODO(1143): Make functions containing throw inlineable.
+ return false;
}
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index 0800714..a30ffc0 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2006-2008 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -141,8 +141,7 @@
class Genesis BASE_EMBEDDED {
public:
- Genesis(Isolate* isolate,
- Handle<Object> global_object,
+ Genesis(Handle<Object> global_object,
v8::Handle<v8::ObjectTemplate> global_template,
v8::ExtensionConfiguration* extensions);
~Genesis() { }
@@ -151,13 +150,8 @@
Genesis* previous() { return previous_; }
- Isolate* isolate() const { return isolate_; }
- Factory* factory() const { return isolate_->factory(); }
- Heap* heap() const { return isolate_->heap(); }
-
private:
Handle<Context> global_context_;
- Isolate* isolate_;
// There may be more than one active genesis object: When GC is
// triggered during environment creation there may be weak handle
@@ -169,7 +163,7 @@
// Creates some basic objects. Used for creating a context from scratch.
void CreateRoots();
// Creates the empty function. Used for creating a context from scratch.
- Handle<JSFunction> CreateEmptyFunction(Isolate* isolate);
+ Handle<JSFunction> CreateEmptyFunction();
// Creates the ThrowTypeError function. ECMA 5th Ed. 13.2.3
Handle<JSFunction> CreateThrowTypeErrorFunction(Builtins::Name builtin);
@@ -200,7 +194,6 @@
// Used for creating a context from scratch.
void InstallNativeFunctions();
bool InstallNatives();
- bool InstallExperimentalNatives();
void InstallBuiltinFunctionIds();
void InstallJSFunctionResultCaches();
void InitializeNormalizedMapCaches();
@@ -246,8 +239,7 @@
Handle<FixedArray> arguments,
Handle<FixedArray> caller);
- static bool CompileBuiltin(Isolate* isolate, int index);
- static bool CompileExperimentalBuiltin(Isolate* isolate, int index);
+ static bool CompileBuiltin(int index);
static bool CompileNative(Vector<const char> name, Handle<String> source);
static bool CompileScriptCached(Vector<const char> name,
Handle<String> source,
@@ -277,13 +269,12 @@
Handle<Context> Bootstrapper::CreateEnvironment(
- Isolate* isolate,
Handle<Object> global_object,
v8::Handle<v8::ObjectTemplate> global_template,
v8::ExtensionConfiguration* extensions) {
HandleScope scope;
Handle<Context> env;
- Genesis genesis(isolate, global_object, global_template, extensions);
+ Genesis genesis(global_object, global_template, extensions);
env = genesis.result();
if (!env.is_null()) {
if (InstallExtensions(env, extensions)) {
@@ -296,16 +287,15 @@
static void SetObjectPrototype(Handle<JSObject> object, Handle<Object> proto) {
// object.__proto__ = proto;
- Factory* factory = object->GetIsolate()->factory();
Handle<Map> old_to_map = Handle<Map>(object->map());
- Handle<Map> new_to_map = factory->CopyMapDropTransitions(old_to_map);
+ Handle<Map> new_to_map = FACTORY->CopyMapDropTransitions(old_to_map);
new_to_map->set_prototype(*proto);
object->set_map(*new_to_map);
}
void Bootstrapper::DetachGlobal(Handle<Context> env) {
- Factory* factory = env->GetIsolate()->factory();
+ Factory* factory = Isolate::Current()->factory();
JSGlobalProxy::cast(env->global_proxy())->set_context(*factory->null_value());
SetObjectPrototype(Handle<JSObject>(env->global_proxy()),
factory->null_value());
@@ -332,7 +322,7 @@
Handle<JSObject> prototype,
Builtins::Name call,
bool is_ecma_native) {
- Isolate* isolate = target->GetIsolate();
+ Isolate* isolate = Isolate::Current();
Factory* factory = isolate->factory();
Handle<String> symbol = factory->LookupAsciiSymbol(name);
Handle<Code> call_code = Handle<Code>(isolate->builtins()->builtin(call));
@@ -354,31 +344,30 @@
Handle<DescriptorArray> Genesis::ComputeFunctionInstanceDescriptor(
PrototypePropertyMode prototypeMode) {
+ Factory* factory = Isolate::Current()->factory();
Handle<DescriptorArray> descriptors =
- factory()->NewDescriptorArray(prototypeMode == DONT_ADD_PROTOTYPE
- ? 4
- : 5);
+ factory->NewDescriptorArray(prototypeMode == DONT_ADD_PROTOTYPE ? 4 : 5);
PropertyAttributes attributes =
static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
{ // Add length.
- Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionLength);
- CallbacksDescriptor d(*factory()->length_symbol(), *proxy, attributes);
+ Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionLength);
+ CallbacksDescriptor d(*factory->length_symbol(), *proxy, attributes);
descriptors->Set(0, &d);
}
{ // Add name.
- Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionName);
- CallbacksDescriptor d(*factory()->name_symbol(), *proxy, attributes);
+ Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionName);
+ CallbacksDescriptor d(*factory->name_symbol(), *proxy, attributes);
descriptors->Set(1, &d);
}
{ // Add arguments.
- Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionArguments);
- CallbacksDescriptor d(*factory()->arguments_symbol(), *proxy, attributes);
+ Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionArguments);
+ CallbacksDescriptor d(*factory->arguments_symbol(), *proxy, attributes);
descriptors->Set(2, &d);
}
{ // Add caller.
- Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionCaller);
- CallbacksDescriptor d(*factory()->caller_symbol(), *proxy, attributes);
+ Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionCaller);
+ CallbacksDescriptor d(*factory->caller_symbol(), *proxy, attributes);
descriptors->Set(3, &d);
}
if (prototypeMode != DONT_ADD_PROTOTYPE) {
@@ -386,8 +375,8 @@
if (prototypeMode == ADD_WRITEABLE_PROTOTYPE) {
attributes = static_cast<PropertyAttributes>(attributes & ~READ_ONLY);
}
- Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionPrototype);
- CallbacksDescriptor d(*factory()->prototype_symbol(), *proxy, attributes);
+ Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionPrototype);
+ CallbacksDescriptor d(*factory->prototype_symbol(), *proxy, attributes);
descriptors->Set(4, &d);
}
descriptors->Sort();
@@ -396,7 +385,7 @@
Handle<Map> Genesis::CreateFunctionMap(PrototypePropertyMode prototype_mode) {
- Handle<Map> map = factory()->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
+ Handle<Map> map = FACTORY->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
Handle<DescriptorArray> descriptors =
ComputeFunctionInstanceDescriptor(prototype_mode);
map->set_instance_descriptors(*descriptors);
@@ -405,7 +394,7 @@
}
-Handle<JSFunction> Genesis::CreateEmptyFunction(Isolate* isolate) {
+Handle<JSFunction> Genesis::CreateEmptyFunction() {
// Allocate the map for function instances. Maps are allocated first and their
// prototypes patched later, once empty function is created.
@@ -433,6 +422,7 @@
function_instance_map_writable_prototype_ =
CreateFunctionMap(ADD_WRITEABLE_PROTOTYPE);
+ Isolate* isolate = Isolate::Current();
Factory* factory = isolate->factory();
Heap* heap = isolate->heap();
@@ -501,31 +491,28 @@
PrototypePropertyMode prototypeMode,
Handle<FixedArray> arguments,
Handle<FixedArray> caller) {
+ Factory* factory = Isolate::Current()->factory();
Handle<DescriptorArray> descriptors =
- factory()->NewDescriptorArray(prototypeMode == DONT_ADD_PROTOTYPE
- ? 4
- : 5);
+ factory->NewDescriptorArray(prototypeMode == DONT_ADD_PROTOTYPE ? 4 : 5);
PropertyAttributes attributes = static_cast<PropertyAttributes>(
DONT_ENUM | DONT_DELETE | READ_ONLY);
{ // length
- Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionLength);
- CallbacksDescriptor d(*factory()->length_symbol(), *proxy, attributes);
+ Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionLength);
+ CallbacksDescriptor d(*factory->length_symbol(), *proxy, attributes);
descriptors->Set(0, &d);
}
{ // name
- Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionName);
- CallbacksDescriptor d(*factory()->name_symbol(), *proxy, attributes);
+ Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionName);
+ CallbacksDescriptor d(*factory->name_symbol(), *proxy, attributes);
descriptors->Set(1, &d);
}
{ // arguments
- CallbacksDescriptor d(*factory()->arguments_symbol(),
- *arguments,
- attributes);
+ CallbacksDescriptor d(*factory->arguments_symbol(), *arguments, attributes);
descriptors->Set(2, &d);
}
{ // caller
- CallbacksDescriptor d(*factory()->caller_symbol(), *caller, attributes);
+ CallbacksDescriptor d(*factory->caller_symbol(), *caller, attributes);
descriptors->Set(3, &d);
}
@@ -534,8 +521,8 @@
if (prototypeMode == ADD_WRITEABLE_PROTOTYPE) {
attributes = static_cast<PropertyAttributes>(attributes & ~READ_ONLY);
}
- Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionPrototype);
- CallbacksDescriptor d(*factory()->prototype_symbol(), *proxy, attributes);
+ Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionPrototype);
+ CallbacksDescriptor d(*factory->prototype_symbol(), *proxy, attributes);
descriptors->Set(4, &d);
}
@@ -547,11 +534,14 @@
// ECMAScript 5th Edition, 13.2.3
Handle<JSFunction> Genesis::CreateThrowTypeErrorFunction(
Builtins::Name builtin) {
- Handle<String> name = factory()->LookupAsciiSymbol("ThrowTypeError");
+ Isolate* isolate = Isolate::Current();
+ Factory* factory = isolate->factory();
+
+ Handle<String> name = factory->LookupAsciiSymbol("ThrowTypeError");
Handle<JSFunction> throw_type_error =
- factory()->NewFunctionWithoutPrototype(name, kStrictMode);
+ factory->NewFunctionWithoutPrototype(name, kStrictMode);
Handle<Code> code = Handle<Code>(
- isolate()->builtins()->builtin(builtin));
+ isolate->builtins()->builtin(builtin));
throw_type_error->set_map(global_context()->strict_mode_function_map());
throw_type_error->set_code(*code);
@@ -569,7 +559,7 @@
Handle<JSFunction> empty_function,
Handle<FixedArray> arguments_callbacks,
Handle<FixedArray> caller_callbacks) {
- Handle<Map> map = factory()->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
+ Handle<Map> map = FACTORY->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
Handle<DescriptorArray> descriptors =
ComputeStrictFunctionInstanceDescriptor(prototype_mode,
arguments_callbacks,
@@ -584,7 +574,7 @@
void Genesis::CreateStrictModeFunctionMaps(Handle<JSFunction> empty) {
// Create the callbacks arrays for ThrowTypeError functions.
// The get/set callacks are filled in after the maps are created below.
- Factory* factory = empty->GetIsolate()->factory();
+ Factory* factory = Isolate::Current()->factory();
Handle<FixedArray> arguments = factory->NewFixedArray(2, TENURED);
Handle<FixedArray> caller = factory->NewFixedArray(2, TENURED);
@@ -633,7 +623,7 @@
static void AddToWeakGlobalContextList(Context* context) {
ASSERT(context->IsGlobalContext());
- Heap* heap = context->GetIsolate()->heap();
+ Heap* heap = Isolate::Current()->heap();
#ifdef DEBUG
{ // NOLINT
ASSERT(context->get(Context::NEXT_CONTEXT_LINK)->IsUndefined());
@@ -651,14 +641,15 @@
void Genesis::CreateRoots() {
+ Isolate* isolate = Isolate::Current();
// Allocate the global context FixedArray first and then patch the
// closure and extension object later (we need the empty function
// and the global object, but in order to create those, we need the
// global context).
- global_context_ = Handle<Context>::cast(isolate()->global_handles()->Create(
- *factory()->NewGlobalContext()));
+ global_context_ = Handle<Context>::cast(isolate->global_handles()->Create(
+ *isolate->factory()->NewGlobalContext()));
AddToWeakGlobalContextList(*global_context_);
- isolate()->set_context(*global_context());
+ isolate->set_context(*global_context());
// Allocate the message listeners object.
{
@@ -701,13 +692,17 @@
}
}
+ Isolate* isolate = Isolate::Current();
+ Factory* factory = isolate->factory();
+ Heap* heap = isolate->heap();
+
if (js_global_template.is_null()) {
- Handle<String> name = Handle<String>(heap()->empty_symbol());
- Handle<Code> code = Handle<Code>(isolate()->builtins()->builtin(
+ Handle<String> name = Handle<String>(heap->empty_symbol());
+ Handle<Code> code = Handle<Code>(isolate->builtins()->builtin(
Builtins::kIllegal));
js_global_function =
- factory()->NewFunction(name, JS_GLOBAL_OBJECT_TYPE,
- JSGlobalObject::kSize, code, true);
+ factory->NewFunction(name, JS_GLOBAL_OBJECT_TYPE,
+ JSGlobalObject::kSize, code, true);
// Change the constructor property of the prototype of the
// hidden global function to refer to the Object function.
Handle<JSObject> prototype =
@@ -715,20 +710,20 @@
JSObject::cast(js_global_function->instance_prototype()));
SetLocalPropertyNoThrow(
prototype,
- factory()->constructor_symbol(),
- isolate()->object_function(),
+ factory->constructor_symbol(),
+ isolate->object_function(),
NONE);
} else {
Handle<FunctionTemplateInfo> js_global_constructor(
FunctionTemplateInfo::cast(js_global_template->constructor()));
js_global_function =
- factory()->CreateApiFunction(js_global_constructor,
- factory()->InnerGlobalObject);
+ factory->CreateApiFunction(js_global_constructor,
+ factory->InnerGlobalObject);
}
js_global_function->initial_map()->set_is_hidden_prototype();
Handle<GlobalObject> inner_global =
- factory()->NewGlobalObject(js_global_function);
+ factory->NewGlobalObject(js_global_function);
if (inner_global_out != NULL) {
*inner_global_out = inner_global;
}
@@ -736,23 +731,23 @@
// Step 2: create or re-initialize the global proxy object.
Handle<JSFunction> global_proxy_function;
if (global_template.IsEmpty()) {
- Handle<String> name = Handle<String>(heap()->empty_symbol());
- Handle<Code> code = Handle<Code>(isolate()->builtins()->builtin(
+ Handle<String> name = Handle<String>(heap->empty_symbol());
+ Handle<Code> code = Handle<Code>(isolate->builtins()->builtin(
Builtins::kIllegal));
global_proxy_function =
- factory()->NewFunction(name, JS_GLOBAL_PROXY_TYPE,
- JSGlobalProxy::kSize, code, true);
+ factory->NewFunction(name, JS_GLOBAL_PROXY_TYPE,
+ JSGlobalProxy::kSize, code, true);
} else {
Handle<ObjectTemplateInfo> data =
v8::Utils::OpenHandle(*global_template);
Handle<FunctionTemplateInfo> global_constructor(
FunctionTemplateInfo::cast(data->constructor()));
global_proxy_function =
- factory()->CreateApiFunction(global_constructor,
- factory()->OuterGlobalObject);
+ factory->CreateApiFunction(global_constructor,
+ factory->OuterGlobalObject);
}
- Handle<String> global_name = factory()->LookupAsciiSymbol("global");
+ Handle<String> global_name = factory->LookupAsciiSymbol("global");
global_proxy_function->shared()->set_instance_class_name(*global_name);
global_proxy_function->initial_map()->set_is_access_check_needed(true);
@@ -766,7 +761,7 @@
Handle<JSGlobalProxy>::cast(global_object));
} else {
return Handle<JSGlobalProxy>::cast(
- factory()->NewJSObject(global_proxy_function, TENURED));
+ factory->NewJSObject(global_proxy_function, TENURED));
}
}
@@ -791,7 +786,7 @@
static const PropertyAttributes attributes =
static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
ForceSetProperty(builtins_global,
- factory()->LookupAsciiSymbol("global"),
+ FACTORY->LookupAsciiSymbol("global"),
inner_global,
attributes);
// Setup the reference from the global object to the builtins object.
@@ -819,7 +814,7 @@
// object reinitialization.
global_context()->set_security_token(*inner_global);
- Isolate* isolate = inner_global->GetIsolate();
+ Isolate* isolate = Isolate::Current();
Factory* factory = isolate->factory();
Heap* heap = isolate->heap();
@@ -1169,26 +1164,17 @@
}
-bool Genesis::CompileBuiltin(Isolate* isolate, int index) {
+bool Genesis::CompileBuiltin(int index) {
Vector<const char> name = Natives::GetScriptName(index);
Handle<String> source_code =
- isolate->bootstrapper()->NativesSourceLookup(index);
- return CompileNative(name, source_code);
-}
-
-
-bool Genesis::CompileExperimentalBuiltin(Isolate* isolate, int index) {
- Vector<const char> name = ExperimentalNatives::GetScriptName(index);
- Factory* factory = isolate->factory();
- Handle<String> source_code =
- factory->NewStringFromAscii(ExperimentalNatives::GetScriptSource(index));
+ Isolate::Current()->bootstrapper()->NativesSourceLookup(index);
return CompileNative(name, source_code);
}
bool Genesis::CompileNative(Vector<const char> name, Handle<String> source) {
HandleScope scope;
- Isolate* isolate = source->GetIsolate();
+ Isolate* isolate = Isolate::Current();
#ifdef ENABLE_DEBUGGER_SUPPORT
isolate->debugger()->set_compiling_natives(true);
#endif
@@ -1213,7 +1199,7 @@
v8::Extension* extension,
Handle<Context> top_context,
bool use_runtime_context) {
- Factory* factory = source->GetIsolate()->factory();
+ Factory* factory = Isolate::Current()->factory();
HandleScope scope;
Handle<SharedFunctionInfo> function_info;
@@ -1260,15 +1246,15 @@
}
-#define INSTALL_NATIVE(Type, name, var) \
- Handle<String> var##_name = factory()->LookupAsciiSymbol(name); \
- Object* var##_native = \
- global_context()->builtins()->GetPropertyNoExceptionThrown( \
- *var##_name); \
+#define INSTALL_NATIVE(Type, name, var) \
+ Handle<String> var##_name = factory->LookupAsciiSymbol(name); \
+ Object* var##_native = \
+ global_context()->builtins()->GetPropertyNoExceptionThrown(*var##_name); \
global_context()->set_##var(Type::cast(var##_native));
void Genesis::InstallNativeFunctions() {
+ Factory* factory = Isolate::Current()->factory();
HandleScope scope;
INSTALL_NATIVE(JSFunction, "CreateDate", create_date_fun);
INSTALL_NATIVE(JSFunction, "ToNumber", to_number_fun);
@@ -1291,23 +1277,25 @@
bool Genesis::InstallNatives() {
HandleScope scope;
+ Isolate* isolate = Isolate::Current();
+ Factory* factory = isolate->factory();
+ Heap* heap = isolate->heap();
// Create a function for the builtins object. Allocate space for the
// JavaScript builtins, a reference to the builtins object
// (itself) and a reference to the global_context directly in the object.
Handle<Code> code = Handle<Code>(
- isolate()->builtins()->builtin(Builtins::kIllegal));
+ isolate->builtins()->builtin(Builtins::kIllegal));
Handle<JSFunction> builtins_fun =
- factory()->NewFunction(factory()->empty_symbol(),
- JS_BUILTINS_OBJECT_TYPE,
- JSBuiltinsObject::kSize, code, true);
+ factory->NewFunction(factory->empty_symbol(), JS_BUILTINS_OBJECT_TYPE,
+ JSBuiltinsObject::kSize, code, true);
- Handle<String> name = factory()->LookupAsciiSymbol("builtins");
+ Handle<String> name = factory->LookupAsciiSymbol("builtins");
builtins_fun->shared()->set_instance_class_name(*name);
// Allocate the builtins object.
Handle<JSBuiltinsObject> builtins =
- Handle<JSBuiltinsObject>::cast(factory()->NewGlobalObject(builtins_fun));
+ Handle<JSBuiltinsObject>::cast(factory->NewGlobalObject(builtins_fun));
builtins->set_builtins(*builtins);
builtins->set_global_context(*global_context());
builtins->set_global_receiver(*builtins);
@@ -1318,7 +1306,7 @@
// global object.
static const PropertyAttributes attributes =
static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
- Handle<String> global_symbol = factory()->LookupAsciiSymbol("global");
+ Handle<String> global_symbol = factory->LookupAsciiSymbol("global");
Handle<Object> global_obj(global_context()->global());
SetLocalPropertyNoThrow(builtins, global_symbol, global_obj, attributes);
@@ -1327,13 +1315,12 @@
// Create a bridge function that has context in the global context.
Handle<JSFunction> bridge =
- factory()->NewFunction(factory()->empty_symbol(),
- factory()->undefined_value());
- ASSERT(bridge->context() == *isolate()->global_context());
+ factory->NewFunction(factory->empty_symbol(), factory->undefined_value());
+ ASSERT(bridge->context() == *isolate->global_context());
// Allocate the builtins context.
Handle<Context> context =
- factory()->NewFunctionContext(Context::MIN_CONTEXT_SLOTS, bridge);
+ factory->NewFunctionContext(Context::MIN_CONTEXT_SLOTS, bridge);
context->set_global(*builtins); // override builtins global object
global_context()->set_runtime_context(*context);
@@ -1342,113 +1329,113 @@
// Builtin functions for Script.
Handle<JSFunction> script_fun =
InstallFunction(builtins, "Script", JS_VALUE_TYPE, JSValue::kSize,
- isolate()->initial_object_prototype(),
+ isolate->initial_object_prototype(),
Builtins::kIllegal, false);
Handle<JSObject> prototype =
- factory()->NewJSObject(isolate()->object_function(), TENURED);
+ factory->NewJSObject(isolate->object_function(), TENURED);
SetPrototype(script_fun, prototype);
global_context()->set_script_function(*script_fun);
// Add 'source' and 'data' property to scripts.
PropertyAttributes common_attributes =
static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
- Handle<Proxy> proxy_source = factory()->NewProxy(&Accessors::ScriptSource);
+ Handle<Proxy> proxy_source = factory->NewProxy(&Accessors::ScriptSource);
Handle<DescriptorArray> script_descriptors =
- factory()->CopyAppendProxyDescriptor(
- factory()->empty_descriptor_array(),
- factory()->LookupAsciiSymbol("source"),
+ factory->CopyAppendProxyDescriptor(
+ factory->empty_descriptor_array(),
+ factory->LookupAsciiSymbol("source"),
proxy_source,
common_attributes);
- Handle<Proxy> proxy_name = factory()->NewProxy(&Accessors::ScriptName);
+ Handle<Proxy> proxy_name = factory->NewProxy(&Accessors::ScriptName);
script_descriptors =
- factory()->CopyAppendProxyDescriptor(
+ factory->CopyAppendProxyDescriptor(
script_descriptors,
- factory()->LookupAsciiSymbol("name"),
+ factory->LookupAsciiSymbol("name"),
proxy_name,
common_attributes);
- Handle<Proxy> proxy_id = factory()->NewProxy(&Accessors::ScriptId);
+ Handle<Proxy> proxy_id = factory->NewProxy(&Accessors::ScriptId);
script_descriptors =
- factory()->CopyAppendProxyDescriptor(
+ factory->CopyAppendProxyDescriptor(
script_descriptors,
- factory()->LookupAsciiSymbol("id"),
+ factory->LookupAsciiSymbol("id"),
proxy_id,
common_attributes);
Handle<Proxy> proxy_line_offset =
- factory()->NewProxy(&Accessors::ScriptLineOffset);
+ factory->NewProxy(&Accessors::ScriptLineOffset);
script_descriptors =
- factory()->CopyAppendProxyDescriptor(
+ factory->CopyAppendProxyDescriptor(
script_descriptors,
- factory()->LookupAsciiSymbol("line_offset"),
+ factory->LookupAsciiSymbol("line_offset"),
proxy_line_offset,
common_attributes);
Handle<Proxy> proxy_column_offset =
- factory()->NewProxy(&Accessors::ScriptColumnOffset);
+ factory->NewProxy(&Accessors::ScriptColumnOffset);
script_descriptors =
- factory()->CopyAppendProxyDescriptor(
+ factory->CopyAppendProxyDescriptor(
script_descriptors,
- factory()->LookupAsciiSymbol("column_offset"),
+ factory->LookupAsciiSymbol("column_offset"),
proxy_column_offset,
common_attributes);
- Handle<Proxy> proxy_data = factory()->NewProxy(&Accessors::ScriptData);
+ Handle<Proxy> proxy_data = factory->NewProxy(&Accessors::ScriptData);
script_descriptors =
- factory()->CopyAppendProxyDescriptor(
+ factory->CopyAppendProxyDescriptor(
script_descriptors,
- factory()->LookupAsciiSymbol("data"),
+ factory->LookupAsciiSymbol("data"),
proxy_data,
common_attributes);
- Handle<Proxy> proxy_type = factory()->NewProxy(&Accessors::ScriptType);
+ Handle<Proxy> proxy_type = factory->NewProxy(&Accessors::ScriptType);
script_descriptors =
- factory()->CopyAppendProxyDescriptor(
+ factory->CopyAppendProxyDescriptor(
script_descriptors,
- factory()->LookupAsciiSymbol("type"),
+ factory->LookupAsciiSymbol("type"),
proxy_type,
common_attributes);
Handle<Proxy> proxy_compilation_type =
- factory()->NewProxy(&Accessors::ScriptCompilationType);
+ factory->NewProxy(&Accessors::ScriptCompilationType);
script_descriptors =
- factory()->CopyAppendProxyDescriptor(
+ factory->CopyAppendProxyDescriptor(
script_descriptors,
- factory()->LookupAsciiSymbol("compilation_type"),
+ factory->LookupAsciiSymbol("compilation_type"),
proxy_compilation_type,
common_attributes);
Handle<Proxy> proxy_line_ends =
- factory()->NewProxy(&Accessors::ScriptLineEnds);
+ factory->NewProxy(&Accessors::ScriptLineEnds);
script_descriptors =
- factory()->CopyAppendProxyDescriptor(
+ factory->CopyAppendProxyDescriptor(
script_descriptors,
- factory()->LookupAsciiSymbol("line_ends"),
+ factory->LookupAsciiSymbol("line_ends"),
proxy_line_ends,
common_attributes);
Handle<Proxy> proxy_context_data =
- factory()->NewProxy(&Accessors::ScriptContextData);
+ factory->NewProxy(&Accessors::ScriptContextData);
script_descriptors =
- factory()->CopyAppendProxyDescriptor(
+ factory->CopyAppendProxyDescriptor(
script_descriptors,
- factory()->LookupAsciiSymbol("context_data"),
+ factory->LookupAsciiSymbol("context_data"),
proxy_context_data,
common_attributes);
Handle<Proxy> proxy_eval_from_script =
- factory()->NewProxy(&Accessors::ScriptEvalFromScript);
+ factory->NewProxy(&Accessors::ScriptEvalFromScript);
script_descriptors =
- factory()->CopyAppendProxyDescriptor(
+ factory->CopyAppendProxyDescriptor(
script_descriptors,
- factory()->LookupAsciiSymbol("eval_from_script"),
+ factory->LookupAsciiSymbol("eval_from_script"),
proxy_eval_from_script,
common_attributes);
Handle<Proxy> proxy_eval_from_script_position =
- factory()->NewProxy(&Accessors::ScriptEvalFromScriptPosition);
+ factory->NewProxy(&Accessors::ScriptEvalFromScriptPosition);
script_descriptors =
- factory()->CopyAppendProxyDescriptor(
+ factory->CopyAppendProxyDescriptor(
script_descriptors,
- factory()->LookupAsciiSymbol("eval_from_script_position"),
+ factory->LookupAsciiSymbol("eval_from_script_position"),
proxy_eval_from_script_position,
common_attributes);
Handle<Proxy> proxy_eval_from_function_name =
- factory()->NewProxy(&Accessors::ScriptEvalFromFunctionName);
+ factory->NewProxy(&Accessors::ScriptEvalFromFunctionName);
script_descriptors =
- factory()->CopyAppendProxyDescriptor(
+ factory->CopyAppendProxyDescriptor(
script_descriptors,
- factory()->LookupAsciiSymbol("eval_from_function_name"),
+ factory->LookupAsciiSymbol("eval_from_function_name"),
proxy_eval_from_function_name,
common_attributes);
@@ -1456,9 +1443,9 @@
script_map->set_instance_descriptors(*script_descriptors);
// Allocate the empty script.
- Handle<Script> script = factory()->NewScript(factory()->empty_string());
+ Handle<Script> script = factory->NewScript(factory->empty_string());
script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
- heap()->public_set_empty_script(*script);
+ heap->public_set_empty_script(*script);
}
{
// Builtin function for OpaqueReference -- a JSValue-based object,
@@ -1467,10 +1454,10 @@
Handle<JSFunction> opaque_reference_fun =
InstallFunction(builtins, "OpaqueReference", JS_VALUE_TYPE,
JSValue::kSize,
- isolate()->initial_object_prototype(),
+ isolate->initial_object_prototype(),
Builtins::kIllegal, false);
Handle<JSObject> prototype =
- factory()->NewJSObject(isolate()->object_function(), TENURED);
+ factory->NewJSObject(isolate->object_function(), TENURED);
SetPrototype(opaque_reference_fun, prototype);
global_context()->set_opaque_reference_function(*opaque_reference_fun);
}
@@ -1489,23 +1476,23 @@
"InternalArray",
JS_ARRAY_TYPE,
JSArray::kSize,
- isolate()->initial_object_prototype(),
+ isolate->initial_object_prototype(),
Builtins::kArrayCode,
true);
Handle<JSObject> prototype =
- factory()->NewJSObject(isolate()->object_function(), TENURED);
+ factory->NewJSObject(isolate->object_function(), TENURED);
SetPrototype(array_function, prototype);
array_function->shared()->set_construct_stub(
- isolate()->builtins()->builtin(Builtins::kArrayConstructCode));
+ isolate->builtins()->builtin(Builtins::kArrayConstructCode));
array_function->shared()->DontAdaptArguments();
// Make "length" magic on instances.
Handle<DescriptorArray> array_descriptors =
- factory()->CopyAppendProxyDescriptor(
- factory()->empty_descriptor_array(),
- factory()->length_symbol(),
- factory()->NewProxy(&Accessors::ArrayLength),
+ factory->CopyAppendProxyDescriptor(
+ factory->empty_descriptor_array(),
+ factory->length_symbol(),
+ factory->NewProxy(&Accessors::ArrayLength),
static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE));
array_function->initial_map()->set_instance_descriptors(
@@ -1521,7 +1508,8 @@
for (int i = Natives::GetDebuggerCount();
i < Natives::GetBuiltinsCount();
i++) {
- if (!CompileBuiltin(isolate(), i)) return false;
+ Vector<const char> name = Natives::GetScriptName(i);
+ if (!CompileBuiltin(i)) return false;
// TODO(ager): We really only need to install the JS builtin
// functions on the builtins object after compiling and running
// runtime.js.
@@ -1541,9 +1529,9 @@
InstallBuiltinFunctionIds();
// Install Function.prototype.call and apply.
- { Handle<String> key = factory()->function_class_symbol();
+ { Handle<String> key = factory->function_class_symbol();
Handle<JSFunction> function =
- Handle<JSFunction>::cast(GetProperty(isolate()->global(), key));
+ Handle<JSFunction>::cast(GetProperty(isolate->global(), key));
Handle<JSObject> proto =
Handle<JSObject>(JSObject::cast(function->instance_prototype()));
@@ -1585,7 +1573,7 @@
// Add initial map.
Handle<Map> initial_map =
- factory()->NewMap(JS_ARRAY_TYPE, JSRegExpResult::kSize);
+ factory->NewMap(JS_ARRAY_TYPE, JSRegExpResult::kSize);
initial_map->set_constructor(*array_constructor);
// Set prototype on map.
@@ -1599,13 +1587,13 @@
ASSERT_EQ(1, array_descriptors->number_of_descriptors());
Handle<DescriptorArray> reresult_descriptors =
- factory()->NewDescriptorArray(3);
+ factory->NewDescriptorArray(3);
reresult_descriptors->CopyFrom(0, *array_descriptors, 0);
int enum_index = 0;
{
- FieldDescriptor index_field(heap()->index_symbol(),
+ FieldDescriptor index_field(heap->index_symbol(),
JSRegExpResult::kIndexIndex,
NONE,
enum_index++);
@@ -1613,7 +1601,7 @@
}
{
- FieldDescriptor input_field(heap()->input_symbol(),
+ FieldDescriptor input_field(heap->input_symbol(),
JSRegExpResult::kInputIndex,
NONE,
enum_index++);
@@ -1638,22 +1626,10 @@
}
-bool Genesis::InstallExperimentalNatives() {
- if (FLAG_harmony_proxies) {
- for (int i = ExperimentalNatives::GetDebuggerCount();
- i < ExperimentalNatives::GetBuiltinsCount();
- i++) {
- if (!CompileExperimentalBuiltin(isolate(), i)) return false;
- }
- }
- return true;
-}
-
-
static Handle<JSObject> ResolveBuiltinIdHolder(
Handle<Context> global_context,
const char* holder_expr) {
- Factory* factory = global_context->GetIsolate()->factory();
+ Factory* factory = Isolate::Current()->factory();
Handle<GlobalObject> global(global_context->global());
const char* period_pos = strchr(holder_expr, '.');
if (period_pos == NULL) {
@@ -1672,8 +1648,7 @@
static void InstallBuiltinFunctionId(Handle<JSObject> holder,
const char* function_name,
BuiltinFunctionId id) {
- Factory* factory = holder->GetIsolate()->factory();
- Handle<String> name = factory->LookupAsciiSymbol(function_name);
+ Handle<String> name = FACTORY->LookupAsciiSymbol(function_name);
Object* function_object = holder->GetProperty(*name)->ToObjectUnchecked();
Handle<JSFunction> function(JSFunction::cast(function_object));
function->shared()->set_function_data(Smi::FromInt(id));
@@ -1700,14 +1675,13 @@
F(16, global_context()->regexp_function())
-static FixedArray* CreateCache(int size, JSFunction* factory_function) {
- Factory* factory = factory_function->GetIsolate()->factory();
+static FixedArray* CreateCache(int size, JSFunction* factory) {
// Caches are supposed to live for a long time, allocate in old space.
int array_size = JSFunctionResultCache::kEntriesIndex + 2 * size;
// Cannot use cast as object is not fully initialized yet.
JSFunctionResultCache* cache = reinterpret_cast<JSFunctionResultCache*>(
- *factory->NewFixedArrayWithHoles(array_size, TENURED));
- cache->set(JSFunctionResultCache::kFactoryIndex, factory_function);
+ *FACTORY->NewFixedArrayWithHoles(array_size, TENURED));
+ cache->set(JSFunctionResultCache::kFactoryIndex, factory);
cache->MakeZeroSize();
return cache;
}
@@ -1746,7 +1720,7 @@
bool Bootstrapper::InstallExtensions(Handle<Context> global_context,
v8::ExtensionConfiguration* extensions) {
- Isolate* isolate = global_context->GetIsolate();
+ Isolate* isolate = Isolate::Current();
BootstrapperActive active;
SaveContext saved_context(isolate);
isolate->set_context(*global_context);
@@ -1757,7 +1731,7 @@
void Genesis::InstallSpecialObjects(Handle<Context> global_context) {
- Factory* factory = global_context->GetIsolate()->factory();
+ Factory* factory = Isolate::Current()->factory();
HandleScope scope;
Handle<JSGlobalObject> js_global(
JSGlobalObject::cast(global_context->global()));
@@ -1893,10 +1867,9 @@
bool Genesis::InstallJSBuiltins(Handle<JSBuiltinsObject> builtins) {
HandleScope scope;
- Factory* factory = builtins->GetIsolate()->factory();
for (int i = 0; i < Builtins::NumberOfJavaScriptBuiltins(); i++) {
Builtins::JavaScript id = static_cast<Builtins::JavaScript>(i);
- Handle<String> name = factory->LookupAsciiSymbol(Builtins::GetName(id));
+ Handle<String> name = FACTORY->LookupAsciiSymbol(Builtins::GetName(id));
Object* function_object = builtins->GetPropertyNoExceptionThrown(*name);
Handle<JSFunction> function
= Handle<JSFunction>(JSFunction::cast(function_object));
@@ -1945,12 +1918,13 @@
ASSERT(object->IsInstanceOf(
FunctionTemplateInfo::cast(object_template->constructor())));
+ Isolate* isolate = Isolate::Current();
bool pending_exception = false;
Handle<JSObject> obj =
Execution::InstantiateObject(object_template, &pending_exception);
if (pending_exception) {
- ASSERT(isolate()->has_pending_exception());
- isolate()->clear_pending_exception();
+ ASSERT(isolate->has_pending_exception());
+ isolate->clear_pending_exception();
return false;
}
TransferObject(obj, object);
@@ -2049,7 +2023,6 @@
void Genesis::TransferObject(Handle<JSObject> from, Handle<JSObject> to) {
HandleScope outer;
- Factory* factory = from->GetIsolate()->factory();
ASSERT(!from->IsJSArray());
ASSERT(!to->IsJSArray());
@@ -2059,7 +2032,7 @@
// Transfer the prototype (new map is needed).
Handle<Map> old_to_map = Handle<Map>(to->map());
- Handle<Map> new_to_map = factory->CopyMapDropTransitions(old_to_map);
+ Handle<Map> new_to_map = FACTORY->CopyMapDropTransitions(old_to_map);
new_to_map->set_prototype(from->map()->prototype());
to->set_map(*new_to_map);
}
@@ -2080,10 +2053,10 @@
}
-Genesis::Genesis(Isolate* isolate,
- Handle<Object> global_object,
+Genesis::Genesis(Handle<Object> global_object,
v8::Handle<v8::ObjectTemplate> global_template,
- v8::ExtensionConfiguration* extensions) : isolate_(isolate) {
+ v8::ExtensionConfiguration* extensions) {
+ Isolate* isolate = Isolate::Current();
result_ = Handle<Context>::null();
// If V8 isn't running and cannot be initialized, just return.
if (!V8::IsRunning() && !V8::Initialize(NULL)) return;
@@ -2113,7 +2086,7 @@
} else {
// We get here if there was no context snapshot.
CreateRoots();
- Handle<JSFunction> empty_function = CreateEmptyFunction(isolate);
+ Handle<JSFunction> empty_function = CreateEmptyFunction();
CreateStrictModeFunctionMaps(empty_function);
Handle<GlobalObject> inner_global;
Handle<JSGlobalProxy> global_proxy =
@@ -2130,9 +2103,6 @@
isolate->counters()->contexts_created_from_scratch()->Increment();
}
- // Install experimental natives.
- if (!InstallExperimentalNatives()) return;
-
result_ = global_context_;
}
diff --git a/src/bootstrapper.h b/src/bootstrapper.h
index 018ceef..3e158d6 100644
--- a/src/bootstrapper.h
+++ b/src/bootstrapper.h
@@ -93,7 +93,6 @@
// Creates a JavaScript Global Context with initial object graph.
// The returned value is a global handle casted to V8Environment*.
Handle<Context> CreateEnvironment(
- Isolate* isolate,
Handle<Object> global_object,
v8::Handle<v8::ObjectTemplate> global_template,
v8::ExtensionConfiguration* extensions);
diff --git a/src/builtins.cc b/src/builtins.cc
index ae3dab4..1846590 100644
--- a/src/builtins.cc
+++ b/src/builtins.cc
@@ -378,8 +378,7 @@
array_proto = JSObject::cast(proto);
if (array_proto != global_context->initial_object_prototype()) return false;
if (array_proto->elements() != heap->empty_fixed_array()) return false;
- ASSERT(array_proto->GetPrototype()->IsNull());
- return true;
+ return array_proto->GetPrototype()->IsNull();
}
@@ -838,8 +837,8 @@
const int delta = actual_delete_count - item_count;
if (actual_start > 0) {
- Object** start = elms->data_start();
- memmove(start + delta, start, actual_start * kPointerSize);
+ AssertNoAllocation no_gc;
+ MoveElements(heap, &no_gc, elms, delta, elms, 0, actual_start);
}
elms = LeftTrimFixedArray(heap, elms, delta);
diff --git a/src/cpu-profiler.cc b/src/cpu-profiler.cc
index 3894748..10a3360 100644
--- a/src/cpu-profiler.cc
+++ b/src/cpu-profiler.cc
@@ -244,7 +244,7 @@
// A paranoid check to make sure that we don't get a memory overrun
// in case of frames_count having a wild value.
if (record.sample.frames_count < 0
- || record.sample.frames_count >= TickSample::kMaxFramesCount)
+ || record.sample.frames_count > TickSample::kMaxFramesCount)
record.sample.frames_count = 0;
generator_->RecordTickSample(record.sample);
ticks_buffer_.FinishDequeue();
diff --git a/src/d8.gyp b/src/d8.gyp
index 29212dd..901fd65 100644
--- a/src/d8.gyp
+++ b/src/d8.gyp
@@ -61,7 +61,6 @@
'variables': {
'js_files': [
'd8.js',
- 'macros.py',
],
},
'actions': [
@@ -73,6 +72,7 @@
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/d8-js.cc',
+ '<(SHARED_INTERMEDIATE_DIR)/d8-js-empty.cc',
],
'action': [
'python',
diff --git a/src/data-flow.cc b/src/data-flow.cc
index 6a3b05c..79339ed 100644
--- a/src/data-flow.cc
+++ b/src/data-flow.cc
@@ -63,4 +63,477 @@
current_value_ = val >> 1;
}
+
+bool AssignedVariablesAnalyzer::Analyze(CompilationInfo* info) {
+ Scope* scope = info->scope();
+ int size = scope->num_parameters() + scope->num_stack_slots();
+ if (size == 0) return true;
+ AssignedVariablesAnalyzer analyzer(info, size);
+ return analyzer.Analyze();
+}
+
+
+AssignedVariablesAnalyzer::AssignedVariablesAnalyzer(CompilationInfo* info,
+ int size)
+ : info_(info), av_(size) {
+}
+
+
+bool AssignedVariablesAnalyzer::Analyze() {
+ ASSERT(av_.length() > 0);
+ VisitStatements(info_->function()->body());
+ return !HasStackOverflow();
+}
+
+
+Variable* AssignedVariablesAnalyzer::FindSmiLoopVariable(ForStatement* stmt) {
+ // The loop must have all necessary parts.
+ if (stmt->init() == NULL || stmt->cond() == NULL || stmt->next() == NULL) {
+ return NULL;
+ }
+ // The initialization statement has to be a simple assignment.
+ Assignment* init = stmt->init()->StatementAsSimpleAssignment();
+ if (init == NULL) return NULL;
+
+ // We only deal with local variables.
+ Variable* loop_var = init->target()->AsVariableProxy()->AsVariable();
+ if (loop_var == NULL || !loop_var->IsStackAllocated()) return NULL;
+
+ // Don't try to get clever with const or dynamic variables.
+ if (loop_var->mode() != Variable::VAR) return NULL;
+
+ // The initial value has to be a smi.
+ Literal* init_lit = init->value()->AsLiteral();
+ if (init_lit == NULL || !init_lit->handle()->IsSmi()) return NULL;
+ int init_value = Smi::cast(*init_lit->handle())->value();
+
+ // The condition must be a compare of variable with <, <=, >, or >=.
+ CompareOperation* cond = stmt->cond()->AsCompareOperation();
+ if (cond == NULL) return NULL;
+ if (cond->op() != Token::LT
+ && cond->op() != Token::LTE
+ && cond->op() != Token::GT
+ && cond->op() != Token::GTE) return NULL;
+
+ // The lhs must be the same variable as in the init expression.
+ if (cond->left()->AsVariableProxy()->AsVariable() != loop_var) return NULL;
+
+ // The rhs must be a smi.
+ Literal* term_lit = cond->right()->AsLiteral();
+ if (term_lit == NULL || !term_lit->handle()->IsSmi()) return NULL;
+ int term_value = Smi::cast(*term_lit->handle())->value();
+
+ // The count operation updates the same variable as in the init expression.
+ CountOperation* update = stmt->next()->StatementAsCountOperation();
+ if (update == NULL) return NULL;
+ if (update->expression()->AsVariableProxy()->AsVariable() != loop_var) {
+ return NULL;
+ }
+
+ // The direction of the count operation must agree with the start and the end
+ // value. We currently do not allow the initial value to be the same as the
+ // terminal value. This _would_ be ok as long as the loop body never executes
+ // or executes exactly one time.
+ if (init_value == term_value) return NULL;
+ if (init_value < term_value && update->op() != Token::INC) return NULL;
+ if (init_value > term_value && update->op() != Token::DEC) return NULL;
+
+ // Check that the update operation cannot overflow the smi range. This can
+ // occur in the two cases where the loop bound is equal to the largest or
+ // smallest smi.
+ if (update->op() == Token::INC && term_value == Smi::kMaxValue) return NULL;
+ if (update->op() == Token::DEC && term_value == Smi::kMinValue) return NULL;
+
+ // Found a smi loop variable.
+ return loop_var;
+}
+
+int AssignedVariablesAnalyzer::BitIndex(Variable* var) {
+ ASSERT(var != NULL);
+ ASSERT(var->IsStackAllocated());
+ Slot* slot = var->AsSlot();
+ if (slot->type() == Slot::PARAMETER) {
+ return slot->index();
+ } else {
+ return info_->scope()->num_parameters() + slot->index();
+ }
+}
+
+
+void AssignedVariablesAnalyzer::RecordAssignedVar(Variable* var) {
+ ASSERT(var != NULL);
+ if (var->IsStackAllocated()) {
+ av_.Add(BitIndex(var));
+ }
+}
+
+
+void AssignedVariablesAnalyzer::MarkIfTrivial(Expression* expr) {
+ Variable* var = expr->AsVariableProxy()->AsVariable();
+ if (var != NULL &&
+ var->IsStackAllocated() &&
+ !var->is_arguments() &&
+ var->mode() != Variable::CONST &&
+ (var->is_this() || !av_.Contains(BitIndex(var)))) {
+ expr->AsVariableProxy()->MarkAsTrivial();
+ }
+}
+
+
+void AssignedVariablesAnalyzer::ProcessExpression(Expression* expr) {
+ BitVector saved_av(av_);
+ av_.Clear();
+ Visit(expr);
+ av_.Union(saved_av);
+}
+
+void AssignedVariablesAnalyzer::VisitBlock(Block* stmt) {
+ VisitStatements(stmt->statements());
+}
+
+
+void AssignedVariablesAnalyzer::VisitExpressionStatement(
+ ExpressionStatement* stmt) {
+ ProcessExpression(stmt->expression());
+}
+
+
+void AssignedVariablesAnalyzer::VisitEmptyStatement(EmptyStatement* stmt) {
+ // Do nothing.
+}
+
+
+void AssignedVariablesAnalyzer::VisitIfStatement(IfStatement* stmt) {
+ ProcessExpression(stmt->condition());
+ Visit(stmt->then_statement());
+ Visit(stmt->else_statement());
+}
+
+
+void AssignedVariablesAnalyzer::VisitContinueStatement(
+ ContinueStatement* stmt) {
+ // Nothing to do.
+}
+
+
+void AssignedVariablesAnalyzer::VisitBreakStatement(BreakStatement* stmt) {
+ // Nothing to do.
+}
+
+
+void AssignedVariablesAnalyzer::VisitReturnStatement(ReturnStatement* stmt) {
+ ProcessExpression(stmt->expression());
+}
+
+
+void AssignedVariablesAnalyzer::VisitWithEnterStatement(
+ WithEnterStatement* stmt) {
+ ProcessExpression(stmt->expression());
+}
+
+
+void AssignedVariablesAnalyzer::VisitWithExitStatement(
+ WithExitStatement* stmt) {
+ // Nothing to do.
+}
+
+
+void AssignedVariablesAnalyzer::VisitSwitchStatement(SwitchStatement* stmt) {
+ BitVector result(av_);
+ av_.Clear();
+ Visit(stmt->tag());
+ result.Union(av_);
+ for (int i = 0; i < stmt->cases()->length(); i++) {
+ CaseClause* clause = stmt->cases()->at(i);
+ if (!clause->is_default()) {
+ av_.Clear();
+ Visit(clause->label());
+ result.Union(av_);
+ }
+ VisitStatements(clause->statements());
+ }
+ av_.Union(result);
+}
+
+
+void AssignedVariablesAnalyzer::VisitDoWhileStatement(DoWhileStatement* stmt) {
+ ProcessExpression(stmt->cond());
+ Visit(stmt->body());
+}
+
+
+void AssignedVariablesAnalyzer::VisitWhileStatement(WhileStatement* stmt) {
+ ProcessExpression(stmt->cond());
+ Visit(stmt->body());
+}
+
+
+void AssignedVariablesAnalyzer::VisitForStatement(ForStatement* stmt) {
+ if (stmt->init() != NULL) Visit(stmt->init());
+ if (stmt->cond() != NULL) ProcessExpression(stmt->cond());
+ if (stmt->next() != NULL) Visit(stmt->next());
+
+ // Process loop body. After visiting the loop body av_ contains
+ // the assigned variables of the loop body.
+ BitVector saved_av(av_);
+ av_.Clear();
+ Visit(stmt->body());
+
+ Variable* var = FindSmiLoopVariable(stmt);
+ if (var != NULL && !av_.Contains(BitIndex(var))) {
+ stmt->set_loop_variable(var);
+ }
+ av_.Union(saved_av);
+}
+
+
+void AssignedVariablesAnalyzer::VisitForInStatement(ForInStatement* stmt) {
+ ProcessExpression(stmt->each());
+ ProcessExpression(stmt->enumerable());
+ Visit(stmt->body());
+}
+
+
+void AssignedVariablesAnalyzer::VisitTryCatchStatement(
+ TryCatchStatement* stmt) {
+ Visit(stmt->try_block());
+ Visit(stmt->catch_block());
+}
+
+
+void AssignedVariablesAnalyzer::VisitTryFinallyStatement(
+ TryFinallyStatement* stmt) {
+ Visit(stmt->try_block());
+ Visit(stmt->finally_block());
+}
+
+
+void AssignedVariablesAnalyzer::VisitDebuggerStatement(
+ DebuggerStatement* stmt) {
+ // Nothing to do.
+}
+
+
+void AssignedVariablesAnalyzer::VisitFunctionLiteral(FunctionLiteral* expr) {
+ // Nothing to do.
+ ASSERT(av_.IsEmpty());
+}
+
+
+void AssignedVariablesAnalyzer::VisitSharedFunctionInfoLiteral(
+ SharedFunctionInfoLiteral* expr) {
+ // Nothing to do.
+ ASSERT(av_.IsEmpty());
+}
+
+
+void AssignedVariablesAnalyzer::VisitConditional(Conditional* expr) {
+ ASSERT(av_.IsEmpty());
+
+ Visit(expr->condition());
+
+ BitVector result(av_);
+ av_.Clear();
+ Visit(expr->then_expression());
+ result.Union(av_);
+
+ av_.Clear();
+ Visit(expr->else_expression());
+ av_.Union(result);
+}
+
+
+void AssignedVariablesAnalyzer::VisitVariableProxy(VariableProxy* expr) {
+ // Nothing to do.
+ ASSERT(av_.IsEmpty());
+}
+
+
+void AssignedVariablesAnalyzer::VisitLiteral(Literal* expr) {
+ // Nothing to do.
+ ASSERT(av_.IsEmpty());
+}
+
+
+void AssignedVariablesAnalyzer::VisitRegExpLiteral(RegExpLiteral* expr) {
+ // Nothing to do.
+ ASSERT(av_.IsEmpty());
+}
+
+
+void AssignedVariablesAnalyzer::VisitObjectLiteral(ObjectLiteral* expr) {
+ ASSERT(av_.IsEmpty());
+ BitVector result(av_.length());
+ for (int i = 0; i < expr->properties()->length(); i++) {
+ Visit(expr->properties()->at(i)->value());
+ result.Union(av_);
+ av_.Clear();
+ }
+ av_ = result;
+}
+
+
+void AssignedVariablesAnalyzer::VisitArrayLiteral(ArrayLiteral* expr) {
+ ASSERT(av_.IsEmpty());
+ BitVector result(av_.length());
+ for (int i = 0; i < expr->values()->length(); i++) {
+ Visit(expr->values()->at(i));
+ result.Union(av_);
+ av_.Clear();
+ }
+ av_ = result;
+}
+
+
+void AssignedVariablesAnalyzer::VisitCatchExtensionObject(
+ CatchExtensionObject* expr) {
+ ASSERT(av_.IsEmpty());
+ Visit(expr->key());
+ ProcessExpression(expr->value());
+}
+
+
+void AssignedVariablesAnalyzer::VisitAssignment(Assignment* expr) {
+ ASSERT(av_.IsEmpty());
+
+ // There are three kinds of assignments: variable assignments, property
+ // assignments, and reference errors (invalid left-hand sides).
+ Variable* var = expr->target()->AsVariableProxy()->AsVariable();
+ Property* prop = expr->target()->AsProperty();
+ ASSERT(var == NULL || prop == NULL);
+
+ if (var != NULL) {
+ MarkIfTrivial(expr->value());
+ Visit(expr->value());
+ if (expr->is_compound()) {
+ // Left-hand side occurs also as an rvalue.
+ MarkIfTrivial(expr->target());
+ ProcessExpression(expr->target());
+ }
+ RecordAssignedVar(var);
+
+ } else if (prop != NULL) {
+ MarkIfTrivial(expr->value());
+ Visit(expr->value());
+ if (!prop->key()->IsPropertyName()) {
+ MarkIfTrivial(prop->key());
+ ProcessExpression(prop->key());
+ }
+ MarkIfTrivial(prop->obj());
+ ProcessExpression(prop->obj());
+
+ } else {
+ Visit(expr->target());
+ }
+}
+
+
+void AssignedVariablesAnalyzer::VisitThrow(Throw* expr) {
+ ASSERT(av_.IsEmpty());
+ Visit(expr->exception());
+}
+
+
+void AssignedVariablesAnalyzer::VisitProperty(Property* expr) {
+ ASSERT(av_.IsEmpty());
+ if (!expr->key()->IsPropertyName()) {
+ MarkIfTrivial(expr->key());
+ Visit(expr->key());
+ }
+ MarkIfTrivial(expr->obj());
+ ProcessExpression(expr->obj());
+}
+
+
+void AssignedVariablesAnalyzer::VisitCall(Call* expr) {
+ ASSERT(av_.IsEmpty());
+ Visit(expr->expression());
+ BitVector result(av_);
+ for (int i = 0; i < expr->arguments()->length(); i++) {
+ av_.Clear();
+ Visit(expr->arguments()->at(i));
+ result.Union(av_);
+ }
+ av_ = result;
+}
+
+
+void AssignedVariablesAnalyzer::VisitCallNew(CallNew* expr) {
+ ASSERT(av_.IsEmpty());
+ Visit(expr->expression());
+ BitVector result(av_);
+ for (int i = 0; i < expr->arguments()->length(); i++) {
+ av_.Clear();
+ Visit(expr->arguments()->at(i));
+ result.Union(av_);
+ }
+ av_ = result;
+}
+
+
+void AssignedVariablesAnalyzer::VisitCallRuntime(CallRuntime* expr) {
+ ASSERT(av_.IsEmpty());
+ BitVector result(av_);
+ for (int i = 0; i < expr->arguments()->length(); i++) {
+ av_.Clear();
+ Visit(expr->arguments()->at(i));
+ result.Union(av_);
+ }
+ av_ = result;
+}
+
+
+void AssignedVariablesAnalyzer::VisitUnaryOperation(UnaryOperation* expr) {
+ ASSERT(av_.IsEmpty());
+ MarkIfTrivial(expr->expression());
+ Visit(expr->expression());
+}
+
+
+void AssignedVariablesAnalyzer::VisitCountOperation(CountOperation* expr) {
+ ASSERT(av_.IsEmpty());
+ if (expr->is_prefix()) MarkIfTrivial(expr->expression());
+ Visit(expr->expression());
+
+ Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
+ if (var != NULL) RecordAssignedVar(var);
+}
+
+
+void AssignedVariablesAnalyzer::VisitBinaryOperation(BinaryOperation* expr) {
+ ASSERT(av_.IsEmpty());
+ MarkIfTrivial(expr->right());
+ Visit(expr->right());
+ MarkIfTrivial(expr->left());
+ ProcessExpression(expr->left());
+}
+
+
+void AssignedVariablesAnalyzer::VisitCompareOperation(CompareOperation* expr) {
+ ASSERT(av_.IsEmpty());
+ MarkIfTrivial(expr->right());
+ Visit(expr->right());
+ MarkIfTrivial(expr->left());
+ ProcessExpression(expr->left());
+}
+
+
+void AssignedVariablesAnalyzer::VisitCompareToNull(CompareToNull* expr) {
+ ASSERT(av_.IsEmpty());
+ MarkIfTrivial(expr->expression());
+ Visit(expr->expression());
+}
+
+
+void AssignedVariablesAnalyzer::VisitThisFunction(ThisFunction* expr) {
+ // Nothing to do.
+ ASSERT(av_.IsEmpty());
+}
+
+
+void AssignedVariablesAnalyzer::VisitDeclaration(Declaration* decl) {
+ UNREACHABLE();
+}
+
+
} } // namespace v8::internal
diff --git a/src/data-flow.h b/src/data-flow.h
index 76cff88..573d7d8 100644
--- a/src/data-flow.h
+++ b/src/data-flow.h
@@ -335,6 +335,44 @@
List<T*> queue_;
};
+
+// Computes the set of assigned variables and annotates variables proxies
+// that are trivial sub-expressions and for-loops where the loop variable
+// is guaranteed to be a smi.
+class AssignedVariablesAnalyzer : public AstVisitor {
+ public:
+ static bool Analyze(CompilationInfo* info);
+
+ private:
+ AssignedVariablesAnalyzer(CompilationInfo* info, int bits);
+ bool Analyze();
+
+ Variable* FindSmiLoopVariable(ForStatement* stmt);
+
+ int BitIndex(Variable* var);
+
+ void RecordAssignedVar(Variable* var);
+
+ void MarkIfTrivial(Expression* expr);
+
+ // Visits an expression saving the accumulator before, clearing
+ // it before visting and restoring it after visiting.
+ void ProcessExpression(Expression* expr);
+
+ // AST node visit functions.
+#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
+ AST_NODE_LIST(DECLARE_VISIT)
+#undef DECLARE_VISIT
+
+ CompilationInfo* info_;
+
+ // Accumulator for assigned variables set.
+ BitVector av_;
+
+ DISALLOW_COPY_AND_ASSIGN(AssignedVariablesAnalyzer);
+};
+
+
} } // namespace v8::internal
diff --git a/src/debug.cc b/src/debug.cc
index 3691333..093f38e 100644
--- a/src/debug.cc
+++ b/src/debug.cc
@@ -477,6 +477,21 @@
// calling convention used by the call site.
Handle<Code> dbgbrk_code(Debug::FindDebugBreak(code, mode));
rinfo()->set_target_address(dbgbrk_code->entry());
+
+ // For stubs that refer back to an inlined version clear the cached map for
+ // the inlined case to always go through the IC. As long as the break point
+ // is set the patching performed by the runtime system will take place in
+ // the code copy and will therefore have no effect on the running code
+ // keeping it from using the inlined code.
+ if (code->is_keyed_load_stub()) {
+ KeyedLoadIC::ClearInlinedVersion(pc());
+ } else if (code->is_keyed_store_stub()) {
+ KeyedStoreIC::ClearInlinedVersion(pc());
+ } else if (code->is_load_stub()) {
+ LoadIC::ClearInlinedVersion(pc());
+ } else if (code->is_store_stub()) {
+ StoreIC::ClearInlinedVersion(pc());
+ }
}
}
@@ -484,6 +499,20 @@
void BreakLocationIterator::ClearDebugBreakAtIC() {
// Patch the code to the original invoke.
rinfo()->set_target_address(original_rinfo()->target_address());
+
+ RelocInfo::Mode mode = rmode();
+ if (RelocInfo::IsCodeTarget(mode)) {
+ AssertNoAllocation nogc;
+ Address target = original_rinfo()->target_address();
+ Code* code = Code::GetCodeFromTargetAddress(target);
+
+ // Restore the inlined version of keyed stores to get back to the
+ // fast case. We need to patch back the keyed store because no
+ // patching happens when running normally. For keyed loads, the
+ // map check will get patched back when running normally after ICs
+ // have been cleared at GC.
+ if (code->is_keyed_store_stub()) KeyedStoreIC::RestoreInlinedVersion(pc());
+ }
}
@@ -814,7 +843,6 @@
HandleScope scope(isolate_);
Handle<Context> context =
isolate_->bootstrapper()->CreateEnvironment(
- isolate_,
Handle<Object>::null(),
v8::Handle<ObjectTemplate>(),
NULL);
diff --git a/src/extensions/experimental/collator.cc b/src/extensions/experimental/collator.cc
deleted file mode 100644
index 7d1a21d..0000000
--- a/src/extensions/experimental/collator.cc
+++ /dev/null
@@ -1,218 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "collator.h"
-
-#include "unicode/coll.h"
-#include "unicode/locid.h"
-#include "unicode/ucol.h"
-
-namespace v8 {
-namespace internal {
-
-v8::Persistent<v8::FunctionTemplate> Collator::collator_template_;
-
-icu::Collator* Collator::UnpackCollator(v8::Handle<v8::Object> obj) {
- if (collator_template_->HasInstance(obj)) {
- return static_cast<icu::Collator*>(obj->GetPointerFromInternalField(0));
- }
-
- return NULL;
-}
-
-void Collator::DeleteCollator(v8::Persistent<v8::Value> object, void* param) {
- v8::Persistent<v8::Object> persistent_object =
- v8::Persistent<v8::Object>::Cast(object);
-
- // First delete the hidden C++ object.
- // Unpacking should never return NULL here. That would only happen if
- // this method is used as the weak callback for persistent handles not
- // pointing to a collator.
- delete UnpackCollator(persistent_object);
-
- // Then dispose of the persistent handle to JS object.
- persistent_object.Dispose();
-}
-
-// Throws a JavaScript exception.
-static v8::Handle<v8::Value> ThrowUnexpectedObjectError() {
- // Returns undefined, and schedules an exception to be thrown.
- return v8::ThrowException(v8::Exception::Error(
- v8::String::New("Collator method called on an object "
- "that is not a Collator.")));
-}
-
-// Extract a boolean option named in |option| and set it to |result|.
-// Return true if it's specified. Otherwise, return false.
-static bool ExtractBooleanOption(const v8::Local<v8::Object>& options,
- const char* option,
- bool* result) {
- v8::HandleScope handle_scope;
- v8::TryCatch try_catch;
- v8::Handle<v8::Value> value = options->Get(v8::String::New(option));
- if (try_catch.HasCaught()) {
- return false;
- }
- // No need to check if |value| is empty because it's taken care of
- // by TryCatch above.
- if (!value->IsUndefined() && !value->IsNull()) {
- if (value->IsBoolean()) {
- *result = value->BooleanValue();
- return true;
- }
- }
- return false;
-}
-
-// When there's an ICU error, throw a JavaScript error with |message|.
-static v8::Handle<v8::Value> ThrowExceptionForICUError(const char* message) {
- return v8::ThrowException(v8::Exception::Error(v8::String::New(message)));
-}
-
-v8::Handle<v8::Value> Collator::CollatorCompare(const v8::Arguments& args) {
- if (args.Length() != 2 || !args[0]->IsString() || !args[1]->IsString()) {
- return v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Two string arguments are required.")));
- }
-
- icu::Collator* collator = UnpackCollator(args.Holder());
- if (!collator) {
- return ThrowUnexpectedObjectError();
- }
-
- v8::String::Value string_value1(args[0]);
- v8::String::Value string_value2(args[1]);
- const UChar* string1 = reinterpret_cast<const UChar*>(*string_value1);
- const UChar* string2 = reinterpret_cast<const UChar*>(*string_value2);
- UErrorCode status = U_ZERO_ERROR;
- UCollationResult result = collator->compare(
- string1, string_value1.length(), string2, string_value2.length(), status);
-
- if (U_FAILURE(status)) {
- return ThrowExceptionForICUError(
- "Unexpected failure in Collator.compare.");
- }
-
- return v8::Int32::New(result);
-}
-
-v8::Handle<v8::Value> Collator::JSCollator(const v8::Arguments& args) {
- v8::HandleScope handle_scope;
-
- if (args.Length() != 2 || !args[0]->IsString() || !args[1]->IsObject()) {
- return v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Locale and collation options are required.")));
- }
-
- v8::String::AsciiValue locale(args[0]);
- icu::Locale icu_locale(*locale);
-
- icu::Collator* collator = NULL;
- UErrorCode status = U_ZERO_ERROR;
- collator = icu::Collator::createInstance(icu_locale, status);
-
- if (U_FAILURE(status)) {
- delete collator;
- return ThrowExceptionForICUError("Failed to create collator.");
- }
-
- v8::Local<v8::Object> options(args[1]->ToObject());
-
- // Below, we change collation options that are explicitly specified
- // by a caller in JavaScript. Otherwise, we don't touch because
- // we don't want to change the locale-dependent default value.
- // The three options below are very likely to have the same default
- // across locales, but I haven't checked them all. Others we may add
- // in the future have certainly locale-dependent default (e.g.
- // caseFirst is upperFirst for Danish while is off for most other locales).
-
- bool ignore_case, ignore_accents, numeric;
-
- if (ExtractBooleanOption(options, "ignoreCase", &ignore_case)) {
- collator->setAttribute(UCOL_CASE_LEVEL, ignore_case ? UCOL_OFF : UCOL_ON,
- status);
- if (U_FAILURE(status)) {
- delete collator;
- return ThrowExceptionForICUError("Failed to set ignoreCase.");
- }
- }
-
- // Accents are taken into account with strength secondary or higher.
- if (ExtractBooleanOption(options, "ignoreAccents", &ignore_accents)) {
- if (!ignore_accents) {
- collator->setStrength(icu::Collator::SECONDARY);
- } else {
- collator->setStrength(icu::Collator::PRIMARY);
- }
- }
-
- if (ExtractBooleanOption(options, "numeric", &numeric)) {
- collator->setAttribute(UCOL_NUMERIC_COLLATION,
- numeric ? UCOL_ON : UCOL_OFF, status);
- if (U_FAILURE(status)) {
- delete collator;
- return ThrowExceptionForICUError("Failed to set numeric sort option.");
- }
- }
-
- if (collator_template_.IsEmpty()) {
- v8::Local<v8::FunctionTemplate> raw_template(v8::FunctionTemplate::New());
- raw_template->SetClassName(v8::String::New("v8Locale.Collator"));
-
- // Define internal field count on instance template.
- v8::Local<v8::ObjectTemplate> object_template =
- raw_template->InstanceTemplate();
-
- // Set aside internal fields for icu collator.
- object_template->SetInternalFieldCount(1);
-
- // Define all of the prototype methods on prototype template.
- v8::Local<v8::ObjectTemplate> proto = raw_template->PrototypeTemplate();
- proto->Set(v8::String::New("compare"),
- v8::FunctionTemplate::New(CollatorCompare));
-
- collator_template_ =
- v8::Persistent<v8::FunctionTemplate>::New(raw_template);
- }
-
- // Create an empty object wrapper.
- v8::Local<v8::Object> local_object =
- collator_template_->GetFunction()->NewInstance();
- v8::Persistent<v8::Object> wrapper =
- v8::Persistent<v8::Object>::New(local_object);
-
- // Set collator as internal field of the resulting JS object.
- wrapper->SetPointerInInternalField(0, collator);
-
- // Make object handle weak so we can delete iterator once GC kicks in.
- wrapper.MakeWeak(NULL, DeleteCollator);
-
- return wrapper;
-}
-
-} } // namespace v8::internal
-
diff --git a/src/extensions/experimental/collator.h b/src/extensions/experimental/collator.h
deleted file mode 100644
index 10d6ffb..0000000
--- a/src/extensions/experimental/collator.h
+++ /dev/null
@@ -1,69 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef V8_EXTENSIONS_EXPERIMENTAL_COLLATOR_H
-#define V8_EXTENSIONS_EXPERIMENTAL_COLLATOR_H_
-
-#include <v8.h>
-
-#include "unicode/uversion.h"
-
-namespace U_ICU_NAMESPACE {
-class Collator;
-class UnicodeString;
-}
-
-namespace v8 {
-namespace internal {
-
-class Collator {
- public:
- static v8::Handle<v8::Value> JSCollator(const v8::Arguments& args);
-
- // Helper methods for various bindings.
-
- // Unpacks collator object from corresponding JavaScript object.
- static icu::Collator* UnpackCollator(v8::Handle<v8::Object> obj);
-
- // Release memory we allocated for the Collator once the JS object that
- // holds the pointer gets garbage collected.
- static void DeleteCollator(v8::Persistent<v8::Value> object, void* param);
-
- // Compare two strings and returns -1, 0 and 1 depending on
- // whether string1 is smaller than, equal to or larger than string2.
- static v8::Handle<v8::Value> CollatorCompare(const v8::Arguments& args);
-
- private:
- Collator() {}
-
- static v8::Persistent<v8::FunctionTemplate> collator_template_;
-};
-
-} } // namespace v8::internal
-
-#endif // V8_EXTENSIONS_EXPERIMENTAL_COLLATOR
-
diff --git a/src/extensions/experimental/experimental.gyp b/src/extensions/experimental/experimental.gyp
index d1194ce..a8585fd 100644
--- a/src/extensions/experimental/experimental.gyp
+++ b/src/extensions/experimental/experimental.gyp
@@ -39,13 +39,9 @@
'sources': [
'break-iterator.cc',
'break-iterator.h',
- 'collator.cc',
- 'collator.h',
'i18n-extension.cc',
'i18n-extension.h',
- 'i18n-locale.cc',
- 'i18n-locale.h',
- '<(SHARED_INTERMEDIATE_DIR)/i18n-js.cc',
+ '<(SHARED_INTERMEDIATE_DIR)/i18n-js.cc',
],
'include_dirs': [
'<(icu_src_dir)/public/common',
@@ -53,7 +49,7 @@
],
'dependencies': [
'<(icu_src_dir)/icu.gyp:*',
- 'js2c_i18n#host',
+ 'js2c_i18n#host',
'../../../tools/gyp/v8.gyp:v8',
],
},
@@ -63,27 +59,28 @@
'toolsets': ['host'],
'variables': {
'library_files': [
- 'i18n.js'
- ],
+ 'i18n.js'
+ ],
},
'actions': [
{
- 'action_name': 'js2c_i18n',
- 'inputs': [
- '../../../tools/js2c.py',
- '<@(library_files)',
- ],
- 'outputs': [
- '<(SHARED_INTERMEDIATE_DIR)/i18n-js.cc',
- ],
- 'action': [
- 'python',
- '../../../tools/js2c.py',
- '<@(_outputs)',
- 'I18N',
- '<@(library_files)'
- ],
- },
+ 'action_name': 'js2c_i18n',
+ 'inputs': [
+ '../../../tools/js2c.py',
+ '<@(library_files)',
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/i18n-js.cc',
+ '<(SHARED_INTERMEDIATE_DIR)/i18n-js-empty.cc'
+ ],
+ 'action': [
+ 'python',
+ '../../../tools/js2c.py',
+ '<@(_outputs)',
+ 'I18N',
+ '<@(library_files)'
+ ],
+ },
],
},
], # targets
diff --git a/src/extensions/experimental/i18n-extension.cc b/src/extensions/experimental/i18n-extension.cc
index 56bea23..6e3ab15 100644
--- a/src/extensions/experimental/i18n-extension.cc
+++ b/src/extensions/experimental/i18n-extension.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -27,10 +27,13 @@
#include "i18n-extension.h"
+#include <algorithm>
+#include <string>
+
#include "break-iterator.h"
-#include "collator.h"
-#include "i18n-locale.h"
#include "natives.h"
+#include "unicode/locid.h"
+#include "unicode/uloc.h"
namespace v8 {
namespace internal {
@@ -54,30 +57,166 @@
v8::Handle<v8::FunctionTemplate> I18NExtension::GetNativeFunction(
v8::Handle<v8::String> name) {
if (name->Equals(v8::String::New("NativeJSLocale"))) {
- return v8::FunctionTemplate::New(I18NLocale::JSLocale);
+ return v8::FunctionTemplate::New(JSLocale);
} else if (name->Equals(v8::String::New("NativeJSAvailableLocales"))) {
- return v8::FunctionTemplate::New(I18NLocale::JSAvailableLocales);
+ return v8::FunctionTemplate::New(JSAvailableLocales);
} else if (name->Equals(v8::String::New("NativeJSMaximizedLocale"))) {
- return v8::FunctionTemplate::New(I18NLocale::JSMaximizedLocale);
+ return v8::FunctionTemplate::New(JSMaximizedLocale);
} else if (name->Equals(v8::String::New("NativeJSMinimizedLocale"))) {
- return v8::FunctionTemplate::New(I18NLocale::JSMinimizedLocale);
+ return v8::FunctionTemplate::New(JSMinimizedLocale);
} else if (name->Equals(v8::String::New("NativeJSDisplayLanguage"))) {
- return v8::FunctionTemplate::New(I18NLocale::JSDisplayLanguage);
+ return v8::FunctionTemplate::New(JSDisplayLanguage);
} else if (name->Equals(v8::String::New("NativeJSDisplayScript"))) {
- return v8::FunctionTemplate::New(I18NLocale::JSDisplayScript);
+ return v8::FunctionTemplate::New(JSDisplayScript);
} else if (name->Equals(v8::String::New("NativeJSDisplayRegion"))) {
- return v8::FunctionTemplate::New(I18NLocale::JSDisplayRegion);
+ return v8::FunctionTemplate::New(JSDisplayRegion);
} else if (name->Equals(v8::String::New("NativeJSDisplayName"))) {
- return v8::FunctionTemplate::New(I18NLocale::JSDisplayName);
+ return v8::FunctionTemplate::New(JSDisplayName);
} else if (name->Equals(v8::String::New("NativeJSBreakIterator"))) {
return v8::FunctionTemplate::New(BreakIterator::JSBreakIterator);
- } else if (name->Equals(v8::String::New("NativeJSCollator"))) {
- return v8::FunctionTemplate::New(Collator::JSCollator);
}
return v8::Handle<v8::FunctionTemplate>();
}
+v8::Handle<v8::Value> I18NExtension::JSLocale(const v8::Arguments& args) {
+ // TODO(cira): Fetch browser locale. Accept en-US as good default for now.
+ // We could possibly pass browser locale as a parameter in the constructor.
+ std::string locale_name("en-US");
+ if (args.Length() == 1 && args[0]->IsString()) {
+ locale_name = *v8::String::Utf8Value(args[0]->ToString());
+ }
+
+ v8::Local<v8::Object> locale = v8::Object::New();
+ locale->Set(v8::String::New("locale"), v8::String::New(locale_name.c_str()));
+
+ icu::Locale icu_locale(locale_name.c_str());
+
+ const char* language = icu_locale.getLanguage();
+ locale->Set(v8::String::New("language"), v8::String::New(language));
+
+ const char* script = icu_locale.getScript();
+ if (strlen(script)) {
+ locale->Set(v8::String::New("script"), v8::String::New(script));
+ }
+
+ const char* region = icu_locale.getCountry();
+ if (strlen(region)) {
+ locale->Set(v8::String::New("region"), v8::String::New(region));
+ }
+
+ return locale;
+}
+
+// TODO(cira): Filter out locales that Chrome doesn't support.
+v8::Handle<v8::Value> I18NExtension::JSAvailableLocales(
+ const v8::Arguments& args) {
+ v8::Local<v8::Array> all_locales = v8::Array::New();
+
+ int count = 0;
+ const icu::Locale* icu_locales = icu::Locale::getAvailableLocales(count);
+ for (int i = 0; i < count; ++i) {
+ all_locales->Set(i, v8::String::New(icu_locales[i].getName()));
+ }
+
+ return all_locales;
+}
+
+// Use - as tag separator, not _ that ICU uses.
+static std::string NormalizeLocale(const std::string& locale) {
+ std::string result(locale);
+ // TODO(cira): remove STL dependency.
+ std::replace(result.begin(), result.end(), '_', '-');
+ return result;
+}
+
+v8::Handle<v8::Value> I18NExtension::JSMaximizedLocale(
+ const v8::Arguments& args) {
+ if (!args.Length() || !args[0]->IsString()) {
+ return v8::Undefined();
+ }
+
+ UErrorCode status = U_ZERO_ERROR;
+ std::string locale_name = *v8::String::Utf8Value(args[0]->ToString());
+ char max_locale[ULOC_FULLNAME_CAPACITY];
+ uloc_addLikelySubtags(locale_name.c_str(), max_locale,
+ sizeof(max_locale), &status);
+ if (U_FAILURE(status)) {
+ return v8::Undefined();
+ }
+
+ return v8::String::New(NormalizeLocale(max_locale).c_str());
+}
+
+v8::Handle<v8::Value> I18NExtension::JSMinimizedLocale(
+ const v8::Arguments& args) {
+ if (!args.Length() || !args[0]->IsString()) {
+ return v8::Undefined();
+ }
+
+ UErrorCode status = U_ZERO_ERROR;
+ std::string locale_name = *v8::String::Utf8Value(args[0]->ToString());
+ char min_locale[ULOC_FULLNAME_CAPACITY];
+ uloc_minimizeSubtags(locale_name.c_str(), min_locale,
+ sizeof(min_locale), &status);
+ if (U_FAILURE(status)) {
+ return v8::Undefined();
+ }
+
+ return v8::String::New(NormalizeLocale(min_locale).c_str());
+}
+
+// Common code for JSDisplayXXX methods.
+static v8::Handle<v8::Value> GetDisplayItem(const v8::Arguments& args,
+ const std::string& item) {
+ if (args.Length() != 2 || !args[0]->IsString() || !args[1]->IsString()) {
+ return v8::Undefined();
+ }
+
+ std::string base_locale = *v8::String::Utf8Value(args[0]->ToString());
+ icu::Locale icu_locale(base_locale.c_str());
+ icu::Locale display_locale =
+ icu::Locale(*v8::String::Utf8Value(args[1]->ToString()));
+ icu::UnicodeString result;
+ if (item == "language") {
+ icu_locale.getDisplayLanguage(display_locale, result);
+ } else if (item == "script") {
+ icu_locale.getDisplayScript(display_locale, result);
+ } else if (item == "region") {
+ icu_locale.getDisplayCountry(display_locale, result);
+ } else if (item == "name") {
+ icu_locale.getDisplayName(display_locale, result);
+ } else {
+ return v8::Undefined();
+ }
+
+ if (result.length()) {
+ return v8::String::New(
+ reinterpret_cast<const uint16_t*>(result.getBuffer()), result.length());
+ }
+
+ return v8::Undefined();
+}
+
+v8::Handle<v8::Value> I18NExtension::JSDisplayLanguage(
+ const v8::Arguments& args) {
+ return GetDisplayItem(args, "language");
+}
+
+v8::Handle<v8::Value> I18NExtension::JSDisplayScript(
+ const v8::Arguments& args) {
+ return GetDisplayItem(args, "script");
+}
+
+v8::Handle<v8::Value> I18NExtension::JSDisplayRegion(
+ const v8::Arguments& args) {
+ return GetDisplayItem(args, "region");
+}
+
+v8::Handle<v8::Value> I18NExtension::JSDisplayName(const v8::Arguments& args) {
+ return GetDisplayItem(args, "name");
+}
+
I18NExtension* I18NExtension::get() {
if (!extension_) {
extension_ = new I18NExtension();
diff --git a/src/extensions/experimental/i18n-extension.h b/src/extensions/experimental/i18n-extension.h
index b4dc7c3..54c973f 100644
--- a/src/extensions/experimental/i18n-extension.h
+++ b/src/extensions/experimental/i18n-extension.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -41,6 +41,16 @@
virtual v8::Handle<v8::FunctionTemplate> GetNativeFunction(
v8::Handle<v8::String> name);
+ // Implementations of window.Locale methods.
+ static v8::Handle<v8::Value> JSLocale(const v8::Arguments& args);
+ static v8::Handle<v8::Value> JSAvailableLocales(const v8::Arguments& args);
+ static v8::Handle<v8::Value> JSMaximizedLocale(const v8::Arguments& args);
+ static v8::Handle<v8::Value> JSMinimizedLocale(const v8::Arguments& args);
+ static v8::Handle<v8::Value> JSDisplayLanguage(const v8::Arguments& args);
+ static v8::Handle<v8::Value> JSDisplayScript(const v8::Arguments& args);
+ static v8::Handle<v8::Value> JSDisplayRegion(const v8::Arguments& args);
+ static v8::Handle<v8::Value> JSDisplayName(const v8::Arguments& args);
+
// V8 code prefers Register, while Chrome and WebKit use get kind of methods.
static void Register();
static I18NExtension* get();
diff --git a/src/extensions/experimental/i18n-locale.cc b/src/extensions/experimental/i18n-locale.cc
deleted file mode 100644
index e5e1cf8..0000000
--- a/src/extensions/experimental/i18n-locale.cc
+++ /dev/null
@@ -1,172 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "i18n-locale.h"
-
-#include <algorithm>
-#include <string>
-
-#include "unicode/locid.h"
-#include "unicode/uloc.h"
-
-namespace v8 {
-namespace internal {
-
-v8::Handle<v8::Value> I18NLocale::JSLocale(const v8::Arguments& args) {
- // TODO(cira): Fetch browser locale. Accept en-US as good default for now.
- // We could possibly pass browser locale as a parameter in the constructor.
- std::string locale_name("en-US");
- if (args.Length() == 1 && args[0]->IsString()) {
- locale_name = *v8::String::Utf8Value(args[0]->ToString());
- }
-
- v8::Local<v8::Object> locale = v8::Object::New();
- locale->Set(v8::String::New("locale"), v8::String::New(locale_name.c_str()));
-
- icu::Locale icu_locale(locale_name.c_str());
-
- const char* language = icu_locale.getLanguage();
- locale->Set(v8::String::New("language"), v8::String::New(language));
-
- const char* script = icu_locale.getScript();
- if (strlen(script)) {
- locale->Set(v8::String::New("script"), v8::String::New(script));
- }
-
- const char* region = icu_locale.getCountry();
- if (strlen(region)) {
- locale->Set(v8::String::New("region"), v8::String::New(region));
- }
-
- return locale;
-}
-
-// TODO(cira): Filter out locales that Chrome doesn't support.
-v8::Handle<v8::Value> I18NLocale::JSAvailableLocales(
- const v8::Arguments& args) {
- v8::Local<v8::Array> all_locales = v8::Array::New();
-
- int count = 0;
- const icu::Locale* icu_locales = icu::Locale::getAvailableLocales(count);
- for (int i = 0; i < count; ++i) {
- all_locales->Set(i, v8::String::New(icu_locales[i].getName()));
- }
-
- return all_locales;
-}
-
-// Use - as tag separator, not _ that ICU uses.
-static std::string NormalizeLocale(const std::string& locale) {
- std::string result(locale);
- // TODO(cira): remove STL dependency.
- std::replace(result.begin(), result.end(), '_', '-');
- return result;
-}
-
-v8::Handle<v8::Value> I18NLocale::JSMaximizedLocale(const v8::Arguments& args) {
- if (!args.Length() || !args[0]->IsString()) {
- return v8::Undefined();
- }
-
- UErrorCode status = U_ZERO_ERROR;
- std::string locale_name = *v8::String::Utf8Value(args[0]->ToString());
- char max_locale[ULOC_FULLNAME_CAPACITY];
- uloc_addLikelySubtags(locale_name.c_str(), max_locale,
- sizeof(max_locale), &status);
- if (U_FAILURE(status)) {
- return v8::Undefined();
- }
-
- return v8::String::New(NormalizeLocale(max_locale).c_str());
-}
-
-v8::Handle<v8::Value> I18NLocale::JSMinimizedLocale(const v8::Arguments& args) {
- if (!args.Length() || !args[0]->IsString()) {
- return v8::Undefined();
- }
-
- UErrorCode status = U_ZERO_ERROR;
- std::string locale_name = *v8::String::Utf8Value(args[0]->ToString());
- char min_locale[ULOC_FULLNAME_CAPACITY];
- uloc_minimizeSubtags(locale_name.c_str(), min_locale,
- sizeof(min_locale), &status);
- if (U_FAILURE(status)) {
- return v8::Undefined();
- }
-
- return v8::String::New(NormalizeLocale(min_locale).c_str());
-}
-
-// Common code for JSDisplayXXX methods.
-static v8::Handle<v8::Value> GetDisplayItem(const v8::Arguments& args,
- const std::string& item) {
- if (args.Length() != 2 || !args[0]->IsString() || !args[1]->IsString()) {
- return v8::Undefined();
- }
-
- std::string base_locale = *v8::String::Utf8Value(args[0]->ToString());
- icu::Locale icu_locale(base_locale.c_str());
- icu::Locale display_locale =
- icu::Locale(*v8::String::Utf8Value(args[1]->ToString()));
- icu::UnicodeString result;
- if (item == "language") {
- icu_locale.getDisplayLanguage(display_locale, result);
- } else if (item == "script") {
- icu_locale.getDisplayScript(display_locale, result);
- } else if (item == "region") {
- icu_locale.getDisplayCountry(display_locale, result);
- } else if (item == "name") {
- icu_locale.getDisplayName(display_locale, result);
- } else {
- return v8::Undefined();
- }
-
- if (result.length()) {
- return v8::String::New(
- reinterpret_cast<const uint16_t*>(result.getBuffer()), result.length());
- }
-
- return v8::Undefined();
-}
-
-v8::Handle<v8::Value> I18NLocale::JSDisplayLanguage(const v8::Arguments& args) {
- return GetDisplayItem(args, "language");
-}
-
-v8::Handle<v8::Value> I18NLocale::JSDisplayScript(const v8::Arguments& args) {
- return GetDisplayItem(args, "script");
-}
-
-v8::Handle<v8::Value> I18NLocale::JSDisplayRegion(const v8::Arguments& args) {
- return GetDisplayItem(args, "region");
-}
-
-v8::Handle<v8::Value> I18NLocale::JSDisplayName(const v8::Arguments& args) {
- return GetDisplayItem(args, "name");
-}
-
-} } // namespace v8::internal
diff --git a/src/extensions/experimental/i18n-locale.h b/src/extensions/experimental/i18n-locale.h
deleted file mode 100644
index aa9adbe..0000000
--- a/src/extensions/experimental/i18n-locale.h
+++ /dev/null
@@ -1,53 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef V8_EXTENSIONS_EXPERIMENTAL_I18N_LOCALE_H_
-#define V8_EXTENSIONS_EXPERIMENTAL_I18N_LOCALE_H_
-
-#include <v8.h>
-
-namespace v8 {
-namespace internal {
-
-class I18NLocale {
- public:
- I18NLocale() {}
-
- // Implementations of window.Locale methods.
- static v8::Handle<v8::Value> JSLocale(const v8::Arguments& args);
- static v8::Handle<v8::Value> JSAvailableLocales(const v8::Arguments& args);
- static v8::Handle<v8::Value> JSMaximizedLocale(const v8::Arguments& args);
- static v8::Handle<v8::Value> JSMinimizedLocale(const v8::Arguments& args);
- static v8::Handle<v8::Value> JSDisplayLanguage(const v8::Arguments& args);
- static v8::Handle<v8::Value> JSDisplayScript(const v8::Arguments& args);
- static v8::Handle<v8::Value> JSDisplayRegion(const v8::Arguments& args);
- static v8::Handle<v8::Value> JSDisplayName(const v8::Arguments& args);
-};
-
-} } // namespace v8::internal
-
-#endif // V8_EXTENSIONS_EXPERIMENTAL_I18N_LOCALE_H_
diff --git a/src/extensions/experimental/i18n.js b/src/extensions/experimental/i18n.js
index 5a74905..baf3859 100644
--- a/src/extensions/experimental/i18n.js
+++ b/src/extensions/experimental/i18n.js
@@ -101,16 +101,3 @@
v8Locale.prototype.v8CreateBreakIterator = function(type) {
return new v8Locale.v8BreakIterator(this.locale, type);
};
-
-// TODO(jungshik): Set |collator.options| to actually recognized / resolved
-// values.
-v8Locale.Collator = function(locale, options) {
- native function NativeJSCollator();
- var collator = NativeJSCollator(locale,
- options === undefined ? {} : options);
- return collator;
-};
-
-v8Locale.prototype.createCollator = function(options) {
- return new v8Locale.Collator(this.locale, options);
-};
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index 69139bb..17e2015 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -96,9 +96,6 @@
//
#define FLAG FLAG_FULL
-// Flags for experimental language features.
-DEFINE_bool(harmony_proxies, false, "enable harmony proxies")
-
// Flags for Crankshaft.
#ifdef V8_TARGET_ARCH_MIPS
DEFINE_bool(crankshaft, false, "use crankshaft")
@@ -223,6 +220,8 @@
// compilation-cache.cc
DEFINE_bool(compilation_cache, true, "enable compilation cache")
+DEFINE_bool(cache_prototype_transitions, true, "cache prototype transitions")
+
// data-flow.cc
DEFINE_bool(loop_peeling, false, "Peel off the first iteration of loops.")
diff --git a/src/frames.h b/src/frames.h
index da9009b..6fe6a63 100644
--- a/src/frames.h
+++ b/src/frames.h
@@ -151,6 +151,12 @@
NO_ID = 0
};
+ // Used to mark the outermost JS entry frame.
+ enum JsFrameMarker {
+ INNER_JSENTRY_FRAME = 0,
+ OUTERMOST_JSENTRY_FRAME = 1
+ };
+
struct State {
State() : sp(NULL), fp(NULL), pc_address(NULL) { }
Address sp;
diff --git a/src/heap.cc b/src/heap.cc
index 93bdc03..0b4abf3 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -127,6 +127,7 @@
global_gc_prologue_callback_(NULL),
global_gc_epilogue_callback_(NULL),
gc_safe_size_of_old_object_(NULL),
+ total_regexp_code_generated_(0),
tracer_(NULL),
young_survivors_after_last_gc_(0),
high_survival_rate_period_length_(0),
@@ -1596,6 +1597,7 @@
map->set_pre_allocated_property_fields(0);
map->set_instance_descriptors(empty_descriptor_array());
map->set_code_cache(empty_fixed_array());
+ map->set_prototype_transitions(empty_fixed_array());
map->set_unused_property_fields(0);
map->set_bit_field(0);
map->set_bit_field2((1 << Map::kIsExtensible) | (1 << Map::kHasFastElements));
@@ -1688,12 +1690,15 @@
// Fix the instance_descriptors for the existing maps.
meta_map()->set_instance_descriptors(empty_descriptor_array());
meta_map()->set_code_cache(empty_fixed_array());
+ meta_map()->set_prototype_transitions(empty_fixed_array());
fixed_array_map()->set_instance_descriptors(empty_descriptor_array());
fixed_array_map()->set_code_cache(empty_fixed_array());
+ fixed_array_map()->set_prototype_transitions(empty_fixed_array());
oddball_map()->set_instance_descriptors(empty_descriptor_array());
oddball_map()->set_code_cache(empty_fixed_array());
+ oddball_map()->set_prototype_transitions(empty_fixed_array());
// Fix prototype object for existing maps.
meta_map()->set_prototype(null_value());
diff --git a/src/heap.h b/src/heap.h
index 7a1bed3..ae4e9e7 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -1210,6 +1210,11 @@
GCTracer* tracer() { return tracer_; }
+ double total_regexp_code_generated() { return total_regexp_code_generated_; }
+ void IncreaseTotalRegexpCodeGenerated(int size) {
+ total_regexp_code_generated_ += size;
+ }
+
// Returns maximum GC pause.
int get_max_gc_pause() { return max_gc_pause_; }
@@ -1493,6 +1498,9 @@
SharedFunctionInfo* shared,
Object* prototype);
+ // Total RegExp code ever generated
+ double total_regexp_code_generated_;
+
GCTracer* tracer_;
diff --git a/src/hydrogen-instructions.cc b/src/hydrogen-instructions.cc
index 3b01f57..032ca76 100644
--- a/src/hydrogen-instructions.cc
+++ b/src/hydrogen-instructions.cc
@@ -1017,9 +1017,10 @@
HConstant::HConstant(Handle<Object> handle, Representation r)
: handle_(handle),
+ constant_type_(HType::TypeFromValue(handle)),
has_int32_value_(false),
- has_double_value_(false),
int32_value_(0),
+ has_double_value_(false),
double_value_(0) {
set_representation(r);
SetFlag(kUseGVN);
@@ -1434,7 +1435,7 @@
HType HConstant::CalculateInferredType() {
- return HType::TypeFromValue(handle_);
+ return constant_type_;
}
diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h
index e32a09c..a623775 100644
--- a/src/hydrogen-instructions.h
+++ b/src/hydrogen-instructions.h
@@ -114,7 +114,6 @@
V(HasCachedArrayIndex) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
- V(InvokeFunction) \
V(IsNull) \
V(IsObject) \
V(IsSmi) \
@@ -156,7 +155,6 @@
V(StoreKeyedGeneric) \
V(StoreNamedField) \
V(StoreNamedGeneric) \
- V(StringAdd) \
V(StringCharCodeAt) \
V(StringCharFromCode) \
V(StringLength) \
@@ -1245,23 +1243,6 @@
};
-class HInvokeFunction: public HBinaryCall {
- public:
- HInvokeFunction(HValue* context, HValue* function, int argument_count)
- : HBinaryCall(context, function, argument_count) {
- }
-
- virtual Representation RequiredInputRepresentation(int index) const {
- return Representation::Tagged();
- }
-
- HValue* context() { return first(); }
- HValue* function() { return second(); }
-
- DECLARE_CONCRETE_INSTRUCTION(InvokeFunction, "invoke_function")
-};
-
-
class HCallConstantFunction: public HCall<0> {
public:
HCallConstantFunction(Handle<JSFunction> function, int argument_count)
@@ -1726,16 +1707,6 @@
virtual void Verify();
#endif
- virtual HValue* Canonicalize() {
- if (!value()->type().IsUninitialized() &&
- value()->type().IsString() &&
- first() == FIRST_STRING_TYPE &&
- last() == LAST_STRING_TYPE) {
- return NULL;
- }
- return this;
- }
-
static HCheckInstanceType* NewIsJSObjectOrJSFunction(HValue* value);
InstanceType first() const { return first_; }
@@ -1777,18 +1748,6 @@
virtual void Verify();
#endif
- virtual HValue* Canonicalize() {
- HType value_type = value()->type();
- if (!value_type.IsUninitialized() &&
- (value_type.IsHeapNumber() ||
- value_type.IsString() ||
- value_type.IsBoolean() ||
- value_type.IsNonPrimitive())) {
- return NULL;
- }
- return this;
- }
-
DECLARE_CONCRETE_INSTRUCTION(CheckNonSmi, "check_non_smi")
protected:
@@ -2031,13 +1990,14 @@
private:
Handle<Object> handle_;
+ HType constant_type_;
// The following two values represent the int32 and the double value of the
// given constant if there is a lossless conversion between the constant
// and the specific representation.
- bool has_int32_value_ : 1;
- bool has_double_value_ : 1;
+ bool has_int32_value_;
int32_t int32_value_;
+ bool has_double_value_;
double double_value_;
};
@@ -3448,29 +3408,6 @@
};
-class HStringAdd: public HBinaryOperation {
- public:
- HStringAdd(HValue* left, HValue* right) : HBinaryOperation(left, right) {
- set_representation(Representation::Tagged());
- SetFlag(kUseGVN);
- SetFlag(kDependsOnMaps);
- }
-
- virtual Representation RequiredInputRepresentation(int index) const {
- return Representation::Tagged();
- }
-
- virtual HType CalculateInferredType() {
- return HType::String();
- }
-
- DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string_add")
-
- protected:
- virtual bool DataEquals(HValue* other) { return true; }
-};
-
-
class HStringCharCodeAt: public HBinaryOperation {
public:
HStringCharCodeAt(HValue* string, HValue* index)
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index f6c47f3..73ea97d 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -242,7 +242,7 @@
void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
- if (HasPredecessor()) {
+ if (!predecessors_.is_empty()) {
// Only loop header blocks can have a predecessor added after
// instructions have been added to the block (they have phis for all
// values in the environment, these phis may be eliminated later).
@@ -1808,7 +1808,7 @@
ZoneList<Representation>* to_convert_reps) {
Representation r = current->representation();
if (r.IsNone()) return;
- if (current->uses()->is_empty()) return;
+ if (current->uses()->length() == 0) return;
// Collect the representation changes in a sorted list. This allows
// us to avoid duplicate changes without searching the list.
@@ -2076,17 +2076,37 @@
// HGraphBuilder infrastructure for bailing out and checking bailouts.
-#define CHECK_BAILOUT(call) \
+#define BAILOUT(reason) \
do { \
- call; \
+ Bailout(reason); \
+ return; \
+ } while (false)
+
+
+#define CHECK_BAILOUT \
+ do { \
if (HasStackOverflow()) return; \
} while (false)
-#define CHECK_ALIVE(call) \
+#define VISIT_FOR_EFFECT(expr) \
+ do { \
+ VisitForEffect(expr); \
+ if (HasStackOverflow()) return; \
+ } while (false)
+
+
+#define VISIT_FOR_VALUE(expr) \
+ do { \
+ VisitForValue(expr); \
+ if (HasStackOverflow()) return; \
+ } while (false)
+
+
+#define VISIT_FOR_CONTROL(expr, true_block, false_block) \
do { \
- call; \
- if (HasStackOverflow() || current_block() == NULL) return; \
+ VisitForControl(expr, true_block, false_block); \
+ if (HasStackOverflow()) return; \
} while (false)
@@ -2128,21 +2148,22 @@
void HGraphBuilder::VisitArgument(Expression* expr) {
- CHECK_ALIVE(VisitForValue(expr));
+ VISIT_FOR_VALUE(expr);
Push(AddInstruction(new(zone()) HPushArgument(Pop())));
}
void HGraphBuilder::VisitArgumentList(ZoneList<Expression*>* arguments) {
for (int i = 0; i < arguments->length(); i++) {
- CHECK_ALIVE(VisitArgument(arguments->at(i)));
+ VisitArgument(arguments->at(i));
+ if (HasStackOverflow() || current_block() == NULL) return;
}
}
void HGraphBuilder::VisitExpressions(ZoneList<Expression*>* exprs) {
for (int i = 0; i < exprs->length(); ++i) {
- CHECK_ALIVE(VisitForValue(exprs->at(i)));
+ VISIT_FOR_VALUE(exprs->at(i));
}
}
@@ -2273,7 +2294,7 @@
void HGraphBuilder::SetupScope(Scope* scope) {
// We don't yet handle the function name for named function expressions.
- if (scope->function() != NULL) return Bailout("named function expression");
+ if (scope->function() != NULL) BAILOUT("named function expression");
HConstant* undefined_constant = new(zone()) HConstant(
isolate()->factory()->undefined_value(), Representation::Tagged());
@@ -2299,7 +2320,7 @@
if (!scope->arguments()->IsStackAllocated() ||
(scope->arguments_shadow() != NULL &&
!scope->arguments_shadow()->IsStackAllocated())) {
- return Bailout("context-allocated arguments");
+ BAILOUT("context-allocated arguments");
}
HArgumentsObject* object = new(zone()) HArgumentsObject;
AddInstruction(object);
@@ -2314,7 +2335,8 @@
void HGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
for (int i = 0; i < statements->length(); i++) {
- CHECK_ALIVE(Visit(statements->at(i)));
+ Visit(statements->at(i));
+ if (HasStackOverflow() || current_block() == NULL) break;
}
}
@@ -2336,12 +2358,10 @@
void HGraphBuilder::VisitBlock(Block* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
BreakAndContinueInfo break_info(stmt);
{ BreakAndContinueScope push(&break_info, this);
- CHECK_BAILOUT(VisitStatements(stmt->statements()));
+ VisitStatements(stmt->statements());
+ CHECK_BAILOUT;
}
HBasicBlock* break_block = break_info.break_block();
if (break_block != NULL) {
@@ -2353,24 +2373,15 @@
void HGraphBuilder::VisitExpressionStatement(ExpressionStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
VisitForEffect(stmt->expression());
}
void HGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
}
void HGraphBuilder::VisitIfStatement(IfStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
if (stmt->condition()->ToBooleanIsTrue()) {
AddSimulate(stmt->ThenId());
Visit(stmt->then_statement());
@@ -2380,27 +2391,20 @@
} else {
HBasicBlock* cond_true = graph()->CreateBasicBlock();
HBasicBlock* cond_false = graph()->CreateBasicBlock();
- CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
+ VISIT_FOR_CONTROL(stmt->condition(), cond_true, cond_false);
+ cond_true->SetJoinId(stmt->ThenId());
+ cond_false->SetJoinId(stmt->ElseId());
- if (cond_true->HasPredecessor()) {
- cond_true->SetJoinId(stmt->ThenId());
- set_current_block(cond_true);
- CHECK_BAILOUT(Visit(stmt->then_statement()));
- cond_true = current_block();
- } else {
- cond_true = NULL;
- }
+ set_current_block(cond_true);
+ Visit(stmt->then_statement());
+ CHECK_BAILOUT;
+ HBasicBlock* other = current_block();
- if (cond_false->HasPredecessor()) {
- cond_false->SetJoinId(stmt->ElseId());
- set_current_block(cond_false);
- CHECK_BAILOUT(Visit(stmt->else_statement()));
- cond_false = current_block();
- } else {
- cond_false = NULL;
- }
+ set_current_block(cond_false);
+ Visit(stmt->else_statement());
+ CHECK_BAILOUT;
- HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->id());
+ HBasicBlock* join = CreateJoin(other, current_block(), stmt->id());
set_current_block(join);
}
}
@@ -2438,9 +2442,6 @@
void HGraphBuilder::VisitContinueStatement(ContinueStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
HBasicBlock* continue_block = break_scope()->Get(stmt->target(), CONTINUE);
current_block()->Goto(continue_block);
set_current_block(NULL);
@@ -2448,9 +2449,6 @@
void HGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
HBasicBlock* break_block = break_scope()->Get(stmt->target(), BREAK);
current_block()->Goto(break_block);
set_current_block(NULL);
@@ -2458,13 +2456,10 @@
void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
AstContext* context = call_context();
if (context == NULL) {
// Not an inlined return, so an actual one.
- CHECK_ALIVE(VisitForValue(stmt->expression()));
+ VISIT_FOR_VALUE(stmt->expression());
HValue* result = environment()->Pop();
current_block()->FinishExit(new(zone()) HReturn(result));
set_current_block(NULL);
@@ -2477,11 +2472,11 @@
test->if_true(),
test->if_false());
} else if (context->IsEffect()) {
- CHECK_ALIVE(VisitForEffect(stmt->expression()));
+ VISIT_FOR_EFFECT(stmt->expression());
current_block()->Goto(function_return(), false);
} else {
ASSERT(context->IsValue());
- CHECK_ALIVE(VisitForValue(stmt->expression()));
+ VISIT_FOR_VALUE(stmt->expression());
HValue* return_value = environment()->Pop();
current_block()->AddLeaveInlined(return_value, function_return());
}
@@ -2491,35 +2486,26 @@
void HGraphBuilder::VisitWithEnterStatement(WithEnterStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
- return Bailout("WithEnterStatement");
+ BAILOUT("WithEnterStatement");
}
void HGraphBuilder::VisitWithExitStatement(WithExitStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
- return Bailout("WithExitStatement");
+ BAILOUT("WithExitStatement");
}
void HGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
// We only optimize switch statements with smi-literal smi comparisons,
// with a bounded number of clauses.
const int kCaseClauseLimit = 128;
ZoneList<CaseClause*>* clauses = stmt->cases();
int clause_count = clauses->length();
if (clause_count > kCaseClauseLimit) {
- return Bailout("SwitchStatement: too many clauses");
+ BAILOUT("SwitchStatement: too many clauses");
}
- CHECK_ALIVE(VisitForValue(stmt->tag()));
+ VISIT_FOR_VALUE(stmt->tag());
AddSimulate(stmt->EntryId());
HValue* tag_value = Pop();
HBasicBlock* first_test_block = current_block();
@@ -2530,7 +2516,7 @@
CaseClause* clause = clauses->at(i);
if (clause->is_default()) continue;
if (!clause->label()->IsSmiLiteral()) {
- return Bailout("SwitchStatement: non-literal switch label");
+ BAILOUT("SwitchStatement: non-literal switch label");
}
// Unconditionally deoptimize on the first non-smi compare.
@@ -2542,7 +2528,7 @@
}
// Otherwise generate a compare and branch.
- CHECK_ALIVE(VisitForValue(clause->label()));
+ VISIT_FOR_VALUE(clause->label());
HValue* label_value = Pop();
HCompare* compare =
new(zone()) HCompare(tag_value, label_value, Token::EQ_STRICT);
@@ -2572,9 +2558,11 @@
// Identify the block where normal (non-fall-through) control flow
// goes to.
HBasicBlock* normal_block = NULL;
- if (clause->is_default() && last_block != NULL) {
- normal_block = last_block;
- last_block = NULL; // Cleared to indicate we've handled it.
+ if (clause->is_default()) {
+ if (last_block != NULL) {
+ normal_block = last_block;
+ last_block = NULL; // Cleared to indicate we've handled it.
+ }
} else if (!curr_test_block->end()->IsDeoptimize()) {
normal_block = curr_test_block->end()->FirstSuccessor();
curr_test_block = curr_test_block->end()->SecondSuccessor();
@@ -2604,7 +2592,8 @@
set_current_block(join);
}
- CHECK_BAILOUT(VisitStatements(clause->statements()));
+ VisitStatements(clause->statements());
+ CHECK_BAILOUT;
fall_through_block = current_block();
}
}
@@ -2664,9 +2653,6 @@
void HGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
ASSERT(current_block() != NULL);
PreProcessOsrEntry(stmt);
HBasicBlock* loop_entry = CreateLoopHeaderBlock();
@@ -2675,7 +2661,8 @@
BreakAndContinueInfo break_info(stmt);
{ BreakAndContinueScope push(&break_info, this);
- CHECK_BAILOUT(Visit(stmt->body()));
+ Visit(stmt->body());
+ CHECK_BAILOUT;
}
HBasicBlock* body_exit =
JoinContinue(stmt, current_block(), break_info.continue_block());
@@ -2686,17 +2673,9 @@
// back edge.
body_exit = graph()->CreateBasicBlock();
loop_successor = graph()->CreateBasicBlock();
- CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
- if (body_exit->HasPredecessor()) {
- body_exit->SetJoinId(stmt->BackEdgeId());
- } else {
- body_exit = NULL;
- }
- if (loop_successor->HasPredecessor()) {
- loop_successor->SetJoinId(stmt->ExitId());
- } else {
- loop_successor = NULL;
- }
+ VISIT_FOR_CONTROL(stmt->cond(), body_exit, loop_successor);
+ body_exit->SetJoinId(stmt->BackEdgeId());
+ loop_successor->SetJoinId(stmt->ExitId());
}
HBasicBlock* loop_exit = CreateLoop(stmt,
loop_entry,
@@ -2708,9 +2687,6 @@
void HGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
ASSERT(current_block() != NULL);
PreProcessOsrEntry(stmt);
HBasicBlock* loop_entry = CreateLoopHeaderBlock();
@@ -2722,22 +2698,16 @@
if (!stmt->cond()->ToBooleanIsTrue()) {
HBasicBlock* body_entry = graph()->CreateBasicBlock();
loop_successor = graph()->CreateBasicBlock();
- CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
- if (body_entry->HasPredecessor()) {
- body_entry->SetJoinId(stmt->BodyId());
- set_current_block(body_entry);
- }
- if (loop_successor->HasPredecessor()) {
- loop_successor->SetJoinId(stmt->ExitId());
- } else {
- loop_successor = NULL;
- }
+ VISIT_FOR_CONTROL(stmt->cond(), body_entry, loop_successor);
+ body_entry->SetJoinId(stmt->BodyId());
+ loop_successor->SetJoinId(stmt->ExitId());
+ set_current_block(body_entry);
}
BreakAndContinueInfo break_info(stmt);
- if (current_block() != NULL) {
- BreakAndContinueScope push(&break_info, this);
- CHECK_BAILOUT(Visit(stmt->body()));
+ { BreakAndContinueScope push(&break_info, this);
+ Visit(stmt->body());
+ CHECK_BAILOUT;
}
HBasicBlock* body_exit =
JoinContinue(stmt, current_block(), break_info.continue_block());
@@ -2751,11 +2721,9 @@
void HGraphBuilder::VisitForStatement(ForStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
if (stmt->init() != NULL) {
- CHECK_ALIVE(Visit(stmt->init()));
+ Visit(stmt->init());
+ CHECK_BAILOUT;
}
ASSERT(current_block() != NULL);
PreProcessOsrEntry(stmt);
@@ -2767,29 +2735,24 @@
if (stmt->cond() != NULL) {
HBasicBlock* body_entry = graph()->CreateBasicBlock();
loop_successor = graph()->CreateBasicBlock();
- CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
- if (body_entry->HasPredecessor()) {
- body_entry->SetJoinId(stmt->BodyId());
- set_current_block(body_entry);
- }
- if (loop_successor->HasPredecessor()) {
- loop_successor->SetJoinId(stmt->ExitId());
- } else {
- loop_successor = NULL;
- }
+ VISIT_FOR_CONTROL(stmt->cond(), body_entry, loop_successor);
+ body_entry->SetJoinId(stmt->BodyId());
+ loop_successor->SetJoinId(stmt->ExitId());
+ set_current_block(body_entry);
}
BreakAndContinueInfo break_info(stmt);
- if (current_block() != NULL) {
- BreakAndContinueScope push(&break_info, this);
- CHECK_BAILOUT(Visit(stmt->body()));
+ { BreakAndContinueScope push(&break_info, this);
+ Visit(stmt->body());
+ CHECK_BAILOUT;
}
HBasicBlock* body_exit =
JoinContinue(stmt, current_block(), break_info.continue_block());
if (stmt->next() != NULL && body_exit != NULL) {
set_current_block(body_exit);
- CHECK_BAILOUT(Visit(stmt->next()));
+ Visit(stmt->next());
+ CHECK_BAILOUT;
body_exit = current_block();
}
@@ -2803,34 +2766,22 @@
void HGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
- return Bailout("ForInStatement");
+ BAILOUT("ForInStatement");
}
void HGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
- return Bailout("TryCatchStatement");
+ BAILOUT("TryCatchStatement");
}
void HGraphBuilder::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
- return Bailout("TryFinallyStatement");
+ BAILOUT("TryFinallyStatement");
}
void HGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
- return Bailout("DebuggerStatement");
+ BAILOUT("DebuggerStatement");
}
@@ -2855,17 +2806,13 @@
void HGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
Handle<SharedFunctionInfo> shared_info =
SearchSharedFunctionInfo(info()->shared_info()->code(),
expr);
if (shared_info.is_null()) {
shared_info = Compiler::BuildFunctionInfo(expr, info()->script());
}
- // We also have a stack overflow if the recursive compilation did.
- if (HasStackOverflow()) return;
+ CHECK_BAILOUT;
HFunctionLiteral* instr =
new(zone()) HFunctionLiteral(shared_info, expr->pretenure());
ast_context()->ReturnInstruction(instr, expr->id());
@@ -2874,47 +2821,32 @@
void HGraphBuilder::VisitSharedFunctionInfoLiteral(
SharedFunctionInfoLiteral* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
- return Bailout("SharedFunctionInfoLiteral");
+ BAILOUT("SharedFunctionInfoLiteral");
}
void HGraphBuilder::VisitConditional(Conditional* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
HBasicBlock* cond_true = graph()->CreateBasicBlock();
HBasicBlock* cond_false = graph()->CreateBasicBlock();
- CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
+ VISIT_FOR_CONTROL(expr->condition(), cond_true, cond_false);
+ cond_true->SetJoinId(expr->ThenId());
+ cond_false->SetJoinId(expr->ElseId());
// Visit the true and false subexpressions in the same AST context as the
// whole expression.
- if (cond_true->HasPredecessor()) {
- cond_true->SetJoinId(expr->ThenId());
- set_current_block(cond_true);
- CHECK_BAILOUT(Visit(expr->then_expression()));
- cond_true = current_block();
- } else {
- cond_true = NULL;
- }
+ set_current_block(cond_true);
+ Visit(expr->then_expression());
+ CHECK_BAILOUT;
+ HBasicBlock* other = current_block();
- if (cond_false->HasPredecessor()) {
- cond_false->SetJoinId(expr->ElseId());
- set_current_block(cond_false);
- CHECK_BAILOUT(Visit(expr->else_expression()));
- cond_false = current_block();
- } else {
- cond_false = NULL;
- }
+ set_current_block(cond_false);
+ Visit(expr->else_expression());
+ CHECK_BAILOUT;
if (!ast_context()->IsTest()) {
- HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
+ HBasicBlock* join = CreateJoin(other, current_block(), expr->id());
set_current_block(join);
- if (join != NULL && !ast_context()->IsEffect()) {
- ast_context()->ReturnValue(Pop());
- }
+ if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
}
}
@@ -2951,20 +2883,17 @@
void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
Variable* variable = expr->AsVariable();
if (variable == NULL) {
- return Bailout("reference to rewritten variable");
+ BAILOUT("reference to rewritten variable");
} else if (variable->IsStackAllocated()) {
if (environment()->Lookup(variable)->CheckFlag(HValue::kIsArguments)) {
- return Bailout("unsupported context for arguments object");
+ BAILOUT("unsupported context for arguments object");
}
ast_context()->ReturnValue(environment()->Lookup(variable));
} else if (variable->IsContextSlot()) {
if (variable->mode() == Variable::CONST) {
- return Bailout("reference to const context slot");
+ BAILOUT("reference to const context slot");
}
HValue* context = BuildContextChainWalk(variable);
int index = variable->AsSlot()->index();
@@ -3000,15 +2929,12 @@
ast_context()->ReturnInstruction(instr, expr->id());
}
} else {
- return Bailout("reference to a variable which requires dynamic lookup");
+ BAILOUT("reference to a variable which requires dynamic lookup");
}
}
void HGraphBuilder::VisitLiteral(Literal* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
HConstant* instr =
new(zone()) HConstant(expr->handle(), Representation::Tagged());
ast_context()->ReturnInstruction(instr, expr->id());
@@ -3016,9 +2942,6 @@
void HGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
HRegExpLiteral* instr = new(zone()) HRegExpLiteral(expr->pattern(),
expr->flags(),
expr->literal_index());
@@ -3027,9 +2950,6 @@
void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
HContext* context = new(zone()) HContext;
AddInstruction(context);
HObjectLiteral* literal =
@@ -3059,7 +2979,7 @@
case ObjectLiteral::Property::COMPUTED:
if (key->handle()->IsSymbol()) {
if (property->emit_store()) {
- CHECK_ALIVE(VisitForValue(value));
+ VISIT_FOR_VALUE(value);
HValue* value = Pop();
Handle<String> name = Handle<String>::cast(key->handle());
HStoreNamedGeneric* store =
@@ -3072,7 +2992,7 @@
AddInstruction(store);
AddSimulate(key->id());
} else {
- CHECK_ALIVE(VisitForEffect(value));
+ VISIT_FOR_EFFECT(value);
}
break;
}
@@ -3080,7 +3000,7 @@
case ObjectLiteral::Property::PROTOTYPE:
case ObjectLiteral::Property::SETTER:
case ObjectLiteral::Property::GETTER:
- return Bailout("Object literal with complex property");
+ BAILOUT("Object literal with complex property");
default: UNREACHABLE();
}
}
@@ -3101,9 +3021,6 @@
void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
ZoneList<Expression*>* subexprs = expr->values();
int length = subexprs->length();
@@ -3123,9 +3040,9 @@
// is already set in the cloned array.
if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
- CHECK_ALIVE(VisitForValue(subexpr));
+ VISIT_FOR_VALUE(subexpr);
HValue* value = Pop();
- if (!Smi::IsValid(i)) return Bailout("Non-smi key in array literal");
+ if (!Smi::IsValid(i)) BAILOUT("Non-smi key in array literal");
// Load the elements array before the first store.
if (elements == NULL) {
@@ -3144,10 +3061,7 @@
void HGraphBuilder::VisitCatchExtensionObject(CatchExtensionObject* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
- return Bailout("CatchExtensionObject");
+ BAILOUT("CatchExtensionObject");
}
@@ -3326,14 +3240,14 @@
Property* prop = expr->target()->AsProperty();
ASSERT(prop != NULL);
expr->RecordTypeFeedback(oracle());
- CHECK_ALIVE(VisitForValue(prop->obj()));
+ VISIT_FOR_VALUE(prop->obj());
HValue* value = NULL;
HInstruction* instr = NULL;
if (prop->key()->IsPropertyName()) {
// Named store.
- CHECK_ALIVE(VisitForValue(expr->value()));
+ VISIT_FOR_VALUE(expr->value());
value = Pop();
HValue* object = Pop();
@@ -3357,8 +3271,8 @@
} else {
// Keyed store.
- CHECK_ALIVE(VisitForValue(prop->key()));
- CHECK_ALIVE(VisitForValue(expr->value()));
+ VISIT_FOR_VALUE(prop->key());
+ VISIT_FOR_VALUE(expr->value());
value = Pop();
HValue* key = Pop();
HValue* object = Pop();
@@ -3420,7 +3334,7 @@
BinaryOperation* operation = expr->binary_operation();
if (var != NULL) {
- CHECK_ALIVE(VisitForValue(operation));
+ VISIT_FOR_VALUE(operation);
if (var->is_global()) {
HandleGlobalVariableAssignment(var,
@@ -3437,7 +3351,7 @@
AddInstruction(instr);
if (instr->HasSideEffects()) AddSimulate(expr->AssignmentId());
} else {
- return Bailout("compound assignment to lookup slot");
+ BAILOUT("compound assignment to lookup slot");
}
ast_context()->ReturnValue(Pop());
@@ -3446,7 +3360,7 @@
if (prop->key()->IsPropertyName()) {
// Named property.
- CHECK_ALIVE(VisitForValue(prop->obj()));
+ VISIT_FOR_VALUE(prop->obj());
HValue* obj = Top();
HInstruction* load = NULL;
@@ -3460,7 +3374,7 @@
PushAndAdd(load);
if (load->HasSideEffects()) AddSimulate(expr->CompoundLoadId());
- CHECK_ALIVE(VisitForValue(expr->value()));
+ VISIT_FOR_VALUE(expr->value());
HValue* right = Pop();
HValue* left = Pop();
@@ -3478,8 +3392,8 @@
} else {
// Keyed property.
- CHECK_ALIVE(VisitForValue(prop->obj()));
- CHECK_ALIVE(VisitForValue(prop->key()));
+ VISIT_FOR_VALUE(prop->obj());
+ VISIT_FOR_VALUE(prop->key());
HValue* obj = environment()->ExpressionStackAt(1);
HValue* key = environment()->ExpressionStackAt(0);
@@ -3487,7 +3401,7 @@
PushAndAdd(load);
if (load->HasSideEffects()) AddSimulate(expr->CompoundLoadId());
- CHECK_ALIVE(VisitForValue(expr->value()));
+ VISIT_FOR_VALUE(expr->value());
HValue* right = Pop();
HValue* left = Pop();
@@ -3506,15 +3420,12 @@
}
} else {
- return Bailout("invalid lhs in compound assignment");
+ BAILOUT("invalid lhs in compound assignment");
}
}
void HGraphBuilder::VisitAssignment(Assignment* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
VariableProxy* proxy = expr->target()->AsVariableProxy();
Variable* var = proxy->AsVariable();
Property* prop = expr->target()->AsProperty();
@@ -3526,7 +3437,7 @@
}
if (var != NULL) {
- if (proxy->IsArguments()) return Bailout("assignment to arguments");
+ if (proxy->IsArguments()) BAILOUT("assignment to arguments");
// Handle the assignment.
if (var->IsStackAllocated()) {
@@ -3540,14 +3451,14 @@
if (rhs_var != NULL && rhs_var->IsStackAllocated()) {
value = environment()->Lookup(rhs_var);
} else {
- CHECK_ALIVE(VisitForValue(expr->value()));
+ VISIT_FOR_VALUE(expr->value());
value = Pop();
}
Bind(var, value);
ast_context()->ReturnValue(value);
} else if (var->IsContextSlot() && var->mode() != Variable::CONST) {
- CHECK_ALIVE(VisitForValue(expr->value()));
+ VISIT_FOR_VALUE(expr->value());
HValue* context = BuildContextChainWalk(var);
int index = var->AsSlot()->index();
HStoreContextSlot* instr =
@@ -3557,7 +3468,7 @@
ast_context()->ReturnValue(Pop());
} else if (var->is_global()) {
- CHECK_ALIVE(VisitForValue(expr->value()));
+ VISIT_FOR_VALUE(expr->value());
HandleGlobalVariableAssignment(var,
Top(),
expr->position(),
@@ -3565,26 +3476,23 @@
ast_context()->ReturnValue(Pop());
} else {
- return Bailout("assignment to LOOKUP or const CONTEXT variable");
+ BAILOUT("assignment to LOOKUP or const CONTEXT variable");
}
} else if (prop != NULL) {
HandlePropertyAssignment(expr);
} else {
- return Bailout("invalid left-hand side in assignment");
+ BAILOUT("invalid left-hand side in assignment");
}
}
void HGraphBuilder::VisitThrow(Throw* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
// We don't optimize functions with invalid left-hand sides in
// assignments, count operations, or for-in. Consequently throw can
// currently only occur in an effect context.
ASSERT(ast_context()->IsEffect());
- CHECK_ALIVE(VisitForValue(expr->exception()));
+ VISIT_FOR_VALUE(expr->exception());
HValue* value = environment()->Pop();
HThrow* instr = new(zone()) HThrow(value);
@@ -3832,7 +3740,7 @@
} else {
Push(graph()->GetArgumentsObject());
VisitForValue(expr->key());
- if (HasStackOverflow() || current_block() == NULL) return true;
+ if (HasStackOverflow()) return false;
HValue* key = Pop();
Drop(1); // Arguments object.
HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements);
@@ -3847,14 +3755,12 @@
void HGraphBuilder::VisitProperty(Property* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
expr->RecordTypeFeedback(oracle());
if (TryArgumentsAccess(expr)) return;
+ CHECK_BAILOUT;
- CHECK_ALIVE(VisitForValue(expr->obj()));
+ VISIT_FOR_VALUE(expr->obj());
HInstruction* instr = NULL;
if (expr->IsArrayLength()) {
@@ -3873,7 +3779,7 @@
LAST_STRING_TYPE));
instr = new(zone()) HStringLength(string);
} else if (expr->IsStringAccess()) {
- CHECK_ALIVE(VisitForValue(expr->key()));
+ VISIT_FOR_VALUE(expr->key());
HValue* index = Pop();
HValue* string = Pop();
HStringCharCodeAt* char_code = BuildStringCharCodeAt(string, index);
@@ -3900,7 +3806,7 @@
}
} else {
- CHECK_ALIVE(VisitForValue(expr->key()));
+ VISIT_FOR_VALUE(expr->key());
HValue* key = Pop();
HValue* obj = Pop();
@@ -3961,11 +3867,10 @@
PrintF("Trying to inline the polymorphic call to %s\n",
*name->ToCString());
}
- if (FLAG_polymorphic_inlining && TryInline(expr)) {
- // Trying to inline will signal that we should bailout from the
- // entire compilation by setting stack overflow on the visitor.
- if (HasStackOverflow()) return;
- } else {
+ if (!FLAG_polymorphic_inlining || !TryInline(expr)) {
+ // Check for bailout, as trying to inline might fail due to bailout
+ // during hydrogen processing.
+ CHECK_BAILOUT;
HCallConstantFunction* call =
new(zone()) HCallConstantFunction(expr->target(), argument_count);
call->set_position(expr->position());
@@ -4005,12 +3910,10 @@
// even without predecessors to the join block, we set it as the exit
// block and continue by adding instructions there.
ASSERT(join != NULL);
+ set_current_block(join);
if (join->HasPredecessor()) {
- set_current_block(join);
join->SetJoinId(expr->id());
if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
- } else {
- set_current_block(NULL);
}
}
@@ -4182,7 +4085,7 @@
// Bail out if the inline function did, as we cannot residualize a call
// instead.
TraceInline(target, "inline graph construction failed");
- return true;
+ return false;
}
// Update inlined nodes count.
@@ -4238,11 +4141,9 @@
// flow to handle.
set_current_block(NULL);
- } else if (function_return()->HasPredecessor()) {
+ } else {
function_return()->SetJoinId(expr->id());
set_current_block(function_return());
- } else {
- set_current_block(NULL);
}
return true;
@@ -4368,10 +4269,10 @@
// Found pattern f.apply(receiver, arguments).
VisitForValue(prop->obj());
- if (HasStackOverflow() || current_block() == NULL) return true;
+ if (HasStackOverflow()) return false;
HValue* function = Pop();
VisitForValue(args->at(0));
- if (HasStackOverflow() || current_block() == NULL) return true;
+ if (HasStackOverflow()) return false;
HValue* receiver = Pop();
HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements);
HInstruction* length = AddInstruction(new(zone()) HArgumentsLength(elements));
@@ -4388,9 +4289,6 @@
void HGraphBuilder::VisitCall(Call* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
Expression* callee = expr->expression();
int argument_count = expr->arguments()->length() + 1; // Plus receiver.
HInstruction* call = NULL;
@@ -4399,16 +4297,17 @@
if (prop != NULL) {
if (!prop->key()->IsPropertyName()) {
// Keyed function call.
- CHECK_ALIVE(VisitForValue(prop->obj()));
+ VISIT_FOR_VALUE(prop->obj());
- CHECK_ALIVE(VisitForValue(prop->key()));
+ VISIT_FOR_VALUE(prop->key());
// Push receiver and key like the non-optimized code generator expects it.
HValue* key = Pop();
HValue* receiver = Pop();
Push(key);
Push(receiver);
- CHECK_ALIVE(VisitExpressions(expr->arguments()));
+ VisitExpressions(expr->arguments());
+ CHECK_BAILOUT;
HContext* context = new(zone()) HContext;
AddInstruction(context);
@@ -4424,9 +4323,11 @@
expr->RecordTypeFeedback(oracle());
if (TryCallApply(expr)) return;
+ CHECK_BAILOUT;
- CHECK_ALIVE(VisitForValue(prop->obj()));
- CHECK_ALIVE(VisitExpressions(expr->arguments()));
+ VISIT_FOR_VALUE(prop->obj());
+ VisitExpressions(expr->arguments());
+ CHECK_BAILOUT;
Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
@@ -4457,10 +4358,16 @@
} else {
AddCheckConstantFunction(expr, receiver, receiver_map, true);
- if (TryInline(expr)) return;
- call = PreProcessCall(
- new(zone()) HCallConstantFunction(expr->target(),
- argument_count));
+ if (TryInline(expr)) {
+ return;
+ } else {
+ // Check for bailout, as the TryInline call in the if condition above
+ // might return false due to bailout during hydrogen processing.
+ CHECK_BAILOUT;
+ call = PreProcessCall(
+ new(zone()) HCallConstantFunction(expr->target(),
+ argument_count));
+ }
}
} else if (types != NULL && types->length() > 1) {
ASSERT(expr->check_type() == RECEIVER_MAP_CHECK);
@@ -4480,7 +4387,7 @@
if (!global_call) {
++argument_count;
- CHECK_ALIVE(VisitForValue(expr->expression()));
+ VISIT_FOR_VALUE(expr->expression());
}
if (global_call) {
@@ -4502,9 +4409,10 @@
HGlobalObject* global_object = new(zone()) HGlobalObject(context);
AddInstruction(context);
PushAndAdd(global_object);
- CHECK_ALIVE(VisitExpressions(expr->arguments()));
+ VisitExpressions(expr->arguments());
+ CHECK_BAILOUT;
- CHECK_ALIVE(VisitForValue(expr->expression()));
+ VISIT_FOR_VALUE(expr->expression());
HValue* function = Pop();
AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
@@ -4518,14 +4426,21 @@
IsGlobalObject());
environment()->SetExpressionStackAt(receiver_index, global_receiver);
- if (TryInline(expr)) return;
+ if (TryInline(expr)) {
+ return;
+ }
+ // Check for bailout, as trying to inline might fail due to bailout
+ // during hydrogen processing.
+ CHECK_BAILOUT;
+
call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(),
- argument_count));
+ argument_count));
} else {
HContext* context = new(zone()) HContext;
AddInstruction(context);
PushAndAdd(new(zone()) HGlobalObject(context));
- CHECK_ALIVE(VisitExpressions(expr->arguments()));
+ VisitExpressions(expr->arguments());
+ CHECK_BAILOUT;
call = PreProcessCall(new(zone()) HCallGlobal(context,
var->name(),
@@ -4538,7 +4453,8 @@
AddInstruction(context);
AddInstruction(global_object);
PushAndAdd(new(zone()) HGlobalReceiver(global_object));
- CHECK_ALIVE(VisitExpressions(expr->arguments()));
+ VisitExpressions(expr->arguments());
+ CHECK_BAILOUT;
call = PreProcessCall(new(zone()) HCallFunction(context, argument_count));
}
@@ -4550,13 +4466,11 @@
void HGraphBuilder::VisitCallNew(CallNew* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
// The constructor function is also used as the receiver argument to the
// JS construct call builtin.
- CHECK_ALIVE(VisitForValue(expr->expression()));
- CHECK_ALIVE(VisitExpressions(expr->arguments()));
+ VISIT_FOR_VALUE(expr->expression());
+ VisitExpressions(expr->arguments());
+ CHECK_BAILOUT;
HContext* context = new(zone()) HContext;
AddInstruction(context);
@@ -4588,11 +4502,8 @@
void HGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
if (expr->is_jsruntime()) {
- return Bailout("call to a JavaScript runtime function");
+ BAILOUT("call to a JavaScript runtime function");
}
const Runtime::Function* function = expr->function();
@@ -4612,7 +4523,8 @@
(this->*generator)(expr);
} else {
ASSERT(function->intrinsic_type == Runtime::RUNTIME);
- CHECK_ALIVE(VisitArgumentList(expr->arguments()));
+ VisitArgumentList(expr->arguments());
+ CHECK_BAILOUT;
Handle<String> name = expr->name();
int argument_count = expr->arguments()->length();
@@ -4626,12 +4538,9 @@
void HGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
Token::Value op = expr->op();
if (op == Token::VOID) {
- CHECK_ALIVE(VisitForEffect(expr->expression()));
+ VISIT_FOR_EFFECT(expr->expression());
ast_context()->ReturnValue(graph()->GetConstantUndefined());
} else if (op == Token::DELETE) {
Property* prop = expr->expression()->AsProperty();
@@ -4639,7 +4548,7 @@
if (prop == NULL && var == NULL) {
// Result of deleting non-property, non-variable reference is true.
// Evaluate the subexpression for side effects.
- CHECK_ALIVE(VisitForEffect(expr->expression()));
+ VISIT_FOR_EFFECT(expr->expression());
ast_context()->ReturnValue(graph()->GetConstantTrue());
} else if (var != NULL &&
!var->is_global() &&
@@ -4654,17 +4563,17 @@
// to accesses on the arguments object.
ast_context()->ReturnValue(graph()->GetConstantFalse());
} else {
- CHECK_ALIVE(VisitForValue(prop->obj()));
- CHECK_ALIVE(VisitForValue(prop->key()));
+ VISIT_FOR_VALUE(prop->obj());
+ VISIT_FOR_VALUE(prop->key());
HValue* key = Pop();
HValue* obj = Pop();
HDeleteProperty* instr = new(zone()) HDeleteProperty(obj, key);
ast_context()->ReturnInstruction(instr, expr->id());
}
} else if (var->is_global()) {
- return Bailout("delete with global variable");
+ BAILOUT("delete with global variable");
} else {
- return Bailout("delete with non-global variable");
+ BAILOUT("delete with non-global variable");
}
} else if (op == Token::NOT) {
if (ast_context()->IsTest()) {
@@ -4675,42 +4584,34 @@
} else if (ast_context()->IsValue()) {
HBasicBlock* materialize_false = graph()->CreateBasicBlock();
HBasicBlock* materialize_true = graph()->CreateBasicBlock();
- CHECK_BAILOUT(VisitForControl(expr->expression(),
- materialize_false,
- materialize_true));
+ VISIT_FOR_CONTROL(expr->expression(),
+ materialize_false,
+ materialize_true);
+ materialize_false->SetJoinId(expr->expression()->id());
+ materialize_true->SetJoinId(expr->expression()->id());
- if (materialize_false->HasPredecessor()) {
- materialize_false->SetJoinId(expr->expression()->id());
- set_current_block(materialize_false);
- Push(graph()->GetConstantFalse());
- } else {
- materialize_false = NULL;
- }
-
- if (materialize_true->HasPredecessor()) {
- materialize_true->SetJoinId(expr->expression()->id());
- set_current_block(materialize_true);
- Push(graph()->GetConstantTrue());
- } else {
- materialize_true = NULL;
- }
+ set_current_block(materialize_false);
+ Push(graph()->GetConstantFalse());
+ set_current_block(materialize_true);
+ Push(graph()->GetConstantTrue());
HBasicBlock* join =
CreateJoin(materialize_false, materialize_true, expr->id());
set_current_block(join);
- if (join != NULL) ast_context()->ReturnValue(Pop());
+ ast_context()->ReturnValue(Pop());
} else {
ASSERT(ast_context()->IsEffect());
VisitForEffect(expr->expression());
}
} else if (op == Token::TYPEOF) {
- CHECK_ALIVE(VisitForTypeOf(expr->expression()));
+ VisitForTypeOf(expr->expression());
+ if (HasStackOverflow()) return;
HValue* value = Pop();
ast_context()->ReturnInstruction(new(zone()) HTypeof(value), expr->id());
} else {
- CHECK_ALIVE(VisitForValue(expr->expression()));
+ VISIT_FOR_VALUE(expr->expression());
HValue* value = Pop();
HInstruction* instr = NULL;
switch (op) {
@@ -4724,7 +4625,7 @@
instr = new(zone()) HMul(value, graph_->GetConstant1());
break;
default:
- return Bailout("Value: unsupported unary operation");
+ BAILOUT("Value: unsupported unary operation");
break;
}
ast_context()->ReturnInstruction(instr, expr->id());
@@ -4743,9 +4644,6 @@
void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
Expression* target = expr->expression();
VariableProxy* proxy = target->AsVariableProxy();
Variable* var = proxy->AsVariable();
@@ -4754,7 +4652,7 @@
bool inc = expr->op() == Token::INC;
if (var != NULL) {
- CHECK_ALIVE(VisitForValue(target));
+ VISIT_FOR_VALUE(target);
// Match the full code generator stack by simulating an extra stack
// element for postfix operations in a non-effect context.
@@ -4779,7 +4677,7 @@
AddInstruction(instr);
if (instr->HasSideEffects()) AddSimulate(expr->AssignmentId());
} else {
- return Bailout("lookup variable in count operation");
+ BAILOUT("lookup variable in count operation");
}
Drop(has_extra ? 2 : 1);
ast_context()->ReturnValue(expr->is_postfix() ? before : after);
@@ -4795,7 +4693,7 @@
bool has_extra = expr->is_postfix() && !ast_context()->IsEffect();
if (has_extra) Push(graph_->GetConstantUndefined());
- CHECK_ALIVE(VisitForValue(prop->obj()));
+ VISIT_FOR_VALUE(prop->obj());
HValue* obj = Top();
HInstruction* load = NULL;
@@ -4836,8 +4734,8 @@
bool has_extra = expr->is_postfix() && !ast_context()->IsEffect();
if (has_extra) Push(graph_->GetConstantUndefined());
- CHECK_ALIVE(VisitForValue(prop->obj()));
- CHECK_ALIVE(VisitForValue(prop->key()));
+ VISIT_FOR_VALUE(prop->obj());
+ VISIT_FOR_VALUE(prop->key());
HValue* obj = environment()->ExpressionStackAt(1);
HValue* key = environment()->ExpressionStackAt(0);
@@ -4868,7 +4766,7 @@
}
} else {
- return Bailout("invalid lhs in count operation");
+ BAILOUT("invalid lhs in count operation");
}
}
@@ -4888,21 +4786,10 @@
HInstruction* HGraphBuilder::BuildBinaryOperation(BinaryOperation* expr,
HValue* left,
HValue* right) {
- TypeInfo info = oracle()->BinaryType(expr);
HInstruction* instr = NULL;
switch (expr->op()) {
case Token::ADD:
- if (info.IsString()) {
- AddInstruction(new(zone()) HCheckNonSmi(left));
- AddInstruction(new(zone()) HCheckInstanceType(
- left, FIRST_STRING_TYPE, LAST_STRING_TYPE));
- AddInstruction(new(zone()) HCheckNonSmi(right));
- AddInstruction(new(zone()) HCheckInstanceType(
- right, FIRST_STRING_TYPE, LAST_STRING_TYPE));
- instr = new(zone()) HStringAdd(left, right);
- } else {
- instr = new(zone()) HAdd(left, right);
- }
+ instr = new(zone()) HAdd(left, right);
break;
case Token::SUB:
instr = new(zone()) HSub(left, right);
@@ -4937,6 +4824,7 @@
default:
UNREACHABLE();
}
+ TypeInfo info = oracle()->BinaryType(expr);
// If we hit an uninitialized binary op stub we will get type info
// for a smi operation. If one of the operands is a constant string
// do not generate code assuming it is a smi operation.
@@ -4973,11 +4861,8 @@
void HGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
if (expr->op() == Token::COMMA) {
- CHECK_ALIVE(VisitForEffect(expr->left()));
+ VISIT_FOR_EFFECT(expr->left());
// Visit the right subexpression in the same AST context as the entire
// expression.
Visit(expr->right());
@@ -4989,25 +4874,19 @@
// Translate left subexpression.
HBasicBlock* eval_right = graph()->CreateBasicBlock();
if (is_logical_and) {
- CHECK_BAILOUT(VisitForControl(expr->left(),
- eval_right,
- context->if_false()));
+ VISIT_FOR_CONTROL(expr->left(), eval_right, context->if_false());
} else {
- CHECK_BAILOUT(VisitForControl(expr->left(),
- context->if_true(),
- eval_right));
+ VISIT_FOR_CONTROL(expr->left(), context->if_true(), eval_right);
}
+ eval_right->SetJoinId(expr->RightId());
// Translate right subexpression by visiting it in the same AST
// context as the entire expression.
- if (eval_right->HasPredecessor()) {
- eval_right->SetJoinId(expr->RightId());
- set_current_block(eval_right);
- Visit(expr->right());
- }
+ set_current_block(eval_right);
+ Visit(expr->right());
} else if (ast_context()->IsValue()) {
- CHECK_ALIVE(VisitForValue(expr->left()));
+ VISIT_FOR_VALUE(expr->left());
ASSERT(current_block() != NULL);
// We need an extra block to maintain edge-split form.
@@ -5020,7 +4899,7 @@
set_current_block(eval_right);
Drop(1); // Value of the left subexpression.
- CHECK_BAILOUT(VisitForValue(expr->right()));
+ VISIT_FOR_VALUE(expr->right());
HBasicBlock* join_block =
CreateJoin(empty_block, current_block(), expr->id());
@@ -5034,42 +4913,33 @@
// extra block to maintain edge-split form.
HBasicBlock* empty_block = graph()->CreateBasicBlock();
HBasicBlock* right_block = graph()->CreateBasicBlock();
+ HBasicBlock* join_block = graph()->CreateBasicBlock();
if (is_logical_and) {
- CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
+ VISIT_FOR_CONTROL(expr->left(), right_block, empty_block);
} else {
- CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
+ VISIT_FOR_CONTROL(expr->left(), empty_block, right_block);
}
-
// TODO(kmillikin): Find a way to fix this. It's ugly that there are
// actually two empty blocks (one here and one inserted by
// TestContext::BuildBranch, and that they both have an HSimulate
// though the second one is not a merge node, and that we really have
// no good AST ID to put on that first HSimulate.
- if (empty_block->HasPredecessor()) {
- empty_block->SetJoinId(expr->id());
- } else {
- empty_block = NULL;
- }
+ empty_block->SetJoinId(expr->id());
+ right_block->SetJoinId(expr->RightId());
+ set_current_block(right_block);
+ VISIT_FOR_EFFECT(expr->right());
- if (right_block->HasPredecessor()) {
- right_block->SetJoinId(expr->RightId());
- set_current_block(right_block);
- CHECK_BAILOUT(VisitForEffect(expr->right()));
- right_block = current_block();
- } else {
- right_block = NULL;
- }
-
- HBasicBlock* join_block =
- CreateJoin(empty_block, right_block, expr->id());
+ empty_block->Goto(join_block);
+ current_block()->Goto(join_block);
+ join_block->SetJoinId(expr->id());
set_current_block(join_block);
// We did not materialize any value in the predecessor environments,
// so there is no need to handle it here.
}
} else {
- CHECK_ALIVE(VisitForValue(expr->left()));
- CHECK_ALIVE(VisitForValue(expr->right()));
+ VISIT_FOR_VALUE(expr->left());
+ VISIT_FOR_VALUE(expr->right());
HValue* right = Pop();
HValue* left = Pop();
@@ -5108,12 +4978,9 @@
void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
if (IsClassOfTest(expr)) {
CallRuntime* call = expr->left()->AsCallRuntime();
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
HValue* value = Pop();
Literal* literal = expr->right()->AsLiteral();
Handle<String> rhs = Handle<String>::cast(literal->handle());
@@ -5129,7 +4996,8 @@
if ((expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT) &&
left_unary != NULL && left_unary->op() == Token::TYPEOF &&
right_literal != NULL && right_literal->handle()->IsString()) {
- CHECK_ALIVE(VisitForTypeOf(left_unary->expression()));
+ VisitForTypeOf(left_unary->expression());
+ if (HasStackOverflow()) return;
HValue* left = Pop();
HInstruction* instr = new(zone()) HTypeofIs(left,
Handle<String>::cast(right_literal->handle()));
@@ -5138,8 +5006,8 @@
return;
}
- CHECK_ALIVE(VisitForValue(expr->left()));
- CHECK_ALIVE(VisitForValue(expr->right()));
+ VISIT_FOR_VALUE(expr->left());
+ VISIT_FOR_VALUE(expr->right());
HValue* right = Pop();
HValue* left = Pop();
@@ -5184,7 +5052,7 @@
instr = new(zone()) HInstanceOfKnownGlobal(left, target);
}
} else if (op == Token::IN) {
- return Bailout("Unsupported comparison: in");
+ BAILOUT("Unsupported comparison: in");
} else if (type_info.IsNonPrimitive()) {
switch (op) {
case Token::EQ:
@@ -5197,7 +5065,7 @@
break;
}
default:
- return Bailout("Unsupported non-primitive compare");
+ BAILOUT("Unsupported non-primitive compare");
break;
}
} else {
@@ -5212,10 +5080,7 @@
void HGraphBuilder::VisitCompareToNull(CompareToNull* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
- CHECK_ALIVE(VisitForValue(expr->expression()));
+ VISIT_FOR_VALUE(expr->expression());
HValue* value = Pop();
HIsNull* compare = new(zone()) HIsNull(value, expr->is_strict());
@@ -5224,10 +5089,7 @@
void HGraphBuilder::VisitThisFunction(ThisFunction* expr) {
- ASSERT(!HasStackOverflow());
- ASSERT(current_block() != NULL);
- ASSERT(current_block()->HasPredecessor());
- return Bailout("ThisFunction");
+ BAILOUT("ThisFunction");
}
@@ -5242,7 +5104,7 @@
(slot != NULL && slot->type() == Slot::LOOKUP) ||
decl->mode() == Variable::CONST ||
decl->fun() != NULL) {
- return Bailout("unsupported declaration");
+ BAILOUT("unsupported declaration");
}
}
@@ -5251,7 +5113,7 @@
// Support for types.
void HGraphBuilder::GenerateIsSmi(CallRuntime* call) {
ASSERT(call->arguments()->length() == 1);
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
HValue* value = Pop();
HIsSmi* result = new(zone()) HIsSmi(value);
ast_context()->ReturnInstruction(result, call->id());
@@ -5260,7 +5122,7 @@
void HGraphBuilder::GenerateIsSpecObject(CallRuntime* call) {
ASSERT(call->arguments()->length() == 1);
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
HValue* value = Pop();
HHasInstanceType* result =
new(zone()) HHasInstanceType(value, FIRST_JS_OBJECT_TYPE, LAST_TYPE);
@@ -5270,7 +5132,7 @@
void HGraphBuilder::GenerateIsFunction(CallRuntime* call) {
ASSERT(call->arguments()->length() == 1);
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
HValue* value = Pop();
HHasInstanceType* result =
new(zone()) HHasInstanceType(value, JS_FUNCTION_TYPE);
@@ -5280,7 +5142,7 @@
void HGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
ASSERT(call->arguments()->length() == 1);
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
HValue* value = Pop();
HHasCachedArrayIndex* result = new(zone()) HHasCachedArrayIndex(value);
ast_context()->ReturnInstruction(result, call->id());
@@ -5289,7 +5151,7 @@
void HGraphBuilder::GenerateIsArray(CallRuntime* call) {
ASSERT(call->arguments()->length() == 1);
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
HValue* value = Pop();
HHasInstanceType* result = new(zone()) HHasInstanceType(value, JS_ARRAY_TYPE);
ast_context()->ReturnInstruction(result, call->id());
@@ -5298,7 +5160,7 @@
void HGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
ASSERT(call->arguments()->length() == 1);
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
HValue* value = Pop();
HHasInstanceType* result =
new(zone()) HHasInstanceType(value, JS_REGEXP_TYPE);
@@ -5308,7 +5170,7 @@
void HGraphBuilder::GenerateIsObject(CallRuntime* call) {
ASSERT(call->arguments()->length() == 1);
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
HValue* value = Pop();
HIsObject* test = new(zone()) HIsObject(value);
ast_context()->ReturnInstruction(test, call->id());
@@ -5316,19 +5178,18 @@
void HGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
- return Bailout("inlined runtime function: IsNonNegativeSmi");
+ BAILOUT("inlined runtime function: IsNonNegativeSmi");
}
void HGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) {
- return Bailout("inlined runtime function: IsUndetectableObject");
+ BAILOUT("inlined runtime function: IsUndetectableObject");
}
void HGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
CallRuntime* call) {
- return Bailout(
- "inlined runtime function: IsStringWrapperSafeForDefaultValueOf");
+ BAILOUT("inlined runtime function: IsStringWrapperSafeForDefaultValueOf");
}
@@ -5357,7 +5218,7 @@
void HGraphBuilder::GenerateArguments(CallRuntime* call) {
ASSERT(call->arguments()->length() == 1);
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
HValue* index = Pop();
HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements);
HInstruction* length = AddInstruction(new(zone()) HArgumentsLength(elements));
@@ -5371,13 +5232,13 @@
void HGraphBuilder::GenerateClassOf(CallRuntime* call) {
// The special form detected by IsClassOfTest is detected before we get here
// and does not cause a bailout.
- return Bailout("inlined runtime function: ClassOf");
+ BAILOUT("inlined runtime function: ClassOf");
}
void HGraphBuilder::GenerateValueOf(CallRuntime* call) {
ASSERT(call->arguments()->length() == 1);
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
HValue* value = Pop();
HValueOf* result = new(zone()) HValueOf(value);
ast_context()->ReturnInstruction(result, call->id());
@@ -5385,15 +5246,15 @@
void HGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
- return Bailout("inlined runtime function: SetValueOf");
+ BAILOUT("inlined runtime function: SetValueOf");
}
// Fast support for charCodeAt(n).
void HGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
ASSERT(call->arguments()->length() == 2);
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
- CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
+ VISIT_FOR_VALUE(call->arguments()->at(1));
HValue* index = Pop();
HValue* string = Pop();
HStringCharCodeAt* result = BuildStringCharCodeAt(string, index);
@@ -5404,7 +5265,7 @@
// Fast support for string.charAt(n) and string[n].
void HGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
ASSERT(call->arguments()->length() == 1);
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
HValue* char_code = Pop();
HStringCharFromCode* result = new(zone()) HStringCharFromCode(char_code);
ast_context()->ReturnInstruction(result, call->id());
@@ -5414,8 +5275,8 @@
// Fast support for string.charAt(n) and string[n].
void HGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
ASSERT(call->arguments()->length() == 2);
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
- CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
+ VISIT_FOR_VALUE(call->arguments()->at(1));
HValue* index = Pop();
HValue* string = Pop();
HStringCharCodeAt* char_code = BuildStringCharCodeAt(string, index);
@@ -5428,8 +5289,8 @@
// Fast support for object equality testing.
void HGraphBuilder::GenerateObjectEquals(CallRuntime* call) {
ASSERT(call->arguments()->length() == 2);
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
- CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
+ VISIT_FOR_VALUE(call->arguments()->at(1));
HValue* right = Pop();
HValue* left = Pop();
HCompareJSObjectEq* result = new(zone()) HCompareJSObjectEq(left, right);
@@ -5445,14 +5306,15 @@
// Fast support for Math.random().
void HGraphBuilder::GenerateRandomHeapNumber(CallRuntime* call) {
- return Bailout("inlined runtime function: RandomHeapNumber");
+ BAILOUT("inlined runtime function: RandomHeapNumber");
}
// Fast support for StringAdd.
void HGraphBuilder::GenerateStringAdd(CallRuntime* call) {
ASSERT_EQ(2, call->arguments()->length());
- CHECK_ALIVE(VisitArgumentList(call->arguments()));
+ VisitArgumentList(call->arguments());
+ CHECK_BAILOUT;
HContext* context = new(zone()) HContext;
AddInstruction(context);
HCallStub* result = new(zone()) HCallStub(context, CodeStub::StringAdd, 2);
@@ -5464,7 +5326,8 @@
// Fast support for SubString.
void HGraphBuilder::GenerateSubString(CallRuntime* call) {
ASSERT_EQ(3, call->arguments()->length());
- CHECK_ALIVE(VisitArgumentList(call->arguments()));
+ VisitArgumentList(call->arguments());
+ CHECK_BAILOUT;
HContext* context = new(zone()) HContext;
AddInstruction(context);
HCallStub* result = new(zone()) HCallStub(context, CodeStub::SubString, 3);
@@ -5476,7 +5339,8 @@
// Fast support for StringCompare.
void HGraphBuilder::GenerateStringCompare(CallRuntime* call) {
ASSERT_EQ(2, call->arguments()->length());
- CHECK_ALIVE(VisitArgumentList(call->arguments()));
+ VisitArgumentList(call->arguments());
+ CHECK_BAILOUT;
HContext* context = new(zone()) HContext;
AddInstruction(context);
HCallStub* result =
@@ -5489,7 +5353,8 @@
// Support for direct calls from JavaScript to native RegExp code.
void HGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
ASSERT_EQ(4, call->arguments()->length());
- CHECK_ALIVE(VisitArgumentList(call->arguments()));
+ VisitArgumentList(call->arguments());
+ CHECK_BAILOUT;
HContext* context = new(zone()) HContext;
AddInstruction(context);
HCallStub* result = new(zone()) HCallStub(context, CodeStub::RegExpExec, 4);
@@ -5501,7 +5366,8 @@
// Construct a RegExp exec result with two in-object properties.
void HGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
ASSERT_EQ(3, call->arguments()->length());
- CHECK_ALIVE(VisitArgumentList(call->arguments()));
+ VisitArgumentList(call->arguments());
+ CHECK_BAILOUT;
HContext* context = new(zone()) HContext;
AddInstruction(context);
HCallStub* result =
@@ -5513,14 +5379,15 @@
// Support for fast native caches.
void HGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
- return Bailout("inlined runtime function: GetFromCache");
+ BAILOUT("inlined runtime function: GetFromCache");
}
// Fast support for number to string.
void HGraphBuilder::GenerateNumberToString(CallRuntime* call) {
ASSERT_EQ(1, call->arguments()->length());
- CHECK_ALIVE(VisitArgumentList(call->arguments()));
+ VisitArgumentList(call->arguments());
+ CHECK_BAILOUT;
HContext* context = new(zone()) HContext;
AddInstruction(context);
HCallStub* result =
@@ -5534,35 +5401,21 @@
// indices. This should only be used if the indices are known to be
// non-negative and within bounds of the elements array at the call site.
void HGraphBuilder::GenerateSwapElements(CallRuntime* call) {
- return Bailout("inlined runtime function: SwapElements");
+ BAILOUT("inlined runtime function: SwapElements");
}
// Fast call for custom callbacks.
void HGraphBuilder::GenerateCallFunction(CallRuntime* call) {
- // 1 ~ The function to call is not itself an argument to the call.
- int arg_count = call->arguments()->length() - 1;
- ASSERT(arg_count >= 1); // There's always at least a receiver.
-
- for (int i = 0; i < arg_count; ++i) {
- CHECK_ALIVE(VisitArgument(call->arguments()->at(i)));
- }
- CHECK_ALIVE(VisitForValue(call->arguments()->last()));
- HValue* function = Pop();
- HContext* context = new HContext;
- AddInstruction(context);
- HInvokeFunction* result =
- new(zone()) HInvokeFunction(context, function, arg_count);
- Drop(arg_count);
- ast_context()->ReturnInstruction(result, call->id());
+ BAILOUT("inlined runtime function: CallFunction");
}
// Fast call to math functions.
void HGraphBuilder::GenerateMathPow(CallRuntime* call) {
ASSERT_EQ(2, call->arguments()->length());
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
- CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
+ VISIT_FOR_VALUE(call->arguments()->at(1));
HValue* right = Pop();
HValue* left = Pop();
HPower* result = new(zone()) HPower(left, right);
@@ -5572,7 +5425,8 @@
void HGraphBuilder::GenerateMathSin(CallRuntime* call) {
ASSERT_EQ(1, call->arguments()->length());
- CHECK_ALIVE(VisitArgumentList(call->arguments()));
+ VisitArgumentList(call->arguments());
+ CHECK_BAILOUT;
HContext* context = new(zone()) HContext;
AddInstruction(context);
HCallStub* result =
@@ -5585,7 +5439,8 @@
void HGraphBuilder::GenerateMathCos(CallRuntime* call) {
ASSERT_EQ(1, call->arguments()->length());
- CHECK_ALIVE(VisitArgumentList(call->arguments()));
+ VisitArgumentList(call->arguments());
+ CHECK_BAILOUT;
HContext* context = new(zone()) HContext;
AddInstruction(context);
HCallStub* result =
@@ -5598,7 +5453,8 @@
void HGraphBuilder::GenerateMathLog(CallRuntime* call) {
ASSERT_EQ(1, call->arguments()->length());
- CHECK_ALIVE(VisitArgumentList(call->arguments()));
+ VisitArgumentList(call->arguments());
+ CHECK_BAILOUT;
HContext* context = new(zone()) HContext;
AddInstruction(context);
HCallStub* result =
@@ -5610,19 +5466,19 @@
void HGraphBuilder::GenerateMathSqrt(CallRuntime* call) {
- return Bailout("inlined runtime function: MathSqrt");
+ BAILOUT("inlined runtime function: MathSqrt");
}
// Check whether two RegExps are equivalent
void HGraphBuilder::GenerateIsRegExpEquivalent(CallRuntime* call) {
- return Bailout("inlined runtime function: IsRegExpEquivalent");
+ BAILOUT("inlined runtime function: IsRegExpEquivalent");
}
void HGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
ASSERT(call->arguments()->length() == 1);
- CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+ VISIT_FOR_VALUE(call->arguments()->at(0));
HValue* value = Pop();
HGetCachedArrayIndex* result = new(zone()) HGetCachedArrayIndex(value);
ast_context()->ReturnInstruction(result, call->id());
@@ -5630,12 +5486,15 @@
void HGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
- return Bailout("inlined runtime function: FastAsciiArrayJoin");
+ BAILOUT("inlined runtime function: FastAsciiArrayJoin");
}
+#undef BAILOUT
#undef CHECK_BAILOUT
-#undef CHECK_ALIVE
+#undef VISIT_FOR_EFFECT
+#undef VISIT_FOR_VALUE
+#undef ADD_TO_SUBGRAPH
HEnvironment::HEnvironment(HEnvironment* outer,
diff --git a/src/ia32/assembler-ia32.h b/src/ia32/assembler-ia32.h
index 079dca7..46fda3b 100644
--- a/src/ia32/assembler-ia32.h
+++ b/src/ia32/assembler-ia32.h
@@ -296,6 +296,7 @@
RelocInfo::Mode rmode_;
friend class Assembler;
+ friend class MacroAssembler;
};
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index 275e8e2..5d32095 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -446,9 +446,6 @@
case TRBinaryOpIC::ODDBALL:
GenerateOddballStub(masm);
break;
- case TRBinaryOpIC::BOTH_STRING:
- GenerateBothStringStub(masm);
- break;
case TRBinaryOpIC::STRING:
GenerateStringStub(masm);
break;
@@ -912,38 +909,6 @@
}
-void TypeRecordingBinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
- Label call_runtime;
- ASSERT(operands_type_ == TRBinaryOpIC::BOTH_STRING);
- ASSERT(op_ == Token::ADD);
- // If both arguments are strings, call the string add stub.
- // Otherwise, do a transition.
-
- // Registers containing left and right operands respectively.
- Register left = edx;
- Register right = eax;
-
- // Test if left operand is a string.
- __ test(left, Immediate(kSmiTagMask));
- __ j(zero, &call_runtime);
- __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
- __ j(above_equal, &call_runtime);
-
- // Test if right operand is a string.
- __ test(right, Immediate(kSmiTagMask));
- __ j(zero, &call_runtime);
- __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
- __ j(above_equal, &call_runtime);
-
- StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
- GenerateRegisterArgsPush(masm);
- __ TailCallStub(&string_add_stub);
-
- __ bind(&call_runtime);
- GenerateTypeTransition(masm);
-}
-
-
void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
Label call_runtime;
ASSERT(operands_type_ == TRBinaryOpIC::INT32);
@@ -1155,25 +1120,23 @@
GenerateAddStrings(masm);
}
- Factory* factory = masm->isolate()->factory();
-
// Convert odd ball arguments to numbers.
NearLabel check, done;
- __ cmp(edx, factory->undefined_value());
+ __ cmp(edx, FACTORY->undefined_value());
__ j(not_equal, &check);
if (Token::IsBitOp(op_)) {
__ xor_(edx, Operand(edx));
} else {
- __ mov(edx, Immediate(factory->nan_value()));
+ __ mov(edx, Immediate(FACTORY->nan_value()));
}
__ jmp(&done);
__ bind(&check);
- __ cmp(eax, factory->undefined_value());
+ __ cmp(eax, FACTORY->undefined_value());
__ j(not_equal, &done);
if (Token::IsBitOp(op_)) {
__ xor_(eax, Operand(eax));
} else {
- __ mov(eax, Immediate(factory->nan_value()));
+ __ mov(eax, Immediate(FACTORY->nan_value()));
}
__ bind(&done);
@@ -4084,7 +4047,12 @@
__ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
__ j(not_equal, ¬_outermost_js);
__ mov(Operand::StaticVariable(js_entry_sp), ebp);
+ __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
+ Label cont;
+ __ jmp(&cont);
__ bind(¬_outermost_js);
+ __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
+ __ bind(&cont);
#endif
// Call a faked try-block that does the invoke.
@@ -4130,23 +4098,20 @@
__ call(Operand(edx));
// Unlink this frame from the handler chain.
- __ pop(Operand::StaticVariable(ExternalReference(
- Isolate::k_handler_address,
- masm->isolate())));
- // Pop next_sp.
- __ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
+ __ PopTryHandler();
+ __ bind(&exit);
#ifdef ENABLE_LOGGING_AND_PROFILING
- // If current EBP value is the same as js_entry_sp value, it means that
- // the current function is the outermost.
- __ cmp(ebp, Operand::StaticVariable(js_entry_sp));
+ // Check if the current stack frame is marked as the outermost JS frame.
+ __ pop(ebx);
+ __ cmp(Operand(ebx),
+ Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
__ j(not_equal, ¬_outermost_js_2);
__ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
__ bind(¬_outermost_js_2);
#endif
// Restore the top frame descriptor from the stack.
- __ bind(&exit);
__ pop(Operand::StaticVariable(ExternalReference(
Isolate::k_c_entry_fp_address,
masm->isolate())));
diff --git a/src/ia32/code-stubs-ia32.h b/src/ia32/code-stubs-ia32.h
index cf73682..80a75cd 100644
--- a/src/ia32/code-stubs-ia32.h
+++ b/src/ia32/code-stubs-ia32.h
@@ -153,7 +153,6 @@
void GenerateHeapNumberStub(MacroAssembler* masm);
void GenerateOddballStub(MacroAssembler* masm);
void GenerateStringStub(MacroAssembler* masm);
- void GenerateBothStringStub(MacroAssembler* masm);
void GenerateGenericStub(MacroAssembler* masm);
void GenerateAddStrings(MacroAssembler* masm);
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 69d5e77..5d153a8 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -192,7 +192,7 @@
__ lea(edx,
Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
__ push(edx);
- __ push(Immediate(Smi::FromInt(scope()->num_parameters())));
+ __ SafePush(Immediate(Smi::FromInt(scope()->num_parameters())));
// Arguments to ArgumentsAccessStub:
// function, receiver address, parameter count.
// The stub will rewrite receiver and parameter count if the previous
@@ -381,13 +381,20 @@
void FullCodeGenerator::AccumulatorValueContext::Plug(
Handle<Object> lit) const {
- __ Set(result_register(), Immediate(lit));
+ if (lit->IsSmi()) {
+ __ SafeSet(result_register(), Immediate(lit));
+ } else {
+ __ Set(result_register(), Immediate(lit));
+ }
}
void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
- // Immediates can be pushed directly.
- __ push(Immediate(lit));
+ if (lit->IsSmi()) {
+ __ SafePush(Immediate(lit));
+ } else {
+ __ push(Immediate(lit));
+ }
}
@@ -730,7 +737,7 @@
}
ASSERT(prop->key()->AsLiteral() != NULL &&
prop->key()->AsLiteral()->handle()->IsSmi());
- __ Set(ecx, Immediate(prop->key()->AsLiteral()->handle()));
+ __ SafeSet(ecx, Immediate(prop->key()->AsLiteral()->handle()));
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
@@ -1184,7 +1191,7 @@
__ mov(edx,
ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
slow));
- __ mov(eax, Immediate(key_literal->handle()));
+ __ SafeSet(eax, Immediate(key_literal->handle()));
Handle<Code> ic =
isolate()->builtins()->KeyedLoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1269,7 +1276,7 @@
ASSERT(key_literal->handle()->IsSmi());
// Load the key.
- __ mov(eax, Immediate(key_literal->handle()));
+ __ SafeSet(eax, Immediate(key_literal->handle()));
// Do a keyed property load.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
@@ -1540,7 +1547,7 @@
MemOperand slot_operand =
EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
__ push(slot_operand);
- __ mov(eax, Immediate(property->key()->AsLiteral()->handle()));
+ __ SafeSet(eax, Immediate(property->key()->AsLiteral()->handle()));
} else {
VisitForStackValue(property->obj());
VisitForAccumulatorValue(property->key());
@@ -1553,7 +1560,7 @@
MemOperand slot_operand =
EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
__ push(slot_operand);
- __ push(Immediate(property->key()->AsLiteral()->handle()));
+ __ SafePush(Immediate(property->key()->AsLiteral()->handle()));
} else {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
@@ -1632,6 +1639,7 @@
void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Literal* key = prop->key()->AsLiteral();
+ ASSERT(!key->handle()->IsSmi());
__ mov(ecx, Immediate(key->handle()));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1796,7 +1804,7 @@
EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
}
__ mov(edx, eax);
- __ Set(ecx, Immediate(prop->key()->AsLiteral()->handle()));
+ __ SafeSet(ecx, Immediate(prop->key()->AsLiteral()->handle()));
} else {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
@@ -2307,7 +2315,7 @@
SetSourcePosition(expr->position());
// Load function and argument count into edi and eax.
- __ Set(eax, Immediate(arg_count));
+ __ SafeSet(eax, Immediate(arg_count));
__ mov(edi, Operand(esp, arg_count * kPointerSize));
Handle<Code> construct_builtin =
@@ -2647,7 +2655,7 @@
// parameter count in eax.
VisitForAccumulatorValue(args->at(0));
__ mov(edx, eax);
- __ mov(eax, Immediate(Smi::FromInt(scope()->num_parameters())));
+ __ SafeSet(eax, Immediate(Smi::FromInt(scope()->num_parameters())));
ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
__ CallStub(&stub);
context()->Plug(eax);
@@ -2659,7 +2667,7 @@
Label exit;
// Get the number of formal parameters.
- __ Set(eax, Immediate(Smi::FromInt(scope()->num_parameters())));
+ __ SafeSet(eax, Immediate(Smi::FromInt(scope()->num_parameters())));
// Check if the calling frame is an arguments adaptor frame.
__ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
@@ -3087,14 +3095,15 @@
void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
ASSERT(args->length() >= 2);
- int arg_count = args->length() - 2; // 2 ~ receiver and function.
- for (int i = 0; i < arg_count + 1; ++i) {
- VisitForStackValue(args->at(i));
+ int arg_count = args->length() - 2; // For receiver and function.
+ VisitForStackValue(args->at(0)); // Receiver.
+ for (int i = 0; i < arg_count; i++) {
+ VisitForStackValue(args->at(i + 1));
}
- VisitForAccumulatorValue(args->last()); // Function.
+ VisitForAccumulatorValue(args->at(arg_count + 1)); // Function.
- // InvokeFunction requires the function in edi. Move it in there.
- __ mov(edi, result_register());
+ // InvokeFunction requires function in edi. Move it in there.
+ if (!result_register().is(edi)) __ mov(edi, result_register());
ParameterCount count(arg_count);
__ InvokeFunction(edi, count, CALL_FUNCTION);
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
@@ -3818,7 +3827,7 @@
MemOperand slot_operand =
EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
__ push(slot_operand);
- __ mov(eax, Immediate(prop->key()->AsLiteral()->handle()));
+ __ SafeSet(eax, Immediate(prop->key()->AsLiteral()->handle()));
} else {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
@@ -4253,7 +4262,30 @@
default:
break;
}
+
__ call(ic, mode);
+
+ // Crankshaft doesn't need patching of inlined loads and stores.
+ // When compiling the snapshot we need to produce code that works
+ // with and without Crankshaft.
+ if (V8::UseCrankshaft() && !Serializer::enabled()) {
+ return;
+ }
+
+ // If we're calling a (keyed) load or store stub, we have to mark
+ // the call as containing no inlined code so we will not attempt to
+ // patch it.
+ switch (ic->kind()) {
+ case Code::LOAD_IC:
+ case Code::KEYED_LOAD_IC:
+ case Code::STORE_IC:
+ case Code::KEYED_STORE_IC:
+ __ nop(); // Signals no inlined code.
+ break;
+ default:
+ // Do nothing.
+ break;
+ }
}
@@ -4274,6 +4306,7 @@
default:
break;
}
+
__ call(ic, RelocInfo::CODE_TARGET);
if (patch_site != NULL && patch_site->is_bound()) {
patch_site->EmitPatchInfo();
diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc
index 4106f01..b7af03c 100644
--- a/src/ia32/ic-ia32.cc
+++ b/src/ia32/ic-ia32.cc
@@ -371,6 +371,12 @@
}
+// The offset from the inlined patch site to the start of the
+// inlined load instruction. It is 7 bytes (test eax, imm) plus
+// 6 bytes (jne slow_label).
+const int LoadIC::kOffsetToLoadInstruction = 13;
+
+
void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : receiver
@@ -1267,6 +1273,172 @@
}
+bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
+ if (V8::UseCrankshaft()) return false;
+
+ // The address of the instruction following the call.
+ Address test_instruction_address =
+ address + Assembler::kCallTargetAddressOffset;
+ // If the instruction following the call is not a test eax, nothing
+ // was inlined.
+ if (*test_instruction_address != Assembler::kTestEaxByte) return false;
+
+ Address delta_address = test_instruction_address + 1;
+ // The delta to the start of the map check instruction.
+ int delta = *reinterpret_cast<int*>(delta_address);
+
+ // The map address is the last 4 bytes of the 7-byte
+ // operand-immediate compare instruction, so we add 3 to get the
+ // offset to the last 4 bytes.
+ Address map_address = test_instruction_address + delta + 3;
+ *(reinterpret_cast<Object**>(map_address)) = map;
+
+ // The offset is in the last 4 bytes of a six byte
+ // memory-to-register move instruction, so we add 2 to get the
+ // offset to the last 4 bytes.
+ Address offset_address =
+ test_instruction_address + delta + kOffsetToLoadInstruction + 2;
+ *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
+ return true;
+}
+
+
+// One byte opcode for mov ecx,0xXXXXXXXX.
+// Marks inlined contextual loads using all kinds of cells. Generated
+// code has the hole check:
+// mov reg, <cell>
+// mov reg, (<cell>, value offset)
+// cmp reg, <the hole>
+// je slow
+// ;; use reg
+static const byte kMovEcxByte = 0xB9;
+
+// One byte opcode for mov edx,0xXXXXXXXX.
+// Marks inlined contextual loads using only "don't delete"
+// cells. Generated code doesn't have the hole check:
+// mov reg, <cell>
+// mov reg, (<cell>, value offset)
+// ;; use reg
+static const byte kMovEdxByte = 0xBA;
+
+bool LoadIC::PatchInlinedContextualLoad(Address address,
+ Object* map,
+ Object* cell,
+ bool is_dont_delete) {
+ if (V8::UseCrankshaft()) return false;
+
+ // The address of the instruction following the call.
+ Address mov_instruction_address =
+ address + Assembler::kCallTargetAddressOffset;
+ // If the instruction following the call is not a mov ecx/edx,
+ // nothing was inlined.
+ byte b = *mov_instruction_address;
+ if (b != kMovEcxByte && b != kMovEdxByte) return false;
+ // If we don't have the hole check generated, we can only support
+ // "don't delete" cells.
+ if (b == kMovEdxByte && !is_dont_delete) return false;
+
+ Address delta_address = mov_instruction_address + 1;
+ // The delta to the start of the map check instruction.
+ int delta = *reinterpret_cast<int*>(delta_address);
+
+ // The map address is the last 4 bytes of the 7-byte
+ // operand-immediate compare instruction, so we add 3 to get the
+ // offset to the last 4 bytes.
+ Address map_address = mov_instruction_address + delta + 3;
+ *(reinterpret_cast<Object**>(map_address)) = map;
+
+ // The cell is in the last 4 bytes of a five byte mov reg, imm32
+ // instruction, so we add 1 to get the offset to the last 4 bytes.
+ Address offset_address =
+ mov_instruction_address + delta + kOffsetToLoadInstruction + 1;
+ *reinterpret_cast<Object**>(offset_address) = cell;
+ return true;
+}
+
+
+bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) {
+ if (V8::UseCrankshaft()) return false;
+
+ // The address of the instruction following the call.
+ Address test_instruction_address =
+ address + Assembler::kCallTargetAddressOffset;
+
+ // If the instruction following the call is not a test eax, nothing
+ // was inlined.
+ if (*test_instruction_address != Assembler::kTestEaxByte) return false;
+
+ // Extract the encoded deltas from the test eax instruction.
+ Address encoded_offsets_address = test_instruction_address + 1;
+ int encoded_offsets = *reinterpret_cast<int*>(encoded_offsets_address);
+ int delta_to_map_check = -(encoded_offsets & 0xFFFF);
+ int delta_to_record_write = encoded_offsets >> 16;
+
+ // Patch the map to check. The map address is the last 4 bytes of
+ // the 7-byte operand-immediate compare instruction.
+ Address map_check_address = test_instruction_address + delta_to_map_check;
+ Address map_address = map_check_address + 3;
+ *(reinterpret_cast<Object**>(map_address)) = map;
+
+ // Patch the offset in the store instruction. The offset is in the
+ // last 4 bytes of a six byte register-to-memory move instruction.
+ Address offset_address =
+ map_check_address + StoreIC::kOffsetToStoreInstruction + 2;
+ // The offset should have initial value (kMaxInt - 1), cleared value
+ // (-1) or we should be clearing the inlined version.
+ ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt - 1 ||
+ *reinterpret_cast<int*>(offset_address) == -1 ||
+ (offset == 0 && map == HEAP->null_value()));
+ *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
+
+ // Patch the offset in the write-barrier code. The offset is the
+ // last 4 bytes of a six byte lea instruction.
+ offset_address = map_check_address + delta_to_record_write + 2;
+ // The offset should have initial value (kMaxInt), cleared value
+ // (-1) or we should be clearing the inlined version.
+ ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt ||
+ *reinterpret_cast<int*>(offset_address) == -1 ||
+ (offset == 0 && map == HEAP->null_value()));
+ *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
+
+ return true;
+}
+
+
+static bool PatchInlinedMapCheck(Address address, Object* map) {
+ if (V8::UseCrankshaft()) return false;
+
+ Address test_instruction_address =
+ address + Assembler::kCallTargetAddressOffset;
+ // The keyed load has a fast inlined case if the IC call instruction
+ // is immediately followed by a test instruction.
+ if (*test_instruction_address != Assembler::kTestEaxByte) return false;
+
+ // Fetch the offset from the test instruction to the map cmp
+ // instruction. This offset is stored in the last 4 bytes of the 5
+ // byte test instruction.
+ Address delta_address = test_instruction_address + 1;
+ int delta = *reinterpret_cast<int*>(delta_address);
+ // Compute the map address. The map address is in the last 4 bytes
+ // of the 7-byte operand-immediate compare instruction, so we add 3
+ // to the offset to get the map address.
+ Address map_address = test_instruction_address + delta + 3;
+ // Patch the map check.
+ *(reinterpret_cast<Object**>(map_address)) = map;
+ return true;
+}
+
+
+bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
+ return PatchInlinedMapCheck(address, map);
+}
+
+
+bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
+ return PatchInlinedMapCheck(address, map);
+}
+
+
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : key
@@ -1347,6 +1519,12 @@
}
+// The offset from the inlined patch site to the start of the inlined
+// store instruction. It is 7 bytes (test reg, imm) plus 6 bytes (jne
+// slow_label).
+const int StoreIC::kOffsetToStoreInstruction = 13;
+
+
void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : value
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index 46c71e8..0f96f78 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -77,7 +77,7 @@
void LCodeGen::FinishCode(Handle<Code> code) {
ASSERT(is_done());
- code->set_stack_slots(GetStackSlotCount());
+ code->set_stack_slots(StackSlotCount());
code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
PopulateDeoptimizationData(code);
Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
@@ -132,7 +132,7 @@
__ push(edi); // Callee's JS function.
// Reserve space for the stack slots needed by the code.
- int slots = GetStackSlotCount();
+ int slots = StackSlotCount();
if (slots > 0) {
if (FLAG_debug_code) {
__ mov(Operand(eax), Immediate(slots));
@@ -254,7 +254,7 @@
bool LCodeGen::GenerateSafepointTable() {
ASSERT(is_done());
- safepoints_.Emit(masm(), GetStackSlotCount());
+ safepoints_.Emit(masm(), StackSlotCount());
return !is_aborted();
}
@@ -386,7 +386,7 @@
translation->StoreDoubleStackSlot(op->index());
} else if (op->IsArgument()) {
ASSERT(is_tagged);
- int src_index = GetStackSlotCount() + op->index();
+ int src_index = StackSlotCount() + op->index();
translation->StoreStackSlot(src_index);
} else if (op->IsRegister()) {
Register reg = ToRegister(op);
@@ -2057,7 +2057,7 @@
}
__ mov(esp, ebp);
__ pop(ebp);
- __ Ret((GetParameterCount() + 1) * kPointerSize, ecx);
+ __ Ret((ParameterCount() + 1) * kPointerSize, ecx);
}
@@ -2493,7 +2493,7 @@
SafepointGenerator safepoint_generator(this,
pointers,
env->deoptimization_index());
- ParameterCount actual(eax);
+ v8::internal::ParameterCount actual(eax);
__ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
}
@@ -2707,16 +2707,25 @@
Register output_reg = ToRegister(instr->result());
XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
- Label below_half, done;
// xmm_scratch = 0.5
ExternalReference one_half = ExternalReference::address_of_one_half();
__ movdbl(xmm_scratch, Operand::StaticVariable(one_half));
- __ ucomisd(xmm_scratch, input_reg);
- __ j(above, &below_half);
// input = input + 0.5
__ addsd(input_reg, xmm_scratch);
+ // We need to return -0 for the input range [-0.5, 0[, otherwise
+ // compute Math.floor(value + 0.5).
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ __ ucomisd(input_reg, xmm_scratch);
+ DeoptimizeIf(below_equal, instr->environment());
+ } else {
+ // If we don't need to bailout on -0, we check only bailout
+ // on negative inputs.
+ __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
+ __ ucomisd(input_reg, xmm_scratch);
+ DeoptimizeIf(below, instr->environment());
+ }
// Compute Math.floor(value + 0.5).
// Use truncating instruction (OK because input is positive).
@@ -2725,27 +2734,6 @@
// Overflow is signalled with minint.
__ cmp(output_reg, 0x80000000u);
DeoptimizeIf(equal, instr->environment());
- __ jmp(&done);
-
- __ bind(&below_half);
-
- // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if
- // we can ignore the difference between a result of -0 and +0.
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
- // If the sign is positive, we return +0.
- __ movmskpd(output_reg, input_reg);
- __ test(output_reg, Immediate(1));
- DeoptimizeIf(not_zero, instr->environment());
- } else {
- // If the input is >= -0.5, we return +0.
- __ mov(output_reg, Immediate(0xBF000000));
- __ movd(xmm_scratch, Operand(output_reg));
- __ cvtss2sd(xmm_scratch, xmm_scratch);
- __ ucomisd(input_reg, xmm_scratch);
- DeoptimizeIf(below, instr->environment());
- }
- __ Set(output_reg, Immediate(0));
- __ bind(&done);
}
@@ -2905,21 +2893,6 @@
}
-void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
- ASSERT(ToRegister(instr->context()).is(esi));
- ASSERT(ToRegister(instr->function()).is(edi));
- ASSERT(instr->HasPointerMap());
- ASSERT(instr->HasDeoptimizationEnvironment());
- LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
- RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- SafepointGenerator generator(this, pointers, env->deoptimization_index());
- ParameterCount count(instr->arity());
- __ InvokeFunction(edi, count, CALL_FUNCTION, &generator);
-}
-
-
void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->key()).is(ecx));
@@ -3323,22 +3296,6 @@
}
-void LCodeGen::DoStringAdd(LStringAdd* instr) {
- if (instr->left()->IsConstantOperand()) {
- __ push(ToImmediate(instr->left()));
- } else {
- __ push(ToOperand(instr->left()));
- }
- if (instr->right()->IsConstantOperand()) {
- __ push(ToImmediate(instr->right()));
- } else {
- __ push(ToOperand(instr->right()));
- }
- StringAddStub stub(NO_STRING_CHECK_IN_STUB);
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
-}
-
-
void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
LOperand* input = instr->InputAt(0);
ASSERT(input->IsRegister() || input->IsStackSlot());
diff --git a/src/ia32/lithium-codegen-ia32.h b/src/ia32/lithium-codegen-ia32.h
index f8bbea3..6d42cd7 100644
--- a/src/ia32/lithium-codegen-ia32.h
+++ b/src/ia32/lithium-codegen-ia32.h
@@ -147,8 +147,8 @@
Register temporary,
Register temporary2);
- int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
- int GetParameterCount() const { return scope()->num_parameters(); }
+ int StackSlotCount() const { return chunk()->spill_slot_count(); }
+ int ParameterCount() const { return scope()->num_parameters(); }
void Abort(const char* format, ...);
void Comment(const char* format, ...);
diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc
index aa91a83..9ccd189 100644
--- a/src/ia32/lithium-ia32.cc
+++ b/src/ia32/lithium-ia32.cc
@@ -71,21 +71,22 @@
#ifdef DEBUG
void LInstruction::VerifyCall() {
- // Call instructions can use only fixed registers as temporaries and
- // outputs because all registers are blocked by the calling convention.
- // Inputs operands must use a fixed register or use-at-start policy or
- // a non-register policy.
+ // Call instructions can use only fixed registers as
+ // temporaries and outputs because all registers
+ // are blocked by the calling convention.
+ // Inputs must use a fixed register.
ASSERT(Output() == NULL ||
LUnallocated::cast(Output())->HasFixedPolicy() ||
!LUnallocated::cast(Output())->HasRegisterPolicy());
for (UseIterator it(this); it.HasNext(); it.Advance()) {
- LUnallocated* operand = LUnallocated::cast(it.Next());
- ASSERT(operand->HasFixedPolicy() ||
- operand->IsUsedAtStart());
+ LOperand* operand = it.Next();
+ ASSERT(LUnallocated::cast(operand)->HasFixedPolicy() ||
+ !LUnallocated::cast(operand)->HasRegisterPolicy());
}
for (TempIterator it(this); it.HasNext(); it.Advance()) {
- LUnallocated* operand = LUnallocated::cast(it.Next());
- ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
+ LOperand* operand = it.Next();
+ ASSERT(LUnallocated::cast(operand)->HasFixedPolicy() ||
+ !LUnallocated::cast(operand)->HasRegisterPolicy());
}
}
#endif
@@ -302,15 +303,6 @@
}
-void LInvokeFunction::PrintDataTo(StringStream* stream) {
- stream->Add("= ");
- InputAt(0)->PrintTo(stream);
- stream->Add(" ");
- InputAt(1)->PrintTo(stream);
- stream->Add(" #%d / ", arity());
-}
-
-
void LCallKeyed::PrintDataTo(StringStream* stream) {
stream->Add("[ecx] #%d / ", arity());
}
@@ -1230,15 +1222,6 @@
}
-LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
- LOperand* context = UseFixed(instr->context(), esi);
- LOperand* function = UseFixed(instr->function(), edi);
- argument_count_ -= instr->argument_count();
- LInvokeFunction* result = new LInvokeFunction(context, function);
- return MarkAsCall(DefineFixed(result, eax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
-}
-
-
LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
BuiltinFunctionId op = instr->op();
if (op == kMathLog) {
@@ -2019,13 +2002,6 @@
}
-LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
- LOperand* left = UseOrConstantAtStart(instr->left());
- LOperand* right = UseOrConstantAtStart(instr->right());
- return MarkAsCall(DefineFixed(new LStringAdd(left, right), eax), instr);
-}
-
-
LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
LOperand* string = UseRegister(instr->string());
LOperand* index = UseRegisterOrConstant(instr->index());
@@ -2070,8 +2046,7 @@
LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LDeleteProperty* result =
- new LDeleteProperty(UseAtStart(instr->object()),
- UseOrConstantAtStart(instr->key()));
+ new LDeleteProperty(Use(instr->object()), UseOrConstant(instr->key()));
return MarkAsCall(DefineFixed(result, eax), instr);
}
diff --git a/src/ia32/lithium-ia32.h b/src/ia32/lithium-ia32.h
index 76c90be..9ace8f8 100644
--- a/src/ia32/lithium-ia32.h
+++ b/src/ia32/lithium-ia32.h
@@ -39,7 +39,6 @@
// Forward declarations.
class LCodeGen;
-
#define LITHIUM_ALL_INSTRUCTION_LIST(V) \
V(ControlInstruction) \
V(Call) \
@@ -107,7 +106,6 @@
V(InstanceOfAndBranch) \
V(InstanceOfKnownGlobal) \
V(Integer32ToDouble) \
- V(InvokeFunction) \
V(IsNull) \
V(IsNullAndBranch) \
V(IsObject) \
@@ -156,7 +154,6 @@
V(StoreKeyedSpecializedArrayElement) \
V(StoreNamedField) \
V(StoreNamedGeneric) \
- V(StringAdd) \
V(StringCharCodeAt) \
V(StringCharFromCode) \
V(StringLength) \
@@ -1453,25 +1450,6 @@
};
-class LInvokeFunction: public LTemplateInstruction<1, 2, 0> {
- public:
- LInvokeFunction(LOperand* context, LOperand* function) {
- inputs_[0] = context;
- inputs_[1] = function;
- }
-
- DECLARE_CONCRETE_INSTRUCTION(InvokeFunction, "invoke-function")
- DECLARE_HYDROGEN_ACCESSOR(InvokeFunction)
-
- LOperand* context() { return inputs_[0]; }
- LOperand* function() { return inputs_[1]; }
-
- virtual void PrintDataTo(StringStream* stream);
-
- int arity() const { return hydrogen()->argument_count() - 1; }
-};
-
-
class LCallKeyed: public LTemplateInstruction<1, 2, 0> {
public:
LCallKeyed(LOperand* context, LOperand* key) {
@@ -1791,21 +1769,6 @@
};
-class LStringAdd: public LTemplateInstruction<1, 2, 0> {
- public:
- LStringAdd(LOperand* left, LOperand* right) {
- inputs_[0] = left;
- inputs_[1] = right;
- }
-
- DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string-add")
- DECLARE_HYDROGEN_ACCESSOR(StringAdd)
-
- LOperand* left() { return inputs_[0]; }
- LOperand* right() { return inputs_[1]; }
-};
-
-
class LStringCharCodeAt: public LTemplateInstruction<1, 2, 0> {
public:
LStringCharCodeAt(LOperand* string, LOperand* index) {
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc
index ad567bc..13394cb 100644
--- a/src/ia32/macro-assembler-ia32.cc
+++ b/src/ia32/macro-assembler-ia32.cc
@@ -164,7 +164,7 @@
void MacroAssembler::Set(Register dst, const Immediate& x) {
if (x.is_zero()) {
- xor_(dst, Operand(dst)); // shorter than mov
+ xor_(dst, Operand(dst)); // Shorter than mov.
} else {
mov(dst, x);
}
@@ -176,6 +176,33 @@
}
+bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
+ static const int kMaxImmediateBits = 17;
+ if (x.rmode_ != RelocInfo::NONE) return false;
+ return !is_intn(x.x_, kMaxImmediateBits);
+}
+
+
+void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
+ if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
+ Set(dst, Immediate(x.x_ ^ jit_cookie()));
+ xor_(dst, jit_cookie());
+ } else {
+ Set(dst, x);
+ }
+}
+
+
+void MacroAssembler::SafePush(const Immediate& x) {
+ if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
+ push(Immediate(x.x_ ^ jit_cookie()));
+ xor_(Operand(esp, 0), Immediate(jit_cookie()));
+ } else {
+ push(x);
+ }
+}
+
+
void MacroAssembler::CmpObjectType(Register heap_object,
InstanceType type,
Register map) {
diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h
index 6909272..b986264 100644
--- a/src/ia32/macro-assembler-ia32.h
+++ b/src/ia32/macro-assembler-ia32.h
@@ -197,6 +197,11 @@
void Set(Register dst, const Immediate& x);
void Set(const Operand& dst, const Immediate& x);
+ // Support for constant splitting.
+ bool IsUnsafeImmediate(const Immediate& x);
+ void SafeSet(Register dst, const Immediate& x);
+ void SafePush(const Immediate& x);
+
// Compare object type for heap object.
// Incoming register is heap_object and outgoing register is map.
void CmpObjectType(Register heap_object, InstanceType type, Register map);
diff --git a/src/ia32/regexp-macro-assembler-ia32.cc b/src/ia32/regexp-macro-assembler-ia32.cc
index 067f8c8..5b2f208 100644
--- a/src/ia32/regexp-macro-assembler-ia32.cc
+++ b/src/ia32/regexp-macro-assembler-ia32.cc
@@ -662,7 +662,7 @@
}
-Handle<Object> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
+Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
// Finalize code - write the entry point code now we know how many
// registers we need.
@@ -879,7 +879,7 @@
Code::ComputeFlags(Code::REGEXP),
masm_->CodeObject());
PROFILE(masm_->isolate(), RegExpCodeCreateEvent(*code, *source));
- return Handle<Object>::cast(code);
+ return Handle<HeapObject>::cast(code);
}
diff --git a/src/ia32/regexp-macro-assembler-ia32.h b/src/ia32/regexp-macro-assembler-ia32.h
index 0af61f2..70606da 100644
--- a/src/ia32/regexp-macro-assembler-ia32.h
+++ b/src/ia32/regexp-macro-assembler-ia32.h
@@ -80,7 +80,7 @@
virtual void CheckPosition(int cp_offset, Label* on_outside_input);
virtual bool CheckSpecialCharacterClass(uc16 type, Label* on_no_match);
virtual void Fail();
- virtual Handle<Object> GetCode(Handle<String> source);
+ virtual Handle<HeapObject> GetCode(Handle<String> source);
virtual void GoTo(Label* label);
virtual void IfRegisterGE(int reg, int comparand, Label* if_ge);
virtual void IfRegisterLT(int reg, int comparand, Label* if_lt);
diff --git a/src/ic.cc b/src/ic.cc
index 2299922..99eb21f 100644
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -304,23 +304,54 @@
}
+void KeyedLoadIC::ClearInlinedVersion(Address address) {
+ // Insert null as the map to check for to make sure the map check fails
+ // sending control flow to the IC instead of the inlined version.
+ PatchInlinedLoad(address, HEAP->null_value());
+}
+
+
void KeyedLoadIC::Clear(Address address, Code* target) {
if (target->ic_state() == UNINITIALIZED) return;
// Make sure to also clear the map used in inline fast cases. If we
// do not clear these maps, cached code can keep objects alive
// through the embedded maps.
+ ClearInlinedVersion(address);
SetTargetAtAddress(address, initialize_stub());
}
+void LoadIC::ClearInlinedVersion(Address address) {
+ // Reset the map check of the inlined inobject property load (if
+ // present) to guarantee failure by holding an invalid map (the null
+ // value). The offset can be patched to anything.
+ Heap* heap = HEAP;
+ PatchInlinedLoad(address, heap->null_value(), 0);
+ PatchInlinedContextualLoad(address,
+ heap->null_value(),
+ heap->null_value(),
+ true);
+}
+
+
void LoadIC::Clear(Address address, Code* target) {
if (target->ic_state() == UNINITIALIZED) return;
+ ClearInlinedVersion(address);
SetTargetAtAddress(address, initialize_stub());
}
+void StoreIC::ClearInlinedVersion(Address address) {
+ // Reset the map check of the inlined inobject property store (if
+ // present) to guarantee failure by holding an invalid map (the null
+ // value). The offset can be patched to anything.
+ PatchInlinedStore(address, HEAP->null_value(), 0);
+}
+
+
void StoreIC::Clear(Address address, Code* target) {
if (target->ic_state() == UNINITIALIZED) return;
+ ClearInlinedVersion(address);
SetTargetAtAddress(address,
(target->extra_ic_state() == kStrictMode)
? initialize_stub_strict()
@@ -328,6 +359,21 @@
}
+void KeyedStoreIC::ClearInlinedVersion(Address address) {
+ // Insert null as the elements map to check for. This will make
+ // sure that the elements fast-case map check fails so that control
+ // flows to the IC instead of the inlined version.
+ PatchInlinedStore(address, HEAP->null_value());
+}
+
+
+void KeyedStoreIC::RestoreInlinedVersion(Address address) {
+ // Restore the fast-case elements map check so that the inlined
+ // version can be used again.
+ PatchInlinedStore(address, HEAP->fixed_array_map());
+}
+
+
void KeyedStoreIC::Clear(Address address, Code* target) {
if (target->ic_state() == UNINITIALIZED) return;
SetTargetAtAddress(address,
@@ -827,6 +873,9 @@
#endif
if (state == PREMONOMORPHIC) {
if (object->IsString()) {
+ Map* map = HeapObject::cast(*object)->map();
+ const int offset = String::kLengthOffset;
+ PatchInlinedLoad(address(), map, offset);
set_target(isolate()->builtins()->builtin(
Builtins::kLoadIC_StringLength));
} else {
@@ -854,6 +903,9 @@
if (FLAG_trace_ic) PrintF("[LoadIC : +#length /array]\n");
#endif
if (state == PREMONOMORPHIC) {
+ Map* map = HeapObject::cast(*object)->map();
+ const int offset = JSArray::kLengthOffset;
+ PatchInlinedLoad(address(), map, offset);
set_target(isolate()->builtins()->builtin(
Builtins::kLoadIC_ArrayLength));
} else {
@@ -896,6 +948,63 @@
LOG(isolate(), SuspectReadEvent(*name, *object));
}
+ bool can_be_inlined_precheck =
+ FLAG_use_ic &&
+ lookup.IsProperty() &&
+ lookup.IsCacheable() &&
+ lookup.holder() == *object &&
+ !object->IsAccessCheckNeeded();
+
+ bool can_be_inlined =
+ can_be_inlined_precheck &&
+ state == PREMONOMORPHIC &&
+ lookup.type() == FIELD;
+
+ bool can_be_inlined_contextual =
+ can_be_inlined_precheck &&
+ state == UNINITIALIZED &&
+ lookup.holder()->IsGlobalObject() &&
+ lookup.type() == NORMAL;
+
+ if (can_be_inlined) {
+ Map* map = lookup.holder()->map();
+ // Property's index in the properties array. If negative we have
+ // an inobject property.
+ int index = lookup.GetFieldIndex() - map->inobject_properties();
+ if (index < 0) {
+ // Index is an offset from the end of the object.
+ int offset = map->instance_size() + (index * kPointerSize);
+ if (PatchInlinedLoad(address(), map, offset)) {
+ set_target(megamorphic_stub());
+ TRACE_IC_NAMED("[LoadIC : inline patch %s]\n", name);
+ return lookup.holder()->FastPropertyAt(lookup.GetFieldIndex());
+ } else {
+ TRACE_IC_NAMED("[LoadIC : no inline patch %s (patching failed)]\n",
+ name);
+ }
+ } else {
+ TRACE_IC_NAMED("[LoadIC : no inline patch %s (not inobject)]\n", name);
+ }
+ } else if (can_be_inlined_contextual) {
+ Map* map = lookup.holder()->map();
+ JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(
+ lookup.holder()->property_dictionary()->ValueAt(
+ lookup.GetDictionaryEntry()));
+ if (PatchInlinedContextualLoad(address(),
+ map,
+ cell,
+ lookup.IsDontDelete())) {
+ set_target(megamorphic_stub());
+ TRACE_IC_NAMED("[LoadIC : inline contextual patch %s]\n", name);
+ ASSERT(cell->value() != isolate()->heap()->the_hole_value());
+ return cell->value();
+ }
+ } else {
+ if (FLAG_use_ic && state == PREMONOMORPHIC) {
+ TRACE_IC_NAMED("[LoadIC : no inline patch %s (not inlinable)]\n", name);
+ }
+ }
+
// Update inline cache and stub cache.
if (FLAG_use_ic) {
UpdateCaches(&lookup, state, object, name);
@@ -1185,6 +1294,18 @@
#ifdef DEBUG
TraceIC("KeyedLoadIC", key, state, target());
#endif // DEBUG
+
+ // For JSObjects with fast elements that are not value wrappers
+ // and that do not have indexed interceptors, we initialize the
+ // inlined fast case (if present) by patching the inlined map
+ // check.
+ if (object->IsJSObject() &&
+ !object->IsJSValue() &&
+ !JSObject::cast(*object)->HasIndexedInterceptor() &&
+ JSObject::cast(*object)->HasFastElements()) {
+ Map* map = JSObject::cast(*object)->map();
+ PatchInlinedLoad(address(), map);
+ }
}
// Get the property.
@@ -1350,7 +1471,57 @@
LookupResult lookup;
if (LookupForWrite(*receiver, *name, &lookup)) {
- // Generate a stub for this store.
+ bool can_be_inlined =
+ state == UNINITIALIZED &&
+ lookup.IsProperty() &&
+ lookup.holder() == *receiver &&
+ lookup.type() == FIELD &&
+ !receiver->IsAccessCheckNeeded();
+
+ if (can_be_inlined) {
+ Map* map = lookup.holder()->map();
+ // Property's index in the properties array. If negative we have
+ // an inobject property.
+ int index = lookup.GetFieldIndex() - map->inobject_properties();
+ if (index < 0) {
+ // Index is an offset from the end of the object.
+ int offset = map->instance_size() + (index * kPointerSize);
+ if (PatchInlinedStore(address(), map, offset)) {
+ set_target((strict_mode == kStrictMode)
+ ? megamorphic_stub_strict()
+ : megamorphic_stub());
+#ifdef DEBUG
+ if (FLAG_trace_ic) {
+ PrintF("[StoreIC : inline patch %s]\n", *name->ToCString());
+ }
+#endif
+ return receiver->SetProperty(*name, *value, NONE, strict_mode);
+#ifdef DEBUG
+
+ } else {
+ if (FLAG_trace_ic) {
+ PrintF("[StoreIC : no inline patch %s (patching failed)]\n",
+ *name->ToCString());
+ }
+ }
+ } else {
+ if (FLAG_trace_ic) {
+ PrintF("[StoreIC : no inline patch %s (not inobject)]\n",
+ *name->ToCString());
+ }
+ }
+ } else {
+ if (state == PREMONOMORPHIC) {
+ if (FLAG_trace_ic) {
+ PrintF("[StoreIC : no inline patch %s (not inlinable)]\n",
+ *name->ToCString());
+#endif
+ }
+ }
+ }
+
+ // If no inlined store ic was patched, generate a stub for this
+ // store.
UpdateCaches(&lookup, state, strict_mode, receiver, name, value);
} else {
// Strict mode doesn't allow setting non-existent global property
@@ -1819,7 +1990,6 @@
case INT32: return "Int32s";
case HEAP_NUMBER: return "HeapNumbers";
case ODDBALL: return "Oddball";
- case BOTH_STRING: return "BothStrings";
case STRING: return "Strings";
case GENERIC: return "Generic";
default: return "Invalid";
@@ -1835,7 +2005,6 @@
case INT32:
case HEAP_NUMBER:
case ODDBALL:
- case BOTH_STRING:
case STRING:
return MONOMORPHIC;
case GENERIC:
@@ -1850,17 +2019,12 @@
TRBinaryOpIC::TypeInfo y) {
if (x == UNINITIALIZED) return y;
if (y == UNINITIALIZED) return x;
- if (x == y) return x;
- if (x == BOTH_STRING && y == STRING) return STRING;
- if (x == STRING && y == BOTH_STRING) return STRING;
- if (x == STRING || x == BOTH_STRING || y == STRING || y == BOTH_STRING) {
- return GENERIC;
- }
- if (x > y) return x;
+ if (x == STRING && y == STRING) return STRING;
+ if (x == STRING || y == STRING) return GENERIC;
+ if (x >= y) return x;
return y;
}
-
TRBinaryOpIC::TypeInfo TRBinaryOpIC::GetTypeInfo(Handle<Object> left,
Handle<Object> right) {
::v8::internal::TypeInfo left_type =
@@ -1882,11 +2046,9 @@
return HEAP_NUMBER;
}
- // Patching for fast string ADD makes sense even if only one of the
- // arguments is a string.
- if (left_type.IsString()) {
- return right_type.IsString() ? BOTH_STRING : STRING;
- } else if (right_type.IsString()) {
+ if (left_type.IsString() || right_type.IsString()) {
+ // Patching for fast string ADD makes sense even if only one of the
+ // arguments is a string.
return STRING;
}
@@ -1919,11 +2081,11 @@
TRBinaryOpIC::TypeInfo type = TRBinaryOpIC::GetTypeInfo(left, right);
type = TRBinaryOpIC::JoinTypes(type, previous_type);
TRBinaryOpIC::TypeInfo result_type = TRBinaryOpIC::UNINITIALIZED;
- if ((type == TRBinaryOpIC::STRING || type == TRBinaryOpIC::BOTH_STRING) &&
- op != Token::ADD) {
+ if (type == TRBinaryOpIC::STRING && op != Token::ADD) {
type = TRBinaryOpIC::GENERIC;
}
- if (type == TRBinaryOpIC::SMI && previous_type == TRBinaryOpIC::SMI) {
+ if (type == TRBinaryOpIC::SMI &&
+ previous_type == TRBinaryOpIC::SMI) {
if (op == Token::DIV || op == Token::MUL || kSmiValueSize == 32) {
// Arithmetic on two Smi inputs has yielded a heap number.
// That is the only way to get here from the Smi stub.
@@ -1935,7 +2097,8 @@
result_type = TRBinaryOpIC::INT32;
}
}
- if (type == TRBinaryOpIC::INT32 && previous_type == TRBinaryOpIC::INT32) {
+ if (type == TRBinaryOpIC::INT32 &&
+ previous_type == TRBinaryOpIC::INT32) {
// We must be here because an operation on two INT32 types overflowed.
result_type = TRBinaryOpIC::HEAP_NUMBER;
}
diff --git a/src/ic.h b/src/ic.h
index 7b7ab43..911cbd8 100644
--- a/src/ic.h
+++ b/src/ic.h
@@ -296,6 +296,14 @@
bool support_wrappers);
static void GenerateFunctionPrototype(MacroAssembler* masm);
+ // Clear the use of the inlined version.
+ static void ClearInlinedVersion(Address address);
+
+ // The offset from the inlined patch site to the start of the
+ // inlined load instruction. It is architecture-dependent, and not
+ // used on ARM.
+ static const int kOffsetToLoadInstruction;
+
private:
// Update the inline cache and the global stub cache based on the
// lookup result.
@@ -320,6 +328,13 @@
static void Clear(Address address, Code* target);
+ static bool PatchInlinedLoad(Address address, Object* map, int index);
+
+ static bool PatchInlinedContextualLoad(Address address,
+ Object* map,
+ Object* cell,
+ bool is_dont_delete);
+
friend class IC;
};
@@ -346,6 +361,9 @@
static void GenerateIndexedInterceptor(MacroAssembler* masm);
+ // Clear the use of the inlined version.
+ static void ClearInlinedVersion(Address address);
+
// Bit mask to be tested against bit field for the cases when
// generic stub should go into slow case.
// Access check is necessary explicitly since generic stub does not perform
@@ -389,6 +407,10 @@
static void Clear(Address address, Code* target);
+ // Support for patching the map that is checked in an inlined
+ // version of keyed load.
+ static bool PatchInlinedLoad(Address address, Object* map);
+
friend class IC;
};
@@ -415,6 +437,13 @@
static void GenerateGlobalProxy(MacroAssembler* masm,
StrictModeFlag strict_mode);
+ // Clear the use of an inlined version.
+ static void ClearInlinedVersion(Address address);
+
+ // The offset from the inlined patch site to the start of the
+ // inlined store instruction.
+ static const int kOffsetToStoreInstruction;
+
private:
// Update the inline cache and the global stub cache based on the
// lookup result.
@@ -460,6 +489,10 @@
static void Clear(Address address, Code* target);
+ // Support for patching the index and the map that is checked in an
+ // inlined version of the named store.
+ static bool PatchInlinedStore(Address address, Object* map, int index);
+
friend class IC;
};
@@ -481,6 +514,12 @@
StrictModeFlag strict_mode);
static void GenerateGeneric(MacroAssembler* masm, StrictModeFlag strict_mode);
+ // Clear the inlined version so the IC is always hit.
+ static void ClearInlinedVersion(Address address);
+
+ // Restore the inlined version so the fast case can get hit.
+ static void RestoreInlinedVersion(Address address);
+
private:
// Update the inline cache.
void UpdateCaches(LookupResult* lookup,
@@ -525,6 +564,14 @@
static void Clear(Address address, Code* target);
+ // Support for patching the map that is checked in an inlined
+ // version of keyed store.
+ // The address is the patch point for the IC call
+ // (Assembler::kCallTargetAddressOffset before the end of
+ // the call/return address).
+ // The map is the new map that the inlined code should check against.
+ static bool PatchInlinedStore(Address address, Object* map);
+
friend class IC;
};
@@ -539,7 +586,6 @@
INT32,
HEAP_NUMBER,
ODDBALL,
- BOTH_STRING, // Only used for addition operation.
STRING, // Only used for addition operation. At least one string operand.
GENERIC
};
diff --git a/src/jsregexp.cc b/src/jsregexp.cc
index 06aae35..66b6332 100644
--- a/src/jsregexp.cc
+++ b/src/jsregexp.cc
@@ -858,12 +858,25 @@
RegExpNode* start,
int capture_count,
Handle<String> pattern) {
+ Heap* heap = pattern->GetHeap();
+
+ bool use_slow_safe_regexp_compiler = false;
+ if (heap->total_regexp_code_generated() >
+ RegExpImpl::kRegWxpCompiledLimit &&
+ heap->isolate()->memory_allocator()->SizeExecutable() >
+ RegExpImpl::kRegExpExecutableMemoryLimit) {
+ use_slow_safe_regexp_compiler = true;
+ }
+
+ macro_assembler->set_slow_safe(use_slow_safe_regexp_compiler);
+
#ifdef DEBUG
if (FLAG_trace_regexp_assembler)
macro_assembler_ = new RegExpMacroAssemblerTracer(macro_assembler);
else
#endif
macro_assembler_ = macro_assembler;
+
List <RegExpNode*> work_list(0);
work_list_ = &work_list;
Label fail;
@@ -877,7 +890,8 @@
}
if (reg_exp_too_big_) return IrregexpRegExpTooBig();
- Handle<Object> code = macro_assembler_->GetCode(pattern);
+ Handle<HeapObject> code = macro_assembler_->GetCode(pattern);
+ heap->IncreaseTotalRegexpCodeGenerated(code->Size());
work_list_ = NULL;
#ifdef DEBUG
if (FLAG_print_code) {
diff --git a/src/jsregexp.h b/src/jsregexp.h
index b9b2f60..d56b650 100644
--- a/src/jsregexp.h
+++ b/src/jsregexp.h
@@ -175,6 +175,14 @@
static ByteArray* IrregexpByteCode(FixedArray* re, bool is_ascii);
static Code* IrregexpNativeCode(FixedArray* re, bool is_ascii);
+ // Limit the space regexps take up on the heap. In order to limit this we
+ // would like to keep track of the amount of regexp code on the heap. This
+ // is not tracked, however. As a conservative approximation we track the
+ // total regexp code compiled including code that has subsequently been freed
+ // and the total executable memory at any point.
+ static const int kRegExpExecutableMemoryLimit = 16 * MB;
+ static const int kRegWxpCompiledLimit = 1 * MB;
+
private:
static String* last_ascii_string_;
static String* two_byte_cached_string_;
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index bd36459..68a5062 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -1077,6 +1077,12 @@
void MarkCompactCollector::MarkMapContents(Map* map) {
+ // Mark prototype transitions array but don't push it into marking stack.
+ // This will make references from it weak. We will clean dead prototype
+ // transitions in ClearNonLiveTransitions.
+ FixedArray* prototype_transitions = map->unchecked_prototype_transitions();
+ if (!prototype_transitions->IsMarked()) SetMark(prototype_transitions);
+
MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(
*HeapObject::RawField(map, Map::kInstanceDescriptorsOffset)));
@@ -1494,7 +1500,7 @@
void MarkCompactCollector::ClearNonLiveTransitions() {
- HeapObjectIterator map_iterator(heap() ->map_space(), &SizeOfMarkedObject);
+ HeapObjectIterator map_iterator(heap()->map_space(), &SizeOfMarkedObject);
// Iterate over the map space, setting map transitions that go from
// a marked map to an unmarked map to null transitions. At the same time,
// set all the prototype fields of maps back to their original value,
@@ -1522,6 +1528,41 @@
map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map);
}
+ // Clear dead prototype transitions.
+ FixedArray* prototype_transitions = map->unchecked_prototype_transitions();
+ if (prototype_transitions->length() > 0) {
+ int finger = Smi::cast(prototype_transitions->get(0))->value();
+ int new_finger = 1;
+ for (int i = 1; i < finger; i += 2) {
+ Object* prototype = prototype_transitions->get(i);
+ Object* cached_map = prototype_transitions->get(i + 1);
+ if (HeapObject::cast(prototype)->IsMarked() &&
+ HeapObject::cast(cached_map)->IsMarked()) {
+ if (new_finger != i) {
+ prototype_transitions->set_unchecked(heap_,
+ new_finger,
+ prototype,
+ UPDATE_WRITE_BARRIER);
+ prototype_transitions->set_unchecked(heap_,
+ new_finger + 1,
+ cached_map,
+ SKIP_WRITE_BARRIER);
+ }
+ new_finger += 2;
+ }
+ }
+
+ // Fill slots that became free with undefined value.
+ Object* undefined = heap()->raw_unchecked_undefined_value();
+ for (int i = new_finger; i < finger; i++) {
+ prototype_transitions->set_unchecked(heap_,
+ i,
+ undefined,
+ SKIP_WRITE_BARRIER);
+ }
+ prototype_transitions->set_unchecked(0, Smi::FromInt(new_finger));
+ }
+
// Follow the chain of back pointers to find the prototype.
Map* current = map;
while (SafeIsMap(current)) {
diff --git a/src/messages.js b/src/messages.js
index e657fc0..d8810dc 100644
--- a/src/messages.js
+++ b/src/messages.js
@@ -212,7 +212,9 @@
invalid_preparser_data: ["Invalid preparser data for function ", "%0"],
strict_mode_with: ["Strict mode code may not include a with statement"],
strict_catch_variable: ["Catch variable may not be eval or arguments in strict mode"],
- too_many_parameters: ["Too many parameters in function definition"],
+ too_many_arguments: ["Too many arguments in function call (only 32766 allowed)"],
+ too_many_parameters: ["Too many parameters in function definition (only 32766 allowed)"],
+ too_many_variables: ["Too many variables declared (only 32767 allowed)"],
strict_param_name: ["Parameter name eval or arguments is not allowed in strict mode"],
strict_param_dupe: ["Strict mode function may not have duplicate parameter names"],
strict_var_name: ["Variable name may not be eval or arguments in strict mode"],
diff --git a/src/mips/regexp-macro-assembler-mips.cc b/src/mips/regexp-macro-assembler-mips.cc
index d1dbc43..9f9e976 100644
--- a/src/mips/regexp-macro-assembler-mips.cc
+++ b/src/mips/regexp-macro-assembler-mips.cc
@@ -259,9 +259,9 @@
}
-Handle<Object> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
+Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
UNIMPLEMENTED_MIPS();
- return Handle<Object>::null();
+ return Handle<HeapObject>::null();
}
diff --git a/src/mips/regexp-macro-assembler-mips.h b/src/mips/regexp-macro-assembler-mips.h
index 2f4319f..7310c9d 100644
--- a/src/mips/regexp-macro-assembler-mips.h
+++ b/src/mips/regexp-macro-assembler-mips.h
@@ -81,7 +81,7 @@
virtual bool CheckSpecialCharacterClass(uc16 type,
Label* on_no_match);
virtual void Fail();
- virtual Handle<Object> GetCode(Handle<String> source);
+ virtual Handle<HeapObject> GetCode(Handle<String> source);
virtual void GoTo(Label* label);
virtual void IfRegisterGE(int reg, int comparand, Label* if_ge);
virtual void IfRegisterLT(int reg, int comparand, Label* if_lt);
diff --git a/src/natives.h b/src/natives.h
index 92f0d90..1df94b0 100644
--- a/src/natives.h
+++ b/src/natives.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2006-2008 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -36,7 +36,7 @@
int index);
enum NativeType {
- CORE, EXPERIMENTAL, D8, I18N
+ CORE, D8, I18N
};
template <NativeType type>
@@ -57,7 +57,6 @@
};
typedef NativesCollection<CORE> Natives;
-typedef NativesCollection<EXPERIMENTAL> ExperimentalNatives;
} } // namespace v8::internal
diff --git a/src/objects-inl.h b/src/objects-inl.h
index 823b2da..65aec5d 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -2515,6 +2515,12 @@
}
+FixedArray* Map::unchecked_prototype_transitions() {
+ return reinterpret_cast<FixedArray*>(
+ READ_FIELD(this, kPrototypeTransitionsOffset));
+}
+
+
Code::Flags Code::flags() {
return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
}
@@ -2923,6 +2929,7 @@
ACCESSORS(Map, instance_descriptors, DescriptorArray,
kInstanceDescriptorsOffset)
ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
+ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
ACCESSORS(Map, constructor, Object, kConstructorOffset)
ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
diff --git a/src/objects.cc b/src/objects.cc
index 6ce4c44..a20548c 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -3713,39 +3713,68 @@
void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
+ // Traverse the transition tree without using a stack. We do this by
+ // reversing the pointers in the maps and descriptor arrays.
Map* current = this;
Map* meta_map = heap()->meta_map();
+ Object** map_or_index_field = NULL;
while (current != meta_map) {
DescriptorArray* d = reinterpret_cast<DescriptorArray*>(
*RawField(current, Map::kInstanceDescriptorsOffset));
- if (d == heap()->empty_descriptor_array()) {
- Map* prev = current->map();
- current->set_map(meta_map);
- callback(current, data);
- current = prev;
- continue;
+ if (!d->IsEmpty()) {
+ FixedArray* contents = reinterpret_cast<FixedArray*>(
+ d->get(DescriptorArray::kContentArrayIndex));
+ map_or_index_field = RawField(contents, HeapObject::kMapOffset);
+ Object* map_or_index = *map_or_index_field;
+ bool map_done = true; // Controls a nested continue statement.
+ for (int i = map_or_index->IsSmi() ? Smi::cast(map_or_index)->value() : 0;
+ i < contents->length();
+ i += 2) {
+ PropertyDetails details(Smi::cast(contents->get(i + 1)));
+ if (details.IsTransition()) {
+ // Found a map in the transition array. We record our progress in
+ // the transition array by recording the current map in the map field
+ // of the next map and recording the index in the transition array in
+ // the map field of the array.
+ Map* next = Map::cast(contents->get(i));
+ next->set_map(current);
+ *map_or_index_field = Smi::FromInt(i + 2);
+ current = next;
+ map_done = false;
+ break;
+ }
+ }
+ if (!map_done) continue;
}
-
- FixedArray* contents = reinterpret_cast<FixedArray*>(
- d->get(DescriptorArray::kContentArrayIndex));
- Object** map_or_index_field = RawField(contents, HeapObject::kMapOffset);
- Object* map_or_index = *map_or_index_field;
- bool map_done = true;
- for (int i = map_or_index->IsSmi() ? Smi::cast(map_or_index)->value() : 0;
- i < contents->length();
- i += 2) {
- PropertyDetails details(Smi::cast(contents->get(i + 1)));
- if (details.IsTransition()) {
- Map* next = reinterpret_cast<Map*>(contents->get(i));
+ // That was the regular transitions, now for the prototype transitions.
+ FixedArray* prototype_transitions =
+ current->unchecked_prototype_transitions();
+ Object** proto_map_or_index_field =
+ RawField(prototype_transitions, HeapObject::kMapOffset);
+ Object* map_or_index = *proto_map_or_index_field;
+ const int start = 2;
+ int i = map_or_index->IsSmi() ? Smi::cast(map_or_index)->value() : start;
+ if (i < prototype_transitions->length()) {
+ // Found a map in the prototype transition array. Record progress in
+ // an analogous way to the regular transitions array above.
+ Object* perhaps_map = prototype_transitions->get(i);
+ if (perhaps_map->IsMap()) {
+ Map* next = Map::cast(perhaps_map);
next->set_map(current);
- *map_or_index_field = Smi::FromInt(i + 2);
+ *proto_map_or_index_field =
+ Smi::FromInt(i + 2);
current = next;
- map_done = false;
- break;
+ continue;
}
}
- if (!map_done) continue;
- *map_or_index_field = heap()->fixed_array_map();
+ *proto_map_or_index_field = heap()->fixed_array_map();
+ if (map_or_index_field != NULL) {
+ *map_or_index_field = heap()->fixed_array_map();
+ }
+
+ // The callback expects a map to have a real map as its map, so we save
+ // the map field, which is being used to track the traversal and put the
+ // correct map (the meta_map) in place while we do the callback.
Map* prev = current->map();
current->set_map(meta_map);
callback(current, data);
@@ -6874,6 +6903,49 @@
}
+Object* Map::GetPrototypeTransition(Object* prototype) {
+ FixedArray* cache = prototype_transitions();
+ int capacity = cache->length();
+ if (capacity == 0) return NULL;
+ int finger = Smi::cast(cache->get(0))->value();
+ for (int i = 1; i < finger; i += 2) {
+ if (cache->get(i) == prototype) return cache->get(i + 1);
+ }
+ return NULL;
+}
+
+
+MaybeObject* Map::PutPrototypeTransition(Object* prototype, Map* map) {
+ // Don't cache prototype transition if this map is shared.
+ if (is_shared() || !FLAG_cache_prototype_transitions) return this;
+
+ FixedArray* cache = prototype_transitions();
+
+ int capacity = cache->length();
+
+ int finger = (capacity == 0) ? 1 : Smi::cast(cache->get(0))->value();
+
+ if (finger >= capacity) {
+ if (capacity > kMaxCachedPrototypeTransitions) return this;
+
+ FixedArray* new_cache;
+ { MaybeObject* maybe_cache = heap()->AllocateFixedArray(finger * 2 + 1);
+ if (!maybe_cache->To<FixedArray>(&new_cache)) return maybe_cache;
+ }
+
+ for (int i = 1; i < capacity; i++) new_cache->set(i, cache->get(i));
+ cache = new_cache;
+ set_prototype_transitions(cache);
+ }
+
+ cache->set(finger, prototype);
+ cache->set(finger + 1, map);
+ cache->set(0, Smi::FromInt(finger + 2));
+
+ return cache;
+}
+
+
MaybeObject* JSObject::SetPrototype(Object* value,
bool skip_hidden_prototypes) {
Heap* heap = GetHeap();
@@ -6924,11 +6996,25 @@
}
// Set the new prototype of the object.
- Object* new_map;
- { MaybeObject* maybe_new_map = real_receiver->map()->CopyDropTransitions();
- if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
+ Map* map = real_receiver->map();
+
+ // Nothing to do if prototype is already set.
+ if (map->prototype() == value) return value;
+
+ Object* new_map = map->GetPrototypeTransition(value);
+ if (new_map == NULL) {
+ { MaybeObject* maybe_new_map = map->CopyDropTransitions();
+ if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
+ }
+
+ { MaybeObject* maybe_new_cache =
+ map->PutPrototypeTransition(value, Map::cast(new_map));
+ if (maybe_new_cache->IsFailure()) return maybe_new_cache;
+ }
+
+ Map::cast(new_map)->set_prototype(value);
}
- Map::cast(new_map)->set_prototype(value);
+ ASSERT(Map::cast(new_map)->prototype() == value);
real_receiver->set_map(Map::cast(new_map));
heap->ClearInstanceofCache();
diff --git a/src/objects.h b/src/objects.h
index 03445e8..e966b3d 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -613,6 +613,7 @@
class StringStream;
class ObjectVisitor;
+class Failure;
struct ValueInfo : public Malloced {
ValueInfo() : type(FIRST_TYPE), ptr(NULL), str(NULL), number(0) { }
@@ -639,6 +640,10 @@
*obj = reinterpret_cast<Object*>(this);
return true;
}
+ inline Failure* ToFailureUnchecked() {
+ ASSERT(IsFailure());
+ return reinterpret_cast<Failure*>(this);
+ }
inline Object* ToObjectUnchecked() {
ASSERT(!IsFailure());
return reinterpret_cast<Object*>(this);
@@ -648,6 +653,13 @@
return reinterpret_cast<Object*>(this);
}
+ template<typename T>
+ inline bool To(T** obj) {
+ if (IsFailure()) return false;
+ *obj = T::cast(reinterpret_cast<Object*>(this));
+ return true;
+ }
+
#ifdef OBJECT_PRINT
// Prints this object with details.
inline void Print() {
@@ -3712,6 +3724,16 @@
// [stub cache]: contains stubs compiled for this map.
DECL_ACCESSORS(code_cache, Object)
+ // [prototype transitions]: cache of prototype transitions.
+ // Prototype transition is a transition that happens
+ // when we change object's prototype to a new one.
+ // Cache format:
+ // 0: finger - index of the first free cell in the cache
+ // 1 + 2 * i: prototype
+ // 2 + 2 * i: target map
+ DECL_ACCESSORS(prototype_transitions, FixedArray)
+ inline FixedArray* unchecked_prototype_transitions();
+
// Lookup in the map's instance descriptors and fill out the result
// with the given holder if the name is found. The holder may be
// NULL when this function is used from the compiler.
@@ -3811,6 +3833,12 @@
void TraverseTransitionTree(TraverseCallback callback, void* data);
+ static const int kMaxCachedPrototypeTransitions = 256;
+
+ Object* GetPrototypeTransition(Object* prototype);
+
+ MaybeObject* PutPrototypeTransition(Object* prototype, Map* map);
+
static const int kMaxPreAllocatedPropertyFields = 255;
// Layout description.
@@ -3821,14 +3849,16 @@
static const int kInstanceDescriptorsOffset =
kConstructorOffset + kPointerSize;
static const int kCodeCacheOffset = kInstanceDescriptorsOffset + kPointerSize;
- static const int kPadStart = kCodeCacheOffset + kPointerSize;
+ static const int kPrototypeTransitionsOffset =
+ kCodeCacheOffset + kPointerSize;
+ static const int kPadStart = kPrototypeTransitionsOffset + kPointerSize;
static const int kSize = MAP_POINTER_ALIGN(kPadStart);
// Layout of pointer fields. Heap iteration code relies on them
// being continiously allocated.
static const int kPointerFieldsBeginOffset = Map::kPrototypeOffset;
static const int kPointerFieldsEndOffset =
- Map::kCodeCacheOffset + kPointerSize;
+ Map::kPrototypeTransitionsOffset + kPointerSize;
// Byte offsets within kInstanceSizesOffset.
static const int kInstanceSizeOffset = kInstanceSizesOffset + 0;
diff --git a/src/parser.cc b/src/parser.cc
index cf84bfa..266f77d 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -1308,7 +1308,7 @@
var = top_scope_->LocalLookup(name);
if (var == NULL) {
// Declare the name.
- var = top_scope_->DeclareLocal(name, mode);
+ var = top_scope_->DeclareLocal(name, mode, Scope::VAR_OR_CONST);
} else {
// The name was declared before; check for conflicting
// re-declarations. If the previous declaration was a const or the
@@ -1580,6 +1580,12 @@
is_const /* always bound for CONST! */,
CHECK_OK);
nvars++;
+ if (top_scope_->num_var_or_const() > kMaxNumFunctionLocals) {
+ ReportMessageAt(scanner().location(), "too_many_variables",
+ Vector<const char*>::empty());
+ *ok = false;
+ return NULL;
+ }
// Parse initialization expression if present and/or needed. A
// declaration of the form:
@@ -3495,6 +3501,12 @@
while (!done) {
Expression* argument = ParseAssignmentExpression(true, CHECK_OK);
result->Add(argument);
+ if (result->length() > kMaxNumFunctionParameters) {
+ ReportMessageAt(scanner().location(), "too_many_arguments",
+ Vector<const char*>::empty());
+ *ok = false;
+ return NULL;
+ }
done = (peek() == Token::RPAREN);
if (!done) Expect(Token::COMMA, CHECK_OK);
}
@@ -3562,7 +3574,9 @@
reserved_loc = scanner().location();
}
- Variable* parameter = top_scope_->DeclareLocal(param_name, Variable::VAR);
+ Variable* parameter = top_scope_->DeclareLocal(param_name,
+ Variable::VAR,
+ Scope::PARAMETER);
top_scope_->AddParameter(parameter);
num_parameters++;
if (num_parameters > kMaxNumFunctionParameters) {
diff --git a/src/parser.h b/src/parser.h
index a63651a..64f1303 100644
--- a/src/parser.h
+++ b/src/parser.h
@@ -448,6 +448,7 @@
// construct a hashable id, so if more than 2^17 are allowed, this
// should be checked.
static const int kMaxNumFunctionParameters = 32766;
+ static const int kMaxNumFunctionLocals = 32767;
FunctionLiteral* ParseLazy(CompilationInfo* info,
UC16CharacterStream* source,
ZoneScope* zone_scope);
diff --git a/src/platform-linux.cc b/src/platform-linux.cc
index 1ecd8fc..c60658f 100644
--- a/src/platform-linux.cc
+++ b/src/platform-linux.cc
@@ -922,19 +922,28 @@
vm_tgid_(getpid()),
interval_(interval) {}
+ static void InstallSignalHandler() {
+ struct sigaction sa;
+ sa.sa_sigaction = ProfilerSignalHandler;
+ sigemptyset(&sa.sa_mask);
+ sa.sa_flags = SA_RESTART | SA_SIGINFO;
+ signal_handler_installed_ =
+ (sigaction(SIGPROF, &sa, &old_signal_handler_) == 0);
+ }
+
+ static void RestoreSignalHandler() {
+ if (signal_handler_installed_) {
+ sigaction(SIGPROF, &old_signal_handler_, 0);
+ signal_handler_installed_ = false;
+ }
+ }
+
static void AddActiveSampler(Sampler* sampler) {
ScopedLock lock(mutex_);
SamplerRegistry::AddActiveSampler(sampler);
if (instance_ == NULL) {
- // Install a signal handler.
- struct sigaction sa;
- sa.sa_sigaction = ProfilerSignalHandler;
- sigemptyset(&sa.sa_mask);
- sa.sa_flags = SA_RESTART | SA_SIGINFO;
- signal_handler_installed_ =
- (sigaction(SIGPROF, &sa, &old_signal_handler_) == 0);
-
- // Start a thread that sends SIGPROF signal to VM threads.
+ // Start a thread that will send SIGPROF signal to VM threads,
+ // when CPU profiling will be enabled.
instance_ = new SignalSender(sampler->interval());
instance_->Start();
} else {
@@ -950,12 +959,7 @@
instance_->Join();
delete instance_;
instance_ = NULL;
-
- // Restore the old signal handler.
- if (signal_handler_installed_) {
- sigaction(SIGPROF, &old_signal_handler_, 0);
- signal_handler_installed_ = false;
- }
+ RestoreSignalHandler();
}
}
@@ -967,6 +971,10 @@
bool cpu_profiling_enabled =
(state == SamplerRegistry::HAS_CPU_PROFILING_SAMPLERS);
bool runtime_profiler_enabled = RuntimeProfiler::IsEnabled();
+ if (cpu_profiling_enabled && !signal_handler_installed_)
+ InstallSignalHandler();
+ else if (!cpu_profiling_enabled && signal_handler_installed_)
+ RestoreSignalHandler();
// When CPU profiling is enabled both JavaScript and C++ code is
// profiled. We must not suspend.
if (!cpu_profiling_enabled) {
diff --git a/src/proxy.js b/src/proxy.js
deleted file mode 100644
index 2516983..0000000
--- a/src/proxy.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-global.Proxy = new $Object();
diff --git a/src/regexp-macro-assembler-irregexp.cc b/src/regexp-macro-assembler-irregexp.cc
index d41a97c..322efa1 100644
--- a/src/regexp-macro-assembler-irregexp.cc
+++ b/src/regexp-macro-assembler-irregexp.cc
@@ -435,7 +435,8 @@
}
-Handle<Object> RegExpMacroAssemblerIrregexp::GetCode(Handle<String> source) {
+Handle<HeapObject> RegExpMacroAssemblerIrregexp::GetCode(
+ Handle<String> source) {
Bind(&backtrack_);
Emit(BC_POP_BT, 0);
Handle<ByteArray> array = FACTORY->NewByteArray(length());
diff --git a/src/regexp-macro-assembler-irregexp.h b/src/regexp-macro-assembler-irregexp.h
index 9deea86..75cf8bf 100644
--- a/src/regexp-macro-assembler-irregexp.h
+++ b/src/regexp-macro-assembler-irregexp.h
@@ -106,7 +106,7 @@
virtual void IfRegisterEqPos(int register_index, Label* if_eq);
virtual IrregexpImplementation Implementation();
- virtual Handle<Object> GetCode(Handle<String> source);
+ virtual Handle<HeapObject> GetCode(Handle<String> source);
private:
void Expand();
// Code and bitmap emission.
diff --git a/src/regexp-macro-assembler-tracer.cc b/src/regexp-macro-assembler-tracer.cc
index fa2c657..b32d71d 100644
--- a/src/regexp-macro-assembler-tracer.cc
+++ b/src/regexp-macro-assembler-tracer.cc
@@ -365,7 +365,7 @@
}
-Handle<Object> RegExpMacroAssemblerTracer::GetCode(Handle<String> source) {
+Handle<HeapObject> RegExpMacroAssemblerTracer::GetCode(Handle<String> source) {
PrintF(" GetCode(%s);\n", *(source->ToCString()));
return assembler_->GetCode(source);
}
diff --git a/src/regexp-macro-assembler-tracer.h b/src/regexp-macro-assembler-tracer.h
index 1fb6d54..8c6cf3a 100644
--- a/src/regexp-macro-assembler-tracer.h
+++ b/src/regexp-macro-assembler-tracer.h
@@ -71,7 +71,7 @@
virtual bool CheckSpecialCharacterClass(uc16 type,
Label* on_no_match);
virtual void Fail();
- virtual Handle<Object> GetCode(Handle<String> source);
+ virtual Handle<HeapObject> GetCode(Handle<String> source);
virtual void GoTo(Label* label);
virtual void IfRegisterGE(int reg, int comparand, Label* if_ge);
virtual void IfRegisterLT(int reg, int comparand, Label* if_lt);
diff --git a/src/regexp-macro-assembler.cc b/src/regexp-macro-assembler.cc
index ea41db6..5578243 100644
--- a/src/regexp-macro-assembler.cc
+++ b/src/regexp-macro-assembler.cc
@@ -35,7 +35,7 @@
namespace v8 {
namespace internal {
-RegExpMacroAssembler::RegExpMacroAssembler() {
+RegExpMacroAssembler::RegExpMacroAssembler() : slow_safe_compiler_(false) {
}
@@ -54,7 +54,8 @@
#ifndef V8_INTERPRETED_REGEXP // Avoid unused code, e.g., on ARM.
-NativeRegExpMacroAssembler::NativeRegExpMacroAssembler() {
+NativeRegExpMacroAssembler::NativeRegExpMacroAssembler()
+ : RegExpMacroAssembler() {
}
@@ -64,7 +65,7 @@
bool NativeRegExpMacroAssembler::CanReadUnaligned() {
#ifdef V8_TARGET_CAN_READ_UNALIGNED
- return true;
+ return !slow_safe();
#else
return false;
#endif
diff --git a/src/regexp-macro-assembler.h b/src/regexp-macro-assembler.h
index 1268e78..0314c70 100644
--- a/src/regexp-macro-assembler.h
+++ b/src/regexp-macro-assembler.h
@@ -130,7 +130,7 @@
return false;
}
virtual void Fail() = 0;
- virtual Handle<Object> GetCode(Handle<String> source) = 0;
+ virtual Handle<HeapObject> GetCode(Handle<String> source) = 0;
virtual void GoTo(Label* label) = 0;
// Check whether a register is >= a given constant and go to a label if it
// is. Backtracks instead if the label is NULL.
@@ -162,6 +162,13 @@
virtual void WriteCurrentPositionToRegister(int reg, int cp_offset) = 0;
virtual void ClearRegisters(int reg_from, int reg_to) = 0;
virtual void WriteStackPointerToRegister(int reg) = 0;
+
+ // Controls the generation of large inlined constants in the code.
+ void set_slow_safe(bool ssc) { slow_safe_compiler_ = ssc; }
+ bool slow_safe() { return slow_safe_compiler_; }
+
+ private:
+ bool slow_safe_compiler_;
};
diff --git a/src/runtime-profiler.cc b/src/runtime-profiler.cc
index 8d258ac..97f0341 100644
--- a/src/runtime-profiler.cc
+++ b/src/runtime-profiler.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -153,7 +153,6 @@
if (FLAG_trace_opt) {
PrintF("[marking (%s) ", eager ? "eagerly" : "lazily");
function->PrintName();
- PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address()));
PrintF(" for recompilation");
if (delay > 0) {
PrintF(" (delayed %0.3f ms)", static_cast<double>(delay) / 1000);
diff --git a/src/runtime.cc b/src/runtime.cc
index 53c048e..7335da8 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -2713,7 +2713,7 @@
end = RegExpImpl::GetCapture(match_info_array, 1);
}
- int length = subject->length();
+ int length = subject_handle->length();
int new_length = length - (end - start);
if (new_length == 0) {
return isolate->heap()->empty_string();
@@ -6597,16 +6597,9 @@
int exponent = number->get_exponent();
int sign = number->get_sign();
- if (exponent < -1) {
- // Number in range ]-0.5..0.5[. These always round to +/-zero.
- if (sign) return isolate->heap()->minus_zero_value();
- return Smi::FromInt(0);
- }
-
- // We compare with kSmiValueSize - 2 because (2^30 - 0.1) has exponent 29 and
- // should be rounded to 2^30, which is not smi (for 31-bit smis, similar
- // agument holds for 32-bit smis).
- if (!sign && exponent < kSmiValueSize - 2) {
+ // We compare with kSmiValueSize - 3 because (2^30 - 0.1) has exponent 29 and
+ // should be rounded to 2^30, which is not smi.
+ if (!sign && exponent <= kSmiValueSize - 3) {
return Smi::FromInt(static_cast<int>(value + 0.5));
}
@@ -10485,7 +10478,7 @@
// Recursively copy the with contexts.
Handle<Context> previous(context_chain->previous());
Handle<JSObject> extension(JSObject::cast(context_chain->extension()));
- Handle<Context> context = CopyWithContextChain(previous, function_context);
+ Handle<Context> context = CopyWithContextChain(function_context, previous);
return context->GetIsolate()->factory()->NewWithContext(
context, extension, context_chain->IsCatchContext());
}
diff --git a/src/scopes.cc b/src/scopes.cc
index 7d9bce5..8df93c5 100644
--- a/src/scopes.cc
+++ b/src/scopes.cc
@@ -202,6 +202,7 @@
inner_scope_calls_eval_ = false;
outer_scope_is_eval_scope_ = false;
force_eager_compilation_ = false;
+ num_var_or_const_ = 0;
num_stack_slots_ = 0;
num_heap_slots_ = 0;
scope_info_ = scope_info;
@@ -364,12 +365,17 @@
}
-Variable* Scope::DeclareLocal(Handle<String> name, Variable::Mode mode) {
+Variable* Scope::DeclareLocal(Handle<String> name,
+ Variable::Mode mode,
+ LocalType type) {
// DYNAMIC variables are introduces during variable allocation,
// INTERNAL variables are allocated explicitly, and TEMPORARY
// variables are allocated via NewTemporary().
ASSERT(!resolved());
ASSERT(mode == Variable::VAR || mode == Variable::CONST);
+ if (type == VAR_OR_CONST) {
+ num_var_or_const_++;
+ }
return variables_.Declare(this, name, mode, true, Variable::NORMAL);
}
diff --git a/src/scopes.h b/src/scopes.h
index 18db0cd..a0e56a4 100644
--- a/src/scopes.h
+++ b/src/scopes.h
@@ -95,6 +95,11 @@
GLOBAL_SCOPE // the top-level scope for a program or a top-level eval
};
+ enum LocalType {
+ PARAMETER,
+ VAR_OR_CONST
+ };
+
Scope(Scope* outer_scope, Type type);
virtual ~Scope() { }
@@ -134,7 +139,9 @@
// Declare a local variable in this scope. If the variable has been
// declared before, the previously declared variable is returned.
- virtual Variable* DeclareLocal(Handle<String> name, Variable::Mode mode);
+ virtual Variable* DeclareLocal(Handle<String> name,
+ Variable::Mode mode,
+ LocalType type);
// Declare an implicit global variable in this scope which must be a
// global scope. The variable was introduced (possibly from an inner
@@ -282,6 +289,9 @@
// cases the context parameter is an empty handle.
void AllocateVariables(Handle<Context> context);
+ // Current number of var or const locals.
+ int num_var_or_const() { return num_var_or_const_; }
+
// Result of variable allocation.
int num_stack_slots() const { return num_stack_slots_; }
int num_heap_slots() const { return num_heap_slots_; }
@@ -373,6 +383,9 @@
bool outer_scope_is_eval_scope_;
bool force_eager_compilation_;
+ // Computed as variables are declared.
+ int num_var_or_const_;
+
// Computed via AllocateVariables; function scopes only.
int num_stack_slots_;
int num_heap_slots_;
diff --git a/src/top.cc b/src/top.cc
index a8dba71..842d269 100644
--- a/src/top.cc
+++ b/src/top.cc
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -572,6 +572,7 @@
// Set the exception being re-thrown.
set_pending_exception(exception);
+ if (exception->IsFailure()) return exception->ToFailureUnchecked();
return Failure::Exception();
}
diff --git a/src/type-info.cc b/src/type-info.cc
index 1940601..4069c83 100644
--- a/src/type-info.cc
+++ b/src/type-info.cc
@@ -275,8 +275,6 @@
return TypeInfo::Integer32();
case TRBinaryOpIC::HEAP_NUMBER:
return TypeInfo::Double();
- case TRBinaryOpIC::BOTH_STRING:
- return TypeInfo::String();
case TRBinaryOpIC::STRING:
case TRBinaryOpIC::GENERIC:
return unknown;
diff --git a/src/v8natives.js b/src/v8natives.js
index 4fcf0ac..429cea5 100644
--- a/src/v8natives.js
+++ b/src/v8natives.js
@@ -147,17 +147,6 @@
}
-// execScript for IE compatibility.
-function GlobalExecScript(expr, lang) {
- // NOTE: We don't care about the character casing.
- if (!lang || /javascript/i.test(lang)) {
- var f = %CompileString(ToString(expr));
- %_CallFunction(%GlobalReceiver(global), f);
- }
- return null;
-}
-
-
// ----------------------------------------------------------------------------
@@ -177,8 +166,7 @@
"isFinite", GlobalIsFinite,
"parseInt", GlobalParseInt,
"parseFloat", GlobalParseFloat,
- "eval", GlobalEval,
- "execScript", GlobalExecScript
+ "eval", GlobalEval
));
}
diff --git a/src/version.cc b/src/version.cc
index 25939c2..71a07db 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -33,9 +33,9 @@
// NOTE these macros are used by the SCons build script so their names
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3
-#define MINOR_VERSION 3
-#define BUILD_NUMBER 0
-#define PATCH_LEVEL 1
+#define MINOR_VERSION 2
+#define BUILD_NUMBER 10
+#define PATCH_LEVEL 16
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0
diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc
index c06bc0c..6e4f005 100644
--- a/src/x64/assembler-x64.cc
+++ b/src/x64/assembler-x64.cc
@@ -1328,7 +1328,7 @@
void Assembler::jmp(NearLabel* L) {
EnsureSpace ensure_space(this);
if (L->is_bound()) {
- const int short_size = sizeof(int8_t);
+ const int short_size = 2;
int offs = L->pos() - pc_offset();
ASSERT(offs <= 0);
ASSERT(is_int8(offs - short_size));
@@ -2540,24 +2540,6 @@
}
-void Assembler::movq(XMMRegister dst, XMMRegister src) {
- EnsureSpace ensure_space(this);
- if (dst.low_bits() == 4) {
- // Avoid unnecessary SIB byte.
- emit(0xf3);
- emit_optional_rex_32(dst, src);
- emit(0x0F);
- emit(0x7e);
- emit_sse_operand(dst, src);
- } else {
- emit(0x66);
- emit_optional_rex_32(src, dst);
- emit(0x0F);
- emit(0xD6);
- emit_sse_operand(src, dst);
- }
-}
-
void Assembler::movdqa(const Operand& dst, XMMRegister src) {
EnsureSpace ensure_space(this);
emit(0x66);
@@ -2621,42 +2603,6 @@
}
-void Assembler::movaps(XMMRegister dst, XMMRegister src) {
- EnsureSpace ensure_space(this);
- if (src.low_bits() == 4) {
- // Try to avoid an unnecessary SIB byte.
- emit_optional_rex_32(src, dst);
- emit(0x0F);
- emit(0x29);
- emit_sse_operand(src, dst);
- } else {
- emit_optional_rex_32(dst, src);
- emit(0x0F);
- emit(0x28);
- emit_sse_operand(dst, src);
- }
-}
-
-
-void Assembler::movapd(XMMRegister dst, XMMRegister src) {
- EnsureSpace ensure_space(this);
- if (src.low_bits() == 4) {
- // Try to avoid an unnecessary SIB byte.
- emit(0x66);
- emit_optional_rex_32(src, dst);
- emit(0x0F);
- emit(0x29);
- emit_sse_operand(src, dst);
- } else {
- emit(0x66);
- emit_optional_rex_32(dst, src);
- emit(0x0F);
- emit(0x28);
- emit_sse_operand(dst, src);
- }
-}
-
-
void Assembler::movss(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
emit(0xF3); // single
@@ -2887,15 +2833,6 @@
}
-void Assembler::xorps(XMMRegister dst, XMMRegister src) {
- EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst, src);
- emit(0x0F);
- emit(0x57);
- emit_sse_operand(dst, src);
-}
-
-
void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
emit(0xF2);
@@ -2926,21 +2863,6 @@
}
-void Assembler::roundsd(XMMRegister dst, XMMRegister src,
- Assembler::RoundingMode mode) {
- ASSERT(CpuFeatures::IsEnabled(SSE4_1));
- EnsureSpace ensure_space(this);
- emit(0x66);
- emit_optional_rex_32(dst, src);
- emit(0x0f);
- emit(0x3a);
- emit(0x0b);
- emit_sse_operand(dst, src);
- // Mask precision exeption.
- emit(static_cast<byte>(mode) | 0x8);
-}
-
-
void Assembler::movmskpd(Register dst, XMMRegister src) {
EnsureSpace ensure_space(this);
emit(0x66);
diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h
index 8a9938b..9453277 100644
--- a/src/x64/assembler-x64.h
+++ b/src/x64/assembler-x64.h
@@ -1291,24 +1291,15 @@
void movd(Register dst, XMMRegister src);
void movq(XMMRegister dst, Register src);
void movq(Register dst, XMMRegister src);
- void movq(XMMRegister dst, XMMRegister src);
void extractps(Register dst, XMMRegister src, byte imm8);
- // Don't use this unless it's important to keep the
- // top half of the destination register unchanged.
- // Used movaps when moving double values and movq for integer
- // values in xmm registers.
- void movsd(XMMRegister dst, XMMRegister src);
-
void movsd(const Operand& dst, XMMRegister src);
+ void movsd(XMMRegister dst, XMMRegister src);
void movsd(XMMRegister dst, const Operand& src);
void movdqa(const Operand& dst, XMMRegister src);
void movdqa(XMMRegister dst, const Operand& src);
- void movapd(XMMRegister dst, XMMRegister src);
- void movaps(XMMRegister dst, XMMRegister src);
-
void movss(XMMRegister dst, const Operand& src);
void movss(const Operand& dst, XMMRegister src);
@@ -1340,21 +1331,11 @@
void andpd(XMMRegister dst, XMMRegister src);
void orpd(XMMRegister dst, XMMRegister src);
void xorpd(XMMRegister dst, XMMRegister src);
- void xorps(XMMRegister dst, XMMRegister src);
void sqrtsd(XMMRegister dst, XMMRegister src);
void ucomisd(XMMRegister dst, XMMRegister src);
void ucomisd(XMMRegister dst, const Operand& src);
- enum RoundingMode {
- kRoundToNearest = 0x0,
- kRoundDown = 0x1,
- kRoundUp = 0x2,
- kRoundToZero = 0x3
- };
-
- void roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode);
-
void movmskpd(Register dst, XMMRegister src);
// The first argument is the reg field, the second argument is the r/m field.
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 76fcc88..c365385 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -266,7 +266,7 @@
__ j(not_equal, &true_result);
// HeapNumber => false iff +0, -0, or NaN.
// These three cases set the zero flag when compared to zero using ucomisd.
- __ xorps(xmm0, xmm0);
+ __ xorpd(xmm0, xmm0);
__ ucomisd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
__ j(zero, &false_result);
// Fall through to |true_result|.
@@ -372,9 +372,6 @@
case TRBinaryOpIC::ODDBALL:
GenerateOddballStub(masm);
break;
- case TRBinaryOpIC::BOTH_STRING:
- GenerateBothStringStub(masm);
- break;
case TRBinaryOpIC::STRING:
GenerateStringStub(masm);
break;
@@ -633,7 +630,7 @@
// already loaded heap_number_map.
__ AllocateInNewSpace(HeapNumber::kSize,
rax,
- rcx,
+ rdx,
no_reg,
&allocation_failed,
TAG_OBJECT);
@@ -653,7 +650,7 @@
// We need tagged values in rdx and rax for the following code,
// not int32 in rax and rcx.
__ Integer32ToSmi(rax, rcx);
- __ Integer32ToSmi(rdx, rax);
+ __ Integer32ToSmi(rdx, rbx);
__ jmp(allocation_failure);
}
break;
@@ -774,36 +771,6 @@
}
-void TypeRecordingBinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
- Label call_runtime;
- ASSERT(operands_type_ == TRBinaryOpIC::BOTH_STRING);
- ASSERT(op_ == Token::ADD);
- // If both arguments are strings, call the string add stub.
- // Otherwise, do a transition.
-
- // Registers containing left and right operands respectively.
- Register left = rdx;
- Register right = rax;
-
- // Test if left operand is a string.
- __ JumpIfSmi(left, &call_runtime);
- __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx);
- __ j(above_equal, &call_runtime);
-
- // Test if right operand is a string.
- __ JumpIfSmi(right, &call_runtime);
- __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx);
- __ j(above_equal, &call_runtime);
-
- StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
- GenerateRegisterArgsPush(masm);
- __ TailCallStub(&string_add_stub);
-
- __ bind(&call_runtime);
- GenerateTypeTransition(masm);
-}
-
-
void TypeRecordingBinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
Label call_runtime;
@@ -1602,7 +1569,7 @@
__ bind(&no_neg);
// Load xmm1 with 1.
- __ movaps(xmm1, xmm3);
+ __ movsd(xmm1, xmm3);
NearLabel while_true;
NearLabel no_multiply;
@@ -1620,8 +1587,8 @@
__ j(positive, &allocate_return);
// Special case if xmm1 has reached infinity.
__ divsd(xmm3, xmm1);
- __ movaps(xmm1, xmm3);
- __ xorps(xmm0, xmm0);
+ __ movsd(xmm1, xmm3);
+ __ xorpd(xmm0, xmm0);
__ ucomisd(xmm0, xmm1);
__ j(equal, &call_runtime);
@@ -1669,11 +1636,11 @@
// Calculates reciprocal of square root.
// sqrtsd returns -0 when input is -0. ECMA spec requires +0.
- __ xorps(xmm1, xmm1);
+ __ xorpd(xmm1, xmm1);
__ addsd(xmm1, xmm0);
__ sqrtsd(xmm1, xmm1);
__ divsd(xmm3, xmm1);
- __ movaps(xmm1, xmm3);
+ __ movsd(xmm1, xmm3);
__ jmp(&allocate_return);
// Test for 0.5.
@@ -1686,8 +1653,8 @@
__ j(not_equal, &call_runtime);
// Calculates square root.
// sqrtsd returns -0 when input is -0. ECMA spec requires +0.
- __ xorps(xmm1, xmm1);
- __ addsd(xmm1, xmm0); // Convert -0 to 0.
+ __ xorpd(xmm1, xmm1);
+ __ addsd(xmm1, xmm0);
__ sqrtsd(xmm1, xmm1);
__ bind(&allocate_return);
@@ -2363,10 +2330,9 @@
// Heap::GetNumberStringCache.
Label is_smi;
Label load_result_from_cache;
- Factory* factory = masm->isolate()->factory();
if (!object_is_smi) {
__ JumpIfSmi(object, &is_smi);
- __ CheckMap(object, factory->heap_number_map(), not_found, true);
+ __ CheckMap(object, FACTORY->heap_number_map(), not_found, true);
STATIC_ASSERT(8 == kDoubleSize);
__ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
@@ -2453,7 +2419,6 @@
ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
Label check_unequal_objects, done;
- Factory* factory = masm->isolate()->factory();
// Compare two smis if required.
if (include_smi_compare_) {
@@ -2501,6 +2466,7 @@
// Note: if cc_ != equal, never_nan_nan_ is not used.
// We cannot set rax to EQUAL until just before return because
// rax must be unchanged on jump to not_identical.
+
if (never_nan_nan_ && (cc_ == equal)) {
__ Set(rax, EQUAL);
__ ret(0);
@@ -2508,7 +2474,7 @@
NearLabel heap_number;
// If it's not a heap number, then return equal for (in)equality operator.
__ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
- factory->heap_number_map());
+ FACTORY->heap_number_map());
__ j(equal, &heap_number);
if (cc_ != equal) {
// Call runtime on identical JSObjects. Otherwise return equal.
@@ -2553,7 +2519,7 @@
// Check if the non-smi operand is a heap number.
__ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
- factory->heap_number_map());
+ FACTORY->heap_number_map());
// If heap number, handle it in the slow case.
__ j(equal, &slow);
// Return non-equal. ebx (the lower half of rbx) is not zero.
@@ -3077,9 +3043,14 @@
__ Load(rax, js_entry_sp);
__ testq(rax, rax);
__ j(not_zero, ¬_outermost_js);
+ __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
__ movq(rax, rbp);
__ Store(js_entry_sp, rax);
+ Label cont;
+ __ jmp(&cont);
__ bind(¬_outermost_js);
+ __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
+ __ bind(&cont);
#endif
// Call a faked try-block that does the invoke.
@@ -3121,27 +3092,21 @@
__ call(kScratchRegister);
// Unlink this frame from the handler chain.
- Operand handler_operand =
- masm->ExternalOperand(ExternalReference(Isolate::k_handler_address,
- isolate));
- __ pop(handler_operand);
- // Pop next_sp.
- __ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
+ __ PopTryHandler();
+ __ bind(&exit);
#ifdef ENABLE_LOGGING_AND_PROFILING
- // If current RBP value is the same as js_entry_sp value, it means that
- // the current function is the outermost.
- __ movq(kScratchRegister, js_entry_sp);
- __ cmpq(rbp, Operand(kScratchRegister, 0));
+ // Check if the current stack frame is marked as the outermost JS frame.
+ __ pop(rbx);
+ __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
__ j(not_equal, ¬_outermost_js_2);
+ __ movq(kScratchRegister, js_entry_sp);
__ movq(Operand(kScratchRegister, 0), Immediate(0));
__ bind(¬_outermost_js_2);
#endif
// Restore the top frame descriptor from the stack.
- __ bind(&exit);
- {
- Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
+ { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
__ pop(c_entry_fp_operand);
}
@@ -3484,11 +3449,10 @@
MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
__ Abort("Unexpected fallthrough to CharCodeAt slow case");
- Factory* factory = masm->isolate()->factory();
// Index is not a smi.
__ bind(&index_not_smi_);
// If index is a heap number, try converting it to an integer.
- __ CheckMap(index_, factory->heap_number_map(), index_not_number_, true);
+ __ CheckMap(index_, FACTORY->heap_number_map(), index_not_number_, true);
call_helper.BeforeCall(masm);
__ push(object_);
__ push(index_);
diff --git a/src/x64/code-stubs-x64.h b/src/x64/code-stubs-x64.h
index 3b40280..f97d099 100644
--- a/src/x64/code-stubs-x64.h
+++ b/src/x64/code-stubs-x64.h
@@ -152,7 +152,6 @@
void GenerateHeapNumberStub(MacroAssembler* masm);
void GenerateOddballStub(MacroAssembler* masm);
void GenerateStringStub(MacroAssembler* masm);
- void GenerateBothStringStub(MacroAssembler* masm);
void GenerateGenericStub(MacroAssembler* masm);
void GenerateHeapResultAllocation(MacroAssembler* masm, Label* alloc_failure);
diff --git a/src/x64/disasm-x64.cc b/src/x64/disasm-x64.cc
index 82bc6ef..2b7b7b7 100644
--- a/src/x64/disasm-x64.cc
+++ b/src/x64/disasm-x64.cc
@@ -1021,26 +1021,12 @@
current += PrintRightOperand(current);
AppendToBuffer(", %s, %d", NameOfCPURegister(regop), (*current) & 3);
current += 1;
- } else if (third_byte == 0x0b) {
- get_modrm(*current, &mod, ®op, &rm);
- // roundsd xmm, xmm/m64, imm8
- AppendToBuffer("roundsd %s, ", NameOfCPURegister(regop));
- current += PrintRightOperand(current);
- AppendToBuffer(", %d", (*current) & 3);
- current += 1;
} else {
UnimplementedInstruction();
}
} else {
get_modrm(*current, &mod, ®op, &rm);
- if (opcode == 0x28) {
- AppendToBuffer("movapd %s, ", NameOfXMMRegister(regop));
- current += PrintRightXMMOperand(current);
- } else if (opcode == 0x29) {
- AppendToBuffer("movapd ");
- current += PrintRightXMMOperand(current);
- AppendToBuffer(", %s", NameOfXMMRegister(regop));
- } else if (opcode == 0x6E) {
+ if (opcode == 0x6E) {
AppendToBuffer("mov%c %s,",
rex_w() ? 'q' : 'd',
NameOfXMMRegister(regop));
@@ -1058,10 +1044,6 @@
AppendToBuffer("movdqa ");
current += PrintRightXMMOperand(current);
AppendToBuffer(", %s", NameOfXMMRegister(regop));
- } else if (opcode == 0xD6) {
- AppendToBuffer("movq ");
- current += PrintRightXMMOperand(current);
- AppendToBuffer(", %s", NameOfXMMRegister(regop));
} else {
const char* mnemonic = "?";
if (opcode == 0x50) {
@@ -1163,11 +1145,6 @@
get_modrm(*current, &mod, ®op, &rm);
AppendToBuffer("cvtss2sd %s,", NameOfXMMRegister(regop));
current += PrintRightXMMOperand(current);
- } else if (opcode == 0x7E) {
- int mod, regop, rm;
- get_modrm(*current, &mod, ®op, &rm);
- AppendToBuffer("movq %s, ", NameOfXMMRegister(regop));
- current += PrintRightXMMOperand(current);
} else {
UnimplementedInstruction();
}
@@ -1185,22 +1162,6 @@
current += 4;
} // else no immediate displacement.
AppendToBuffer("nop");
-
- } else if (opcode == 28) {
- // movaps xmm, xmm/m128
- int mod, regop, rm;
- get_modrm(*current, &mod, ®op, &rm);
- AppendToBuffer("movaps %s, ", NameOfXMMRegister(regop));
- current += PrintRightXMMOperand(current);
-
- } else if (opcode == 29) {
- // movaps xmm/m128, xmm
- int mod, regop, rm;
- get_modrm(*current, &mod, ®op, &rm);
- AppendToBuffer("movaps");
- current += PrintRightXMMOperand(current);
- AppendToBuffer(", %s", NameOfXMMRegister(regop));
-
} else if (opcode == 0xA2 || opcode == 0x31) {
// RDTSC or CPUID
AppendToBuffer("%s", mnemonic);
@@ -1212,13 +1173,6 @@
byte_size_operand_ = idesc.byte_size_operation;
current += PrintOperands(idesc.mnem, idesc.op_order_, current);
- } else if (opcode == 57) {
- // xoprps xmm, xmm/m128
- int mod, regop, rm;
- get_modrm(*current, &mod, ®op, &rm);
- AppendToBuffer("xorps %s, ", NameOfXMMRegister(regop));
- current += PrintRightXMMOperand(current);
-
} else if ((opcode & 0xF0) == 0x80) {
// Jcc: Conditional jump (branch).
current = data + JumpConditional(data);
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index d5fb7da..6933d78 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -1398,13 +1398,17 @@
// Fall through.
case ObjectLiteral::Property::COMPUTED:
if (key->handle()->IsSymbol()) {
- VisitForAccumulatorValue(value);
- __ Move(rcx, key->handle());
- __ movq(rdx, Operand(rsp, 0));
if (property->emit_store()) {
- Handle<Code> ic = isolate()->builtins()->StoreIC_Initialize();
+ VisitForAccumulatorValue(value);
+ __ Move(rcx, key->handle());
+ __ movq(rdx, Operand(rsp, 0));
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
+ : isolate()->builtins()->StoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(key->id(), NO_REGISTERS);
+ } else {
+ VisitForEffect(value);
}
break;
}
@@ -2758,7 +2762,7 @@
__ movd(xmm1, rcx);
__ movd(xmm0, rax);
__ cvtss2sd(xmm1, xmm1);
- __ xorps(xmm0, xmm1);
+ __ xorpd(xmm0, xmm1);
__ subsd(xmm0, xmm1);
__ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
@@ -3043,14 +3047,15 @@
void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
ASSERT(args->length() >= 2);
- int arg_count = args->length() - 2; // 2 ~ receiver and function.
- for (int i = 0; i < arg_count + 1; i++) {
- VisitForStackValue(args->at(i));
+ int arg_count = args->length() - 2; // For receiver and function.
+ VisitForStackValue(args->at(0)); // Receiver.
+ for (int i = 0; i < arg_count; i++) {
+ VisitForStackValue(args->at(i + 1));
}
- VisitForAccumulatorValue(args->last()); // Function.
+ VisitForAccumulatorValue(args->at(arg_count + 1)); // Function.
- // InvokeFunction requires the function in rdi. Move it in there.
- __ movq(rdi, result_register());
+ // InvokeFunction requires function in rdi. Move it in there.
+ if (!result_register().is(rdi)) __ movq(rdi, result_register());
ParameterCount count(arg_count);
__ InvokeFunction(rdi, count, CALL_FUNCTION);
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -4231,7 +4236,30 @@
default:
break;
}
+
__ call(ic, mode);
+
+ // Crankshaft doesn't need patching of inlined loads and stores.
+ // When compiling the snapshot we need to produce code that works
+ // with and without Crankshaft.
+ if (V8::UseCrankshaft() && !Serializer::enabled()) {
+ return;
+ }
+
+ // If we're calling a (keyed) load or store stub, we have to mark
+ // the call as containing no inlined code so we will not attempt to
+ // patch it.
+ switch (ic->kind()) {
+ case Code::LOAD_IC:
+ case Code::KEYED_LOAD_IC:
+ case Code::STORE_IC:
+ case Code::KEYED_STORE_IC:
+ __ nop(); // Signals no inlined code.
+ break;
+ default:
+ // Do nothing.
+ break;
+ }
}
@@ -4252,6 +4280,7 @@
default:
break;
}
+
__ call(ic, RelocInfo::CODE_TARGET);
if (patch_site != NULL && patch_site->is_bound()) {
patch_site->EmitPatchInfo();
diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc
index 5ed89b5..5ca56ac 100644
--- a/src/x64/ic-x64.cc
+++ b/src/x64/ic-x64.cc
@@ -381,6 +381,11 @@
}
+// The offset from the inlined patch site to the start of the inlined
+// load instruction.
+const int LoadIC::kOffsetToLoadInstruction = 20;
+
+
void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : receiver
@@ -1292,6 +1297,130 @@
}
+bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
+ if (V8::UseCrankshaft()) return false;
+
+ // The address of the instruction following the call.
+ Address test_instruction_address =
+ address + Assembler::kCallTargetAddressOffset;
+ // If the instruction following the call is not a test rax, nothing
+ // was inlined.
+ if (*test_instruction_address != Assembler::kTestEaxByte) return false;
+
+ Address delta_address = test_instruction_address + 1;
+ // The delta to the start of the map check instruction.
+ int delta = *reinterpret_cast<int*>(delta_address);
+
+ // The map address is the last 8 bytes of the 10-byte
+ // immediate move instruction, so we add 2 to get the
+ // offset to the last 8 bytes.
+ Address map_address = test_instruction_address + delta + 2;
+ *(reinterpret_cast<Object**>(map_address)) = map;
+
+ // The offset is in the 32-bit displacement of a seven byte
+ // memory-to-register move instruction (REX.W 0x88 ModR/M disp32),
+ // so we add 3 to get the offset of the displacement.
+ Address offset_address =
+ test_instruction_address + delta + kOffsetToLoadInstruction + 3;
+ *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
+ return true;
+}
+
+
+bool LoadIC::PatchInlinedContextualLoad(Address address,
+ Object* map,
+ Object* cell,
+ bool is_dont_delete) {
+ // TODO(<bug#>): implement this.
+ return false;
+}
+
+
+bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) {
+ if (V8::UseCrankshaft()) return false;
+
+ // The address of the instruction following the call.
+ Address test_instruction_address =
+ address + Assembler::kCallTargetAddressOffset;
+
+ // If the instruction following the call is not a test rax, nothing
+ // was inlined.
+ if (*test_instruction_address != Assembler::kTestEaxByte) return false;
+
+ // Extract the encoded deltas from the test rax instruction.
+ Address encoded_offsets_address = test_instruction_address + 1;
+ int encoded_offsets = *reinterpret_cast<int*>(encoded_offsets_address);
+ int delta_to_map_check = -(encoded_offsets & 0xFFFF);
+ int delta_to_record_write = encoded_offsets >> 16;
+
+ // Patch the map to check. The map address is the last 8 bytes of
+ // the 10-byte immediate move instruction.
+ Address map_check_address = test_instruction_address + delta_to_map_check;
+ Address map_address = map_check_address + 2;
+ *(reinterpret_cast<Object**>(map_address)) = map;
+
+ // Patch the offset in the store instruction. The offset is in the
+ // last 4 bytes of a 7 byte register-to-memory move instruction.
+ Address offset_address =
+ map_check_address + StoreIC::kOffsetToStoreInstruction + 3;
+ // The offset should have initial value (kMaxInt - 1), cleared value
+ // (-1) or we should be clearing the inlined version.
+ ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt - 1 ||
+ *reinterpret_cast<int*>(offset_address) == -1 ||
+ (offset == 0 && map == HEAP->null_value()));
+ *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
+
+ // Patch the offset in the write-barrier code. The offset is the
+ // last 4 bytes of a 7 byte lea instruction.
+ offset_address = map_check_address + delta_to_record_write + 3;
+ // The offset should have initial value (kMaxInt), cleared value
+ // (-1) or we should be clearing the inlined version.
+ ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt ||
+ *reinterpret_cast<int*>(offset_address) == -1 ||
+ (offset == 0 && map == HEAP->null_value()));
+ *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
+
+ return true;
+}
+
+
+static bool PatchInlinedMapCheck(Address address, Object* map) {
+ if (V8::UseCrankshaft()) return false;
+
+ // Arguments are address of start of call sequence that called
+ // the IC,
+ Address test_instruction_address =
+ address + Assembler::kCallTargetAddressOffset;
+ // The keyed load has a fast inlined case if the IC call instruction
+ // is immediately followed by a test instruction.
+ if (*test_instruction_address != Assembler::kTestEaxByte) return false;
+
+ // Fetch the offset from the test instruction to the map compare
+ // instructions (starting with the 64-bit immediate mov of the map
+ // address). This offset is stored in the last 4 bytes of the 5
+ // byte test instruction.
+ Address delta_address = test_instruction_address + 1;
+ int delta = *reinterpret_cast<int*>(delta_address);
+ // Compute the map address. The map address is in the last 8 bytes
+ // of the 10-byte immediate mov instruction (incl. REX prefix), so we add 2
+ // to the offset to get the map address.
+ Address map_address = test_instruction_address + delta + 2;
+ // Patch the map check.
+ *(reinterpret_cast<Object**>(map_address)) = map;
+ return true;
+}
+
+
+bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
+ return PatchInlinedMapCheck(address, map);
+}
+
+
+bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
+ return PatchInlinedMapCheck(address, map);
+}
+
+
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : key
@@ -1374,6 +1503,11 @@
}
+// The offset from the inlined patch site to the start of the inlined
+// store instruction.
+const int StoreIC::kOffsetToStoreInstruction = 20;
+
+
void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : value
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index c242874..202e7a2 100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -91,7 +91,7 @@
void LCodeGen::FinishCode(Handle<Code> code) {
ASSERT(is_done());
- code->set_stack_slots(GetStackSlotCount());
+ code->set_stack_slots(StackSlotCount());
code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
PopulateDeoptimizationData(code);
Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
@@ -146,7 +146,7 @@
__ push(rdi); // Callee's JS function.
// Reserve space for the stack slots needed by the code.
- int slots = GetStackSlotCount();
+ int slots = StackSlotCount();
if (slots > 0) {
if (FLAG_debug_code) {
__ Set(rax, slots);
@@ -290,7 +290,7 @@
while (byte_count-- > 0) {
__ int3();
}
- safepoints_.Emit(masm(), GetStackSlotCount());
+ safepoints_.Emit(masm(), StackSlotCount());
return !is_aborted();
}
@@ -418,7 +418,7 @@
translation->StoreDoubleStackSlot(op->index());
} else if (op->IsArgument()) {
ASSERT(is_tagged);
- int src_index = GetStackSlotCount() + op->index();
+ int src_index = StackSlotCount() + op->index();
translation->StoreStackSlot(src_index);
} else if (op->IsRegister()) {
Register reg = ToRegister(op);
@@ -1111,7 +1111,7 @@
// Use xor to produce +0.0 in a fast and compact way, but avoid to
// do so if the constant is -0.0.
if (int_val == 0) {
- __ xorps(res, res);
+ __ xorpd(res, res);
} else {
Register tmp = ToRegister(instr->TempAt(0));
__ Set(tmp, int_val);
@@ -1223,12 +1223,12 @@
break;
case Token::MOD:
__ PrepareCallCFunction(2);
- __ movaps(xmm0, left);
+ __ movsd(xmm0, left);
ASSERT(right.is(xmm1));
__ CallCFunction(
ExternalReference::double_fp_operation(Token::MOD, isolate()), 2);
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
- __ movaps(result, xmm0);
+ __ movsd(result, xmm0);
break;
default:
UNREACHABLE();
@@ -1287,7 +1287,7 @@
EmitBranch(true_block, false_block, not_zero);
} else if (r.IsDouble()) {
XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
- __ xorps(xmm0, xmm0);
+ __ xorpd(xmm0, xmm0);
__ ucomisd(reg, xmm0);
EmitBranch(true_block, false_block, not_equal);
} else {
@@ -1322,7 +1322,7 @@
// HeapNumber => false iff +0, -0, or NaN. These three cases set the
// zero flag when compared to zero using ucomisd.
- __ xorps(xmm0, xmm0);
+ __ xorpd(xmm0, xmm0);
__ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
__ j(zero, false_label);
__ jmp(true_label);
@@ -2058,7 +2058,7 @@
}
__ movq(rsp, rbp);
__ pop(rbp);
- __ Ret((GetParameterCount() + 1) * kPointerSize, rcx);
+ __ Ret((ParameterCount() + 1) * kPointerSize, rcx);
}
@@ -2507,19 +2507,25 @@
env->deoptimization_index());
v8::internal::ParameterCount actual(rax);
__ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
- __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
}
void LCodeGen::DoPushArgument(LPushArgument* instr) {
LOperand* argument = instr->InputAt(0);
- EmitPushTaggedOperand(argument);
+ if (argument->IsConstantOperand()) {
+ EmitPushConstantOperand(argument);
+ } else if (argument->IsRegister()) {
+ __ push(ToRegister(argument));
+ } else {
+ ASSERT(!argument->IsDoubleRegister());
+ __ push(ToOperand(argument));
+ }
}
void LCodeGen::DoContext(LContext* instr) {
Register result = ToRegister(instr->result());
- __ movq(result, rsi);
+ __ movq(result, Operand(rbp, StandardFrameConstants::kContextOffset));
}
@@ -2671,7 +2677,7 @@
if (r.IsDouble()) {
XMMRegister scratch = xmm0;
XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
- __ xorps(scratch, scratch);
+ __ xorpd(scratch, scratch);
__ subsd(scratch, input_reg);
__ andpd(input_reg, scratch);
} else if (r.IsInteger32()) {
@@ -2682,9 +2688,7 @@
Register input_reg = ToRegister(instr->InputAt(0));
// Smi check.
__ JumpIfNotSmi(input_reg, deferred->entry());
- __ SmiToInteger32(input_reg, input_reg);
EmitIntegerMathAbs(instr);
- __ Integer32ToSmi(input_reg, input_reg);
__ bind(deferred->exit());
}
}
@@ -2694,36 +2698,21 @@
XMMRegister xmm_scratch = xmm0;
Register output_reg = ToRegister(instr->result());
XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
+ __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
+ __ ucomisd(input_reg, xmm_scratch);
- if (CpuFeatures::IsSupported(SSE4_1)) {
- CpuFeatures::Scope scope(SSE4_1);
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
- // Deoptimize if minus zero.
- __ movq(output_reg, input_reg);
- __ subq(output_reg, Immediate(1));
- DeoptimizeIf(overflow, instr->environment());
- }
- __ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown);
- __ cvttsd2si(output_reg, xmm_scratch);
- __ cmpl(output_reg, Immediate(0x80000000));
- DeoptimizeIf(equal, instr->environment());
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ DeoptimizeIf(below_equal, instr->environment());
} else {
- __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
- __ ucomisd(input_reg, xmm_scratch);
-
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
- DeoptimizeIf(below_equal, instr->environment());
- } else {
- DeoptimizeIf(below, instr->environment());
- }
-
- // Use truncating instruction (OK because input is positive).
- __ cvttsd2si(output_reg, input_reg);
-
- // Overflow is signalled with minint.
- __ cmpl(output_reg, Immediate(0x80000000));
- DeoptimizeIf(equal, instr->environment());
+ DeoptimizeIf(below, instr->environment());
}
+
+ // Use truncating instruction (OK because input is positive).
+ __ cvttsd2si(output_reg, input_reg);
+
+ // Overflow is signalled with minint.
+ __ cmpl(output_reg, Immediate(0x80000000));
+ DeoptimizeIf(equal, instr->environment());
}
@@ -2732,44 +2721,33 @@
Register output_reg = ToRegister(instr->result());
XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
- Label done;
// xmm_scratch = 0.5
__ movq(kScratchRegister, V8_INT64_C(0x3FE0000000000000), RelocInfo::NONE);
__ movq(xmm_scratch, kScratchRegister);
- NearLabel below_half;
- __ ucomisd(xmm_scratch, input_reg);
- __ j(above, &below_half); // If input_reg is NaN, this doesn't jump.
- // input = input + 0.5
- // This addition might give a result that isn't the correct for
- // rounding, due to loss of precision, but only for a number that's
- // so big that the conversion below will overflow anyway.
- __ addsd(input_reg, xmm_scratch);
- // Compute Math.floor(input).
- // Use truncating instruction (OK because input is positive).
- __ cvttsd2si(output_reg, input_reg);
- // Overflow is signalled with minint.
- __ cmpl(output_reg, Immediate(0x80000000));
- DeoptimizeIf(equal, instr->environment());
- __ jmp(&done);
- __ bind(&below_half);
+ // input = input + 0.5
+ __ addsd(input_reg, xmm_scratch);
+
+ // We need to return -0 for the input range [-0.5, 0[, otherwise
+ // compute Math.floor(value + 0.5).
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
- // Bailout if negative (including -0).
- __ movq(output_reg, input_reg);
- __ testq(output_reg, output_reg);
- DeoptimizeIf(negative, instr->environment());
+ __ ucomisd(input_reg, xmm_scratch);
+ DeoptimizeIf(below_equal, instr->environment());
} else {
- // Bailout if below -0.5, otherwise round to (positive) zero, even
- // if negative.
- // xmm_scrach = -0.5
- __ movq(kScratchRegister, V8_INT64_C(0xBFE0000000000000), RelocInfo::NONE);
- __ movq(xmm_scratch, kScratchRegister);
+ // If we don't need to bailout on -0, we check only bailout
+ // on negative inputs.
+ __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
__ ucomisd(input_reg, xmm_scratch);
DeoptimizeIf(below, instr->environment());
}
- __ xorl(output_reg, output_reg);
- __ bind(&done);
+ // Compute Math.floor(value + 0.5).
+ // Use truncating instruction (OK because input is positive).
+ __ cvttsd2si(output_reg, input_reg);
+
+ // Overflow is signalled with minint.
+ __ cmpl(output_reg, Immediate(0x80000000));
+ DeoptimizeIf(equal, instr->environment());
}
@@ -2784,7 +2762,7 @@
XMMRegister xmm_scratch = xmm0;
XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
- __ xorps(xmm_scratch, xmm_scratch);
+ __ xorpd(xmm_scratch, xmm_scratch);
__ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
__ sqrtsd(input_reg, input_reg);
}
@@ -2800,7 +2778,7 @@
if (exponent_type.IsDouble()) {
__ PrepareCallCFunction(2);
// Move arguments to correct registers
- __ movaps(xmm0, left_reg);
+ __ movsd(xmm0, left_reg);
ASSERT(ToDoubleRegister(right).is(xmm1));
__ CallCFunction(
ExternalReference::power_double_double_function(isolate()), 2);
@@ -2808,7 +2786,7 @@
__ PrepareCallCFunction(2);
// Move arguments to correct registers: xmm0 and edi (not rdi).
// On Windows, the registers are xmm0 and edx.
- __ movaps(xmm0, left_reg);
+ __ movsd(xmm0, left_reg);
#ifdef _WIN64
ASSERT(ToRegister(right).is(rdx));
#else
@@ -2834,13 +2812,13 @@
__ bind(&call);
__ PrepareCallCFunction(2);
// Move arguments to correct registers xmm0 and xmm1.
- __ movaps(xmm0, left_reg);
+ __ movsd(xmm0, left_reg);
// Right argument is already in xmm1.
__ CallCFunction(
ExternalReference::power_double_double_function(isolate()), 2);
}
// Return value is in xmm0.
- __ movaps(result_reg, xmm0);
+ __ movsd(result_reg, xmm0);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
}
@@ -2903,21 +2881,6 @@
}
-void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
- ASSERT(ToRegister(instr->function()).is(rdi));
- ASSERT(instr->HasPointerMap());
- ASSERT(instr->HasDeoptimizationEnvironment());
- LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
- RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- SafepointGenerator generator(this, pointers, env->deoptimization_index());
- ParameterCount count(instr->arity());
- __ InvokeFunction(rdi, count, CALL_FUNCTION, &generator);
- __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
-}
-
-
void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
ASSERT(ToRegister(instr->key()).is(rcx));
ASSERT(ToRegister(instr->result()).is(rax));
@@ -3124,14 +3087,6 @@
}
-void LCodeGen::DoStringAdd(LStringAdd* instr) {
- EmitPushTaggedOperand(instr->left());
- EmitPushTaggedOperand(instr->right());
- StringAddStub stub(NO_STRING_CHECK_IN_STUB);
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
-}
-
-
void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
class DeferredStringCharCodeAt: public LDeferredCode {
public:
@@ -3422,7 +3377,7 @@
DeoptimizeIf(not_equal, env);
// Convert undefined to NaN. Compute NaN as 0/0.
- __ xorps(result_reg, result_reg);
+ __ xorpd(result_reg, result_reg);
__ divsd(result_reg, result_reg);
__ jmp(&done);
@@ -3791,7 +3746,14 @@
void LCodeGen::DoTypeof(LTypeof* instr) {
LOperand* input = instr->InputAt(0);
- EmitPushTaggedOperand(input);
+ if (input->IsConstantOperand()) {
+ __ Push(ToHandle(LConstantOperand::cast(input)));
+ } else if (input->IsRegister()) {
+ __ push(ToRegister(input));
+ } else {
+ ASSERT(input->IsStackSlot());
+ __ push(ToOperand(input));
+ }
CallRuntime(Runtime::kTypeof, 1, instr);
}
@@ -3819,14 +3781,19 @@
}
-void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
- ASSERT(!operand->IsDoubleRegister());
- if (operand->IsConstantOperand()) {
- __ Push(ToHandle(LConstantOperand::cast(operand)));
- } else if (operand->IsRegister()) {
- __ push(ToRegister(operand));
+void LCodeGen::EmitPushConstantOperand(LOperand* operand) {
+ ASSERT(operand->IsConstantOperand());
+ LConstantOperand* const_op = LConstantOperand::cast(operand);
+ Handle<Object> literal = chunk_->LookupLiteral(const_op);
+ Representation r = chunk_->LookupLiteralRepresentation(const_op);
+ if (r.IsInteger32()) {
+ ASSERT(literal->IsNumber());
+ __ push(Immediate(static_cast<int32_t>(literal->Number())));
+ } else if (r.IsDouble()) {
+ Abort("unsupported double immediate");
} else {
- __ push(ToOperand(operand));
+ ASSERT(r.IsTagged());
+ __ Push(literal);
}
}
@@ -3972,8 +3939,20 @@
void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
LOperand* obj = instr->object();
LOperand* key = instr->key();
- EmitPushTaggedOperand(obj);
- EmitPushTaggedOperand(key);
+ // Push object.
+ if (obj->IsRegister()) {
+ __ push(ToRegister(obj));
+ } else {
+ __ push(ToOperand(obj));
+ }
+ // Push key.
+ if (key->IsConstantOperand()) {
+ EmitPushConstantOperand(key);
+ } else if (key->IsRegister()) {
+ __ push(ToRegister(key));
+ } else {
+ __ push(ToOperand(key));
+ }
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
LEnvironment* env = instr->deoptimization_environment();
diff --git a/src/x64/lithium-codegen-x64.h b/src/x64/lithium-codegen-x64.h
index 96e0a0f..34277f6 100644
--- a/src/x64/lithium-codegen-x64.h
+++ b/src/x64/lithium-codegen-x64.h
@@ -141,8 +141,8 @@
Register input,
Register temporary);
- int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
- int GetParameterCount() const { return scope()->num_parameters(); }
+ int StackSlotCount() const { return chunk()->spill_slot_count(); }
+ int ParameterCount() const { return scope()->num_parameters(); }
void Abort(const char* format, ...);
void Comment(const char* format, ...);
@@ -268,9 +268,8 @@
Handle<Map> type,
Handle<String> name);
- // Emits code for pushing either a tagged constant, a (non-double)
- // register, or a stack slot operand.
- void EmitPushTaggedOperand(LOperand* operand);
+ // Emits code for pushing a constant operand.
+ void EmitPushConstantOperand(LOperand* operand);
struct JumpTableEntry {
explicit inline JumpTableEntry(Address entry)
diff --git a/src/x64/lithium-gap-resolver-x64.cc b/src/x64/lithium-gap-resolver-x64.cc
index c3c617c..cedd025 100644
--- a/src/x64/lithium-gap-resolver-x64.cc
+++ b/src/x64/lithium-gap-resolver-x64.cc
@@ -214,7 +214,7 @@
} else if (source->IsDoubleRegister()) {
XMMRegister src = cgen_->ToDoubleRegister(source);
if (destination->IsDoubleRegister()) {
- __ movaps(cgen_->ToDoubleRegister(destination), src);
+ __ movsd(cgen_->ToDoubleRegister(destination), src);
} else {
ASSERT(destination->IsDoubleStackSlot());
__ movsd(cgen_->ToOperand(destination), src);
@@ -273,9 +273,9 @@
// Swap two double registers.
XMMRegister source_reg = cgen_->ToDoubleRegister(source);
XMMRegister destination_reg = cgen_->ToDoubleRegister(destination);
- __ movaps(xmm0, source_reg);
- __ movaps(source_reg, destination_reg);
- __ movaps(destination_reg, xmm0);
+ __ movsd(xmm0, source_reg);
+ __ movsd(source_reg, destination_reg);
+ __ movsd(destination_reg, xmm0);
} else if (source->IsDoubleRegister() || destination->IsDoubleRegister()) {
// Swap a double register and a double stack slot.
diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc
index 620bbc9..07ca3a5 100644
--- a/src/x64/lithium-x64.cc
+++ b/src/x64/lithium-x64.cc
@@ -71,21 +71,22 @@
#ifdef DEBUG
void LInstruction::VerifyCall() {
- // Call instructions can use only fixed registers as temporaries and
- // outputs because all registers are blocked by the calling convention.
- // Inputs operands must use a fixed register or use-at-start policy or
- // a non-register policy.
+ // Call instructions can use only fixed registers as
+ // temporaries and outputs because all registers
+ // are blocked by the calling convention.
+ // Inputs must use a fixed register.
ASSERT(Output() == NULL ||
LUnallocated::cast(Output())->HasFixedPolicy() ||
!LUnallocated::cast(Output())->HasRegisterPolicy());
for (UseIterator it(this); it.HasNext(); it.Advance()) {
- LUnallocated* operand = LUnallocated::cast(it.Next());
- ASSERT(operand->HasFixedPolicy() ||
- operand->IsUsedAtStart());
+ LOperand* operand = it.Next();
+ ASSERT(LUnallocated::cast(operand)->HasFixedPolicy() ||
+ !LUnallocated::cast(operand)->HasRegisterPolicy());
}
for (TempIterator it(this); it.HasNext(); it.Advance()) {
- LUnallocated* operand = LUnallocated::cast(it.Next());
- ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
+ LOperand* operand = it.Next();
+ ASSERT(LUnallocated::cast(operand)->HasFixedPolicy() ||
+ !LUnallocated::cast(operand)->HasRegisterPolicy());
}
}
#endif
@@ -302,13 +303,6 @@
}
-void LInvokeFunction::PrintDataTo(StringStream* stream) {
- stream->Add("= ");
- InputAt(0)->PrintTo(stream);
- stream->Add(" #%d / ", arity());
-}
-
-
void LCallKeyed::PrintDataTo(StringStream* stream) {
stream->Add("[rcx] #%d / ", arity());
}
@@ -1217,14 +1211,6 @@
}
-LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
- LOperand* function = UseFixed(instr->function(), rdi);
- argument_count_ -= instr->argument_count();
- LInvokeFunction* result = new LInvokeFunction(function);
- return MarkAsCall(DefineFixed(result, rax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
-}
-
-
LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
BuiltinFunctionId op = instr->op();
if (op == kMathLog || op == kMathSin || op == kMathCos) {
@@ -1955,13 +1941,6 @@
}
-LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
- LOperand* left = UseOrConstantAtStart(instr->left());
- LOperand* right = UseOrConstantAtStart(instr->right());
- return MarkAsCall(DefineFixed(new LStringAdd(left, right), rax), instr);
-}
-
-
LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
LOperand* string = UseRegister(instr->string());
LOperand* index = UseRegisterOrConstant(instr->index());
@@ -2005,8 +1984,7 @@
LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LDeleteProperty* result =
- new LDeleteProperty(UseAtStart(instr->object()),
- UseOrConstantAtStart(instr->key()));
+ new LDeleteProperty(Use(instr->object()), UseOrConstant(instr->key()));
return MarkAsCall(DefineFixed(result, rax), instr);
}
diff --git a/src/x64/lithium-x64.h b/src/x64/lithium-x64.h
index 74f4820..15bb894 100644
--- a/src/x64/lithium-x64.h
+++ b/src/x64/lithium-x64.h
@@ -98,15 +98,14 @@
V(GlobalObject) \
V(GlobalReceiver) \
V(Goto) \
- V(HasCachedArrayIndex) \
- V(HasCachedArrayIndexAndBranch) \
V(HasInstanceType) \
V(HasInstanceTypeAndBranch) \
+ V(HasCachedArrayIndex) \
+ V(HasCachedArrayIndexAndBranch) \
V(InstanceOf) \
V(InstanceOfAndBranch) \
V(InstanceOfKnownGlobal) \
V(Integer32ToDouble) \
- V(InvokeFunction) \
V(IsNull) \
V(IsNullAndBranch) \
V(IsObject) \
@@ -153,7 +152,6 @@
V(StoreKeyedSpecializedArrayElement) \
V(StoreNamedField) \
V(StoreNamedGeneric) \
- V(StringAdd) \
V(StringCharCodeAt) \
V(StringCharFromCode) \
V(StringLength) \
@@ -1395,23 +1393,6 @@
};
-class LInvokeFunction: public LTemplateInstruction<1, 1, 0> {
- public:
- explicit LInvokeFunction(LOperand* function) {
- inputs_[0] = function;
- }
-
- DECLARE_CONCRETE_INSTRUCTION(InvokeFunction, "invoke-function")
- DECLARE_HYDROGEN_ACCESSOR(InvokeFunction)
-
- LOperand* function() { return inputs_[0]; }
-
- virtual void PrintDataTo(StringStream* stream);
-
- int arity() const { return hydrogen()->argument_count() - 1; }
-};
-
-
class LCallKeyed: public LTemplateInstruction<1, 1, 0> {
public:
explicit LCallKeyed(LOperand* key) {
@@ -1703,21 +1684,6 @@
};
-class LStringAdd: public LTemplateInstruction<1, 2, 0> {
- public:
- LStringAdd(LOperand* left, LOperand* right) {
- inputs_[0] = left;
- inputs_[1] = right;
- }
-
- DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string-add")
- DECLARE_HYDROGEN_ACCESSOR(StringAdd)
-
- LOperand* left() { return inputs_[0]; }
- LOperand* right() { return inputs_[1]; }
-};
-
-
class LStringCharCodeAt: public LTemplateInstruction<1, 2, 0> {
public:
LStringCharCodeAt(LOperand* string, LOperand* index) {
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 3394206..7f027f7 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -650,7 +650,6 @@
Label leave_exit_frame;
Label write_back;
- Factory* factory = isolate()->factory();
ExternalReference next_address =
ExternalReference::handle_scope_next_address();
const int kNextOffset = 0;
@@ -698,7 +697,7 @@
// Check if the function scheduled an exception.
movq(rsi, scheduled_exception_address);
- Cmp(Operand(rsi, 0), factory->the_hole_value());
+ Cmp(Operand(rsi, 0), FACTORY->the_hole_value());
j(not_equal, &promote_scheduled_exception);
LeaveApiExitFrame();
@@ -713,7 +712,7 @@
bind(&empty_result);
// It was zero; the result is undefined.
- Move(rax, factory->undefined_value());
+ Move(rax, FACTORY->undefined_value());
jmp(&prologue);
// HandleScope limit has changed. Delete allocated extensions.
@@ -1248,17 +1247,12 @@
Register src2) {
// No overflow checking. Use only when it's known that
// overflowing is impossible.
+ ASSERT(!dst.is(src2));
if (!dst.is(src1)) {
- if (emit_debug_code()) {
- movq(kScratchRegister, src1);
- addq(kScratchRegister, src2);
- Check(no_overflow, "Smi addition overflow");
- }
- lea(dst, Operand(src1, src2, times_1, 0));
- } else {
- addq(dst, src2);
- Assert(no_overflow, "Smi addition overflow");
+ movq(dst, src1);
}
+ addq(dst, src2);
+ Assert(no_overflow, "Smi addition overflow");
}
@@ -1901,7 +1895,7 @@
Condition is_smi = CheckSmi(object);
j(is_smi, &ok);
Cmp(FieldOperand(object, HeapObject::kMapOffset),
- isolate()->factory()->heap_number_map());
+ FACTORY->heap_number_map());
Assert(equal, "Operand not a number");
bind(&ok);
}
@@ -2158,7 +2152,7 @@
push(kScratchRegister);
if (emit_debug_code()) {
movq(kScratchRegister,
- isolate()->factory()->undefined_value(),
+ FACTORY->undefined_value(),
RelocInfo::EMBEDDED_OBJECT);
cmpq(Operand(rsp, 0), kScratchRegister);
Check(not_equal, "code object not properly patched");
@@ -2326,7 +2320,7 @@
// Check the context is a global context.
if (emit_debug_code()) {
Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
- isolate()->factory()->global_context_map());
+ FACTORY->global_context_map());
Check(equal, "JSGlobalObject::global_context should be a global context.");
}
@@ -2828,7 +2822,7 @@
movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
if (emit_debug_code()) {
Label ok, fail;
- CheckMap(map, isolate()->factory()->meta_map(), &fail, false);
+ CheckMap(map, FACTORY->meta_map(), &fail, false);
jmp(&ok);
bind(&fail);
Abort("Global functions must have initial map");
diff --git a/src/x64/regexp-macro-assembler-x64.cc b/src/x64/regexp-macro-assembler-x64.cc
index d4ccb0e..c16da94 100644
--- a/src/x64/regexp-macro-assembler-x64.cc
+++ b/src/x64/regexp-macro-assembler-x64.cc
@@ -703,7 +703,7 @@
}
-Handle<Object> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
+Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
// Finalize code - write the entry point code now we know how many
// registers we need.
// Entry code:
@@ -972,7 +972,7 @@
code_desc, Code::ComputeFlags(Code::REGEXP),
masm_.CodeObject());
PROFILE(isolate, RegExpCodeCreateEvent(*code, *source));
- return Handle<Object>::cast(code);
+ return Handle<HeapObject>::cast(code);
}
diff --git a/src/x64/regexp-macro-assembler-x64.h b/src/x64/regexp-macro-assembler-x64.h
index a83f8cb..02b510f 100644
--- a/src/x64/regexp-macro-assembler-x64.h
+++ b/src/x64/regexp-macro-assembler-x64.h
@@ -75,7 +75,7 @@
virtual bool CheckSpecialCharacterClass(uc16 type,
Label* on_no_match);
virtual void Fail();
- virtual Handle<Object> GetCode(Handle<String> source);
+ virtual Handle<HeapObject> GetCode(Handle<String> source);
virtual void GoTo(Label* label);
virtual void IfRegisterGE(int reg, int comparand, Label* if_ge);
virtual void IfRegisterLT(int reg, int comparand, Label* if_lt);