Merge V8 5.3.332.45. DO NOT MERGE
Test: Manual
FPIIM-449
Change-Id: Id3254828b068abdea3cb10442e0172a8c9a98e03
(cherry picked from commit 13e2dadd00298019ed862f2b2fc5068bba730bcf)
diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc
index fdf11c1..925ae48 100644
--- a/src/ia32/assembler-ia32.cc
+++ b/src/ia32/assembler-ia32.cc
@@ -191,36 +191,24 @@
return Memory::Address_at(pc_);
}
+Address RelocInfo::wasm_global_reference() {
+ DCHECK(IsWasmGlobalReference(rmode_));
+ return Memory::Address_at(pc_);
+}
+
uint32_t RelocInfo::wasm_memory_size_reference() {
DCHECK(IsWasmMemorySizeReference(rmode_));
return Memory::uint32_at(pc_);
}
-void RelocInfo::update_wasm_memory_reference(
- Address old_base, Address new_base, uint32_t old_size, uint32_t new_size,
- ICacheFlushMode icache_flush_mode) {
- DCHECK(IsWasmMemoryReference(rmode_) || IsWasmMemorySizeReference(rmode_));
- if (IsWasmMemoryReference(rmode_)) {
- Address updated_reference;
- DCHECK(old_base <= wasm_memory_reference() &&
- wasm_memory_reference() < old_base + old_size);
- updated_reference = new_base + (wasm_memory_reference() - old_base);
- DCHECK(new_base <= updated_reference &&
- updated_reference < new_base + new_size);
- Memory::Address_at(pc_) = updated_reference;
- } else if (IsWasmMemorySizeReference(rmode_)) {
- uint32_t updated_size_reference;
- DCHECK(wasm_memory_size_reference() <= old_size);
- updated_size_reference =
- new_size + (wasm_memory_size_reference() - old_size);
- DCHECK(updated_size_reference <= new_size);
- Memory::uint32_at(pc_) = updated_size_reference;
- } else {
- UNREACHABLE();
- }
- if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
- Assembler::FlushICache(isolate_, pc_, sizeof(int32_t));
- }
+void RelocInfo::unchecked_update_wasm_memory_reference(
+ Address address, ICacheFlushMode flush_mode) {
+ Memory::Address_at(pc_) = address;
+}
+
+void RelocInfo::unchecked_update_wasm_memory_size(uint32_t size,
+ ICacheFlushMode flush_mode) {
+ Memory::uint32_at(pc_) = size;
}
// -----------------------------------------------------------------------------
@@ -344,6 +332,8 @@
desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
desc->origin = this;
desc->constant_pool_size = 0;
+ desc->unwinding_info_size = 0;
+ desc->unwinding_info = nullptr;
}
@@ -730,6 +720,33 @@
emit_operand(reg, op);
}
+void Assembler::lock() {
+ EnsureSpace ensure_space(this);
+ EMIT(0xF0);
+}
+
+void Assembler::cmpxchg(const Operand& dst, Register src) {
+ EnsureSpace ensure_space(this);
+ EMIT(0x0F);
+ EMIT(0xB1);
+ emit_operand(src, dst);
+}
+
+void Assembler::cmpxchg_b(const Operand& dst, Register src) {
+ EnsureSpace ensure_space(this);
+ EMIT(0x0F);
+ EMIT(0xB0);
+ emit_operand(src, dst);
+}
+
+void Assembler::cmpxchg_w(const Operand& dst, Register src) {
+ EnsureSpace ensure_space(this);
+ EMIT(0x66);
+ EMIT(0x0F);
+ EMIT(0xB1);
+ emit_operand(src, dst);
+}
+
void Assembler::adc(Register dst, int32_t imm32) {
EnsureSpace ensure_space(this);
emit_arith(2, Operand(dst), Immediate(imm32));
@@ -1516,7 +1533,6 @@
void Assembler::call(Label* L) {
- positions_recorder()->WriteRecordedPositions();
EnsureSpace ensure_space(this);
if (L->is_bound()) {
const int long_size = 5;
@@ -1534,7 +1550,6 @@
void Assembler::call(byte* entry, RelocInfo::Mode rmode) {
- positions_recorder()->WriteRecordedPositions();
EnsureSpace ensure_space(this);
DCHECK(!RelocInfo::IsCodeTarget(rmode));
EMIT(0xE8);
@@ -1553,7 +1568,6 @@
void Assembler::call(const Operand& adr) {
- positions_recorder()->WriteRecordedPositions();
EnsureSpace ensure_space(this);
EMIT(0xFF);
emit_operand(edx, adr);
@@ -1568,7 +1582,6 @@
void Assembler::call(Handle<Code> code,
RelocInfo::Mode rmode,
TypeFeedbackId ast_id) {
- positions_recorder()->WriteRecordedPositions();
EnsureSpace ensure_space(this);
DCHECK(RelocInfo::IsCodeTarget(rmode)
|| rmode == RelocInfo::CODE_AGE_SEQUENCE);
diff --git a/src/ia32/assembler-ia32.h b/src/ia32/assembler-ia32.h
index c3edacb..4e542d7 100644
--- a/src/ia32/assembler-ia32.h
+++ b/src/ia32/assembler-ia32.h
@@ -124,8 +124,6 @@
Register r = {code};
return r;
}
- const char* ToString();
- bool IsAllocatable() const;
bool is_valid() const { return 0 <= reg_code && reg_code < kNumRegisters; }
bool is(Register reg) const { return reg_code == reg.reg_code; }
int code() const {
@@ -149,6 +147,8 @@
#undef DECLARE_REGISTER
const Register no_reg = {Register::kCode_no_reg};
+static const bool kSimpleFPAliasing = true;
+
struct XMMRegister {
enum Code {
#define REGISTER_CODE(R) kCode_##R,
@@ -165,7 +165,6 @@
return result;
}
- bool IsAllocatable() const;
bool is_valid() const { return 0 <= reg_code && reg_code < kMaxNumRegisters; }
int code() const {
@@ -175,8 +174,6 @@
bool is(XMMRegister reg) const { return reg_code == reg.reg_code; }
- const char* ToString();
-
int reg_code;
};
@@ -662,6 +659,14 @@
void xchg_b(Register reg, const Operand& op);
void xchg_w(Register reg, const Operand& op);
+ // Lock prefix
+ void lock();
+
+ // CompareExchange
+ void cmpxchg(const Operand& dst, Register src);
+ void cmpxchg_b(const Operand& dst, Register src);
+ void cmpxchg_w(const Operand& dst, Register src);
+
// Arithmetics
void adc(Register dst, int32_t imm32);
void adc(Register dst, const Operand& src);
diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc
index 232c56b..96b2787 100644
--- a/src/ia32/builtins-ia32.cc
+++ b/src/ia32/builtins-ia32.cc
@@ -16,10 +16,7 @@
#define __ ACCESS_MASM(masm)
-
-void Builtins::Generate_Adaptor(MacroAssembler* masm,
- CFunctionId id,
- BuiltinExtraArguments extra_args) {
+void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
// ----------- S t a t e -------------
// -- eax : number of arguments excluding receiver
// -- edi : target
@@ -39,19 +36,11 @@
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
// Insert extra arguments.
- int num_extra_args = 0;
- if (extra_args != BuiltinExtraArguments::kNone) {
- __ PopReturnAddressTo(ecx);
- if (extra_args & BuiltinExtraArguments::kTarget) {
- ++num_extra_args;
- __ Push(edi);
- }
- if (extra_args & BuiltinExtraArguments::kNewTarget) {
- ++num_extra_args;
- __ Push(edx);
- }
- __ PushReturnAddressFrom(ecx);
- }
+ const int num_extra_args = 2;
+ __ PopReturnAddressTo(ecx);
+ __ Push(edi);
+ __ Push(edx);
+ __ PushReturnAddressFrom(ecx);
// JumpToExternalReference expects eax to contain the number of arguments
// including the receiver and the extra arguments.
@@ -395,8 +384,8 @@
__ AssertGeneratorObject(ebx);
// Store input value into generator object.
- __ mov(FieldOperand(ebx, JSGeneratorObject::kInputOffset), eax);
- __ RecordWriteField(ebx, JSGeneratorObject::kInputOffset, eax, ecx,
+ __ mov(FieldOperand(ebx, JSGeneratorObject::kInputOrDebugPosOffset), eax);
+ __ RecordWriteField(ebx, JSGeneratorObject::kInputOrDebugPosOffset, eax, ecx,
kDontSaveFPRegs);
// Store resume mode into generator object.
@@ -407,22 +396,20 @@
__ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
// Flood function if we are stepping.
- Label skip_flooding;
- ExternalReference step_in_enabled =
- ExternalReference::debug_step_in_enabled_address(masm->isolate());
- __ cmpb(Operand::StaticVariable(step_in_enabled), Immediate(0));
- __ j(equal, &skip_flooding);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ Push(ebx);
- __ Push(edx);
- __ Push(edi);
- __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
- __ Pop(edx);
- __ Pop(ebx);
- __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
- }
- __ bind(&skip_flooding);
+ Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
+ Label stepping_prepared;
+ ExternalReference last_step_action =
+ ExternalReference::debug_last_step_action_address(masm->isolate());
+ STATIC_ASSERT(StepFrame > StepIn);
+ __ cmpb(Operand::StaticVariable(last_step_action), Immediate(StepIn));
+ __ j(greater_equal, &prepare_step_in_if_stepping);
+
+ // Flood function if we need to continue stepping in the suspended generator.
+ ExternalReference debug_suspended_generator =
+ ExternalReference::debug_suspended_generator_address(masm->isolate());
+ __ cmp(ebx, Operand::StaticVariable(debug_suspended_generator));
+ __ j(equal, &prepare_step_in_suspended_generator);
+ __ bind(&stepping_prepared);
// Pop return address.
__ PopReturnAddressTo(eax);
@@ -518,6 +505,51 @@
__ mov(eax, ebx); // Continuation expects generator object in eax.
__ jmp(edx);
}
+
+ __ bind(&prepare_step_in_if_stepping);
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ __ Push(ebx);
+ __ Push(edx);
+ __ Push(edi);
+ __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
+ __ Pop(edx);
+ __ Pop(ebx);
+ __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
+ }
+ __ jmp(&stepping_prepared);
+
+ __ bind(&prepare_step_in_suspended_generator);
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ __ Push(ebx);
+ __ Push(edx);
+ __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
+ __ Pop(edx);
+ __ Pop(ebx);
+ __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
+ }
+ __ jmp(&stepping_prepared);
+}
+
+static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
+ Register scratch2) {
+ Register args_count = scratch1;
+ Register return_pc = scratch2;
+
+ // Get the arguments + reciever count.
+ __ mov(args_count,
+ Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
+ __ mov(args_count,
+ FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
+
+ // Leave the frame (also dropping the register file).
+ __ leave();
+
+ // Drop receiver + arguments.
+ __ pop(return_pc);
+ __ add(esp, args_count);
+ __ push(return_pc);
}
// Generate code for entering a JS function with the interpreter.
@@ -623,18 +655,7 @@
masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
// The return value is in eax.
-
- // Get the arguments + reciever count.
- __ mov(ebx, Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
- __ mov(ebx, FieldOperand(ebx, BytecodeArray::kParameterSizeOffset));
-
- // Leave the frame (also dropping the register file).
- __ leave();
-
- // Drop receiver + arguments and return.
- __ pop(ecx);
- __ add(esp, ebx);
- __ push(ecx);
+ LeaveInterpreterFrame(masm, ebx, ecx);
__ ret(0);
// Load debug copy of the bytecode array.
@@ -661,6 +682,31 @@
__ jmp(ecx);
}
+void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) {
+ // Save the function and context for call to CompileBaseline.
+ __ mov(edi, Operand(ebp, StandardFrameConstants::kFunctionOffset));
+ __ mov(kContextRegister,
+ Operand(ebp, StandardFrameConstants::kContextOffset));
+
+ // Leave the frame before recompiling for baseline so that we don't count as
+ // an activation on the stack.
+ LeaveInterpreterFrame(masm, ebx, ecx);
+
+ {
+ FrameScope frame_scope(masm, StackFrame::INTERNAL);
+ // Push return value.
+ __ push(eax);
+
+ // Push function as argument and compile for baseline.
+ __ push(edi);
+ __ CallRuntime(Runtime::kCompileBaseline);
+
+ // Restore return value.
+ __ pop(eax);
+ }
+ __ ret(0);
+}
+
static void Generate_InterpreterPushArgs(MacroAssembler* masm,
Register array_limit) {
// ----------- S t a t e -------------
@@ -840,13 +886,30 @@
const int bailout_id = BailoutId::None().ToInt();
__ cmp(temp, Immediate(Smi::FromInt(bailout_id)));
__ j(not_equal, &loop_bottom);
+
// Literals available?
+ Label got_literals, maybe_cleared_weakcell;
__ mov(temp, FieldOperand(map, index, times_half_pointer_size,
SharedFunctionInfo::kOffsetToPreviousLiterals));
+
+ // temp contains either a WeakCell pointing to the literals array or the
+ // literals array directly.
+ STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset);
+ __ JumpIfSmi(FieldOperand(temp, WeakCell::kValueOffset),
+ &maybe_cleared_weakcell);
+ // The WeakCell value is a pointer, therefore it's a valid literals array.
__ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
- __ JumpIfSmi(temp, &gotta_call_runtime);
+ __ jmp(&got_literals);
+
+ // We have a smi. If it's 0, then we are looking at a cleared WeakCell
+ // around the literals array, and we should visit the runtime. If it's > 0,
+ // then temp already contains the literals array.
+ __ bind(&maybe_cleared_weakcell);
+ __ cmp(FieldOperand(temp, WeakCell::kValueOffset), Immediate(0));
+ __ j(equal, &gotta_call_runtime);
// Save the literals in the closure.
+ __ bind(&got_literals);
__ mov(ecx, Operand(esp, 0));
__ mov(FieldOperand(ecx, JSFunction::kLiteralsOffset), temp);
__ push(index);
@@ -1119,6 +1182,9 @@
void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
int field_index) {
// ----------- S t a t e -------------
+ // -- eax : number of arguments
+ // -- edi : function
+ // -- esi : context
// -- esp[0] : return address
// -- esp[4] : receiver
// -----------------------------------
@@ -1161,7 +1227,11 @@
__ bind(&receiver_not_date);
{
FrameScope scope(masm, StackFrame::MANUAL);
- __ EnterFrame(StackFrame::INTERNAL);
+ __ Push(ebp);
+ __ Move(ebp, esp);
+ __ Push(esi);
+ __ Push(edi);
+ __ Push(Immediate(0));
__ CallRuntime(Runtime::kThrowNotDateError);
}
}
@@ -1495,6 +1565,8 @@
void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
// ----------- S t a t e -------------
// -- eax : number of arguments
+ // -- edi : function
+ // -- esi : context
// -- esp[0] : return address
// -- esp[(argc - n) * 8] : arg[n] (zero-based)
// -- esp[(argc + 1) * 8] : receiver
@@ -1522,27 +1594,32 @@
__ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
// Load the double value of the parameter into xmm1, maybe converting the
- // parameter to a number first using the ToNumberStub if necessary.
+ // parameter to a number first using the ToNumber builtin if necessary.
Label convert, convert_smi, convert_number, done_convert;
__ bind(&convert);
__ JumpIfSmi(ebx, &convert_smi);
__ JumpIfRoot(FieldOperand(ebx, HeapObject::kMapOffset),
Heap::kHeapNumberMapRootIndex, &convert_number);
{
- // Parameter is not a Number, use the ToNumberStub to convert it.
- FrameScope scope(masm, StackFrame::INTERNAL);
+ // Parameter is not a Number, use the ToNumber builtin to convert it.
+ FrameScope scope(masm, StackFrame::MANUAL);
+ __ Push(ebp);
+ __ Move(ebp, esp);
+ __ Push(esi);
+ __ Push(edi);
__ SmiTag(eax);
__ SmiTag(ecx);
__ Push(eax);
__ Push(ecx);
__ Push(edx);
__ mov(eax, ebx);
- ToNumberStub stub(masm->isolate());
- __ CallStub(&stub);
+ __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
__ mov(ebx, eax);
__ Pop(edx);
__ Pop(ecx);
__ Pop(eax);
+ __ Pop(edi);
+ __ Pop(esi);
{
// Restore the double accumulator value (xmm0).
Label restore_smi, done_restore;
@@ -1557,6 +1634,7 @@
}
__ SmiUntag(ecx);
__ SmiUntag(eax);
+ __ leave();
}
__ jmp(&convert);
__ bind(&convert_number);
@@ -1590,8 +1668,10 @@
// Left and right hand side are equal, check for -0 vs. +0.
__ bind(&compare_equal);
+ __ Push(edi); // Preserve function in edi.
__ movmskpd(edi, reg);
__ test(edi, Immediate(1));
+ __ Pop(edi);
__ j(not_zero, &compare_swap);
__ bind(&done_compare);
@@ -1631,8 +1711,7 @@
}
// 2a. Convert the first argument to a number.
- ToNumberStub stub(masm->isolate());
- __ TailCallStub(&stub);
+ __ Jump(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
// 2b. No arguments, return +0 (already in eax).
__ bind(&no_arguments);
@@ -1682,8 +1761,7 @@
__ Push(edi);
__ Push(edx);
__ Move(eax, ebx);
- ToNumberStub stub(masm->isolate());
- __ CallStub(&stub);
+ __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
__ Move(ebx, eax);
__ Pop(edx);
__ Pop(edi);
@@ -2575,6 +2653,81 @@
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
+// static
+void Builtins::Generate_StringToNumber(MacroAssembler* masm) {
+ // The StringToNumber stub takes one argument in eax.
+ __ AssertString(eax);
+
+ // Check if string has a cached array index.
+ Label runtime;
+ __ test(FieldOperand(eax, String::kHashFieldOffset),
+ Immediate(String::kContainsCachedArrayIndexMask));
+ __ j(not_zero, &runtime, Label::kNear);
+ __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
+ __ IndexFromHash(eax, eax);
+ __ Ret();
+
+ __ bind(&runtime);
+ {
+ FrameScope frame(masm, StackFrame::INTERNAL);
+ // Push argument.
+ __ push(eax);
+ // We cannot use a tail call here because this builtin can also be called
+ // from wasm.
+ __ CallRuntime(Runtime::kStringToNumber);
+ }
+ __ Ret();
+}
+
+// static
+void Builtins::Generate_ToNumber(MacroAssembler* masm) {
+ // The ToNumber stub takes one argument in eax.
+ Label not_smi;
+ __ JumpIfNotSmi(eax, ¬_smi, Label::kNear);
+ __ Ret();
+ __ bind(¬_smi);
+
+ Label not_heap_number;
+ __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
+ __ j(not_equal, ¬_heap_number, Label::kNear);
+ __ Ret();
+ __ bind(¬_heap_number);
+
+ __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
+ RelocInfo::CODE_TARGET);
+}
+
+// static
+void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) {
+ // The NonNumberToNumber stub takes one argument in eax.
+ __ AssertNotNumber(eax);
+
+ Label not_string;
+ __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
+ // eax: object
+ // edi: object map
+ __ j(above_equal, ¬_string, Label::kNear);
+ __ Jump(masm->isolate()->builtins()->StringToNumber(),
+ RelocInfo::CODE_TARGET);
+ __ bind(¬_string);
+
+ Label not_oddball;
+ __ CmpInstanceType(edi, ODDBALL_TYPE);
+ __ j(not_equal, ¬_oddball, Label::kNear);
+ __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
+ __ Ret();
+ __ bind(¬_oddball);
+ {
+ FrameScope frame(masm, StackFrame::INTERNAL);
+ // Push argument.
+ __ push(eax);
+ // We cannot use a tail call here because this builtin can also be called
+ // from wasm.
+ __ CallRuntime(Runtime::kToNumber);
+ }
+ __ Ret();
+}
+
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : actual number of arguments
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index b711ce9..5761b16 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -22,78 +22,29 @@
namespace v8 {
namespace internal {
+#define __ ACCESS_MASM(masm)
-static void InitializeArrayConstructorDescriptor(
- Isolate* isolate, CodeStubDescriptor* descriptor,
- int constant_stack_parameter_count) {
- // register state
- // eax -- number of arguments
- // edi -- function
- // ebx -- allocation site with elements kind
- Address deopt_handler = Runtime::FunctionForId(
- Runtime::kArrayConstructor)->entry;
-
- if (constant_stack_parameter_count == 0) {
- descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
- JS_FUNCTION_STUB_MODE);
- } else {
- descriptor->Initialize(eax, deopt_handler, constant_stack_parameter_count,
- JS_FUNCTION_STUB_MODE);
- }
+void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
+ __ pop(ecx);
+ __ mov(MemOperand(esp, eax, times_4, 0), edi);
+ __ push(edi);
+ __ push(ebx);
+ __ push(ecx);
+ __ add(eax, Immediate(3));
+ __ TailCallRuntime(Runtime::kNewArray);
}
-
-static void InitializeInternalArrayConstructorDescriptor(
- Isolate* isolate, CodeStubDescriptor* descriptor,
- int constant_stack_parameter_count) {
- // register state
- // eax -- number of arguments
- // edi -- constructor function
- Address deopt_handler = Runtime::FunctionForId(
- Runtime::kInternalArrayConstructor)->entry;
-
- if (constant_stack_parameter_count == 0) {
- descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
- JS_FUNCTION_STUB_MODE);
- } else {
- descriptor->Initialize(eax, deopt_handler, constant_stack_parameter_count,
- JS_FUNCTION_STUB_MODE);
- }
-}
-
-
-void ArraySingleArgumentConstructorStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
-}
-
-
-void ArrayNArgumentsConstructorStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
-}
-
-
void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
descriptor->Initialize(eax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
-void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
+void FastFunctionBindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
- InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
+ Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
+ descriptor->Initialize(eax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
-
-void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
-}
-
-
-#define __ ACCESS_MASM(masm)
-
-
void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
ExternalReference miss) {
// Update the static counter each time a new code stub is generated.
@@ -680,7 +631,6 @@
&miss, // When not a string.
&miss, // When not a number.
&miss, // When index out of range.
- STRING_INDEX_IS_ARRAY_INDEX,
RECEIVER_IS_STRING);
char_at_generator.GenerateFast(masm);
__ ret(0);
@@ -1469,6 +1419,7 @@
// edi : the function to call
Isolate* isolate = masm->isolate();
Label initialize, done, miss, megamorphic, not_array_function;
+ Label done_increment_count, done_initialize_count;
// Load the cache state into ecx.
__ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
@@ -1481,7 +1432,7 @@
// type-feedback-vector.h).
Label check_allocation_site;
__ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
- __ j(equal, &done, Label::kFar);
+ __ j(equal, &done_increment_count, Label::kFar);
__ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex);
__ j(equal, &done, Label::kFar);
__ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
@@ -1504,7 +1455,7 @@
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
__ cmp(edi, ecx);
__ j(not_equal, &megamorphic);
- __ jmp(&done, Label::kFar);
+ __ jmp(&done_increment_count, Label::kFar);
__ bind(&miss);
@@ -1533,11 +1484,25 @@
// slot.
CreateAllocationSiteStub create_stub(isolate);
CallStubInRecordCallTarget(masm, &create_stub);
- __ jmp(&done);
+ __ jmp(&done_initialize_count);
__ bind(¬_array_function);
CreateWeakCellStub weak_cell_stub(isolate);
CallStubInRecordCallTarget(masm, &weak_cell_stub);
+ __ bind(&done_initialize_count);
+
+ // Initialize the call counter.
+ __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
+ FixedArray::kHeaderSize + kPointerSize),
+ Immediate(Smi::FromInt(1)));
+ __ jmp(&done);
+
+ __ bind(&done_increment_count);
+ // Increment the call count for monomorphic function calls.
+ __ add(FieldOperand(ebx, edx, times_half_pointer_size,
+ FixedArray::kHeaderSize + kPointerSize),
+ Immediate(Smi::FromInt(1)));
+
__ bind(&done);
}
@@ -1601,7 +1566,7 @@
// Increment the call count for monomorphic function calls.
__ add(FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize + kPointerSize),
- Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement)));
+ Immediate(Smi::FromInt(1)));
__ mov(ebx, ecx);
__ mov(edx, edi);
@@ -1649,7 +1614,7 @@
// Increment the call count for monomorphic function calls.
__ add(FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize + kPointerSize),
- Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement)));
+ Immediate(Smi::FromInt(1)));
__ bind(&call_function);
__ Set(eax, argc);
@@ -1720,7 +1685,7 @@
// Initialize the call counter.
__ mov(FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize + kPointerSize),
- Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement)));
+ Immediate(Smi::FromInt(1)));
// Store the function. Use a stub since we need a frame for allocation.
// ebx - vector
@@ -1774,7 +1739,7 @@
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
// It is important that the store buffer overflow stubs are generated first.
- ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
+ CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
CreateWeakCellStub::GenerateAheadOfTime(isolate);
BinaryOpICStub::GenerateAheadOfTime(isolate);
@@ -2102,13 +2067,7 @@
}
__ push(object_);
__ push(index_); // Consumed by runtime conversion function.
- if (index_flags_ == STRING_INDEX_IS_NUMBER) {
- __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
- } else {
- DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
- // NumberToSmi discards numbers that are not exact integers.
- __ CallRuntime(Runtime::kNumberToSmi);
- }
+ __ CallRuntime(Runtime::kNumberToSmi);
if (!index_.is(eax)) {
// Save the conversion result before the pop instructions below
// have a chance to overwrite it.
@@ -2441,77 +2400,12 @@
// ecx: sub string length (smi)
// edx: from index (smi)
StringCharAtGenerator generator(eax, edx, ecx, eax, &runtime, &runtime,
- &runtime, STRING_INDEX_IS_NUMBER,
- RECEIVER_IS_STRING);
+ &runtime, RECEIVER_IS_STRING);
generator.GenerateFast(masm);
__ ret(3 * kPointerSize);
generator.SkipSlow(masm, &runtime);
}
-
-void ToNumberStub::Generate(MacroAssembler* masm) {
- // The ToNumber stub takes one argument in eax.
- Label not_smi;
- __ JumpIfNotSmi(eax, ¬_smi, Label::kNear);
- __ Ret();
- __ bind(¬_smi);
-
- Label not_heap_number;
- __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
- __ j(not_equal, ¬_heap_number, Label::kNear);
- __ Ret();
- __ bind(¬_heap_number);
-
- NonNumberToNumberStub stub(masm->isolate());
- __ TailCallStub(&stub);
-}
-
-void NonNumberToNumberStub::Generate(MacroAssembler* masm) {
- // The NonNumberToNumber stub takes one argument in eax.
- __ AssertNotNumber(eax);
-
- Label not_string;
- __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
- // eax: object
- // edi: object map
- __ j(above_equal, ¬_string, Label::kNear);
- StringToNumberStub stub(masm->isolate());
- __ TailCallStub(&stub);
- __ bind(¬_string);
-
- Label not_oddball;
- __ CmpInstanceType(edi, ODDBALL_TYPE);
- __ j(not_equal, ¬_oddball, Label::kNear);
- __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
- __ Ret();
- __ bind(¬_oddball);
-
- __ pop(ecx); // Pop return address.
- __ push(eax); // Push argument.
- __ push(ecx); // Push return address.
- __ TailCallRuntime(Runtime::kToNumber);
-}
-
-void StringToNumberStub::Generate(MacroAssembler* masm) {
- // The StringToNumber stub takes one argument in eax.
- __ AssertString(eax);
-
- // Check if string has a cached array index.
- Label runtime;
- __ test(FieldOperand(eax, String::kHashFieldOffset),
- Immediate(String::kContainsCachedArrayIndexMask));
- __ j(not_zero, &runtime, Label::kNear);
- __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
- __ IndexFromHash(eax, eax);
- __ Ret();
-
- __ bind(&runtime);
- __ PopReturnAddressTo(ecx); // Pop return address.
- __ Push(eax); // Push argument.
- __ PushReturnAddressFrom(ecx); // Push return address.
- __ TailCallRuntime(Runtime::kStringToNumber);
-}
-
void ToStringStub::Generate(MacroAssembler* masm) {
// The ToString stub takes one argument in eax.
Label is_number;
@@ -2718,7 +2612,7 @@
// Load ecx with the allocation site. We stick an undefined dummy value here
// and replace it with the real allocation site later when we instantiate this
// stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
- __ mov(ecx, handle(isolate()->heap()->undefined_value()));
+ __ mov(ecx, isolate()->factory()->undefined_value());
// Make sure that we actually patched the allocation site.
if (FLAG_debug_code) {
@@ -3549,14 +3443,14 @@
void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
__ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
- LoadICStub stub(isolate(), state());
+ LoadICStub stub(isolate());
stub.GenerateForTrampoline(masm);
}
void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
__ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
- KeyedLoadICStub stub(isolate(), state());
+ KeyedLoadICStub stub(isolate());
stub.GenerateForTrampoline(masm);
}
@@ -4332,19 +4226,14 @@
}
}
-
-void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
+void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
isolate);
ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
isolate);
- ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
- isolate);
-}
+ ArrayNArgumentsConstructorStub stub(isolate);
+ stub.GetCode();
-
-void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
- Isolate* isolate) {
ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
for (int i = 0; i < 2; i++) {
// For internal arrays we only need a few things
@@ -4352,8 +4241,6 @@
stubh1.GetCode();
InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
stubh2.GetCode();
- InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
- stubh3.GetCode();
}
}
@@ -4373,13 +4260,15 @@
CreateArrayDispatchOneArgument(masm, mode);
__ bind(¬_one_case);
- CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
+ ArrayNArgumentsConstructorStub stub(masm->isolate());
+ __ TailCallStub(&stub);
} else if (argument_count() == NONE) {
CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
} else if (argument_count() == ONE) {
CreateArrayDispatchOneArgument(masm, mode);
} else if (argument_count() == MORE_THAN_ONE) {
- CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
+ ArrayNArgumentsConstructorStub stub(masm->isolate());
+ __ TailCallStub(&stub);
} else {
UNREACHABLE();
}
@@ -4491,7 +4380,7 @@
__ TailCallStub(&stub1);
__ bind(¬_one_case);
- InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
+ ArrayNArgumentsConstructorStub stubN(isolate());
__ TailCallStub(&stubN);
}
@@ -4804,8 +4693,11 @@
__ mov(eax, edi);
__ Ret();
- // Fall back to %AllocateInNewSpace.
+ // Fall back to %AllocateInNewSpace (if not too big).
+ Label too_big_for_new_space;
__ bind(&allocate);
+ __ cmp(ecx, Immediate(Page::kMaxRegularHeapObjectSize));
+ __ j(greater, &too_big_for_new_space);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(ecx);
@@ -4818,6 +4710,22 @@
__ Pop(eax);
}
__ jmp(&done_allocate);
+
+ // Fall back to %NewRestParameter.
+ __ bind(&too_big_for_new_space);
+ __ PopReturnAddressTo(ecx);
+ // We reload the function from the caller frame due to register pressure
+ // within this stub. This is the slow path, hence reloading is preferable.
+ if (skip_stub_frame()) {
+ // For Ignition we need to skip the handler/stub frame to reach the
+ // JavaScript frame for the function.
+ __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+ __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
+ } else {
+ __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
+ }
+ __ PushReturnAddressFrom(ecx);
+ __ TailCallRuntime(Runtime::kNewRestParameter);
}
}
@@ -5174,8 +5082,11 @@
__ mov(eax, edi);
__ Ret();
- // Fall back to %AllocateInNewSpace.
+ // Fall back to %AllocateInNewSpace (if not too big).
+ Label too_big_for_new_space;
__ bind(&allocate);
+ __ cmp(ecx, Immediate(Page::kMaxRegularHeapObjectSize));
+ __ j(greater, &too_big_for_new_space);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(ecx);
@@ -5188,37 +5099,22 @@
__ Pop(eax);
}
__ jmp(&done_allocate);
-}
-
-void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
- Register context_reg = esi;
- Register slot_reg = ebx;
- Register result_reg = eax;
- Label slow_case;
-
- // Go up context chain to the script context.
- for (int i = 0; i < depth(); ++i) {
- __ mov(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
- context_reg = result_reg;
+ // Fall back to %NewStrictArguments.
+ __ bind(&too_big_for_new_space);
+ __ PopReturnAddressTo(ecx);
+ // We reload the function from the caller frame due to register pressure
+ // within this stub. This is the slow path, hence reloading is preferable.
+ if (skip_stub_frame()) {
+ // For Ignition we need to skip the handler/stub frame to reach the
+ // JavaScript frame for the function.
+ __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+ __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
+ } else {
+ __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
}
-
- // Load the PropertyCell value at the specified slot.
- __ mov(result_reg, ContextOperand(context_reg, slot_reg));
- __ mov(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset));
-
- // Check that value is not the_hole.
- __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex);
- __ j(equal, &slow_case, Label::kNear);
- __ Ret();
-
- // Fallback to the runtime.
- __ bind(&slow_case);
- __ SmiTag(slot_reg);
- __ Pop(result_reg); // Pop return address.
- __ Push(slot_reg);
- __ Push(result_reg); // Push return address.
- __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
+ __ PushReturnAddressFrom(ecx);
+ __ TailCallRuntime(Runtime::kNewStrictArguments);
}
diff --git a/src/ia32/code-stubs-ia32.h b/src/ia32/code-stubs-ia32.h
index fc813f5..c1878f0 100644
--- a/src/ia32/code-stubs-ia32.h
+++ b/src/ia32/code-stubs-ia32.h
@@ -301,8 +301,8 @@
Register r2,
Register r3) {
for (int i = 0; i < Register::kNumRegisters; i++) {
- Register candidate = Register::from_code(i);
- if (candidate.IsAllocatable()) {
+ if (RegisterConfiguration::Crankshaft()->IsAllocatableGeneralCode(i)) {
+ Register candidate = Register::from_code(i);
if (candidate.is(ecx)) continue;
if (candidate.is(r1)) continue;
if (candidate.is(r2)) continue;
diff --git a/src/ia32/codegen-ia32.cc b/src/ia32/codegen-ia32.cc
index 36c83cc..18e5364 100644
--- a/src/ia32/codegen-ia32.cc
+++ b/src/ia32/codegen-ia32.cc
@@ -34,43 +34,6 @@
#define __ masm.
-UnaryMathFunctionWithIsolate CreateExpFunction(Isolate* isolate) {
- size_t actual_size;
- byte* buffer =
- static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
- if (buffer == nullptr) return nullptr;
- ExternalReference::InitializeMathExpData();
-
- MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
- CodeObjectRequired::kNo);
- // esp[1 * kPointerSize]: raw double input
- // esp[0 * kPointerSize]: return address
- {
- XMMRegister input = xmm1;
- XMMRegister result = xmm2;
- __ movsd(input, Operand(esp, 1 * kPointerSize));
- __ push(eax);
- __ push(ebx);
-
- MathExpGenerator::EmitMathExp(&masm, input, result, xmm0, eax, ebx);
-
- __ pop(ebx);
- __ pop(eax);
- __ movsd(Operand(esp, 1 * kPointerSize), result);
- __ fld_d(Operand(esp, 1 * kPointerSize));
- __ Ret();
- }
-
- CodeDesc desc;
- masm.GetCode(&desc);
- DCHECK(!RelocInfo::RequiresRelocation(desc));
-
- Assembler::FlushICache(isolate, buffer, actual_size);
- base::OS::ProtectCode(buffer, actual_size);
- return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
-}
-
-
UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
size_t actual_size;
// Allocate buffer in executable space.
@@ -580,6 +543,7 @@
__ push(eax);
__ push(ebx);
+ __ push(esi);
__ mov(edi, FieldOperand(edi, FixedArray::kLengthOffset));
@@ -620,8 +584,9 @@
// Call into runtime if GC is required.
__ bind(&gc_required);
+
// Restore registers before jumping into runtime.
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ __ pop(esi);
__ pop(ebx);
__ pop(eax);
__ jmp(fail);
@@ -656,12 +621,11 @@
__ sub(edi, Immediate(Smi::FromInt(1)));
__ j(not_sign, &loop);
+ // Restore registers.
+ __ pop(esi);
__ pop(ebx);
__ pop(eax);
- // Restore esi.
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
-
__ bind(&only_change_map);
// eax: value
// ebx: target map
@@ -927,64 +891,6 @@
__ bind(&done);
}
-
-static Operand ExpConstant(int index) {
- return Operand::StaticVariable(ExternalReference::math_exp_constants(index));
-}
-
-
-void MathExpGenerator::EmitMathExp(MacroAssembler* masm,
- XMMRegister input,
- XMMRegister result,
- XMMRegister double_scratch,
- Register temp1,
- Register temp2) {
- DCHECK(!input.is(double_scratch));
- DCHECK(!input.is(result));
- DCHECK(!result.is(double_scratch));
- DCHECK(!temp1.is(temp2));
- DCHECK(ExternalReference::math_exp_constants(0).address() != NULL);
- DCHECK(!masm->serializer_enabled()); // External references not serializable.
-
- Label done;
-
- __ movsd(double_scratch, ExpConstant(0));
- __ xorpd(result, result);
- __ ucomisd(double_scratch, input);
- __ j(above_equal, &done);
- __ ucomisd(input, ExpConstant(1));
- __ movsd(result, ExpConstant(2));
- __ j(above_equal, &done);
- __ movsd(double_scratch, ExpConstant(3));
- __ movsd(result, ExpConstant(4));
- __ mulsd(double_scratch, input);
- __ addsd(double_scratch, result);
- __ movd(temp2, double_scratch);
- __ subsd(double_scratch, result);
- __ movsd(result, ExpConstant(6));
- __ mulsd(double_scratch, ExpConstant(5));
- __ subsd(double_scratch, input);
- __ subsd(result, double_scratch);
- __ movsd(input, double_scratch);
- __ mulsd(input, double_scratch);
- __ mulsd(result, input);
- __ mov(temp1, temp2);
- __ mulsd(result, ExpConstant(7));
- __ subsd(result, double_scratch);
- __ add(temp1, Immediate(0x1ff800));
- __ addsd(result, ExpConstant(8));
- __ and_(temp2, Immediate(0x7ff));
- __ shr(temp1, 11);
- __ shl(temp1, 20);
- __ movd(input, temp1);
- __ pshufd(input, input, static_cast<uint8_t>(0xe1)); // Order: 11 10 00 01
- __ movsd(double_scratch, Operand::StaticArray(
- temp2, times_8, ExternalReference::math_exp_log_table()));
- __ orps(input, double_scratch);
- __ mulsd(result, input);
- __ bind(&done);
-}
-
#undef __
diff --git a/src/ia32/codegen-ia32.h b/src/ia32/codegen-ia32.h
index 133b1ad..46468e9 100644
--- a/src/ia32/codegen-ia32.h
+++ b/src/ia32/codegen-ia32.h
@@ -29,19 +29,6 @@
};
-class MathExpGenerator : public AllStatic {
- public:
- static void EmitMathExp(MacroAssembler* masm,
- XMMRegister input,
- XMMRegister result,
- XMMRegister double_scratch,
- Register temp1,
- Register temp2);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MathExpGenerator);
-};
-
} // namespace internal
} // namespace v8
diff --git a/src/ia32/deoptimizer-ia32.cc b/src/ia32/deoptimizer-ia32.cc
index 656d3e9..c14a2a1 100644
--- a/src/ia32/deoptimizer-ia32.cc
+++ b/src/ia32/deoptimizer-ia32.cc
@@ -196,8 +196,7 @@
const int kDoubleRegsSize = kDoubleSize * XMMRegister::kMaxNumRegisters;
__ sub(esp, Immediate(kDoubleRegsSize));
- const RegisterConfiguration* config =
- RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
+ const RegisterConfiguration* config = RegisterConfiguration::Crankshaft();
for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
int code = config->GetAllocatableDoubleCode(i);
XMMRegister xmm_reg = XMMRegister::from_code(code);
diff --git a/src/ia32/disasm-ia32.cc b/src/ia32/disasm-ia32.cc
index 8a1b3b5..be3530c 100644
--- a/src/ia32/disasm-ia32.cc
+++ b/src/ia32/disasm-ia32.cc
@@ -1232,6 +1232,10 @@
return "shrd"; // 3-operand version.
case 0xAB:
return "bts";
+ case 0xB0:
+ return "cmpxchg_b";
+ case 0xB1:
+ return "cmpxchg";
case 0xBC:
return "bsf";
case 0xBD:
@@ -1264,6 +1268,9 @@
vex_byte0_ = *data;
vex_byte1_ = *(data + 1);
data += 2;
+ } else if (*data == 0xF0 /*lock*/) {
+ AppendToBuffer("lock ");
+ data++;
}
bool processed = true; // Will be set to false if the current instruction
@@ -1496,6 +1503,18 @@
} else {
AppendToBuffer(",%s,cl", NameOfCPURegister(regop));
}
+ } else if (f0byte == 0xB0) {
+ // cmpxchg_b
+ data += 2;
+ AppendToBuffer("%s ", f0mnem);
+ int mod, regop, rm;
+ get_modrm(*data, &mod, ®op, &rm);
+ data += PrintRightOperand(data);
+ AppendToBuffer(",%s", NameOfByteCPURegister(regop));
+ } else if (f0byte == 0xB1) {
+ // cmpxchg
+ data += 2;
+ data += PrintOperands(f0mnem, OPER_REG_OP_ORDER, data);
} else if (f0byte == 0xBC) {
data += 2;
int mod, regop, rm;
@@ -1620,9 +1639,8 @@
data++;
int mod, regop, rm;
get_modrm(*data, &mod, ®op, &rm);
- AppendToBuffer("xchg_w ");
+ AppendToBuffer("xchg_w %s,", NameOfCPURegister(regop));
data += PrintRightOperand(data);
- AppendToBuffer(",%s", NameOfCPURegister(regop));
} else if (*data == 0x89) {
data++;
int mod, regop, rm;
@@ -1897,6 +1915,9 @@
NameOfXMMRegister(regop),
NameOfXMMRegister(rm));
data++;
+ } else if (*data == 0xB1) {
+ data++;
+ data += PrintOperands("cmpxchg_w", OPER_REG_OP_ORDER, data);
} else {
UnimplementedInstruction();
}
@@ -2227,7 +2248,7 @@
const char* NameConverter::NameOfAddress(byte* addr) const {
- v8::internal::SNPrintF(tmp_buffer_, "%p", addr);
+ v8::internal::SNPrintF(tmp_buffer_, "%p", static_cast<void*>(addr));
return tmp_buffer_.start();
}
@@ -2290,7 +2311,7 @@
buffer[0] = '\0';
byte* prev_pc = pc;
pc += d.InstructionDecode(buffer, pc);
- fprintf(f, "%p", prev_pc);
+ fprintf(f, "%p", static_cast<void*>(prev_pc));
fprintf(f, " ");
for (byte* bp = prev_pc; bp < pc; bp++) {
diff --git a/src/ia32/interface-descriptors-ia32.cc b/src/ia32/interface-descriptors-ia32.cc
index 8a877b1..98259c7 100644
--- a/src/ia32/interface-descriptors-ia32.cc
+++ b/src/ia32/interface-descriptors-ia32.cc
@@ -11,6 +11,14 @@
const Register CallInterfaceDescriptor::ContextRegister() { return esi; }
+void CallInterfaceDescriptor::DefaultInitializePlatformSpecific(
+ CallInterfaceDescriptorData* data, int register_parameter_count) {
+ const Register default_stub_registers[] = {eax, ebx, ecx, edx, edi};
+ CHECK_LE(static_cast<size_t>(register_parameter_count),
+ arraysize(default_stub_registers));
+ data->InitializePlatformSpecific(register_parameter_count,
+ default_stub_registers);
+}
const Register LoadDescriptor::ReceiverRegister() { return edx; }
const Register LoadDescriptor::NameRegister() { return ecx; }
@@ -44,9 +52,6 @@
const Register StoreTransitionDescriptor::MapRegister() { return ebx; }
-const Register LoadGlobalViaContextDescriptor::SlotRegister() { return ebx; }
-
-
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return ebx; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return eax; }
@@ -68,8 +73,6 @@
const Register GrowArrayElementsDescriptor::ObjectRegister() { return eax; }
const Register GrowArrayElementsDescriptor::KeyRegister() { return ebx; }
-const Register HasPropertyDescriptor::ObjectRegister() { return eax; }
-const Register HasPropertyDescriptor::KeyRegister() { return ebx; }
void FastNewClosureDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
@@ -259,43 +262,27 @@
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
-void ArrayConstructorConstantArgCountDescriptor::InitializePlatformSpecific(
+void ArraySingleArgumentConstructorDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// register state
// eax -- number of arguments
// edi -- function
// ebx -- allocation site with elements kind
- Register registers[] = {edi, ebx};
+ Register registers[] = {edi, ebx, eax};
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
-
-void ArrayConstructorDescriptor::InitializePlatformSpecific(
+void ArrayNArgumentsConstructorDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
- // stack param count needs (constructor pointer, and single argument)
- Register registers[] = {edi, ebx, eax};
- data->InitializePlatformSpecific(arraysize(registers), registers);
-}
-
-
-void InternalArrayConstructorConstantArgCountDescriptor::
- InitializePlatformSpecific(CallInterfaceDescriptorData* data) {
// register state
// eax -- number of arguments
// edi -- function
- Register registers[] = {edi};
+ // ebx -- allocation site with elements kind
+ Register registers[] = {edi, ebx, eax};
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
-
-void InternalArrayConstructorDescriptor::InitializePlatformSpecific(
- CallInterfaceDescriptorData* data) {
- // stack param count needs (constructor pointer, and single argument)
- Register registers[] = {edi, eax};
- data->InitializePlatformSpecific(arraysize(registers), registers);
-}
-
-void FastArrayPushDescriptor::InitializePlatformSpecific(
+void VarArgFunctionDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// stack param count needs (arg count)
Register registers[] = {eax};
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc
index 08189e2..25a0a95 100644
--- a/src/ia32/macro-assembler-ia32.cc
+++ b/src/ia32/macro-assembler-ia32.cc
@@ -1095,8 +1095,8 @@
void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
- mov(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
- mov(vector, FieldOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
+ mov(vector, FieldOperand(vector, JSFunction::kLiteralsOffset));
+ mov(vector, FieldOperand(vector, LiteralsArray::kFeedbackVectorOffset));
}
@@ -2362,10 +2362,11 @@
const ParameterCount& expected,
const ParameterCount& actual) {
Label skip_flooding;
- ExternalReference step_in_enabled =
- ExternalReference::debug_step_in_enabled_address(isolate());
- cmpb(Operand::StaticVariable(step_in_enabled), Immediate(0));
- j(equal, &skip_flooding);
+ ExternalReference last_step_action =
+ ExternalReference::debug_last_step_action_address(isolate());
+ STATIC_ASSERT(StepFrame > StepIn);
+ cmpb(Operand::StaticVariable(last_step_action), Immediate(StepIn));
+ j(less, &skip_flooding);
{
FrameScope frame(this,
has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
@@ -2678,7 +2679,7 @@
void MacroAssembler::Move(Register dst, const Immediate& x) {
- if (x.is_zero()) {
+ if (x.is_zero() && RelocInfo::IsNone(x.rmode_)) {
xor_(dst, dst); // Shorter than mov of 32-bit immediate 0.
} else {
mov(dst, x);