blob: 099a46103c864f3bcf52c0e01a3d3de4db5c7fa2 [file] [log] [blame]
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "bootstrapper.h"
#include "codegen-inl.h"
#include "assembler-x64.h"
#include "macro-assembler-x64.h"
#include "debug.h"
namespace v8 {
namespace internal {
MacroAssembler::MacroAssembler(void* buffer, int size)
: Assembler(buffer, size),
unresolved_(0),
generating_stub_(false),
allow_stub_calls_(true),
code_object_(Heap::undefined_value()) {
}
// TODO(x64): For now, the write barrier is disabled on x64 and we
// therefore generate no code. This should be fixed when the write
// barrier is enabled.
void MacroAssembler::RecordWrite(Register object, int offset,
Register value, Register scratch) {
}
void MacroAssembler::Assert(Condition cc, const char* msg) {
if (FLAG_debug_code) Check(cc, msg);
}
void MacroAssembler::Check(Condition cc, const char* msg) {
Label L;
j(cc, &L);
Abort(msg);
// will not return here
bind(&L);
}
void MacroAssembler::NegativeZeroTest(Register result,
Register op,
Label* then_label) {
Label ok;
testq(result, result);
j(not_zero, &ok);
testq(op, op);
j(sign, then_label);
bind(&ok);
}
void MacroAssembler::Abort(const char* msg) {
// We want to pass the msg string like a smi to avoid GC
// problems, however msg is not guaranteed to be aligned
// properly. Instead, we pass an aligned pointer that is
// a proper v8 smi, but also pass the alignment difference
// from the real pointer as a smi.
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
// Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
#ifdef DEBUG
if (msg != NULL) {
RecordComment("Abort message: ");
RecordComment(msg);
}
#endif
push(rax);
movq(kScratchRegister, p0, RelocInfo::NONE);
push(kScratchRegister);
movq(kScratchRegister,
reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)),
RelocInfo::NONE);
push(kScratchRegister);
CallRuntime(Runtime::kAbort, 2);
// will not return here
}
void MacroAssembler::CallStub(CodeStub* stub) {
ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
movq(kScratchRegister, stub->GetCode(), RelocInfo::CODE_TARGET);
call(kScratchRegister);
}
void MacroAssembler::StubReturn(int argc) {
ASSERT(argc >= 1 && generating_stub());
ret((argc - 1) * kPointerSize);
}
void MacroAssembler::IllegalOperation(int num_arguments) {
if (num_arguments > 0) {
addq(rsp, Immediate(num_arguments * kPointerSize));
}
movq(rax, Factory::undefined_value(), RelocInfo::EMBEDDED_OBJECT);
}
void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
CallRuntime(Runtime::FunctionForId(id), num_arguments);
}
void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
// If the expected number of arguments of the runtime function is
// constant, we check that the actual number of arguments match the
// expectation.
if (f->nargs >= 0 && f->nargs != num_arguments) {
IllegalOperation(num_arguments);
return;
}
Runtime::FunctionId function_id =
static_cast<Runtime::FunctionId>(f->stub_id);
RuntimeStub stub(function_id, num_arguments);
CallStub(&stub);
}
void MacroAssembler::TailCallRuntime(ExternalReference const& ext,
int num_arguments) {
// TODO(1236192): Most runtime routines don't need the number of
// arguments passed in because it is constant. At some point we
// should remove this need and make the runtime routine entry code
// smarter.
movq(rax, Immediate(num_arguments));
JumpToBuiltin(ext);
}
void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
// Set the entry point and jump to the C entry runtime stub.
movq(rbx, ext);
CEntryStub ces;
movq(kScratchRegister, ces.GetCode(), RelocInfo::CODE_TARGET);
jmp(kScratchRegister);
}
void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
bool resolved;
Handle<Code> code = ResolveBuiltin(id, &resolved);
const char* name = Builtins::GetName(id);
int argc = Builtins::GetArgumentsCount(id);
movq(target, code, RelocInfo::EMBEDDED_OBJECT);
if (!resolved) {
uint32_t flags =
Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
Bootstrapper::FixupFlagsIsPCRelative::encode(false) |
Bootstrapper::FixupFlagsUseCodeObject::encode(true);
Unresolved entry = { pc_offset() - sizeof(intptr_t), flags, name };
unresolved_.Add(entry);
}
addq(target, Immediate(Code::kHeaderSize - kHeapObjectTag));
}
Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
bool* resolved) {
// Move the builtin function into the temporary function slot by
// reading it from the builtins object. NOTE: We should be able to
// reduce this to two instructions by putting the function table in
// the global object instead of the "builtins" object and by using a
// real register for the function.
movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
movq(rdx, FieldOperand(rdx, GlobalObject::kBuiltinsOffset));
int builtins_offset =
JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
movq(rdi, FieldOperand(rdx, builtins_offset));
return Builtins::GetCode(id, resolved);
}
void MacroAssembler::Set(Register dst, int64_t x) {
if (x == 0) {
xor_(dst, dst);
} else if (is_int32(x)) {
movq(dst, Immediate(x));
} else if (is_uint32(x)) {
movl(dst, Immediate(x));
} else {
movq(dst, x, RelocInfo::NONE);
}
}
void MacroAssembler::Set(const Operand& dst, int64_t x) {
if (x == 0) {
xor_(kScratchRegister, kScratchRegister);
movq(dst, kScratchRegister);
} else if (is_int32(x)) {
movq(dst, Immediate(x));
} else if (is_uint32(x)) {
movl(dst, Immediate(x));
} else {
movq(kScratchRegister, x, RelocInfo::NONE);
movq(dst, kScratchRegister);
}
}
bool MacroAssembler::IsUnsafeSmi(Smi* value) {
return false;
}
void MacroAssembler::LoadUnsafeSmi(Register dst, Smi* source) {
UNIMPLEMENTED();
}
void MacroAssembler::Move(Register dst, Handle<Object> source) {
ASSERT(!source->IsFailure());
if (source->IsSmi()) {
if (IsUnsafeSmi(source)) {
LoadUnsafeSmi(dst, source);
} else {
int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
movq(dst, Immediate(smi));
}
} else {
movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
}
}
void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
if (source->IsSmi()) {
int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
movq(dst, Immediate(smi));
} else {
movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
movq(dst, kScratchRegister);
}
}
void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Move(kScratchRegister, source);
cmpq(dst, kScratchRegister);
}
void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
if (source->IsSmi()) {
if (IsUnsafeSmi(source)) {
LoadUnsafeSmi(kScratchRegister, source);
cmpl(dst, kScratchRegister);
} else {
// For smi-comparison, it suffices to compare the low 32 bits.
int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
cmpl(dst, Immediate(smi));
}
} else {
ASSERT(source->IsHeapObject());
movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
cmpq(dst, kScratchRegister);
}
}
void MacroAssembler::Push(Handle<Object> source) {
if (source->IsSmi()) {
if (IsUnsafeSmi(source)) {
LoadUnsafeSmi(kScratchRegister, source);
push(kScratchRegister);
} else {
int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
push(Immediate(smi));
}
} else {
ASSERT(source->IsHeapObject());
movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
push(kScratchRegister);
}
}
void MacroAssembler::Jump(ExternalReference ext) {
movq(kScratchRegister, ext);
jmp(kScratchRegister);
}
void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
movq(kScratchRegister, destination, rmode);
jmp(kScratchRegister);
}
void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
WriteRecordedPositions();
ASSERT(RelocInfo::IsCodeTarget(rmode));
movq(kScratchRegister, code_object, rmode);
#ifdef DEBUG
Label target;
bind(&target);
#endif
jmp(kScratchRegister);
#ifdef DEBUG
ASSERT_EQ(kTargetAddrToReturnAddrDist,
SizeOfCodeGeneratedSince(&target) + kPointerSize);
#endif
}
void MacroAssembler::Call(ExternalReference ext) {
movq(kScratchRegister, ext);
call(kScratchRegister);
}
void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
movq(kScratchRegister, destination, rmode);
call(kScratchRegister);
}
void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
WriteRecordedPositions();
ASSERT(RelocInfo::IsCodeTarget(rmode));
movq(kScratchRegister, code_object, rmode);
#ifdef DEBUG
Label target;
bind(&target);
#endif
call(kScratchRegister);
#ifdef DEBUG
ASSERT_EQ(kTargetAddrToReturnAddrDist,
SizeOfCodeGeneratedSince(&target) + kPointerSize);
#endif
}
void MacroAssembler::PushTryHandler(CodeLocation try_location,
HandlerType type) {
// Adjust this code if not the case.
ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
// The pc (return address) is already on TOS. This code pushes state,
// frame pointer and current handler. Check that they are expected
// next on the stack, in that order.
ASSERT_EQ(StackHandlerConstants::kStateOffset,
StackHandlerConstants::kPCOffset - kPointerSize);
ASSERT_EQ(StackHandlerConstants::kFPOffset,
StackHandlerConstants::kStateOffset - kPointerSize);
ASSERT_EQ(StackHandlerConstants::kNextOffset,
StackHandlerConstants::kFPOffset - kPointerSize);
if (try_location == IN_JAVASCRIPT) {
if (type == TRY_CATCH_HANDLER) {
push(Immediate(StackHandler::TRY_CATCH));
} else {
push(Immediate(StackHandler::TRY_FINALLY));
}
push(rbp);
} else {
ASSERT(try_location == IN_JS_ENTRY);
// The frame pointer does not point to a JS frame so we save NULL
// for rbp. We expect the code throwing an exception to check rbp
// before dereferencing it to restore the context.
push(Immediate(StackHandler::ENTRY));
push(Immediate(0)); // NULL frame pointer.
}
// Save the current handler.
movq(kScratchRegister, ExternalReference(Top::k_handler_address));
push(Operand(kScratchRegister, 0));
// Link this handler.
movq(Operand(kScratchRegister, 0), rsp);
}
void MacroAssembler::Ret() {
ret(0);
}
void MacroAssembler::FCmp() {
fcompp();
push(rax);
fnstsw_ax();
// TODO(X64): Check that sahf is safe to use, using CPUProbe.
sahf();
pop(rax);
}
void MacroAssembler::CmpObjectType(Register heap_object,
InstanceType type,
Register map) {
movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
CmpInstanceType(map, type);
}
void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
Immediate(static_cast<int8_t>(type)));
}
void MacroAssembler::TryGetFunctionPrototype(Register function,
Register result,
Label* miss) {
// Check that the receiver isn't a smi.
testl(function, Immediate(kSmiTagMask));
j(zero, miss);
// Check that the function really is a function.
CmpObjectType(function, JS_FUNCTION_TYPE, result);
j(not_equal, miss);
// Make sure that the function has an instance prototype.
Label non_instance;
testb(FieldOperand(result, Map::kBitFieldOffset),
Immediate(1 << Map::kHasNonInstancePrototype));
j(not_zero, &non_instance);
// Get the prototype or initial map from the function.
movq(result,
FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
// If the prototype or initial map is the hole, don't return it and
// simply miss the cache instead. This will allow us to allocate a
// prototype object on-demand in the runtime system.
Cmp(result, Factory::the_hole_value());
j(equal, miss);
// If the function does not have an initial map, we're done.
Label done;
CmpObjectType(result, MAP_TYPE, kScratchRegister);
j(not_equal, &done);
// Get the prototype from the initial map.
movq(result, FieldOperand(result, Map::kPrototypeOffset));
jmp(&done);
// Non-instance prototype: Fetch prototype from constructor field
// in initial map.
bind(&non_instance);
movq(result, FieldOperand(result, Map::kConstructorOffset));
// All done.
bind(&done);
}
void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
if (FLAG_native_code_counters && counter->Enabled()) {
movq(kScratchRegister, ExternalReference(counter));
movl(Operand(kScratchRegister, 0), Immediate(value));
}
}
void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
ASSERT(value > 0);
if (FLAG_native_code_counters && counter->Enabled()) {
movq(kScratchRegister, ExternalReference(counter));
Operand operand(kScratchRegister, 0);
if (value == 1) {
incl(operand);
} else {
addl(operand, Immediate(value));
}
}
}
void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
ASSERT(value > 0);
if (FLAG_native_code_counters && counter->Enabled()) {
movq(kScratchRegister, ExternalReference(counter));
Operand operand(kScratchRegister, 0);
if (value == 1) {
decl(operand);
} else {
subl(operand, Immediate(value));
}
}
}
#ifdef ENABLE_DEBUGGER_SUPPORT
void MacroAssembler::PushRegistersFromMemory(RegList regs) {
ASSERT((regs & ~kJSCallerSaved) == 0);
// Push the content of the memory location to the stack.
for (int i = 0; i < kNumJSCallerSaved; i++) {
int r = JSCallerSavedCode(i);
if ((regs & (1 << r)) != 0) {
ExternalReference reg_addr =
ExternalReference(Debug_Address::Register(i));
movq(kScratchRegister, reg_addr);
push(Operand(kScratchRegister, 0));
}
}
}
void MacroAssembler::SaveRegistersToMemory(RegList regs) {
ASSERT((regs & ~kJSCallerSaved) == 0);
// Copy the content of registers to memory location.
for (int i = 0; i < kNumJSCallerSaved; i++) {
int r = JSCallerSavedCode(i);
if ((regs & (1 << r)) != 0) {
Register reg = { r };
ExternalReference reg_addr =
ExternalReference(Debug_Address::Register(i));
movq(kScratchRegister, reg_addr);
movq(Operand(kScratchRegister, 0), reg);
}
}
}
void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
ASSERT((regs & ~kJSCallerSaved) == 0);
// Copy the content of memory location to registers.
for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
int r = JSCallerSavedCode(i);
if ((regs & (1 << r)) != 0) {
Register reg = { r };
ExternalReference reg_addr =
ExternalReference(Debug_Address::Register(i));
movq(kScratchRegister, reg_addr);
movq(reg, Operand(kScratchRegister, 0));
}
}
}
void MacroAssembler::PopRegistersToMemory(RegList regs) {
ASSERT((regs & ~kJSCallerSaved) == 0);
// Pop the content from the stack to the memory location.
for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
int r = JSCallerSavedCode(i);
if ((regs & (1 << r)) != 0) {
ExternalReference reg_addr =
ExternalReference(Debug_Address::Register(i));
movq(kScratchRegister, reg_addr);
pop(Operand(kScratchRegister, 0));
}
}
}
void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
Register scratch,
RegList regs) {
ASSERT(!scratch.is(kScratchRegister));
ASSERT(!base.is(kScratchRegister));
ASSERT(!base.is(scratch));
ASSERT((regs & ~kJSCallerSaved) == 0);
// Copy the content of the stack to the memory location and adjust base.
for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
int r = JSCallerSavedCode(i);
if ((regs & (1 << r)) != 0) {
movq(scratch, Operand(base, 0));
ExternalReference reg_addr =
ExternalReference(Debug_Address::Register(i));
movq(kScratchRegister, reg_addr);
movq(Operand(kScratchRegister, 0), scratch);
lea(base, Operand(base, kPointerSize));
}
}
}
#endif // ENABLE_DEBUGGER_SUPPORT
void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
bool resolved;
Handle<Code> code = ResolveBuiltin(id, &resolved);
// Calls are not allowed in some stubs.
ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
// Rely on the assertion to check that the number of provided
// arguments match the expected number of arguments. Fake a
// parameter count to avoid emitting code to do the check.
ParameterCount expected(0);
InvokeCode(Handle<Code>(code), expected, expected,
RelocInfo::CODE_TARGET, flag);
const char* name = Builtins::GetName(id);
int argc = Builtins::GetArgumentsCount(id);
// The target address for the jump is stored as an immediate at offset
// kInvokeCodeAddressOffset.
if (!resolved) {
uint32_t flags =
Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
Bootstrapper::FixupFlagsIsPCRelative::encode(false) |
Bootstrapper::FixupFlagsUseCodeObject::encode(false);
Unresolved entry =
{ pc_offset() - kTargetAddrToReturnAddrDist, flags, name };
unresolved_.Add(entry);
}
}
void MacroAssembler::InvokePrologue(const ParameterCount& expected,
const ParameterCount& actual,
Handle<Code> code_constant,
Register code_register,
Label* done,
InvokeFlag flag) {
bool definitely_matches = false;
Label invoke;
if (expected.is_immediate()) {
ASSERT(actual.is_immediate());
if (expected.immediate() == actual.immediate()) {
definitely_matches = true;
} else {
movq(rax, Immediate(actual.immediate()));
if (expected.immediate() ==
SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
// Don't worry about adapting arguments for built-ins that
// don't want that done. Skip adaption code by making it look
// like we have a match between expected and actual number of
// arguments.
definitely_matches = true;
} else {
movq(rbx, Immediate(expected.immediate()));
}
}
} else {
if (actual.is_immediate()) {
// Expected is in register, actual is immediate. This is the
// case when we invoke function values without going through the
// IC mechanism.
cmpq(expected.reg(), Immediate(actual.immediate()));
j(equal, &invoke);
ASSERT(expected.reg().is(rbx));
movq(rax, Immediate(actual.immediate()));
} else if (!expected.reg().is(actual.reg())) {
// Both expected and actual are in (different) registers. This
// is the case when we invoke functions using call and apply.
cmpq(expected.reg(), actual.reg());
j(equal, &invoke);
ASSERT(actual.reg().is(rax));
ASSERT(expected.reg().is(rbx));
}
}
if (!definitely_matches) {
Handle<Code> adaptor =
Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
if (!code_constant.is_null()) {
movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
} else if (!code_register.is(rdx)) {
movq(rdx, code_register);
}
movq(kScratchRegister, adaptor, RelocInfo::CODE_TARGET);
if (flag == CALL_FUNCTION) {
call(kScratchRegister);
jmp(done);
} else {
jmp(kScratchRegister);
}
bind(&invoke);
}
}
void MacroAssembler::InvokeCode(Register code,
const ParameterCount& expected,
const ParameterCount& actual,
InvokeFlag flag) {
Label done;
InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
if (flag == CALL_FUNCTION) {
call(code);
} else {
ASSERT(flag == JUMP_FUNCTION);
jmp(code);
}
bind(&done);
}
void MacroAssembler::InvokeCode(Handle<Code> code,
const ParameterCount& expected,
const ParameterCount& actual,
RelocInfo::Mode rmode,
InvokeFlag flag) {
Label done;
Register dummy = rax;
InvokePrologue(expected, actual, code, dummy, &done, flag);
if (flag == CALL_FUNCTION) {
Call(code, rmode);
} else {
ASSERT(flag == JUMP_FUNCTION);
Jump(code, rmode);
}
bind(&done);
}
void MacroAssembler::InvokeFunction(Register function,
const ParameterCount& actual,
InvokeFlag flag) {
ASSERT(function.is(rdi));
movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
movsxlq(rbx,
FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
// Advances rdx to the end of the Code object header, to the start of
// the executable code.
lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
ParameterCount expected(rbx);
InvokeCode(rdx, expected, actual, flag);
}
void MacroAssembler::EnterFrame(StackFrame::Type type) {
push(rbp);
movq(rbp, rsp);
push(rsi); // Context.
push(Immediate(Smi::FromInt(type)));
movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
push(kScratchRegister);
if (FLAG_debug_code) {
movq(kScratchRegister,
Factory::undefined_value(),
RelocInfo::EMBEDDED_OBJECT);
cmpq(Operand(rsp, 0), kScratchRegister);
Check(not_equal, "code object not properly patched");
}
}
void MacroAssembler::LeaveFrame(StackFrame::Type type) {
if (FLAG_debug_code) {
movq(kScratchRegister, Immediate(Smi::FromInt(type)));
cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
Check(equal, "stack frame types must match");
}
movq(rsp, rbp);
pop(rbp);
}
void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
// Setup the frame structure on the stack.
// All constants are relative to the frame pointer of the exit frame.
ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
push(rbp);
movq(rbp, rsp);
// Reserve room for entry stack pointer and push the debug marker.
ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
push(Immediate(0)); // saved entry sp, patched before call
push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0));
// Save the frame pointer and the context in top.
ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
ExternalReference context_address(Top::k_context_address);
movq(r14, rax); // Backup rax before we use it.
movq(rax, rbp);
store_rax(c_entry_fp_address);
movq(rax, rsi);
store_rax(context_address);
// Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
// so it must be retained across the C-call.
int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
lea(r15, Operand(rbp, r14, times_pointer_size, offset));
#ifdef ENABLE_DEBUGGER_SUPPORT
// Save the state of all registers to the stack from the memory
// location. This is needed to allow nested break points.
if (type == StackFrame::EXIT_DEBUG) {
// TODO(1243899): This should be symmetric to
// CopyRegistersFromStackToMemory() but it isn't! esp is assumed
// correct here, but computed for the other call. Very error
// prone! FIX THIS. Actually there are deeper problems with
// register saving than this asymmetry (see the bug report
// associated with this issue).
PushRegistersFromMemory(kJSCallerSaved);
}
#endif
// Reserve space for two arguments: argc and argv
subq(rsp, Immediate(2 * kPointerSize));
// Get the required frame alignment for the OS.
static const int kFrameAlignment = OS::ActivationFrameAlignment();
if (kFrameAlignment > 0) {
ASSERT(IsPowerOf2(kFrameAlignment));
movq(kScratchRegister, Immediate(-kFrameAlignment));
and_(rsp, kScratchRegister);
}
// Patch the saved entry sp.
movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
}
void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
// Registers:
// r15 : argv
#ifdef ENABLE_DEBUGGER_SUPPORT
// Restore the memory copy of the registers by digging them out from
// the stack. This is needed to allow nested break points.
if (type == StackFrame::EXIT_DEBUG) {
// It's okay to clobber register ebx below because we don't need
// the function pointer after this.
const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
lea(rbx, Operand(rbp, kOffset));
CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
}
#endif
// Get the return address from the stack and restore the frame pointer.
movq(rcx, Operand(rbp, 1 * kPointerSize));
movq(rbp, Operand(rbp, 0 * kPointerSize));
// Pop the arguments and the receiver from the caller stack.
lea(rsp, Operand(r15, 1 * kPointerSize));
// Restore current context from top and clear it in debug mode.
ExternalReference context_address(Top::k_context_address);
movq(kScratchRegister, context_address);
movq(rsi, Operand(kScratchRegister, 0));
#ifdef DEBUG
movq(Operand(kScratchRegister, 0), Immediate(0));
#endif
// Push the return address to get ready to return.
push(rcx);
// Clear the top frame.
ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
movq(kScratchRegister, c_entry_fp_address);
movq(Operand(kScratchRegister, 0), Immediate(0));
}
} } // namespace v8::internal