blob: ae45eaba547c1df29643972f00b783fc7b2be212 [file] [log] [blame]
ager@chromium.org5ec48922009-05-05 07:25:34 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
kasperl@chromium.org71affb52009-05-26 05:44:31 +000028#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
ager@chromium.orgeadaf222009-06-16 09:43:10 +000032#include "assembler-x64.h"
ager@chromium.orge2902be2009-06-08 12:21:35 +000033#include "macro-assembler-x64.h"
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000034#include "serialize.h"
ager@chromium.orgeadaf222009-06-16 09:43:10 +000035#include "debug.h"
kasperl@chromium.org71affb52009-05-26 05:44:31 +000036
37namespace v8 {
38namespace internal {
39
40MacroAssembler::MacroAssembler(void* buffer, int size)
41 : Assembler(buffer, size),
42 unresolved_(0),
43 generating_stub_(false),
44 allow_stub_calls_(true),
45 code_object_(Heap::undefined_value()) {
46}
47
ager@chromium.orge2902be2009-06-08 12:21:35 +000048
ager@chromium.org18ad94b2009-09-02 08:22:29 +000049void MacroAssembler::LoadRoot(Register destination,
50 Heap::RootListIndex index) {
51 movq(destination, Operand(r13, index << kPointerSizeLog2));
52}
53
54
55void MacroAssembler::PushRoot(Heap::RootListIndex index) {
56 push(Operand(r13, index << kPointerSizeLog2));
57}
58
59
60void MacroAssembler::CompareRoot(Register with,
61 Heap::RootListIndex index) {
62 cmpq(with, Operand(r13, index << kPointerSizeLog2));
63}
64
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000065
66static void RecordWriteHelper(MacroAssembler* masm,
67 Register object,
68 Register addr,
69 Register scratch) {
70 Label fast;
71
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000072 // Compute the page start address from the heap object pointer, and reuse
73 // the 'object' register for it.
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000074 ASSERT(is_int32(~Page::kPageAlignmentMask));
75 masm->and_(object,
76 Immediate(static_cast<int32_t>(~Page::kPageAlignmentMask)));
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000077 Register page_start = object;
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000078
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000079 // Compute the bit addr in the remembered set/index of the pointer in the
80 // page. Reuse 'addr' as pointer_offset.
81 masm->subq(addr, page_start);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000082 masm->shr(addr, Immediate(kPointerSizeLog2));
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000083 Register pointer_offset = addr;
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000084
85 // If the bit offset lies beyond the normal remembered set range, it is in
86 // the extra remembered set area of a large object.
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000087 masm->cmpq(pointer_offset, Immediate(Page::kPageSize / kPointerSize));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000088 masm->j(less, &fast);
89
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000090 // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
91 // extra remembered set after the large object.
92
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000093 // Load the array length into 'scratch'.
94 masm->movl(scratch,
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000095 Operand(page_start,
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000096 Page::kObjectStartOffset + FixedArray::kLengthOffset));
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000097 Register array_length = scratch;
98
99 // Extra remembered set starts right after the large object (a FixedArray), at
100 // page_start + kObjectStartOffset + objectSize
101 // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
102 // Add the delta between the end of the normal RSet and the start of the
103 // extra RSet to 'page_start', so that addressing the bit using
104 // 'pointer_offset' hits the extra RSet words.
105 masm->lea(page_start,
106 Operand(page_start, array_length, times_pointer_size,
107 Page::kObjectStartOffset + FixedArray::kHeaderSize
108 - Page::kRSetEndOffset));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000109
110 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
111 // to limit code size. We should probably evaluate this decision by
112 // measuring the performance of an equivalent implementation using
113 // "simpler" instructions
114 masm->bind(&fast);
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +0000115 masm->bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000116}
117
118
119class RecordWriteStub : public CodeStub {
120 public:
121 RecordWriteStub(Register object, Register addr, Register scratch)
122 : object_(object), addr_(addr), scratch_(scratch) { }
123
124 void Generate(MacroAssembler* masm);
125
126 private:
127 Register object_;
128 Register addr_;
129 Register scratch_;
130
131#ifdef DEBUG
132 void Print() {
133 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
134 object_.code(), addr_.code(), scratch_.code());
135 }
136#endif
137
138 // Minor key encoding in 12 bits of three registers (object, address and
139 // scratch) OOOOAAAASSSS.
140 class ScratchBits: public BitField<uint32_t, 0, 4> {};
141 class AddressBits: public BitField<uint32_t, 4, 4> {};
142 class ObjectBits: public BitField<uint32_t, 8, 4> {};
143
144 Major MajorKey() { return RecordWrite; }
145
146 int MinorKey() {
147 // Encode the registers.
148 return ObjectBits::encode(object_.code()) |
149 AddressBits::encode(addr_.code()) |
150 ScratchBits::encode(scratch_.code());
151 }
152};
153
154
155void RecordWriteStub::Generate(MacroAssembler* masm) {
156 RecordWriteHelper(masm, object_, addr_, scratch_);
157 masm->ret(0);
158}
159
160
161// Set the remembered set bit for [object+offset].
162// object is the object being stored into, value is the object being stored.
163// If offset is zero, then the scratch register contains the array index into
164// the elements array represented as a Smi.
165// All registers are clobbered by the operation.
166void MacroAssembler::RecordWrite(Register object,
167 int offset,
168 Register value,
169 Register scratch) {
170 // First, check if a remembered set write is even needed. The tests below
171 // catch stores of Smis and stores into young gen (which does not have space
172 // for the remembered set bits.
173 Label done;
174
175 // Test that the object address is not in the new space. We cannot
176 // set remembered set bits in the new space.
177 movq(value, object);
178 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
179 and_(value, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
180 movq(kScratchRegister, ExternalReference::new_space_start());
181 cmpq(value, kScratchRegister);
182 j(equal, &done);
183
184 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
185 // Compute the bit offset in the remembered set, leave it in 'value'.
186 lea(value, Operand(object, offset));
187 ASSERT(is_int32(Page::kPageAlignmentMask));
188 and_(value, Immediate(static_cast<int32_t>(Page::kPageAlignmentMask)));
189 shr(value, Immediate(kObjectAlignmentBits));
190
191 // Compute the page address from the heap object pointer, leave it in
192 // 'object' (immediate value is sign extended).
193 and_(object, Immediate(~Page::kPageAlignmentMask));
194
195 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
196 // to limit code size. We should probably evaluate this decision by
197 // measuring the performance of an equivalent implementation using
198 // "simpler" instructions
199 bts(Operand(object, Page::kRSetOffset), value);
200 } else {
201 Register dst = scratch;
202 if (offset != 0) {
203 lea(dst, Operand(object, offset));
204 } else {
205 // array access: calculate the destination address in the same manner as
206 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 4 to get an offset
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +0000207 // into an array of pointers.
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000208 lea(dst, Operand(object, dst, times_half_pointer_size,
209 FixedArray::kHeaderSize - kHeapObjectTag));
210 }
211 // If we are already generating a shared stub, not inlining the
212 // record write code isn't going to save us any memory.
213 if (generating_stub()) {
214 RecordWriteHelper(this, object, dst, value);
215 } else {
216 RecordWriteStub stub(object, dst, value);
217 CallStub(&stub);
218 }
219 }
220
221 bind(&done);
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000222}
223
224
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000225void MacroAssembler::Assert(Condition cc, const char* msg) {
226 if (FLAG_debug_code) Check(cc, msg);
227}
228
229
230void MacroAssembler::Check(Condition cc, const char* msg) {
231 Label L;
232 j(cc, &L);
233 Abort(msg);
234 // will not return here
235 bind(&L);
236}
237
238
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000239void MacroAssembler::NegativeZeroTest(Register result,
240 Register op,
241 Label* then_label) {
242 Label ok;
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000243 testl(result, result);
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000244 j(not_zero, &ok);
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000245 testl(op, op);
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000246 j(sign, then_label);
247 bind(&ok);
248}
249
250
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000251void MacroAssembler::Abort(const char* msg) {
252 // We want to pass the msg string like a smi to avoid GC
253 // problems, however msg is not guaranteed to be aligned
254 // properly. Instead, we pass an aligned pointer that is
255 // a proper v8 smi, but also pass the alignment difference
256 // from the real pointer as a smi.
257 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
258 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
259 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
260 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
261#ifdef DEBUG
262 if (msg != NULL) {
263 RecordComment("Abort message: ");
264 RecordComment(msg);
265 }
266#endif
267 push(rax);
268 movq(kScratchRegister, p0, RelocInfo::NONE);
269 push(kScratchRegister);
270 movq(kScratchRegister,
271 reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)),
272 RelocInfo::NONE);
273 push(kScratchRegister);
274 CallRuntime(Runtime::kAbort, 2);
275 // will not return here
276}
277
278
279void MacroAssembler::CallStub(CodeStub* stub) {
280 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
sgjesse@chromium.org911335c2009-08-19 12:59:44 +0000281 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000282}
283
284
285void MacroAssembler::StubReturn(int argc) {
286 ASSERT(argc >= 1 && generating_stub());
287 ret((argc - 1) * kPointerSize);
288}
289
290
291void MacroAssembler::IllegalOperation(int num_arguments) {
292 if (num_arguments > 0) {
293 addq(rsp, Immediate(num_arguments * kPointerSize));
294 }
ager@chromium.org18ad94b2009-09-02 08:22:29 +0000295 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000296}
297
298
299void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
300 CallRuntime(Runtime::FunctionForId(id), num_arguments);
301}
302
303
304void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
305 // If the expected number of arguments of the runtime function is
306 // constant, we check that the actual number of arguments match the
307 // expectation.
308 if (f->nargs >= 0 && f->nargs != num_arguments) {
309 IllegalOperation(num_arguments);
310 return;
311 }
312
313 Runtime::FunctionId function_id =
314 static_cast<Runtime::FunctionId>(f->stub_id);
315 RuntimeStub stub(function_id, num_arguments);
316 CallStub(&stub);
317}
318
319
320void MacroAssembler::TailCallRuntime(ExternalReference const& ext,
321 int num_arguments) {
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000322 // ----------- S t a t e -------------
323 // -- rsp[0] : return address
324 // -- rsp[8] : argument num_arguments - 1
325 // ...
326 // -- rsp[8 * num_arguments] : argument 0 (receiver)
327 // -----------------------------------
328
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000329 // TODO(1236192): Most runtime routines don't need the number of
330 // arguments passed in because it is constant. At some point we
331 // should remove this need and make the runtime routine entry code
332 // smarter.
333 movq(rax, Immediate(num_arguments));
334 JumpToBuiltin(ext);
335}
336
337
338void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
339 // Set the entry point and jump to the C entry runtime stub.
340 movq(rbx, ext);
341 CEntryStub ces;
342 movq(kScratchRegister, ces.GetCode(), RelocInfo::CODE_TARGET);
343 jmp(kScratchRegister);
kasperl@chromium.org71affb52009-05-26 05:44:31 +0000344}
345
ager@chromium.orge2902be2009-06-08 12:21:35 +0000346
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000347void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
348 bool resolved;
349 Handle<Code> code = ResolveBuiltin(id, &resolved);
350
351 const char* name = Builtins::GetName(id);
352 int argc = Builtins::GetArgumentsCount(id);
353
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000354 movq(target, code, RelocInfo::EMBEDDED_OBJECT);
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000355 if (!resolved) {
356 uint32_t flags =
357 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
358 Bootstrapper::FixupFlagsIsPCRelative::encode(false) |
359 Bootstrapper::FixupFlagsUseCodeObject::encode(true);
360 Unresolved entry = { pc_offset() - sizeof(intptr_t), flags, name };
361 unresolved_.Add(entry);
362 }
363 addq(target, Immediate(Code::kHeaderSize - kHeapObjectTag));
364}
365
366
367Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
368 bool* resolved) {
369 // Move the builtin function into the temporary function slot by
370 // reading it from the builtins object. NOTE: We should be able to
371 // reduce this to two instructions by putting the function table in
372 // the global object instead of the "builtins" object and by using a
373 // real register for the function.
374 movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
375 movq(rdx, FieldOperand(rdx, GlobalObject::kBuiltinsOffset));
376 int builtins_offset =
377 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
378 movq(rdi, FieldOperand(rdx, builtins_offset));
379
380
381 return Builtins::GetCode(id, resolved);
382}
383
384
ager@chromium.orge2902be2009-06-08 12:21:35 +0000385void MacroAssembler::Set(Register dst, int64_t x) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000386 if (x == 0) {
387 xor_(dst, dst);
388 } else if (is_int32(x)) {
ager@chromium.orge2902be2009-06-08 12:21:35 +0000389 movq(dst, Immediate(x));
390 } else if (is_uint32(x)) {
391 movl(dst, Immediate(x));
392 } else {
393 movq(dst, x, RelocInfo::NONE);
394 }
395}
396
397
398void MacroAssembler::Set(const Operand& dst, int64_t x) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000399 if (x == 0) {
400 xor_(kScratchRegister, kScratchRegister);
401 movq(dst, kScratchRegister);
402 } else if (is_int32(x)) {
403 movq(dst, Immediate(x));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000404 } else if (is_uint32(x)) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000405 movl(dst, Immediate(x));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000406 } else {
407 movq(kScratchRegister, x, RelocInfo::NONE);
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000408 movq(dst, kScratchRegister);
ager@chromium.orge2902be2009-06-08 12:21:35 +0000409 }
ager@chromium.orge2902be2009-06-08 12:21:35 +0000410}
411
412
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000413bool MacroAssembler::IsUnsafeSmi(Smi* value) {
414 return false;
415}
416
417void MacroAssembler::LoadUnsafeSmi(Register dst, Smi* source) {
418 UNIMPLEMENTED();
419}
420
421
422void MacroAssembler::Move(Register dst, Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000423 ASSERT(!source->IsFailure());
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000424 if (source->IsSmi()) {
425 if (IsUnsafeSmi(source)) {
426 LoadUnsafeSmi(dst, source);
427 } else {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000428 int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
429 movq(dst, Immediate(smi));
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000430 }
431 } else {
432 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
433 }
434}
435
436
437void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000438 if (source->IsSmi()) {
439 int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
440 movq(dst, Immediate(smi));
441 } else {
442 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
443 movq(dst, kScratchRegister);
444 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000445}
446
447
448void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
449 Move(kScratchRegister, source);
450 cmpq(dst, kScratchRegister);
451}
452
453
ager@chromium.org3e875802009-06-29 08:26:34 +0000454void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000455 if (source->IsSmi()) {
456 if (IsUnsafeSmi(source)) {
457 LoadUnsafeSmi(kScratchRegister, source);
458 cmpl(dst, kScratchRegister);
459 } else {
460 // For smi-comparison, it suffices to compare the low 32 bits.
461 int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
462 cmpl(dst, Immediate(smi));
463 }
464 } else {
465 ASSERT(source->IsHeapObject());
466 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
467 cmpq(dst, kScratchRegister);
468 }
ager@chromium.org3e875802009-06-29 08:26:34 +0000469}
470
471
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000472void MacroAssembler::Push(Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000473 if (source->IsSmi()) {
474 if (IsUnsafeSmi(source)) {
475 LoadUnsafeSmi(kScratchRegister, source);
476 push(kScratchRegister);
477 } else {
478 int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
479 push(Immediate(smi));
480 }
481 } else {
482 ASSERT(source->IsHeapObject());
483 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
484 push(kScratchRegister);
485 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000486}
487
488
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000489void MacroAssembler::Push(Smi* source) {
490 if (IsUnsafeSmi(source)) {
491 LoadUnsafeSmi(kScratchRegister, source);
492 push(kScratchRegister);
493 } else {
494 int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(source));
495 push(Immediate(smi));
496 }
497}
498
499
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000500void MacroAssembler::Jump(ExternalReference ext) {
501 movq(kScratchRegister, ext);
502 jmp(kScratchRegister);
503}
504
505
506void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
507 movq(kScratchRegister, destination, rmode);
508 jmp(kScratchRegister);
509}
510
511
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000512void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000513 ASSERT(RelocInfo::IsCodeTarget(rmode));
514 movq(kScratchRegister, code_object, rmode);
ager@chromium.org3e875802009-06-29 08:26:34 +0000515#ifdef DEBUG
516 Label target;
517 bind(&target);
518#endif
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000519 jmp(kScratchRegister);
ager@chromium.org3e875802009-06-29 08:26:34 +0000520#ifdef DEBUG
sgjesse@chromium.org911335c2009-08-19 12:59:44 +0000521 ASSERT_EQ(kPatchReturnSequenceLength,
ager@chromium.org3e875802009-06-29 08:26:34 +0000522 SizeOfCodeGeneratedSince(&target) + kPointerSize);
523#endif
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000524}
525
526
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000527void MacroAssembler::Call(ExternalReference ext) {
528 movq(kScratchRegister, ext);
529 call(kScratchRegister);
530}
531
532
533void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
534 movq(kScratchRegister, destination, rmode);
535 call(kScratchRegister);
536}
537
538
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000539void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000540 ASSERT(RelocInfo::IsCodeTarget(rmode));
sgjesse@chromium.org911335c2009-08-19 12:59:44 +0000541 WriteRecordedPositions();
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000542 movq(kScratchRegister, code_object, rmode);
543#ifdef DEBUG
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000544 // Patch target is kPointer size bytes *before* target label.
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000545 Label target;
546 bind(&target);
547#endif
548 call(kScratchRegister);
549#ifdef DEBUG
sgjesse@chromium.org911335c2009-08-19 12:59:44 +0000550 ASSERT_EQ(kPatchReturnSequenceLength,
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000551 SizeOfCodeGeneratedSince(&target) + kPointerSize);
552#endif
553}
554
555
ager@chromium.orge2902be2009-06-08 12:21:35 +0000556void MacroAssembler::PushTryHandler(CodeLocation try_location,
557 HandlerType type) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000558 // Adjust this code if not the case.
559 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
560
561 // The pc (return address) is already on TOS. This code pushes state,
562 // frame pointer and current handler. Check that they are expected
563 // next on the stack, in that order.
ager@chromium.orge2902be2009-06-08 12:21:35 +0000564 ASSERT_EQ(StackHandlerConstants::kStateOffset,
565 StackHandlerConstants::kPCOffset - kPointerSize);
ager@chromium.orge2902be2009-06-08 12:21:35 +0000566 ASSERT_EQ(StackHandlerConstants::kFPOffset,
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000567 StackHandlerConstants::kStateOffset - kPointerSize);
568 ASSERT_EQ(StackHandlerConstants::kNextOffset,
ager@chromium.orge2902be2009-06-08 12:21:35 +0000569 StackHandlerConstants::kFPOffset - kPointerSize);
570
571 if (try_location == IN_JAVASCRIPT) {
572 if (type == TRY_CATCH_HANDLER) {
573 push(Immediate(StackHandler::TRY_CATCH));
574 } else {
575 push(Immediate(StackHandler::TRY_FINALLY));
576 }
ager@chromium.orge2902be2009-06-08 12:21:35 +0000577 push(rbp);
ager@chromium.orge2902be2009-06-08 12:21:35 +0000578 } else {
579 ASSERT(try_location == IN_JS_ENTRY);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000580 // The frame pointer does not point to a JS frame so we save NULL
581 // for rbp. We expect the code throwing an exception to check rbp
582 // before dereferencing it to restore the context.
ager@chromium.orge2902be2009-06-08 12:21:35 +0000583 push(Immediate(StackHandler::ENTRY));
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000584 push(Immediate(0)); // NULL frame pointer.
ager@chromium.orge2902be2009-06-08 12:21:35 +0000585 }
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000586 // Save the current handler.
ager@chromium.orge2902be2009-06-08 12:21:35 +0000587 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000588 push(Operand(kScratchRegister, 0));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000589 // Link this handler.
590 movq(Operand(kScratchRegister, 0), rsp);
591}
592
593
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000594void MacroAssembler::Ret() {
595 ret(0);
596}
597
598
ager@chromium.org3e875802009-06-29 08:26:34 +0000599void MacroAssembler::FCmp() {
600 fcompp();
601 push(rax);
602 fnstsw_ax();
ager@chromium.org532c4972009-09-01 16:23:26 +0000603 if (CpuFeatures::IsSupported(CpuFeatures::SAHF)) {
604 sahf();
605 } else {
606 shrl(rax, Immediate(8));
607 and_(rax, Immediate(0xFF));
608 push(rax);
609 popfq();
610 }
ager@chromium.org3e875802009-06-29 08:26:34 +0000611 pop(rax);
612}
613
614
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000615void MacroAssembler::CmpObjectType(Register heap_object,
616 InstanceType type,
617 Register map) {
618 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
619 CmpInstanceType(map, type);
620}
621
622
623void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
624 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
625 Immediate(static_cast<int8_t>(type)));
626}
627
628
kasperl@chromium.org86f77b72009-07-06 08:21:57 +0000629void MacroAssembler::TryGetFunctionPrototype(Register function,
630 Register result,
631 Label* miss) {
632 // Check that the receiver isn't a smi.
633 testl(function, Immediate(kSmiTagMask));
634 j(zero, miss);
635
636 // Check that the function really is a function.
637 CmpObjectType(function, JS_FUNCTION_TYPE, result);
638 j(not_equal, miss);
639
640 // Make sure that the function has an instance prototype.
641 Label non_instance;
642 testb(FieldOperand(result, Map::kBitFieldOffset),
643 Immediate(1 << Map::kHasNonInstancePrototype));
644 j(not_zero, &non_instance);
645
646 // Get the prototype or initial map from the function.
647 movq(result,
648 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
649
650 // If the prototype or initial map is the hole, don't return it and
651 // simply miss the cache instead. This will allow us to allocate a
652 // prototype object on-demand in the runtime system.
ager@chromium.org18ad94b2009-09-02 08:22:29 +0000653 CompareRoot(result, Heap::kTheHoleValueRootIndex);
kasperl@chromium.org86f77b72009-07-06 08:21:57 +0000654 j(equal, miss);
655
656 // If the function does not have an initial map, we're done.
657 Label done;
658 CmpObjectType(result, MAP_TYPE, kScratchRegister);
659 j(not_equal, &done);
660
661 // Get the prototype from the initial map.
662 movq(result, FieldOperand(result, Map::kPrototypeOffset));
663 jmp(&done);
664
665 // Non-instance prototype: Fetch prototype from constructor field
666 // in initial map.
667 bind(&non_instance);
668 movq(result, FieldOperand(result, Map::kConstructorOffset));
669
670 // All done.
671 bind(&done);
672}
673
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000674
675void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
676 if (FLAG_native_code_counters && counter->Enabled()) {
677 movq(kScratchRegister, ExternalReference(counter));
678 movl(Operand(kScratchRegister, 0), Immediate(value));
679 }
680}
681
682
683void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
684 ASSERT(value > 0);
685 if (FLAG_native_code_counters && counter->Enabled()) {
686 movq(kScratchRegister, ExternalReference(counter));
687 Operand operand(kScratchRegister, 0);
688 if (value == 1) {
689 incl(operand);
690 } else {
691 addl(operand, Immediate(value));
692 }
693 }
694}
695
696
697void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
698 ASSERT(value > 0);
699 if (FLAG_native_code_counters && counter->Enabled()) {
700 movq(kScratchRegister, ExternalReference(counter));
701 Operand operand(kScratchRegister, 0);
702 if (value == 1) {
703 decl(operand);
704 } else {
705 subl(operand, Immediate(value));
706 }
707 }
708}
709
710
711#ifdef ENABLE_DEBUGGER_SUPPORT
712
713void MacroAssembler::PushRegistersFromMemory(RegList regs) {
714 ASSERT((regs & ~kJSCallerSaved) == 0);
715 // Push the content of the memory location to the stack.
716 for (int i = 0; i < kNumJSCallerSaved; i++) {
717 int r = JSCallerSavedCode(i);
718 if ((regs & (1 << r)) != 0) {
719 ExternalReference reg_addr =
720 ExternalReference(Debug_Address::Register(i));
721 movq(kScratchRegister, reg_addr);
722 push(Operand(kScratchRegister, 0));
723 }
724 }
725}
726
727void MacroAssembler::SaveRegistersToMemory(RegList regs) {
728 ASSERT((regs & ~kJSCallerSaved) == 0);
729 // Copy the content of registers to memory location.
730 for (int i = 0; i < kNumJSCallerSaved; i++) {
731 int r = JSCallerSavedCode(i);
732 if ((regs & (1 << r)) != 0) {
733 Register reg = { r };
734 ExternalReference reg_addr =
735 ExternalReference(Debug_Address::Register(i));
736 movq(kScratchRegister, reg_addr);
737 movq(Operand(kScratchRegister, 0), reg);
738 }
739 }
740}
741
742
743void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
744 ASSERT((regs & ~kJSCallerSaved) == 0);
745 // Copy the content of memory location to registers.
746 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
747 int r = JSCallerSavedCode(i);
748 if ((regs & (1 << r)) != 0) {
749 Register reg = { r };
750 ExternalReference reg_addr =
751 ExternalReference(Debug_Address::Register(i));
752 movq(kScratchRegister, reg_addr);
753 movq(reg, Operand(kScratchRegister, 0));
754 }
755 }
756}
757
758
759void MacroAssembler::PopRegistersToMemory(RegList regs) {
760 ASSERT((regs & ~kJSCallerSaved) == 0);
761 // Pop the content from the stack to the memory location.
762 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
763 int r = JSCallerSavedCode(i);
764 if ((regs & (1 << r)) != 0) {
765 ExternalReference reg_addr =
766 ExternalReference(Debug_Address::Register(i));
767 movq(kScratchRegister, reg_addr);
768 pop(Operand(kScratchRegister, 0));
769 }
770 }
771}
772
773
774void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
775 Register scratch,
776 RegList regs) {
777 ASSERT(!scratch.is(kScratchRegister));
778 ASSERT(!base.is(kScratchRegister));
779 ASSERT(!base.is(scratch));
780 ASSERT((regs & ~kJSCallerSaved) == 0);
781 // Copy the content of the stack to the memory location and adjust base.
782 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
783 int r = JSCallerSavedCode(i);
784 if ((regs & (1 << r)) != 0) {
785 movq(scratch, Operand(base, 0));
786 ExternalReference reg_addr =
787 ExternalReference(Debug_Address::Register(i));
788 movq(kScratchRegister, reg_addr);
789 movq(Operand(kScratchRegister, 0), scratch);
790 lea(base, Operand(base, kPointerSize));
791 }
792 }
793}
794
795#endif // ENABLE_DEBUGGER_SUPPORT
796
797
ager@chromium.org3e875802009-06-29 08:26:34 +0000798void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
799 bool resolved;
800 Handle<Code> code = ResolveBuiltin(id, &resolved);
801
802 // Calls are not allowed in some stubs.
803 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
804
805 // Rely on the assertion to check that the number of provided
806 // arguments match the expected number of arguments. Fake a
807 // parameter count to avoid emitting code to do the check.
808 ParameterCount expected(0);
809 InvokeCode(Handle<Code>(code), expected, expected,
810 RelocInfo::CODE_TARGET, flag);
811
812 const char* name = Builtins::GetName(id);
813 int argc = Builtins::GetArgumentsCount(id);
814 // The target address for the jump is stored as an immediate at offset
815 // kInvokeCodeAddressOffset.
816 if (!resolved) {
817 uint32_t flags =
818 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000819 Bootstrapper::FixupFlagsIsPCRelative::encode(false) |
ager@chromium.org3e875802009-06-29 08:26:34 +0000820 Bootstrapper::FixupFlagsUseCodeObject::encode(false);
821 Unresolved entry =
sgjesse@chromium.org911335c2009-08-19 12:59:44 +0000822 { pc_offset() - kPatchReturnSequenceLength, flags, name };
ager@chromium.org3e875802009-06-29 08:26:34 +0000823 unresolved_.Add(entry);
824 }
825}
826
827
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000828void MacroAssembler::InvokePrologue(const ParameterCount& expected,
829 const ParameterCount& actual,
830 Handle<Code> code_constant,
831 Register code_register,
832 Label* done,
833 InvokeFlag flag) {
834 bool definitely_matches = false;
835 Label invoke;
836 if (expected.is_immediate()) {
837 ASSERT(actual.is_immediate());
838 if (expected.immediate() == actual.immediate()) {
839 definitely_matches = true;
840 } else {
841 movq(rax, Immediate(actual.immediate()));
842 if (expected.immediate() ==
843 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
844 // Don't worry about adapting arguments for built-ins that
845 // don't want that done. Skip adaption code by making it look
846 // like we have a match between expected and actual number of
847 // arguments.
848 definitely_matches = true;
849 } else {
850 movq(rbx, Immediate(expected.immediate()));
851 }
852 }
853 } else {
854 if (actual.is_immediate()) {
855 // Expected is in register, actual is immediate. This is the
856 // case when we invoke function values without going through the
857 // IC mechanism.
858 cmpq(expected.reg(), Immediate(actual.immediate()));
859 j(equal, &invoke);
860 ASSERT(expected.reg().is(rbx));
861 movq(rax, Immediate(actual.immediate()));
862 } else if (!expected.reg().is(actual.reg())) {
863 // Both expected and actual are in (different) registers. This
864 // is the case when we invoke functions using call and apply.
865 cmpq(expected.reg(), actual.reg());
866 j(equal, &invoke);
867 ASSERT(actual.reg().is(rax));
868 ASSERT(expected.reg().is(rbx));
869 }
870 }
871
872 if (!definitely_matches) {
873 Handle<Code> adaptor =
874 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
875 if (!code_constant.is_null()) {
876 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
877 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
878 } else if (!code_register.is(rdx)) {
879 movq(rdx, code_register);
880 }
881
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000882 if (flag == CALL_FUNCTION) {
sgjesse@chromium.org911335c2009-08-19 12:59:44 +0000883 Call(adaptor, RelocInfo::CODE_TARGET);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000884 jmp(done);
885 } else {
sgjesse@chromium.org911335c2009-08-19 12:59:44 +0000886 Jump(adaptor, RelocInfo::CODE_TARGET);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000887 }
888 bind(&invoke);
889 }
890}
891
892
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000893void MacroAssembler::InvokeCode(Register code,
894 const ParameterCount& expected,
895 const ParameterCount& actual,
896 InvokeFlag flag) {
897 Label done;
898 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
899 if (flag == CALL_FUNCTION) {
900 call(code);
901 } else {
902 ASSERT(flag == JUMP_FUNCTION);
903 jmp(code);
904 }
905 bind(&done);
906}
907
908
909void MacroAssembler::InvokeCode(Handle<Code> code,
910 const ParameterCount& expected,
911 const ParameterCount& actual,
912 RelocInfo::Mode rmode,
913 InvokeFlag flag) {
914 Label done;
915 Register dummy = rax;
916 InvokePrologue(expected, actual, code, dummy, &done, flag);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000917 if (flag == CALL_FUNCTION) {
ager@chromium.org3e875802009-06-29 08:26:34 +0000918 Call(code, rmode);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000919 } else {
920 ASSERT(flag == JUMP_FUNCTION);
ager@chromium.org3e875802009-06-29 08:26:34 +0000921 Jump(code, rmode);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000922 }
923 bind(&done);
924}
925
926
927void MacroAssembler::InvokeFunction(Register function,
928 const ParameterCount& actual,
929 InvokeFlag flag) {
930 ASSERT(function.is(rdi));
931 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
932 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
ager@chromium.org3e875802009-06-29 08:26:34 +0000933 movsxlq(rbx,
934 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000935 movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000936 // Advances rdx to the end of the Code object header, to the start of
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000937 // the executable code.
938 lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
939
940 ParameterCount expected(rbx);
941 InvokeCode(rdx, expected, actual, flag);
942}
943
944
945void MacroAssembler::EnterFrame(StackFrame::Type type) {
946 push(rbp);
947 movq(rbp, rsp);
948 push(rsi); // Context.
949 push(Immediate(Smi::FromInt(type)));
950 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
951 push(kScratchRegister);
952 if (FLAG_debug_code) {
953 movq(kScratchRegister,
954 Factory::undefined_value(),
955 RelocInfo::EMBEDDED_OBJECT);
956 cmpq(Operand(rsp, 0), kScratchRegister);
957 Check(not_equal, "code object not properly patched");
958 }
959}
960
961
962void MacroAssembler::LeaveFrame(StackFrame::Type type) {
963 if (FLAG_debug_code) {
964 movq(kScratchRegister, Immediate(Smi::FromInt(type)));
965 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
966 Check(equal, "stack frame types must match");
967 }
968 movq(rsp, rbp);
969 pop(rbp);
970}
971
972
973
974void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
975 ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
976
977 // Setup the frame structure on the stack.
kasperl@chromium.org86f77b72009-07-06 08:21:57 +0000978 // All constants are relative to the frame pointer of the exit frame.
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000979 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
980 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
981 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
982 push(rbp);
983 movq(rbp, rsp);
984
985 // Reserve room for entry stack pointer and push the debug marker.
986 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
987 push(Immediate(0)); // saved entry sp, patched before call
988 push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0));
989
990 // Save the frame pointer and the context in top.
991 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
992 ExternalReference context_address(Top::k_context_address);
kasperl@chromium.org86f77b72009-07-06 08:21:57 +0000993 movq(r14, rax); // Backup rax before we use it.
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000994
995 movq(rax, rbp);
996 store_rax(c_entry_fp_address);
997 movq(rax, rsi);
998 store_rax(context_address);
999
1000 // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
1001 // so it must be retained across the C-call.
1002 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00001003 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001004
1005#ifdef ENABLE_DEBUGGER_SUPPORT
1006 // Save the state of all registers to the stack from the memory
1007 // location. This is needed to allow nested break points.
1008 if (type == StackFrame::EXIT_DEBUG) {
1009 // TODO(1243899): This should be symmetric to
1010 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
1011 // correct here, but computed for the other call. Very error
1012 // prone! FIX THIS. Actually there are deeper problems with
1013 // register saving than this asymmetry (see the bug report
1014 // associated with this issue).
1015 PushRegistersFromMemory(kJSCallerSaved);
1016 }
1017#endif
1018
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001019 // Get the required frame alignment for the OS.
1020 static const int kFrameAlignment = OS::ActivationFrameAlignment();
1021 if (kFrameAlignment > 0) {
1022 ASSERT(IsPowerOf2(kFrameAlignment));
1023 movq(kScratchRegister, Immediate(-kFrameAlignment));
1024 and_(rsp, kScratchRegister);
1025 }
1026
ager@chromium.org18ad94b2009-09-02 08:22:29 +00001027#ifdef _WIN64
1028 // Reserve space for the Arguments object. The Windows 64-bit ABI
1029 // requires us to pass this structure as a pointer to its location on
1030 // the stack. The structure contains 2 pointers.
1031 // The structure on the stack must be 16-byte aligned.
1032 // We also need backing space for 4 parameters, even though
1033 // we only pass one parameter, and it is in a register.
1034 subq(rsp, Immediate(6 * kPointerSize));
1035 ASSERT(kFrameAlignment == 2 * kPointerSize); // Change the padding if needed.
1036#endif
1037
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001038 // Patch the saved entry sp.
1039 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
1040}
1041
1042
1043void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
1044 // Registers:
1045 // r15 : argv
1046#ifdef ENABLE_DEBUGGER_SUPPORT
1047 // Restore the memory copy of the registers by digging them out from
1048 // the stack. This is needed to allow nested break points.
1049 if (type == StackFrame::EXIT_DEBUG) {
1050 // It's okay to clobber register ebx below because we don't need
1051 // the function pointer after this.
1052 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
1053 int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
1054 lea(rbx, Operand(rbp, kOffset));
1055 CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
1056 }
1057#endif
1058
1059 // Get the return address from the stack and restore the frame pointer.
1060 movq(rcx, Operand(rbp, 1 * kPointerSize));
1061 movq(rbp, Operand(rbp, 0 * kPointerSize));
1062
1063 // Pop the arguments and the receiver from the caller stack.
1064 lea(rsp, Operand(r15, 1 * kPointerSize));
1065
1066 // Restore current context from top and clear it in debug mode.
1067 ExternalReference context_address(Top::k_context_address);
1068 movq(kScratchRegister, context_address);
1069 movq(rsi, Operand(kScratchRegister, 0));
1070#ifdef DEBUG
1071 movq(Operand(kScratchRegister, 0), Immediate(0));
1072#endif
1073
1074 // Push the return address to get ready to return.
1075 push(rcx);
1076
1077 // Clear the top frame.
1078 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1079 movq(kScratchRegister, c_entry_fp_address);
1080 movq(Operand(kScratchRegister, 0), Immediate(0));
1081}
1082
1083
kasperl@chromium.orge959c182009-07-27 08:59:04 +00001084Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
1085 JSObject* holder, Register holder_reg,
1086 Register scratch,
1087 Label* miss) {
1088 // Make sure there's no overlap between scratch and the other
1089 // registers.
1090 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
1091
1092 // Keep track of the current object in register reg. On the first
1093 // iteration, reg is an alias for object_reg, on later iterations,
1094 // it is an alias for holder_reg.
1095 Register reg = object_reg;
1096 int depth = 1;
1097
1098 // Check the maps in the prototype chain.
1099 // Traverse the prototype chain from the object and do map checks.
1100 while (object != holder) {
1101 depth++;
1102
1103 // Only global objects and objects that do not require access
1104 // checks are allowed in stubs.
1105 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
1106
1107 JSObject* prototype = JSObject::cast(object->GetPrototype());
1108 if (Heap::InNewSpace(prototype)) {
1109 // Get the map of the current object.
1110 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1111 Cmp(scratch, Handle<Map>(object->map()));
1112 // Branch on the result of the map check.
1113 j(not_equal, miss);
1114 // Check access rights to the global object. This has to happen
1115 // after the map check so that we know that the object is
1116 // actually a global object.
1117 if (object->IsJSGlobalProxy()) {
1118 CheckAccessGlobalProxy(reg, scratch, miss);
1119
1120 // Restore scratch register to be the map of the object.
1121 // We load the prototype from the map in the scratch register.
1122 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1123 }
1124 // The prototype is in new space; we cannot store a reference
1125 // to it in the code. Load it from the map.
1126 reg = holder_reg; // from now the object is in holder_reg
1127 movq(reg, FieldOperand(scratch, Map::kPrototypeOffset));
1128
1129 } else {
1130 // Check the map of the current object.
1131 Cmp(FieldOperand(reg, HeapObject::kMapOffset),
1132 Handle<Map>(object->map()));
1133 // Branch on the result of the map check.
1134 j(not_equal, miss);
1135 // Check access rights to the global object. This has to happen
1136 // after the map check so that we know that the object is
1137 // actually a global object.
1138 if (object->IsJSGlobalProxy()) {
1139 CheckAccessGlobalProxy(reg, scratch, miss);
1140 }
1141 // The prototype is in old space; load it directly.
1142 reg = holder_reg; // from now the object is in holder_reg
1143 Move(reg, Handle<JSObject>(prototype));
1144 }
1145
1146 // Go to the next object in the prototype chain.
1147 object = prototype;
1148 }
1149
1150 // Check the holder map.
1151 Cmp(FieldOperand(reg, HeapObject::kMapOffset),
1152 Handle<Map>(holder->map()));
1153 j(not_equal, miss);
1154
1155 // Log the check depth.
1156 LOG(IntEvent("check-maps-depth", depth));
1157
1158 // Perform security check for access to the global object and return
1159 // the holder register.
1160 ASSERT(object == holder);
1161 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
1162 if (object->IsJSGlobalProxy()) {
1163 CheckAccessGlobalProxy(reg, scratch, miss);
1164 }
1165 return reg;
1166}
1167
1168
1169
1170
1171void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1172 Register scratch,
1173 Label* miss) {
1174 Label same_contexts;
1175
1176 ASSERT(!holder_reg.is(scratch));
1177 ASSERT(!scratch.is(kScratchRegister));
1178 // Load current lexical context from the stack frame.
1179 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
1180
1181 // When generating debug code, make sure the lexical context is set.
1182 if (FLAG_debug_code) {
1183 cmpq(scratch, Immediate(0));
1184 Check(not_equal, "we should not have an empty lexical context");
1185 }
1186 // Load the global context of the current context.
1187 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1188 movq(scratch, FieldOperand(scratch, offset));
1189 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
1190
1191 // Check the context is a global context.
1192 if (FLAG_debug_code) {
1193 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
1194 Factory::global_context_map());
1195 Check(equal, "JSGlobalObject::global_context should be a global context.");
1196 }
1197
1198 // Check if both contexts are the same.
1199 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1200 j(equal, &same_contexts);
1201
1202 // Compare security tokens.
1203 // Check that the security token in the calling global object is
1204 // compatible with the security token in the receiving global
1205 // object.
1206
1207 // Check the context is a global context.
1208 if (FLAG_debug_code) {
1209 // Preserve original value of holder_reg.
1210 push(holder_reg);
1211 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
ager@chromium.org18ad94b2009-09-02 08:22:29 +00001212 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
kasperl@chromium.orge959c182009-07-27 08:59:04 +00001213 Check(not_equal, "JSGlobalProxy::context() should not be null.");
1214
1215 // Read the first word and compare to global_context_map(),
1216 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
ager@chromium.org18ad94b2009-09-02 08:22:29 +00001217 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
kasperl@chromium.orge959c182009-07-27 08:59:04 +00001218 Check(equal, "JSGlobalObject::global_context should be a global context.");
1219 pop(holder_reg);
1220 }
1221
1222 movq(kScratchRegister,
1223 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1224 int token_offset = Context::kHeaderSize +
1225 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1226 movq(scratch, FieldOperand(scratch, token_offset));
1227 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
1228 j(not_equal, miss);
1229
1230 bind(&same_contexts);
1231}
1232
1233
ager@chromium.org18ad94b2009-09-02 08:22:29 +00001234void MacroAssembler::LoadAllocationTopHelper(
1235 Register result,
1236 Register result_end,
1237 Register scratch,
1238 bool result_contains_top_on_entry) {
1239 ExternalReference new_space_allocation_top =
1240 ExternalReference::new_space_allocation_top_address();
1241
1242 // Just return if allocation top is already known.
1243 if (result_contains_top_on_entry) {
1244 // No use of scratch if allocation top is provided.
1245 ASSERT(scratch.is(no_reg));
1246 return;
1247 }
1248
1249 // Move address of new object to result. Use scratch register if available.
1250 if (scratch.is(no_reg)) {
1251 movq(kScratchRegister, new_space_allocation_top);
1252 movq(result, Operand(kScratchRegister, 0));
1253 } else {
1254 ASSERT(!scratch.is(result_end));
1255 movq(scratch, new_space_allocation_top);
1256 movq(result, Operand(scratch, 0));
1257 }
1258}
1259
1260
1261void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1262 Register scratch) {
1263 ExternalReference new_space_allocation_top =
1264 ExternalReference::new_space_allocation_top_address();
1265
1266 // Update new top.
1267 if (result_end.is(rax)) {
1268 // rax can be stored directly to a memory location.
1269 store_rax(new_space_allocation_top);
1270 } else {
1271 // Register required - use scratch provided if available.
1272 if (scratch.is(no_reg)) {
1273 movq(kScratchRegister, new_space_allocation_top);
1274 movq(Operand(kScratchRegister, 0), result_end);
1275 } else {
1276 movq(Operand(scratch, 0), result_end);
1277 }
1278 }
1279}
1280
1281
1282void MacroAssembler::AllocateObjectInNewSpace(
1283 int object_size,
1284 Register result,
1285 Register result_end,
1286 Register scratch,
1287 Label* gc_required,
1288 bool result_contains_top_on_entry) {
1289 ASSERT(!result.is(result_end));
1290
1291 // Load address of new object into result.
1292 LoadAllocationTopHelper(result,
1293 result_end,
1294 scratch,
1295 result_contains_top_on_entry);
1296
1297 // Calculate new top and bail out if new space is exhausted.
1298 ExternalReference new_space_allocation_limit =
1299 ExternalReference::new_space_allocation_limit_address();
1300 lea(result_end, Operand(result, object_size));
1301 movq(kScratchRegister, new_space_allocation_limit);
1302 cmpq(result_end, Operand(kScratchRegister, 0));
1303 j(above, gc_required);
1304
1305 // Update allocation top.
1306 UpdateAllocationTopHelper(result_end, scratch);
1307}
1308
1309
1310void MacroAssembler::AllocateObjectInNewSpace(
1311 int header_size,
1312 ScaleFactor element_size,
1313 Register element_count,
1314 Register result,
1315 Register result_end,
1316 Register scratch,
1317 Label* gc_required,
1318 bool result_contains_top_on_entry) {
1319 ASSERT(!result.is(result_end));
1320
1321 // Load address of new object into result.
1322 LoadAllocationTopHelper(result,
1323 result_end,
1324 scratch,
1325 result_contains_top_on_entry);
1326
1327 // Calculate new top and bail out if new space is exhausted.
1328 ExternalReference new_space_allocation_limit =
1329 ExternalReference::new_space_allocation_limit_address();
1330 lea(result_end, Operand(result, element_count, element_size, header_size));
1331 movq(kScratchRegister, new_space_allocation_limit);
1332 cmpq(result_end, Operand(kScratchRegister, 0));
1333 j(above, gc_required);
1334
1335 // Update allocation top.
1336 UpdateAllocationTopHelper(result_end, scratch);
1337}
1338
1339
1340void MacroAssembler::AllocateObjectInNewSpace(
1341 Register object_size,
1342 Register result,
1343 Register result_end,
1344 Register scratch,
1345 Label* gc_required,
1346 bool result_contains_top_on_entry) {
1347
1348 // Load address of new object into result.
1349 LoadAllocationTopHelper(result,
1350 result_end,
1351 scratch,
1352 result_contains_top_on_entry);
1353
1354
1355 // Calculate new top and bail out if new space is exhausted.
1356 ExternalReference new_space_allocation_limit =
1357 ExternalReference::new_space_allocation_limit_address();
1358 if (!object_size.is(result_end)) {
1359 movq(result_end, object_size);
1360 }
1361 addq(result_end, result);
1362 movq(kScratchRegister, new_space_allocation_limit);
1363 cmpq(result_end, Operand(kScratchRegister, 0));
1364 j(above, gc_required);
1365
1366 // Update allocation top.
1367 UpdateAllocationTopHelper(result_end, scratch);
1368}
1369
1370
1371void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1372 ExternalReference new_space_allocation_top =
1373 ExternalReference::new_space_allocation_top_address();
1374
1375 // Make sure the object has no tag before resetting top.
1376 and_(object, Immediate(~kHeapObjectTagMask));
1377 movq(kScratchRegister, new_space_allocation_top);
1378#ifdef DEBUG
1379 cmpq(object, Operand(kScratchRegister, 0));
1380 Check(below, "Undo allocation of non allocated memory");
1381#endif
1382 movq(Operand(kScratchRegister, 0), object);
1383}
1384
1385
kasperl@chromium.org71affb52009-05-26 05:44:31 +00001386} } // namespace v8::internal