blob: 71157914c2c45793b6627d8a4e15bf9ca1a7e8c8 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "assembler-x64.h"
33#include "macro-assembler-x64.h"
34#include "serialize.h"
35#include "debug.h"
36
37namespace v8 {
38namespace internal {
39
40MacroAssembler::MacroAssembler(void* buffer, int size)
Steve Block3ce2e202009-11-05 08:53:23 +000041 : Assembler(buffer, size),
42 unresolved_(0),
43 generating_stub_(false),
44 allow_stub_calls_(true),
45 code_object_(Heap::undefined_value()) {
Steve Blocka7e24c12009-10-30 11:49:00 +000046}
47
48
Steve Block3ce2e202009-11-05 08:53:23 +000049void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000050 movq(destination, Operand(r13, index << kPointerSizeLog2));
51}
52
53
54void MacroAssembler::PushRoot(Heap::RootListIndex index) {
55 push(Operand(r13, index << kPointerSizeLog2));
56}
57
58
Steve Block3ce2e202009-11-05 08:53:23 +000059void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000060 cmpq(with, Operand(r13, index << kPointerSizeLog2));
61}
62
63
Steve Block3ce2e202009-11-05 08:53:23 +000064void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000065 LoadRoot(kScratchRegister, index);
66 cmpq(with, kScratchRegister);
67}
68
69
Steve Blockd0582a62009-12-15 09:54:21 +000070void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
71 CompareRoot(rsp, Heap::kStackLimitRootIndex);
72 j(below, on_stack_overflow);
73}
74
75
Steve Blocka7e24c12009-10-30 11:49:00 +000076static void RecordWriteHelper(MacroAssembler* masm,
77 Register object,
78 Register addr,
79 Register scratch) {
80 Label fast;
81
82 // Compute the page start address from the heap object pointer, and reuse
83 // the 'object' register for it.
84 ASSERT(is_int32(~Page::kPageAlignmentMask));
85 masm->and_(object,
86 Immediate(static_cast<int32_t>(~Page::kPageAlignmentMask)));
87 Register page_start = object;
88
89 // Compute the bit addr in the remembered set/index of the pointer in the
90 // page. Reuse 'addr' as pointer_offset.
91 masm->subq(addr, page_start);
92 masm->shr(addr, Immediate(kPointerSizeLog2));
93 Register pointer_offset = addr;
94
95 // If the bit offset lies beyond the normal remembered set range, it is in
96 // the extra remembered set area of a large object.
97 masm->cmpq(pointer_offset, Immediate(Page::kPageSize / kPointerSize));
98 masm->j(less, &fast);
99
100 // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
101 // extra remembered set after the large object.
102
103 // Load the array length into 'scratch'.
104 masm->movl(scratch,
105 Operand(page_start,
106 Page::kObjectStartOffset + FixedArray::kLengthOffset));
107 Register array_length = scratch;
108
109 // Extra remembered set starts right after the large object (a FixedArray), at
110 // page_start + kObjectStartOffset + objectSize
111 // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
112 // Add the delta between the end of the normal RSet and the start of the
113 // extra RSet to 'page_start', so that addressing the bit using
114 // 'pointer_offset' hits the extra RSet words.
115 masm->lea(page_start,
116 Operand(page_start, array_length, times_pointer_size,
117 Page::kObjectStartOffset + FixedArray::kHeaderSize
118 - Page::kRSetEndOffset));
119
120 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
121 // to limit code size. We should probably evaluate this decision by
122 // measuring the performance of an equivalent implementation using
123 // "simpler" instructions
124 masm->bind(&fast);
125 masm->bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
126}
127
128
129class RecordWriteStub : public CodeStub {
130 public:
131 RecordWriteStub(Register object, Register addr, Register scratch)
132 : object_(object), addr_(addr), scratch_(scratch) { }
133
134 void Generate(MacroAssembler* masm);
135
136 private:
137 Register object_;
138 Register addr_;
139 Register scratch_;
140
141#ifdef DEBUG
142 void Print() {
143 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
144 object_.code(), addr_.code(), scratch_.code());
145 }
146#endif
147
148 // Minor key encoding in 12 bits of three registers (object, address and
149 // scratch) OOOOAAAASSSS.
Steve Block3ce2e202009-11-05 08:53:23 +0000150 class ScratchBits : public BitField<uint32_t, 0, 4> {};
151 class AddressBits : public BitField<uint32_t, 4, 4> {};
152 class ObjectBits : public BitField<uint32_t, 8, 4> {};
Steve Blocka7e24c12009-10-30 11:49:00 +0000153
154 Major MajorKey() { return RecordWrite; }
155
156 int MinorKey() {
157 // Encode the registers.
158 return ObjectBits::encode(object_.code()) |
159 AddressBits::encode(addr_.code()) |
160 ScratchBits::encode(scratch_.code());
161 }
162};
163
164
165void RecordWriteStub::Generate(MacroAssembler* masm) {
166 RecordWriteHelper(masm, object_, addr_, scratch_);
167 masm->ret(0);
168}
169
170
171// Set the remembered set bit for [object+offset].
172// object is the object being stored into, value is the object being stored.
Steve Block3ce2e202009-11-05 08:53:23 +0000173// If offset is zero, then the smi_index register contains the array index into
174// the elements array represented as a smi. Otherwise it can be used as a
175// scratch register.
Steve Blocka7e24c12009-10-30 11:49:00 +0000176// All registers are clobbered by the operation.
177void MacroAssembler::RecordWrite(Register object,
178 int offset,
179 Register value,
Steve Block3ce2e202009-11-05 08:53:23 +0000180 Register smi_index) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000181 // First, check if a remembered set write is even needed. The tests below
182 // catch stores of Smis and stores into young gen (which does not have space
183 // for the remembered set bits.
184 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000185 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000186
Steve Block3ce2e202009-11-05 08:53:23 +0000187 RecordWriteNonSmi(object, offset, value, smi_index);
188 bind(&done);
189}
190
191
192void MacroAssembler::RecordWriteNonSmi(Register object,
193 int offset,
194 Register scratch,
195 Register smi_index) {
196 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +0000197 // Test that the object address is not in the new space. We cannot
198 // set remembered set bits in the new space.
Steve Block3ce2e202009-11-05 08:53:23 +0000199 movq(scratch, object);
Steve Blocka7e24c12009-10-30 11:49:00 +0000200 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
Steve Block3ce2e202009-11-05 08:53:23 +0000201 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
Steve Blocka7e24c12009-10-30 11:49:00 +0000202 movq(kScratchRegister, ExternalReference::new_space_start());
Steve Block3ce2e202009-11-05 08:53:23 +0000203 cmpq(scratch, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000204 j(equal, &done);
205
206 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
207 // Compute the bit offset in the remembered set, leave it in 'value'.
Steve Block3ce2e202009-11-05 08:53:23 +0000208 lea(scratch, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000209 ASSERT(is_int32(Page::kPageAlignmentMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000210 and_(scratch, Immediate(static_cast<int32_t>(Page::kPageAlignmentMask)));
211 shr(scratch, Immediate(kObjectAlignmentBits));
Steve Blocka7e24c12009-10-30 11:49:00 +0000212
213 // Compute the page address from the heap object pointer, leave it in
214 // 'object' (immediate value is sign extended).
215 and_(object, Immediate(~Page::kPageAlignmentMask));
216
217 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
218 // to limit code size. We should probably evaluate this decision by
219 // measuring the performance of an equivalent implementation using
220 // "simpler" instructions
Steve Block3ce2e202009-11-05 08:53:23 +0000221 bts(Operand(object, Page::kRSetOffset), scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000222 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000223 Register dst = smi_index;
Steve Blocka7e24c12009-10-30 11:49:00 +0000224 if (offset != 0) {
225 lea(dst, Operand(object, offset));
226 } else {
227 // array access: calculate the destination address in the same manner as
Steve Block3ce2e202009-11-05 08:53:23 +0000228 // KeyedStoreIC::GenerateGeneric.
229 SmiIndex index = SmiToIndex(smi_index, smi_index, kPointerSizeLog2);
230 lea(dst, Operand(object,
231 index.reg,
232 index.scale,
Steve Blocka7e24c12009-10-30 11:49:00 +0000233 FixedArray::kHeaderSize - kHeapObjectTag));
234 }
235 // If we are already generating a shared stub, not inlining the
236 // record write code isn't going to save us any memory.
237 if (generating_stub()) {
Steve Block3ce2e202009-11-05 08:53:23 +0000238 RecordWriteHelper(this, object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000239 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000240 RecordWriteStub stub(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000241 CallStub(&stub);
242 }
243 }
244
245 bind(&done);
246}
247
248
249void MacroAssembler::Assert(Condition cc, const char* msg) {
250 if (FLAG_debug_code) Check(cc, msg);
251}
252
253
254void MacroAssembler::Check(Condition cc, const char* msg) {
255 Label L;
256 j(cc, &L);
257 Abort(msg);
258 // will not return here
259 bind(&L);
260}
261
262
263void MacroAssembler::NegativeZeroTest(Register result,
264 Register op,
265 Label* then_label) {
266 Label ok;
267 testl(result, result);
268 j(not_zero, &ok);
269 testl(op, op);
270 j(sign, then_label);
271 bind(&ok);
272}
273
274
275void MacroAssembler::Abort(const char* msg) {
276 // We want to pass the msg string like a smi to avoid GC
277 // problems, however msg is not guaranteed to be aligned
278 // properly. Instead, we pass an aligned pointer that is
279 // a proper v8 smi, but also pass the alignment difference
280 // from the real pointer as a smi.
281 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
282 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
283 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
284 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
285#ifdef DEBUG
286 if (msg != NULL) {
287 RecordComment("Abort message: ");
288 RecordComment(msg);
289 }
290#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000291 // Disable stub call restrictions to always allow calls to abort.
292 set_allow_stub_calls(true);
293
Steve Blocka7e24c12009-10-30 11:49:00 +0000294 push(rax);
295 movq(kScratchRegister, p0, RelocInfo::NONE);
296 push(kScratchRegister);
297 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000298 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000299 RelocInfo::NONE);
300 push(kScratchRegister);
301 CallRuntime(Runtime::kAbort, 2);
302 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000303 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000304}
305
306
307void MacroAssembler::CallStub(CodeStub* stub) {
308 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
309 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
310}
311
312
313void MacroAssembler::StubReturn(int argc) {
314 ASSERT(argc >= 1 && generating_stub());
315 ret((argc - 1) * kPointerSize);
316}
317
318
319void MacroAssembler::IllegalOperation(int num_arguments) {
320 if (num_arguments > 0) {
321 addq(rsp, Immediate(num_arguments * kPointerSize));
322 }
323 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
324}
325
326
327void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
328 CallRuntime(Runtime::FunctionForId(id), num_arguments);
329}
330
331
332void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
333 // If the expected number of arguments of the runtime function is
334 // constant, we check that the actual number of arguments match the
335 // expectation.
336 if (f->nargs >= 0 && f->nargs != num_arguments) {
337 IllegalOperation(num_arguments);
338 return;
339 }
340
341 Runtime::FunctionId function_id =
342 static_cast<Runtime::FunctionId>(f->stub_id);
343 RuntimeStub stub(function_id, num_arguments);
344 CallStub(&stub);
345}
346
347
348void MacroAssembler::TailCallRuntime(ExternalReference const& ext,
349 int num_arguments,
350 int result_size) {
351 // ----------- S t a t e -------------
352 // -- rsp[0] : return address
353 // -- rsp[8] : argument num_arguments - 1
354 // ...
355 // -- rsp[8 * num_arguments] : argument 0 (receiver)
356 // -----------------------------------
357
358 // TODO(1236192): Most runtime routines don't need the number of
359 // arguments passed in because it is constant. At some point we
360 // should remove this need and make the runtime routine entry code
361 // smarter.
362 movq(rax, Immediate(num_arguments));
363 JumpToRuntime(ext, result_size);
364}
365
366
367void MacroAssembler::JumpToRuntime(const ExternalReference& ext,
368 int result_size) {
369 // Set the entry point and jump to the C entry runtime stub.
370 movq(rbx, ext);
371 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000372 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000373}
374
375
376void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
377 bool resolved;
378 Handle<Code> code = ResolveBuiltin(id, &resolved);
379
380 const char* name = Builtins::GetName(id);
381 int argc = Builtins::GetArgumentsCount(id);
382
383 movq(target, code, RelocInfo::EMBEDDED_OBJECT);
384 if (!resolved) {
385 uint32_t flags =
386 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
Steve Blocka7e24c12009-10-30 11:49:00 +0000387 Bootstrapper::FixupFlagsUseCodeObject::encode(true);
388 Unresolved entry = { pc_offset() - sizeof(intptr_t), flags, name };
389 unresolved_.Add(entry);
390 }
391 addq(target, Immediate(Code::kHeaderSize - kHeapObjectTag));
392}
393
Steve Blocka7e24c12009-10-30 11:49:00 +0000394Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
395 bool* resolved) {
396 // Move the builtin function into the temporary function slot by
397 // reading it from the builtins object. NOTE: We should be able to
398 // reduce this to two instructions by putting the function table in
399 // the global object instead of the "builtins" object and by using a
400 // real register for the function.
401 movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
402 movq(rdx, FieldOperand(rdx, GlobalObject::kBuiltinsOffset));
403 int builtins_offset =
404 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
405 movq(rdi, FieldOperand(rdx, builtins_offset));
406
Steve Blocka7e24c12009-10-30 11:49:00 +0000407 return Builtins::GetCode(id, resolved);
408}
409
410
411void MacroAssembler::Set(Register dst, int64_t x) {
412 if (x == 0) {
413 xor_(dst, dst);
414 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000415 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000416 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000417 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000418 } else {
419 movq(dst, x, RelocInfo::NONE);
420 }
421}
422
423
424void MacroAssembler::Set(const Operand& dst, int64_t x) {
425 if (x == 0) {
426 xor_(kScratchRegister, kScratchRegister);
427 movq(dst, kScratchRegister);
428 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000429 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000430 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000431 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000432 } else {
433 movq(kScratchRegister, x, RelocInfo::NONE);
434 movq(dst, kScratchRegister);
435 }
436}
437
Steve Blocka7e24c12009-10-30 11:49:00 +0000438// ----------------------------------------------------------------------------
439// Smi tagging, untagging and tag detection.
440
Steve Block3ce2e202009-11-05 08:53:23 +0000441static int kSmiShift = kSmiTagSize + kSmiShiftSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000442
443void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000444 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000445 if (!dst.is(src)) {
446 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000447 }
Steve Block3ce2e202009-11-05 08:53:23 +0000448 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000449}
450
451
452void MacroAssembler::Integer32ToSmi(Register dst,
453 Register src,
454 Label* on_overflow) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000455 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000456 // 32-bit integer always fits in a long smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000457 if (!dst.is(src)) {
458 movl(dst, src);
459 }
Steve Block3ce2e202009-11-05 08:53:23 +0000460 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000461}
462
463
Steve Block3ce2e202009-11-05 08:53:23 +0000464void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
465 Register src,
466 int constant) {
467 if (dst.is(src)) {
468 addq(dst, Immediate(constant));
469 } else {
470 lea(dst, Operand(src, constant));
471 }
472 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000473}
474
475
476void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000477 ASSERT_EQ(0, kSmiTag);
478 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000479 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000480 }
Steve Block3ce2e202009-11-05 08:53:23 +0000481 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000482}
483
484
485void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000486 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000487 if (!dst.is(src)) {
488 movq(dst, src);
489 }
490 sar(dst, Immediate(kSmiShift));
491}
492
493
494void MacroAssembler::SmiTest(Register src) {
495 testq(src, src);
496}
497
498
499void MacroAssembler::SmiCompare(Register dst, Register src) {
500 cmpq(dst, src);
501}
502
503
504void MacroAssembler::SmiCompare(Register dst, Smi* src) {
505 ASSERT(!dst.is(kScratchRegister));
506 if (src->value() == 0) {
507 testq(dst, dst);
508 } else {
509 Move(kScratchRegister, src);
510 cmpq(dst, kScratchRegister);
511 }
512}
513
514
515void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
516 cmpq(dst, src);
517}
518
519
520void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
521 if (src->value() == 0) {
522 // Only tagged long smi to have 32-bit representation.
523 cmpq(dst, Immediate(0));
524 } else {
525 Move(kScratchRegister, src);
526 cmpq(dst, kScratchRegister);
527 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000528}
529
530
531void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
532 Register src,
533 int power) {
534 ASSERT(power >= 0);
535 ASSERT(power < 64);
536 if (power == 0) {
537 SmiToInteger64(dst, src);
538 return;
539 }
Steve Block3ce2e202009-11-05 08:53:23 +0000540 if (!dst.is(src)) {
541 movq(dst, src);
542 }
543 if (power < kSmiShift) {
544 sar(dst, Immediate(kSmiShift - power));
545 } else if (power > kSmiShift) {
546 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000547 }
548}
549
550
Steve Blocka7e24c12009-10-30 11:49:00 +0000551Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000552 ASSERT_EQ(0, kSmiTag);
553 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000554 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000555}
556
557
558Condition MacroAssembler::CheckPositiveSmi(Register src) {
559 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000560 movq(kScratchRegister, src);
561 rol(kScratchRegister, Immediate(1));
562 testl(kScratchRegister, Immediate(0x03));
Steve Blocka7e24c12009-10-30 11:49:00 +0000563 return zero;
564}
565
566
Steve Blocka7e24c12009-10-30 11:49:00 +0000567Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
568 if (first.is(second)) {
569 return CheckSmi(first);
570 }
571 movl(kScratchRegister, first);
572 orl(kScratchRegister, second);
Steve Block3ce2e202009-11-05 08:53:23 +0000573 testb(kScratchRegister, Immediate(kSmiTagMask));
574 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000575}
576
577
578Condition MacroAssembler::CheckIsMinSmi(Register src) {
579 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Steve Block3ce2e202009-11-05 08:53:23 +0000580 movq(kScratchRegister, src);
581 rol(kScratchRegister, Immediate(1));
582 cmpq(kScratchRegister, Immediate(1));
Steve Blocka7e24c12009-10-30 11:49:00 +0000583 return equal;
584}
585
Steve Blocka7e24c12009-10-30 11:49:00 +0000586
587Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000588 // A 32-bit integer value can always be converted to a smi.
589 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +0000590}
591
592
Steve Block3ce2e202009-11-05 08:53:23 +0000593Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
594 // An unsigned 32-bit integer value is valid as long as the high bit
595 // is not set.
596 testq(src, Immediate(0x80000000));
597 return zero;
598}
599
600
601void MacroAssembler::SmiNeg(Register dst, Register src, Label* on_smi_result) {
602 if (dst.is(src)) {
603 ASSERT(!dst.is(kScratchRegister));
604 movq(kScratchRegister, src);
605 neg(dst); // Low 32 bits are retained as zero by negation.
606 // Test if result is zero or Smi::kMinValue.
607 cmpq(dst, kScratchRegister);
608 j(not_equal, on_smi_result);
609 movq(src, kScratchRegister);
610 } else {
611 movq(dst, src);
612 neg(dst);
613 cmpq(dst, src);
614 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
615 j(not_equal, on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000616 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000617}
618
619
620void MacroAssembler::SmiAdd(Register dst,
621 Register src1,
622 Register src2,
623 Label* on_not_smi_result) {
624 ASSERT(!dst.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +0000625 if (dst.is(src1)) {
626 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000627 Label smi_result;
628 j(no_overflow, &smi_result);
629 // Restore src1.
Steve Block3ce2e202009-11-05 08:53:23 +0000630 subq(src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000631 jmp(on_not_smi_result);
632 bind(&smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +0000633 } else {
634 movq(dst, src1);
635 addq(dst, src2);
636 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000637 }
638}
639
640
Steve Blocka7e24c12009-10-30 11:49:00 +0000641void MacroAssembler::SmiSub(Register dst,
642 Register src1,
643 Register src2,
644 Label* on_not_smi_result) {
645 ASSERT(!dst.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +0000646 if (dst.is(src1)) {
647 subq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000648 Label smi_result;
649 j(no_overflow, &smi_result);
650 // Restore src1.
Steve Block3ce2e202009-11-05 08:53:23 +0000651 addq(src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000652 jmp(on_not_smi_result);
653 bind(&smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +0000654 } else {
655 movq(dst, src1);
656 subq(dst, src2);
657 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000658 }
659}
660
661
662void MacroAssembler::SmiMul(Register dst,
663 Register src1,
664 Register src2,
665 Label* on_not_smi_result) {
666 ASSERT(!dst.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +0000667 ASSERT(!dst.is(kScratchRegister));
668 ASSERT(!src1.is(kScratchRegister));
669 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000670
671 if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000672 Label failure, zero_correct_result;
673 movq(kScratchRegister, src1); // Create backup for later testing.
674 SmiToInteger64(dst, src1);
675 imul(dst, src2);
676 j(overflow, &failure);
677
678 // Check for negative zero result. If product is zero, and one
679 // argument is negative, go to slow case.
680 Label correct_result;
681 testq(dst, dst);
682 j(not_zero, &correct_result);
683
684 movq(dst, kScratchRegister);
685 xor_(dst, src2);
686 j(positive, &zero_correct_result); // Result was positive zero.
687
688 bind(&failure); // Reused failure exit, restores src1.
689 movq(src1, kScratchRegister);
690 jmp(on_not_smi_result);
691
692 bind(&zero_correct_result);
693 xor_(dst, dst);
694
695 bind(&correct_result);
696 } else {
697 SmiToInteger64(dst, src1);
698 imul(dst, src2);
699 j(overflow, on_not_smi_result);
700 // Check for negative zero result. If product is zero, and one
701 // argument is negative, go to slow case.
702 Label correct_result;
703 testq(dst, dst);
704 j(not_zero, &correct_result);
705 // One of src1 and src2 is zero, the check whether the other is
706 // negative.
Steve Blocka7e24c12009-10-30 11:49:00 +0000707 movq(kScratchRegister, src1);
Steve Block3ce2e202009-11-05 08:53:23 +0000708 xor_(kScratchRegister, src2);
709 j(negative, on_not_smi_result);
710 bind(&correct_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000711 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000712}
713
714
715void MacroAssembler::SmiTryAddConstant(Register dst,
716 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000717 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000718 Label* on_not_smi_result) {
719 // Does not assume that src is a smi.
Steve Block3ce2e202009-11-05 08:53:23 +0000720 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +0000721 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000722 ASSERT(!dst.is(kScratchRegister));
723 ASSERT(!src.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000724
Steve Block3ce2e202009-11-05 08:53:23 +0000725 JumpIfNotSmi(src, on_not_smi_result);
726 Register tmp = (dst.is(src) ? kScratchRegister : dst);
727 Move(tmp, constant);
728 addq(tmp, src);
729 j(overflow, on_not_smi_result);
730 if (dst.is(src)) {
731 movq(dst, tmp);
732 }
733}
734
735
736void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
737 if (constant->value() == 0) {
738 if (!dst.is(src)) {
739 movq(dst, src);
740 }
741 } else if (dst.is(src)) {
742 ASSERT(!dst.is(kScratchRegister));
743
744 Move(kScratchRegister, constant);
745 addq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000746 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000747 Move(dst, constant);
748 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000749 }
750}
751
752
753void MacroAssembler::SmiAddConstant(Register dst,
754 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000755 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000756 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +0000757 if (constant->value() == 0) {
758 if (!dst.is(src)) {
759 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000760 }
Steve Block3ce2e202009-11-05 08:53:23 +0000761 } else if (dst.is(src)) {
762 ASSERT(!dst.is(kScratchRegister));
763
764 Move(kScratchRegister, constant);
765 addq(dst, kScratchRegister);
766 Label result_ok;
767 j(no_overflow, &result_ok);
768 subq(dst, kScratchRegister);
769 jmp(on_not_smi_result);
770 bind(&result_ok);
Steve Blocka7e24c12009-10-30 11:49:00 +0000771 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000772 Move(dst, constant);
773 addq(dst, src);
774 j(overflow, on_not_smi_result);
775 }
776}
777
778
779void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
780 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000781 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000782 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000783 }
Steve Block3ce2e202009-11-05 08:53:23 +0000784 } else if (dst.is(src)) {
785 ASSERT(!dst.is(kScratchRegister));
786
787 Move(kScratchRegister, constant);
788 subq(dst, kScratchRegister);
789 } else {
790 // Subtract by adding the negative, to do it in two operations.
791 if (constant->value() == Smi::kMinValue) {
792 Move(kScratchRegister, constant);
793 movq(dst, src);
794 subq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000795 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000796 Move(dst, Smi::FromInt(-constant->value()));
797 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000798 }
799 }
800}
801
802
803void MacroAssembler::SmiSubConstant(Register dst,
804 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000805 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000806 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +0000807 if (constant->value() == 0) {
808 if (!dst.is(src)) {
809 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000810 }
Steve Block3ce2e202009-11-05 08:53:23 +0000811 } else if (dst.is(src)) {
812 ASSERT(!dst.is(kScratchRegister));
813
814 Move(kScratchRegister, constant);
815 subq(dst, kScratchRegister);
816 Label sub_success;
817 j(no_overflow, &sub_success);
818 addq(src, kScratchRegister);
819 jmp(on_not_smi_result);
820 bind(&sub_success);
Steve Blocka7e24c12009-10-30 11:49:00 +0000821 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000822 if (constant->value() == Smi::kMinValue) {
823 Move(kScratchRegister, constant);
824 movq(dst, src);
825 subq(dst, kScratchRegister);
826 j(overflow, on_not_smi_result);
827 } else {
828 Move(dst, Smi::FromInt(-(constant->value())));
829 addq(dst, src);
830 j(overflow, on_not_smi_result);
831 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000832 }
833}
834
835
836void MacroAssembler::SmiDiv(Register dst,
837 Register src1,
838 Register src2,
839 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +0000840 ASSERT(!src1.is(kScratchRegister));
841 ASSERT(!src2.is(kScratchRegister));
842 ASSERT(!dst.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000843 ASSERT(!src2.is(rax));
844 ASSERT(!src2.is(rdx));
845 ASSERT(!src1.is(rdx));
846
847 // Check for 0 divisor (result is +/-Infinity).
848 Label positive_divisor;
Steve Block3ce2e202009-11-05 08:53:23 +0000849 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000850 j(zero, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000851
Steve Block3ce2e202009-11-05 08:53:23 +0000852 if (src1.is(rax)) {
853 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000854 }
Steve Block3ce2e202009-11-05 08:53:23 +0000855 SmiToInteger32(rax, src1);
856 // We need to rule out dividing Smi::kMinValue by -1, since that would
857 // overflow in idiv and raise an exception.
858 // We combine this with negative zero test (negative zero only happens
859 // when dividing zero by a negative number).
Steve Blocka7e24c12009-10-30 11:49:00 +0000860
Steve Block3ce2e202009-11-05 08:53:23 +0000861 // We overshoot a little and go to slow case if we divide min-value
862 // by any negative value, not just -1.
863 Label safe_div;
864 testl(rax, Immediate(0x7fffffff));
865 j(not_zero, &safe_div);
866 testq(src2, src2);
867 if (src1.is(rax)) {
868 j(positive, &safe_div);
869 movq(src1, kScratchRegister);
870 jmp(on_not_smi_result);
871 } else {
872 j(negative, on_not_smi_result);
873 }
874 bind(&safe_div);
875
876 SmiToInteger32(src2, src2);
877 // Sign extend src1 into edx:eax.
878 cdq();
Steve Blocka7e24c12009-10-30 11:49:00 +0000879 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +0000880 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000881 // Check that the remainder is zero.
882 testl(rdx, rdx);
Steve Block3ce2e202009-11-05 08:53:23 +0000883 if (src1.is(rax)) {
884 Label smi_result;
885 j(zero, &smi_result);
886 movq(src1, kScratchRegister);
887 jmp(on_not_smi_result);
888 bind(&smi_result);
889 } else {
890 j(not_zero, on_not_smi_result);
891 }
892 if (!dst.is(src1) && src1.is(rax)) {
893 movq(src1, kScratchRegister);
894 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000895 Integer32ToSmi(dst, rax);
896}
897
898
899void MacroAssembler::SmiMod(Register dst,
900 Register src1,
901 Register src2,
902 Label* on_not_smi_result) {
903 ASSERT(!dst.is(kScratchRegister));
904 ASSERT(!src1.is(kScratchRegister));
905 ASSERT(!src2.is(kScratchRegister));
906 ASSERT(!src2.is(rax));
907 ASSERT(!src2.is(rdx));
908 ASSERT(!src1.is(rdx));
Steve Block3ce2e202009-11-05 08:53:23 +0000909 ASSERT(!src1.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000910
Steve Block3ce2e202009-11-05 08:53:23 +0000911 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000912 j(zero, on_not_smi_result);
913
914 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000915 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000916 }
Steve Block3ce2e202009-11-05 08:53:23 +0000917 SmiToInteger32(rax, src1);
918 SmiToInteger32(src2, src2);
919
920 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
921 Label safe_div;
922 cmpl(rax, Immediate(Smi::kMinValue));
923 j(not_equal, &safe_div);
924 cmpl(src2, Immediate(-1));
925 j(not_equal, &safe_div);
926 // Retag inputs and go slow case.
927 Integer32ToSmi(src2, src2);
928 if (src1.is(rax)) {
929 movq(src1, kScratchRegister);
930 }
931 jmp(on_not_smi_result);
932 bind(&safe_div);
933
Steve Blocka7e24c12009-10-30 11:49:00 +0000934 // Sign extend eax into edx:eax.
935 cdq();
936 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +0000937 // Restore smi tags on inputs.
938 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000939 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000940 movq(src1, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000941 }
Steve Block3ce2e202009-11-05 08:53:23 +0000942 // Check for a negative zero result. If the result is zero, and the
943 // dividend is negative, go slow to return a floating point negative zero.
944 Label smi_result;
945 testl(rdx, rdx);
946 j(not_zero, &smi_result);
947 testq(src1, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000948 j(negative, on_not_smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +0000949 bind(&smi_result);
950 Integer32ToSmi(dst, rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +0000951}
952
953
954void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000955 ASSERT(!dst.is(kScratchRegister));
956 ASSERT(!src.is(kScratchRegister));
957 // Set tag and padding bits before negating, so that they are zero afterwards.
958 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +0000959 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000960 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000961 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000962 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +0000963 }
Steve Block3ce2e202009-11-05 08:53:23 +0000964 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000965}
966
967
968void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +0000969 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000970 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000971 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000972 }
973 and_(dst, src2);
974}
975
976
Steve Block3ce2e202009-11-05 08:53:23 +0000977void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
978 if (constant->value() == 0) {
979 xor_(dst, dst);
980 } else if (dst.is(src)) {
981 ASSERT(!dst.is(kScratchRegister));
982 Move(kScratchRegister, constant);
983 and_(dst, kScratchRegister);
984 } else {
985 Move(dst, constant);
986 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000987 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000988}
989
990
991void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
992 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000993 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000994 }
995 or_(dst, src2);
996}
997
998
Steve Block3ce2e202009-11-05 08:53:23 +0000999void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1000 if (dst.is(src)) {
1001 ASSERT(!dst.is(kScratchRegister));
1002 Move(kScratchRegister, constant);
1003 or_(dst, kScratchRegister);
1004 } else {
1005 Move(dst, constant);
1006 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001007 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001008}
1009
Steve Block3ce2e202009-11-05 08:53:23 +00001010
Steve Blocka7e24c12009-10-30 11:49:00 +00001011void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1012 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001013 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001014 }
1015 xor_(dst, src2);
1016}
1017
1018
Steve Block3ce2e202009-11-05 08:53:23 +00001019void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1020 if (dst.is(src)) {
1021 ASSERT(!dst.is(kScratchRegister));
1022 Move(kScratchRegister, constant);
1023 xor_(dst, kScratchRegister);
1024 } else {
1025 Move(dst, constant);
1026 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001027 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001028}
1029
1030
Steve Blocka7e24c12009-10-30 11:49:00 +00001031void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1032 Register src,
1033 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001034 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001035 if (shift_value > 0) {
1036 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001037 sar(dst, Immediate(shift_value + kSmiShift));
1038 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001039 } else {
1040 UNIMPLEMENTED(); // Not used.
1041 }
1042 }
1043}
1044
1045
1046void MacroAssembler::SmiShiftLogicalRightConstant(Register dst,
1047 Register src,
1048 int shift_value,
1049 Label* on_not_smi_result) {
1050 // Logic right shift interprets its result as an *unsigned* number.
1051 if (dst.is(src)) {
1052 UNIMPLEMENTED(); // Not used.
1053 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001054 movq(dst, src);
1055 if (shift_value == 0) {
1056 testq(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001057 j(negative, on_not_smi_result);
1058 }
Steve Block3ce2e202009-11-05 08:53:23 +00001059 shr(dst, Immediate(shift_value + kSmiShift));
1060 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001061 }
1062}
1063
1064
1065void MacroAssembler::SmiShiftLeftConstant(Register dst,
1066 Register src,
1067 int shift_value,
1068 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001069 if (!dst.is(src)) {
1070 movq(dst, src);
1071 }
1072 if (shift_value > 0) {
1073 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001074 }
1075}
1076
1077
1078void MacroAssembler::SmiShiftLeft(Register dst,
1079 Register src1,
1080 Register src2,
1081 Label* on_not_smi_result) {
1082 ASSERT(!dst.is(rcx));
1083 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001084 // Untag shift amount.
1085 if (!dst.is(src1)) {
1086 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001087 }
Steve Block3ce2e202009-11-05 08:53:23 +00001088 SmiToInteger32(rcx, src2);
1089 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1090 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001091 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001092}
1093
1094
1095void MacroAssembler::SmiShiftLogicalRight(Register dst,
1096 Register src1,
1097 Register src2,
1098 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001099 ASSERT(!dst.is(kScratchRegister));
1100 ASSERT(!src1.is(kScratchRegister));
1101 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001102 ASSERT(!dst.is(rcx));
1103 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001104 if (src1.is(rcx) || src2.is(rcx)) {
1105 movq(kScratchRegister, rcx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001106 }
Steve Block3ce2e202009-11-05 08:53:23 +00001107 if (!dst.is(src1)) {
1108 movq(dst, src1);
1109 }
1110 SmiToInteger32(rcx, src2);
1111 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001112 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
Steve Block3ce2e202009-11-05 08:53:23 +00001113 shl(dst, Immediate(kSmiShift));
1114 testq(dst, dst);
1115 if (src1.is(rcx) || src2.is(rcx)) {
1116 Label positive_result;
1117 j(positive, &positive_result);
1118 if (src1.is(rcx)) {
1119 movq(src1, kScratchRegister);
1120 } else {
1121 movq(src2, kScratchRegister);
1122 }
1123 jmp(on_not_smi_result);
1124 bind(&positive_result);
1125 } else {
1126 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1127 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001128}
1129
1130
1131void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1132 Register src1,
1133 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001134 ASSERT(!dst.is(kScratchRegister));
1135 ASSERT(!src1.is(kScratchRegister));
1136 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001137 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001138 if (src1.is(rcx)) {
1139 movq(kScratchRegister, src1);
1140 } else if (src2.is(rcx)) {
1141 movq(kScratchRegister, src2);
1142 }
1143 if (!dst.is(src1)) {
1144 movq(dst, src1);
1145 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001146 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001147 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001148 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001149 shl(dst, Immediate(kSmiShift));
1150 if (src1.is(rcx)) {
1151 movq(src1, kScratchRegister);
1152 } else if (src2.is(rcx)) {
1153 movq(src2, kScratchRegister);
1154 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001155}
1156
1157
1158void MacroAssembler::SelectNonSmi(Register dst,
1159 Register src1,
1160 Register src2,
1161 Label* on_not_smis) {
Steve Block3ce2e202009-11-05 08:53:23 +00001162 ASSERT(!dst.is(kScratchRegister));
1163 ASSERT(!src1.is(kScratchRegister));
1164 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001165 ASSERT(!dst.is(src1));
1166 ASSERT(!dst.is(src2));
1167 // Both operands must not be smis.
1168#ifdef DEBUG
Steve Block3ce2e202009-11-05 08:53:23 +00001169 if (allow_stub_calls()) { // Check contains a stub call.
1170 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1171 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1172 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001173#endif
1174 ASSERT_EQ(0, kSmiTag);
1175 ASSERT_EQ(0, Smi::FromInt(0));
Steve Block3ce2e202009-11-05 08:53:23 +00001176 movl(kScratchRegister, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001177 and_(kScratchRegister, src1);
1178 testl(kScratchRegister, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001179 // If non-zero then both are smis.
Steve Blocka7e24c12009-10-30 11:49:00 +00001180 j(not_zero, on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00001181
Steve Block3ce2e202009-11-05 08:53:23 +00001182 // Exactly one operand is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001183 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1184 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1185 subq(kScratchRegister, Immediate(1));
1186 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1187 movq(dst, src1);
1188 xor_(dst, src2);
1189 and_(dst, kScratchRegister);
1190 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1191 xor_(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001192 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001193}
1194
Steve Block3ce2e202009-11-05 08:53:23 +00001195SmiIndex MacroAssembler::SmiToIndex(Register dst,
1196 Register src,
1197 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001198 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001199 // There is a possible optimization if shift is in the range 60-63, but that
1200 // will (and must) never happen.
1201 if (!dst.is(src)) {
1202 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001203 }
Steve Block3ce2e202009-11-05 08:53:23 +00001204 if (shift < kSmiShift) {
1205 sar(dst, Immediate(kSmiShift - shift));
1206 } else {
1207 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001208 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001209 return SmiIndex(dst, times_1);
1210}
1211
Steve Blocka7e24c12009-10-30 11:49:00 +00001212SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1213 Register src,
1214 int shift) {
1215 // Register src holds a positive smi.
1216 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001217 if (!dst.is(src)) {
1218 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001219 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001220 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001221 if (shift < kSmiShift) {
1222 sar(dst, Immediate(kSmiShift - shift));
1223 } else {
1224 shl(dst, Immediate(shift - kSmiShift));
1225 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001226 return SmiIndex(dst, times_1);
1227}
1228
1229
Steve Block3ce2e202009-11-05 08:53:23 +00001230void MacroAssembler::JumpIfSmi(Register src, Label* on_smi) {
1231 ASSERT_EQ(0, kSmiTag);
1232 Condition smi = CheckSmi(src);
1233 j(smi, on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001234}
1235
Steve Block3ce2e202009-11-05 08:53:23 +00001236
1237void MacroAssembler::JumpIfNotSmi(Register src, Label* on_not_smi) {
1238 Condition smi = CheckSmi(src);
1239 j(NegateCondition(smi), on_not_smi);
1240}
1241
1242
1243void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1244 Label* on_not_positive_smi) {
1245 Condition positive_smi = CheckPositiveSmi(src);
1246 j(NegateCondition(positive_smi), on_not_positive_smi);
1247}
1248
1249
1250void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1251 Smi* constant,
1252 Label* on_equals) {
1253 SmiCompare(src, constant);
1254 j(equal, on_equals);
1255}
1256
1257
1258void MacroAssembler::JumpIfNotValidSmiValue(Register src, Label* on_invalid) {
1259 Condition is_valid = CheckInteger32ValidSmiValue(src);
1260 j(NegateCondition(is_valid), on_invalid);
1261}
1262
1263
1264void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1265 Label* on_invalid) {
1266 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1267 j(NegateCondition(is_valid), on_invalid);
1268}
1269
1270
1271void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2,
1272 Label* on_not_both_smi) {
1273 Condition both_smi = CheckBothSmi(src1, src2);
1274 j(NegateCondition(both_smi), on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001275}
1276
1277
1278void MacroAssembler::Move(Register dst, Handle<Object> source) {
1279 ASSERT(!source->IsFailure());
1280 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001281 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001282 } else {
1283 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1284 }
1285}
1286
1287
1288void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001289 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001290 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001291 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001292 } else {
1293 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1294 movq(dst, kScratchRegister);
1295 }
1296}
1297
1298
1299void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001300 if (source->IsSmi()) {
1301 SmiCompare(dst, Smi::cast(*source));
1302 } else {
1303 Move(kScratchRegister, source);
1304 cmpq(dst, kScratchRegister);
1305 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001306}
1307
1308
1309void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1310 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001311 SmiCompare(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001312 } else {
1313 ASSERT(source->IsHeapObject());
1314 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1315 cmpq(dst, kScratchRegister);
1316 }
1317}
1318
1319
1320void MacroAssembler::Push(Handle<Object> source) {
1321 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001322 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001323 } else {
1324 ASSERT(source->IsHeapObject());
1325 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1326 push(kScratchRegister);
1327 }
1328}
1329
1330
1331void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001332 intptr_t smi = reinterpret_cast<intptr_t>(source);
1333 if (is_int32(smi)) {
1334 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001335 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001336 Set(kScratchRegister, smi);
1337 push(kScratchRegister);
1338 }
1339}
1340
1341
1342void MacroAssembler::Test(const Operand& src, Smi* source) {
1343 intptr_t smi = reinterpret_cast<intptr_t>(source);
1344 if (is_int32(smi)) {
1345 testl(src, Immediate(static_cast<int32_t>(smi)));
1346 } else {
1347 Move(kScratchRegister, source);
1348 testq(src, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001349 }
1350}
1351
1352
1353void MacroAssembler::Jump(ExternalReference ext) {
1354 movq(kScratchRegister, ext);
1355 jmp(kScratchRegister);
1356}
1357
1358
1359void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1360 movq(kScratchRegister, destination, rmode);
1361 jmp(kScratchRegister);
1362}
1363
1364
1365void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001366 // TODO(X64): Inline this
1367 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001368}
1369
1370
1371void MacroAssembler::Call(ExternalReference ext) {
1372 movq(kScratchRegister, ext);
1373 call(kScratchRegister);
1374}
1375
1376
1377void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1378 movq(kScratchRegister, destination, rmode);
1379 call(kScratchRegister);
1380}
1381
1382
1383void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1384 ASSERT(RelocInfo::IsCodeTarget(rmode));
1385 WriteRecordedPositions();
Steve Block3ce2e202009-11-05 08:53:23 +00001386 call(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001387}
1388
1389
1390void MacroAssembler::PushTryHandler(CodeLocation try_location,
1391 HandlerType type) {
1392 // Adjust this code if not the case.
1393 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1394
1395 // The pc (return address) is already on TOS. This code pushes state,
1396 // frame pointer and current handler. Check that they are expected
1397 // next on the stack, in that order.
1398 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1399 StackHandlerConstants::kPCOffset - kPointerSize);
1400 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1401 StackHandlerConstants::kStateOffset - kPointerSize);
1402 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1403 StackHandlerConstants::kFPOffset - kPointerSize);
1404
1405 if (try_location == IN_JAVASCRIPT) {
1406 if (type == TRY_CATCH_HANDLER) {
1407 push(Immediate(StackHandler::TRY_CATCH));
1408 } else {
1409 push(Immediate(StackHandler::TRY_FINALLY));
1410 }
1411 push(rbp);
1412 } else {
1413 ASSERT(try_location == IN_JS_ENTRY);
1414 // The frame pointer does not point to a JS frame so we save NULL
1415 // for rbp. We expect the code throwing an exception to check rbp
1416 // before dereferencing it to restore the context.
1417 push(Immediate(StackHandler::ENTRY));
1418 push(Immediate(0)); // NULL frame pointer.
1419 }
1420 // Save the current handler.
1421 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1422 push(Operand(kScratchRegister, 0));
1423 // Link this handler.
1424 movq(Operand(kScratchRegister, 0), rsp);
1425}
1426
1427
1428void MacroAssembler::Ret() {
1429 ret(0);
1430}
1431
1432
1433void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001434 fucomip();
1435 ffree(0);
1436 fincstp();
Steve Blocka7e24c12009-10-30 11:49:00 +00001437}
1438
1439
1440void MacroAssembler::CmpObjectType(Register heap_object,
1441 InstanceType type,
1442 Register map) {
1443 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1444 CmpInstanceType(map, type);
1445}
1446
1447
1448void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1449 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1450 Immediate(static_cast<int8_t>(type)));
1451}
1452
1453
1454void MacroAssembler::TryGetFunctionPrototype(Register function,
1455 Register result,
1456 Label* miss) {
1457 // Check that the receiver isn't a smi.
1458 testl(function, Immediate(kSmiTagMask));
1459 j(zero, miss);
1460
1461 // Check that the function really is a function.
1462 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1463 j(not_equal, miss);
1464
1465 // Make sure that the function has an instance prototype.
1466 Label non_instance;
1467 testb(FieldOperand(result, Map::kBitFieldOffset),
1468 Immediate(1 << Map::kHasNonInstancePrototype));
1469 j(not_zero, &non_instance);
1470
1471 // Get the prototype or initial map from the function.
1472 movq(result,
1473 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1474
1475 // If the prototype or initial map is the hole, don't return it and
1476 // simply miss the cache instead. This will allow us to allocate a
1477 // prototype object on-demand in the runtime system.
1478 CompareRoot(result, Heap::kTheHoleValueRootIndex);
1479 j(equal, miss);
1480
1481 // If the function does not have an initial map, we're done.
1482 Label done;
1483 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1484 j(not_equal, &done);
1485
1486 // Get the prototype from the initial map.
1487 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1488 jmp(&done);
1489
1490 // Non-instance prototype: Fetch prototype from constructor field
1491 // in initial map.
1492 bind(&non_instance);
1493 movq(result, FieldOperand(result, Map::kConstructorOffset));
1494
1495 // All done.
1496 bind(&done);
1497}
1498
1499
1500void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1501 if (FLAG_native_code_counters && counter->Enabled()) {
1502 movq(kScratchRegister, ExternalReference(counter));
1503 movl(Operand(kScratchRegister, 0), Immediate(value));
1504 }
1505}
1506
1507
1508void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1509 ASSERT(value > 0);
1510 if (FLAG_native_code_counters && counter->Enabled()) {
1511 movq(kScratchRegister, ExternalReference(counter));
1512 Operand operand(kScratchRegister, 0);
1513 if (value == 1) {
1514 incl(operand);
1515 } else {
1516 addl(operand, Immediate(value));
1517 }
1518 }
1519}
1520
1521
1522void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1523 ASSERT(value > 0);
1524 if (FLAG_native_code_counters && counter->Enabled()) {
1525 movq(kScratchRegister, ExternalReference(counter));
1526 Operand operand(kScratchRegister, 0);
1527 if (value == 1) {
1528 decl(operand);
1529 } else {
1530 subl(operand, Immediate(value));
1531 }
1532 }
1533}
1534
Steve Blocka7e24c12009-10-30 11:49:00 +00001535#ifdef ENABLE_DEBUGGER_SUPPORT
1536
1537void MacroAssembler::PushRegistersFromMemory(RegList regs) {
1538 ASSERT((regs & ~kJSCallerSaved) == 0);
1539 // Push the content of the memory location to the stack.
1540 for (int i = 0; i < kNumJSCallerSaved; i++) {
1541 int r = JSCallerSavedCode(i);
1542 if ((regs & (1 << r)) != 0) {
1543 ExternalReference reg_addr =
1544 ExternalReference(Debug_Address::Register(i));
1545 movq(kScratchRegister, reg_addr);
1546 push(Operand(kScratchRegister, 0));
1547 }
1548 }
1549}
1550
Steve Block3ce2e202009-11-05 08:53:23 +00001551
Steve Blocka7e24c12009-10-30 11:49:00 +00001552void MacroAssembler::SaveRegistersToMemory(RegList regs) {
1553 ASSERT((regs & ~kJSCallerSaved) == 0);
1554 // Copy the content of registers to memory location.
1555 for (int i = 0; i < kNumJSCallerSaved; i++) {
1556 int r = JSCallerSavedCode(i);
1557 if ((regs & (1 << r)) != 0) {
1558 Register reg = { r };
1559 ExternalReference reg_addr =
1560 ExternalReference(Debug_Address::Register(i));
1561 movq(kScratchRegister, reg_addr);
1562 movq(Operand(kScratchRegister, 0), reg);
1563 }
1564 }
1565}
1566
1567
1568void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
1569 ASSERT((regs & ~kJSCallerSaved) == 0);
1570 // Copy the content of memory location to registers.
1571 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1572 int r = JSCallerSavedCode(i);
1573 if ((regs & (1 << r)) != 0) {
1574 Register reg = { r };
1575 ExternalReference reg_addr =
1576 ExternalReference(Debug_Address::Register(i));
1577 movq(kScratchRegister, reg_addr);
1578 movq(reg, Operand(kScratchRegister, 0));
1579 }
1580 }
1581}
1582
1583
1584void MacroAssembler::PopRegistersToMemory(RegList regs) {
1585 ASSERT((regs & ~kJSCallerSaved) == 0);
1586 // Pop the content from the stack to the memory location.
1587 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1588 int r = JSCallerSavedCode(i);
1589 if ((regs & (1 << r)) != 0) {
1590 ExternalReference reg_addr =
1591 ExternalReference(Debug_Address::Register(i));
1592 movq(kScratchRegister, reg_addr);
1593 pop(Operand(kScratchRegister, 0));
1594 }
1595 }
1596}
1597
1598
1599void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
1600 Register scratch,
1601 RegList regs) {
1602 ASSERT(!scratch.is(kScratchRegister));
1603 ASSERT(!base.is(kScratchRegister));
1604 ASSERT(!base.is(scratch));
1605 ASSERT((regs & ~kJSCallerSaved) == 0);
1606 // Copy the content of the stack to the memory location and adjust base.
1607 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1608 int r = JSCallerSavedCode(i);
1609 if ((regs & (1 << r)) != 0) {
1610 movq(scratch, Operand(base, 0));
1611 ExternalReference reg_addr =
1612 ExternalReference(Debug_Address::Register(i));
1613 movq(kScratchRegister, reg_addr);
1614 movq(Operand(kScratchRegister, 0), scratch);
1615 lea(base, Operand(base, kPointerSize));
1616 }
1617 }
1618}
1619
1620#endif // ENABLE_DEBUGGER_SUPPORT
1621
1622
1623void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
1624 bool resolved;
1625 Handle<Code> code = ResolveBuiltin(id, &resolved);
1626
1627 // Calls are not allowed in some stubs.
1628 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1629
1630 // Rely on the assertion to check that the number of provided
1631 // arguments match the expected number of arguments. Fake a
1632 // parameter count to avoid emitting code to do the check.
1633 ParameterCount expected(0);
Steve Block3ce2e202009-11-05 08:53:23 +00001634 InvokeCode(Handle<Code>(code),
1635 expected,
1636 expected,
1637 RelocInfo::CODE_TARGET,
1638 flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001639
1640 const char* name = Builtins::GetName(id);
1641 int argc = Builtins::GetArgumentsCount(id);
1642 // The target address for the jump is stored as an immediate at offset
1643 // kInvokeCodeAddressOffset.
1644 if (!resolved) {
1645 uint32_t flags =
1646 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
Steve Blocka7e24c12009-10-30 11:49:00 +00001647 Bootstrapper::FixupFlagsUseCodeObject::encode(false);
1648 Unresolved entry =
1649 { pc_offset() - kCallTargetAddressOffset, flags, name };
1650 unresolved_.Add(entry);
1651 }
1652}
1653
1654
1655void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1656 const ParameterCount& actual,
1657 Handle<Code> code_constant,
1658 Register code_register,
1659 Label* done,
1660 InvokeFlag flag) {
1661 bool definitely_matches = false;
1662 Label invoke;
1663 if (expected.is_immediate()) {
1664 ASSERT(actual.is_immediate());
1665 if (expected.immediate() == actual.immediate()) {
1666 definitely_matches = true;
1667 } else {
1668 movq(rax, Immediate(actual.immediate()));
1669 if (expected.immediate() ==
Steve Block3ce2e202009-11-05 08:53:23 +00001670 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001671 // Don't worry about adapting arguments for built-ins that
1672 // don't want that done. Skip adaption code by making it look
1673 // like we have a match between expected and actual number of
1674 // arguments.
1675 definitely_matches = true;
1676 } else {
1677 movq(rbx, Immediate(expected.immediate()));
1678 }
1679 }
1680 } else {
1681 if (actual.is_immediate()) {
1682 // Expected is in register, actual is immediate. This is the
1683 // case when we invoke function values without going through the
1684 // IC mechanism.
1685 cmpq(expected.reg(), Immediate(actual.immediate()));
1686 j(equal, &invoke);
1687 ASSERT(expected.reg().is(rbx));
1688 movq(rax, Immediate(actual.immediate()));
1689 } else if (!expected.reg().is(actual.reg())) {
1690 // Both expected and actual are in (different) registers. This
1691 // is the case when we invoke functions using call and apply.
1692 cmpq(expected.reg(), actual.reg());
1693 j(equal, &invoke);
1694 ASSERT(actual.reg().is(rax));
1695 ASSERT(expected.reg().is(rbx));
1696 }
1697 }
1698
1699 if (!definitely_matches) {
1700 Handle<Code> adaptor =
1701 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1702 if (!code_constant.is_null()) {
1703 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1704 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1705 } else if (!code_register.is(rdx)) {
1706 movq(rdx, code_register);
1707 }
1708
1709 if (flag == CALL_FUNCTION) {
1710 Call(adaptor, RelocInfo::CODE_TARGET);
1711 jmp(done);
1712 } else {
1713 Jump(adaptor, RelocInfo::CODE_TARGET);
1714 }
1715 bind(&invoke);
1716 }
1717}
1718
1719
1720void MacroAssembler::InvokeCode(Register code,
1721 const ParameterCount& expected,
1722 const ParameterCount& actual,
1723 InvokeFlag flag) {
1724 Label done;
1725 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1726 if (flag == CALL_FUNCTION) {
1727 call(code);
1728 } else {
1729 ASSERT(flag == JUMP_FUNCTION);
1730 jmp(code);
1731 }
1732 bind(&done);
1733}
1734
1735
1736void MacroAssembler::InvokeCode(Handle<Code> code,
1737 const ParameterCount& expected,
1738 const ParameterCount& actual,
1739 RelocInfo::Mode rmode,
1740 InvokeFlag flag) {
1741 Label done;
1742 Register dummy = rax;
1743 InvokePrologue(expected, actual, code, dummy, &done, flag);
1744 if (flag == CALL_FUNCTION) {
1745 Call(code, rmode);
1746 } else {
1747 ASSERT(flag == JUMP_FUNCTION);
1748 Jump(code, rmode);
1749 }
1750 bind(&done);
1751}
1752
1753
1754void MacroAssembler::InvokeFunction(Register function,
1755 const ParameterCount& actual,
1756 InvokeFlag flag) {
1757 ASSERT(function.is(rdi));
1758 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1759 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
1760 movsxlq(rbx,
1761 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
1762 movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
1763 // Advances rdx to the end of the Code object header, to the start of
1764 // the executable code.
1765 lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
1766
1767 ParameterCount expected(rbx);
1768 InvokeCode(rdx, expected, actual, flag);
1769}
1770
1771
1772void MacroAssembler::EnterFrame(StackFrame::Type type) {
1773 push(rbp);
1774 movq(rbp, rsp);
1775 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00001776 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00001777 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1778 push(kScratchRegister);
1779 if (FLAG_debug_code) {
1780 movq(kScratchRegister,
1781 Factory::undefined_value(),
1782 RelocInfo::EMBEDDED_OBJECT);
1783 cmpq(Operand(rsp, 0), kScratchRegister);
1784 Check(not_equal, "code object not properly patched");
1785 }
1786}
1787
1788
1789void MacroAssembler::LeaveFrame(StackFrame::Type type) {
1790 if (FLAG_debug_code) {
Steve Block3ce2e202009-11-05 08:53:23 +00001791 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00001792 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
1793 Check(equal, "stack frame types must match");
1794 }
1795 movq(rsp, rbp);
1796 pop(rbp);
1797}
1798
1799
Steve Blockd0582a62009-12-15 09:54:21 +00001800void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001801 // Setup the frame structure on the stack.
1802 // All constants are relative to the frame pointer of the exit frame.
1803 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
1804 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
1805 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
1806 push(rbp);
1807 movq(rbp, rsp);
1808
1809 // Reserve room for entry stack pointer and push the debug marker.
Steve Block3ce2e202009-11-05 08:53:23 +00001810 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00001811 push(Immediate(0)); // saved entry sp, patched before call
Steve Blockd0582a62009-12-15 09:54:21 +00001812 if (mode == ExitFrame::MODE_DEBUG) {
1813 push(Immediate(0));
1814 } else {
1815 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1816 push(kScratchRegister);
1817 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001818
1819 // Save the frame pointer and the context in top.
1820 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1821 ExternalReference context_address(Top::k_context_address);
1822 movq(r14, rax); // Backup rax before we use it.
1823
1824 movq(rax, rbp);
1825 store_rax(c_entry_fp_address);
1826 movq(rax, rsi);
1827 store_rax(context_address);
1828
1829 // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
1830 // so it must be retained across the C-call.
1831 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1832 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
1833
1834#ifdef ENABLE_DEBUGGER_SUPPORT
1835 // Save the state of all registers to the stack from the memory
1836 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00001837 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001838 // TODO(1243899): This should be symmetric to
1839 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
1840 // correct here, but computed for the other call. Very error
1841 // prone! FIX THIS. Actually there are deeper problems with
1842 // register saving than this asymmetry (see the bug report
1843 // associated with this issue).
1844 PushRegistersFromMemory(kJSCallerSaved);
1845 }
1846#endif
1847
1848#ifdef _WIN64
1849 // Reserve space on stack for result and argument structures, if necessary.
1850 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
1851 // Reserve space for the Arguments object. The Windows 64-bit ABI
1852 // requires us to pass this structure as a pointer to its location on
1853 // the stack. The structure contains 2 values.
1854 int argument_stack_space = 2 * kPointerSize;
1855 // We also need backing space for 4 parameters, even though
1856 // we only pass one or two parameter, and it is in a register.
1857 int argument_mirror_space = 4 * kPointerSize;
1858 int total_stack_space =
1859 argument_mirror_space + argument_stack_space + result_stack_space;
1860 subq(rsp, Immediate(total_stack_space));
1861#endif
1862
1863 // Get the required frame alignment for the OS.
1864 static const int kFrameAlignment = OS::ActivationFrameAlignment();
1865 if (kFrameAlignment > 0) {
1866 ASSERT(IsPowerOf2(kFrameAlignment));
1867 movq(kScratchRegister, Immediate(-kFrameAlignment));
1868 and_(rsp, kScratchRegister);
1869 }
1870
1871 // Patch the saved entry sp.
1872 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
1873}
1874
1875
Steve Blockd0582a62009-12-15 09:54:21 +00001876void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001877 // Registers:
1878 // r15 : argv
1879#ifdef ENABLE_DEBUGGER_SUPPORT
1880 // Restore the memory copy of the registers by digging them out from
1881 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00001882 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001883 // It's okay to clobber register rbx below because we don't need
1884 // the function pointer after this.
1885 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +00001886 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00001887 lea(rbx, Operand(rbp, kOffset));
1888 CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
1889 }
1890#endif
1891
1892 // Get the return address from the stack and restore the frame pointer.
1893 movq(rcx, Operand(rbp, 1 * kPointerSize));
1894 movq(rbp, Operand(rbp, 0 * kPointerSize));
1895
Steve Blocka7e24c12009-10-30 11:49:00 +00001896 // Pop everything up to and including the arguments and the receiver
1897 // from the caller stack.
1898 lea(rsp, Operand(r15, 1 * kPointerSize));
1899
1900 // Restore current context from top and clear it in debug mode.
1901 ExternalReference context_address(Top::k_context_address);
1902 movq(kScratchRegister, context_address);
1903 movq(rsi, Operand(kScratchRegister, 0));
1904#ifdef DEBUG
1905 movq(Operand(kScratchRegister, 0), Immediate(0));
1906#endif
1907
1908 // Push the return address to get ready to return.
1909 push(rcx);
1910
1911 // Clear the top frame.
1912 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1913 movq(kScratchRegister, c_entry_fp_address);
1914 movq(Operand(kScratchRegister, 0), Immediate(0));
1915}
1916
1917
Steve Block3ce2e202009-11-05 08:53:23 +00001918Register MacroAssembler::CheckMaps(JSObject* object,
1919 Register object_reg,
1920 JSObject* holder,
1921 Register holder_reg,
Steve Blocka7e24c12009-10-30 11:49:00 +00001922 Register scratch,
1923 Label* miss) {
1924 // Make sure there's no overlap between scratch and the other
1925 // registers.
1926 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
1927
1928 // Keep track of the current object in register reg. On the first
1929 // iteration, reg is an alias for object_reg, on later iterations,
1930 // it is an alias for holder_reg.
1931 Register reg = object_reg;
1932 int depth = 1;
1933
1934 // Check the maps in the prototype chain.
1935 // Traverse the prototype chain from the object and do map checks.
1936 while (object != holder) {
1937 depth++;
1938
1939 // Only global objects and objects that do not require access
1940 // checks are allowed in stubs.
1941 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
1942
1943 JSObject* prototype = JSObject::cast(object->GetPrototype());
1944 if (Heap::InNewSpace(prototype)) {
1945 // Get the map of the current object.
1946 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1947 Cmp(scratch, Handle<Map>(object->map()));
1948 // Branch on the result of the map check.
1949 j(not_equal, miss);
1950 // Check access rights to the global object. This has to happen
1951 // after the map check so that we know that the object is
1952 // actually a global object.
1953 if (object->IsJSGlobalProxy()) {
1954 CheckAccessGlobalProxy(reg, scratch, miss);
1955
1956 // Restore scratch register to be the map of the object.
1957 // We load the prototype from the map in the scratch register.
1958 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1959 }
1960 // The prototype is in new space; we cannot store a reference
1961 // to it in the code. Load it from the map.
1962 reg = holder_reg; // from now the object is in holder_reg
1963 movq(reg, FieldOperand(scratch, Map::kPrototypeOffset));
1964
1965 } else {
1966 // Check the map of the current object.
1967 Cmp(FieldOperand(reg, HeapObject::kMapOffset),
1968 Handle<Map>(object->map()));
1969 // Branch on the result of the map check.
1970 j(not_equal, miss);
1971 // Check access rights to the global object. This has to happen
1972 // after the map check so that we know that the object is
1973 // actually a global object.
1974 if (object->IsJSGlobalProxy()) {
1975 CheckAccessGlobalProxy(reg, scratch, miss);
1976 }
1977 // The prototype is in old space; load it directly.
1978 reg = holder_reg; // from now the object is in holder_reg
1979 Move(reg, Handle<JSObject>(prototype));
1980 }
1981
1982 // Go to the next object in the prototype chain.
1983 object = prototype;
1984 }
1985
1986 // Check the holder map.
Steve Block3ce2e202009-11-05 08:53:23 +00001987 Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001988 j(not_equal, miss);
1989
1990 // Log the check depth.
1991 LOG(IntEvent("check-maps-depth", depth));
1992
1993 // Perform security check for access to the global object and return
1994 // the holder register.
1995 ASSERT(object == holder);
1996 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
1997 if (object->IsJSGlobalProxy()) {
1998 CheckAccessGlobalProxy(reg, scratch, miss);
1999 }
2000 return reg;
2001}
2002
2003
Steve Blocka7e24c12009-10-30 11:49:00 +00002004void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2005 Register scratch,
2006 Label* miss) {
2007 Label same_contexts;
2008
2009 ASSERT(!holder_reg.is(scratch));
2010 ASSERT(!scratch.is(kScratchRegister));
2011 // Load current lexical context from the stack frame.
2012 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2013
2014 // When generating debug code, make sure the lexical context is set.
2015 if (FLAG_debug_code) {
2016 cmpq(scratch, Immediate(0));
2017 Check(not_equal, "we should not have an empty lexical context");
2018 }
2019 // Load the global context of the current context.
2020 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2021 movq(scratch, FieldOperand(scratch, offset));
2022 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2023
2024 // Check the context is a global context.
2025 if (FLAG_debug_code) {
2026 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2027 Factory::global_context_map());
2028 Check(equal, "JSGlobalObject::global_context should be a global context.");
2029 }
2030
2031 // Check if both contexts are the same.
2032 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2033 j(equal, &same_contexts);
2034
2035 // Compare security tokens.
2036 // Check that the security token in the calling global object is
2037 // compatible with the security token in the receiving global
2038 // object.
2039
2040 // Check the context is a global context.
2041 if (FLAG_debug_code) {
2042 // Preserve original value of holder_reg.
2043 push(holder_reg);
2044 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2045 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2046 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2047
2048 // Read the first word and compare to global_context_map(),
2049 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2050 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2051 Check(equal, "JSGlobalObject::global_context should be a global context.");
2052 pop(holder_reg);
2053 }
2054
2055 movq(kScratchRegister,
2056 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00002057 int token_offset =
2058 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002059 movq(scratch, FieldOperand(scratch, token_offset));
2060 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2061 j(not_equal, miss);
2062
2063 bind(&same_contexts);
2064}
2065
2066
2067void MacroAssembler::LoadAllocationTopHelper(Register result,
2068 Register result_end,
2069 Register scratch,
2070 AllocationFlags flags) {
2071 ExternalReference new_space_allocation_top =
2072 ExternalReference::new_space_allocation_top_address();
2073
2074 // Just return if allocation top is already known.
2075 if ((flags & RESULT_CONTAINS_TOP) != 0) {
2076 // No use of scratch if allocation top is provided.
2077 ASSERT(scratch.is(no_reg));
2078#ifdef DEBUG
2079 // Assert that result actually contains top on entry.
2080 movq(kScratchRegister, new_space_allocation_top);
2081 cmpq(result, Operand(kScratchRegister, 0));
2082 Check(equal, "Unexpected allocation top");
2083#endif
2084 return;
2085 }
2086
2087 // Move address of new object to result. Use scratch register if available.
2088 if (scratch.is(no_reg)) {
2089 movq(kScratchRegister, new_space_allocation_top);
2090 movq(result, Operand(kScratchRegister, 0));
2091 } else {
2092 ASSERT(!scratch.is(result_end));
2093 movq(scratch, new_space_allocation_top);
2094 movq(result, Operand(scratch, 0));
2095 }
2096}
2097
2098
2099void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2100 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +00002101 if (FLAG_debug_code) {
2102 testq(result_end, Immediate(kObjectAlignmentMask));
2103 Check(zero, "Unaligned allocation in new space");
2104 }
2105
Steve Blocka7e24c12009-10-30 11:49:00 +00002106 ExternalReference new_space_allocation_top =
2107 ExternalReference::new_space_allocation_top_address();
2108
2109 // Update new top.
2110 if (result_end.is(rax)) {
2111 // rax can be stored directly to a memory location.
2112 store_rax(new_space_allocation_top);
2113 } else {
2114 // Register required - use scratch provided if available.
2115 if (scratch.is(no_reg)) {
2116 movq(kScratchRegister, new_space_allocation_top);
2117 movq(Operand(kScratchRegister, 0), result_end);
2118 } else {
2119 movq(Operand(scratch, 0), result_end);
2120 }
2121 }
2122}
2123
2124
2125void MacroAssembler::AllocateInNewSpace(int object_size,
2126 Register result,
2127 Register result_end,
2128 Register scratch,
2129 Label* gc_required,
2130 AllocationFlags flags) {
2131 ASSERT(!result.is(result_end));
2132
2133 // Load address of new object into result.
2134 LoadAllocationTopHelper(result, result_end, scratch, flags);
2135
2136 // Calculate new top and bail out if new space is exhausted.
2137 ExternalReference new_space_allocation_limit =
2138 ExternalReference::new_space_allocation_limit_address();
2139 lea(result_end, Operand(result, object_size));
2140 movq(kScratchRegister, new_space_allocation_limit);
2141 cmpq(result_end, Operand(kScratchRegister, 0));
2142 j(above, gc_required);
2143
2144 // Update allocation top.
2145 UpdateAllocationTopHelper(result_end, scratch);
2146
2147 // Tag the result if requested.
2148 if ((flags & TAG_OBJECT) != 0) {
2149 addq(result, Immediate(kHeapObjectTag));
2150 }
2151}
2152
2153
2154void MacroAssembler::AllocateInNewSpace(int header_size,
2155 ScaleFactor element_size,
2156 Register element_count,
2157 Register result,
2158 Register result_end,
2159 Register scratch,
2160 Label* gc_required,
2161 AllocationFlags flags) {
2162 ASSERT(!result.is(result_end));
2163
2164 // Load address of new object into result.
2165 LoadAllocationTopHelper(result, result_end, scratch, flags);
2166
2167 // Calculate new top and bail out if new space is exhausted.
2168 ExternalReference new_space_allocation_limit =
2169 ExternalReference::new_space_allocation_limit_address();
2170 lea(result_end, Operand(result, element_count, element_size, header_size));
2171 movq(kScratchRegister, new_space_allocation_limit);
2172 cmpq(result_end, Operand(kScratchRegister, 0));
2173 j(above, gc_required);
2174
2175 // Update allocation top.
2176 UpdateAllocationTopHelper(result_end, scratch);
2177
2178 // Tag the result if requested.
2179 if ((flags & TAG_OBJECT) != 0) {
2180 addq(result, Immediate(kHeapObjectTag));
2181 }
2182}
2183
2184
2185void MacroAssembler::AllocateInNewSpace(Register object_size,
2186 Register result,
2187 Register result_end,
2188 Register scratch,
2189 Label* gc_required,
2190 AllocationFlags flags) {
2191 // Load address of new object into result.
2192 LoadAllocationTopHelper(result, result_end, scratch, flags);
2193
2194 // Calculate new top and bail out if new space is exhausted.
2195 ExternalReference new_space_allocation_limit =
2196 ExternalReference::new_space_allocation_limit_address();
2197 if (!object_size.is(result_end)) {
2198 movq(result_end, object_size);
2199 }
2200 addq(result_end, result);
2201 movq(kScratchRegister, new_space_allocation_limit);
2202 cmpq(result_end, Operand(kScratchRegister, 0));
2203 j(above, gc_required);
2204
2205 // Update allocation top.
2206 UpdateAllocationTopHelper(result_end, scratch);
2207
2208 // Tag the result if requested.
2209 if ((flags & TAG_OBJECT) != 0) {
2210 addq(result, Immediate(kHeapObjectTag));
2211 }
2212}
2213
2214
2215void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2216 ExternalReference new_space_allocation_top =
2217 ExternalReference::new_space_allocation_top_address();
2218
2219 // Make sure the object has no tag before resetting top.
2220 and_(object, Immediate(~kHeapObjectTagMask));
2221 movq(kScratchRegister, new_space_allocation_top);
2222#ifdef DEBUG
2223 cmpq(object, Operand(kScratchRegister, 0));
2224 Check(below, "Undo allocation of non allocated memory");
2225#endif
2226 movq(Operand(kScratchRegister, 0), object);
2227}
2228
2229
Steve Block3ce2e202009-11-05 08:53:23 +00002230void MacroAssembler::AllocateHeapNumber(Register result,
2231 Register scratch,
2232 Label* gc_required) {
2233 // Allocate heap number in new space.
2234 AllocateInNewSpace(HeapNumber::kSize,
2235 result,
2236 scratch,
2237 no_reg,
2238 gc_required,
2239 TAG_OBJECT);
2240
2241 // Set the map.
2242 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2243 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2244}
2245
2246
Steve Blockd0582a62009-12-15 09:54:21 +00002247void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2248 if (context_chain_length > 0) {
2249 // Move up the chain of contexts to the context containing the slot.
2250 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2251 // Load the function context (which is the incoming, outer context).
2252 movq(rax, FieldOperand(rax, JSFunction::kContextOffset));
2253 for (int i = 1; i < context_chain_length; i++) {
2254 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2255 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2256 }
2257 // The context may be an intermediate context, not a function context.
2258 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2259 } else { // context is the current function context.
2260 // The context may be an intermediate context, not a function context.
2261 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2262 }
2263}
2264
2265
Steve Blocka7e24c12009-10-30 11:49:00 +00002266CodePatcher::CodePatcher(byte* address, int size)
2267 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2268 // Create a new macro assembler pointing to the address of the code to patch.
2269 // The size is adjusted with kGap on order for the assembler to generate size
2270 // bytes of instructions without failing with buffer size constraints.
2271 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2272}
2273
2274
2275CodePatcher::~CodePatcher() {
2276 // Indicate that code has changed.
2277 CPU::FlushICache(address_, size_);
2278
2279 // Check that the code was patched as expected.
2280 ASSERT(masm_.pc_ == address_ + size_);
2281 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2282}
2283
Steve Blocka7e24c12009-10-30 11:49:00 +00002284} } // namespace v8::internal