blob: 56bbc202a3fc92613b6a125a7b09444d93397f68 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "assembler-x64.h"
33#include "macro-assembler-x64.h"
34#include "serialize.h"
35#include "debug.h"
36
37namespace v8 {
38namespace internal {
39
40MacroAssembler::MacroAssembler(void* buffer, int size)
Steve Block3ce2e202009-11-05 08:53:23 +000041 : Assembler(buffer, size),
42 unresolved_(0),
43 generating_stub_(false),
44 allow_stub_calls_(true),
45 code_object_(Heap::undefined_value()) {
Steve Blocka7e24c12009-10-30 11:49:00 +000046}
47
48
Steve Block3ce2e202009-11-05 08:53:23 +000049void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000050 movq(destination, Operand(r13, index << kPointerSizeLog2));
51}
52
53
54void MacroAssembler::PushRoot(Heap::RootListIndex index) {
55 push(Operand(r13, index << kPointerSizeLog2));
56}
57
58
Steve Block3ce2e202009-11-05 08:53:23 +000059void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000060 cmpq(with, Operand(r13, index << kPointerSizeLog2));
61}
62
63
Steve Block3ce2e202009-11-05 08:53:23 +000064void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000065 LoadRoot(kScratchRegister, index);
66 cmpq(with, kScratchRegister);
67}
68
69
Steve Blockd0582a62009-12-15 09:54:21 +000070void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
71 CompareRoot(rsp, Heap::kStackLimitRootIndex);
72 j(below, on_stack_overflow);
73}
74
75
Steve Blocka7e24c12009-10-30 11:49:00 +000076static void RecordWriteHelper(MacroAssembler* masm,
77 Register object,
78 Register addr,
79 Register scratch) {
80 Label fast;
81
82 // Compute the page start address from the heap object pointer, and reuse
83 // the 'object' register for it.
84 ASSERT(is_int32(~Page::kPageAlignmentMask));
85 masm->and_(object,
86 Immediate(static_cast<int32_t>(~Page::kPageAlignmentMask)));
87 Register page_start = object;
88
89 // Compute the bit addr in the remembered set/index of the pointer in the
90 // page. Reuse 'addr' as pointer_offset.
91 masm->subq(addr, page_start);
92 masm->shr(addr, Immediate(kPointerSizeLog2));
93 Register pointer_offset = addr;
94
95 // If the bit offset lies beyond the normal remembered set range, it is in
96 // the extra remembered set area of a large object.
97 masm->cmpq(pointer_offset, Immediate(Page::kPageSize / kPointerSize));
98 masm->j(less, &fast);
99
100 // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
101 // extra remembered set after the large object.
102
103 // Load the array length into 'scratch'.
104 masm->movl(scratch,
105 Operand(page_start,
106 Page::kObjectStartOffset + FixedArray::kLengthOffset));
107 Register array_length = scratch;
108
109 // Extra remembered set starts right after the large object (a FixedArray), at
110 // page_start + kObjectStartOffset + objectSize
111 // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
112 // Add the delta between the end of the normal RSet and the start of the
113 // extra RSet to 'page_start', so that addressing the bit using
114 // 'pointer_offset' hits the extra RSet words.
115 masm->lea(page_start,
116 Operand(page_start, array_length, times_pointer_size,
117 Page::kObjectStartOffset + FixedArray::kHeaderSize
118 - Page::kRSetEndOffset));
119
120 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
121 // to limit code size. We should probably evaluate this decision by
122 // measuring the performance of an equivalent implementation using
123 // "simpler" instructions
124 masm->bind(&fast);
125 masm->bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
126}
127
128
129class RecordWriteStub : public CodeStub {
130 public:
131 RecordWriteStub(Register object, Register addr, Register scratch)
132 : object_(object), addr_(addr), scratch_(scratch) { }
133
134 void Generate(MacroAssembler* masm);
135
136 private:
137 Register object_;
138 Register addr_;
139 Register scratch_;
140
141#ifdef DEBUG
142 void Print() {
143 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
144 object_.code(), addr_.code(), scratch_.code());
145 }
146#endif
147
148 // Minor key encoding in 12 bits of three registers (object, address and
149 // scratch) OOOOAAAASSSS.
Steve Block3ce2e202009-11-05 08:53:23 +0000150 class ScratchBits : public BitField<uint32_t, 0, 4> {};
151 class AddressBits : public BitField<uint32_t, 4, 4> {};
152 class ObjectBits : public BitField<uint32_t, 8, 4> {};
Steve Blocka7e24c12009-10-30 11:49:00 +0000153
154 Major MajorKey() { return RecordWrite; }
155
156 int MinorKey() {
157 // Encode the registers.
158 return ObjectBits::encode(object_.code()) |
159 AddressBits::encode(addr_.code()) |
160 ScratchBits::encode(scratch_.code());
161 }
162};
163
164
165void RecordWriteStub::Generate(MacroAssembler* masm) {
166 RecordWriteHelper(masm, object_, addr_, scratch_);
167 masm->ret(0);
168}
169
170
171// Set the remembered set bit for [object+offset].
172// object is the object being stored into, value is the object being stored.
Steve Block3ce2e202009-11-05 08:53:23 +0000173// If offset is zero, then the smi_index register contains the array index into
174// the elements array represented as a smi. Otherwise it can be used as a
175// scratch register.
Steve Blocka7e24c12009-10-30 11:49:00 +0000176// All registers are clobbered by the operation.
177void MacroAssembler::RecordWrite(Register object,
178 int offset,
179 Register value,
Steve Block3ce2e202009-11-05 08:53:23 +0000180 Register smi_index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000181 // The compiled code assumes that record write doesn't change the
182 // context register, so we check that none of the clobbered
183 // registers are rsi.
184 ASSERT(!object.is(rsi) && !value.is(rsi) && !smi_index.is(rsi));
185
Steve Blocka7e24c12009-10-30 11:49:00 +0000186 // First, check if a remembered set write is even needed. The tests below
187 // catch stores of Smis and stores into young gen (which does not have space
188 // for the remembered set bits.
189 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000190 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000191
Steve Block3ce2e202009-11-05 08:53:23 +0000192 RecordWriteNonSmi(object, offset, value, smi_index);
193 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000194
195 // Clobber all input registers when running with the debug-code flag
196 // turned on to provoke errors. This clobbering repeats the
197 // clobbering done inside RecordWriteNonSmi but it's necessary to
198 // avoid having the fast case for smis leave the registers
199 // unchanged.
200 if (FLAG_debug_code) {
201 movq(object, bit_cast<int64_t>(kZapValue), RelocInfo::NONE);
202 movq(value, bit_cast<int64_t>(kZapValue), RelocInfo::NONE);
203 movq(smi_index, bit_cast<int64_t>(kZapValue), RelocInfo::NONE);
204 }
Steve Block3ce2e202009-11-05 08:53:23 +0000205}
206
207
208void MacroAssembler::RecordWriteNonSmi(Register object,
209 int offset,
210 Register scratch,
211 Register smi_index) {
212 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000213
214 if (FLAG_debug_code) {
215 Label okay;
216 JumpIfNotSmi(object, &okay);
217 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
218 bind(&okay);
219 }
220
Steve Blocka7e24c12009-10-30 11:49:00 +0000221 // Test that the object address is not in the new space. We cannot
222 // set remembered set bits in the new space.
Steve Block3ce2e202009-11-05 08:53:23 +0000223 movq(scratch, object);
Steve Blocka7e24c12009-10-30 11:49:00 +0000224 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
Steve Block3ce2e202009-11-05 08:53:23 +0000225 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
Steve Blocka7e24c12009-10-30 11:49:00 +0000226 movq(kScratchRegister, ExternalReference::new_space_start());
Steve Block3ce2e202009-11-05 08:53:23 +0000227 cmpq(scratch, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000228 j(equal, &done);
229
230 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
231 // Compute the bit offset in the remembered set, leave it in 'value'.
Steve Block3ce2e202009-11-05 08:53:23 +0000232 lea(scratch, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000233 ASSERT(is_int32(Page::kPageAlignmentMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000234 and_(scratch, Immediate(static_cast<int32_t>(Page::kPageAlignmentMask)));
235 shr(scratch, Immediate(kObjectAlignmentBits));
Steve Blocka7e24c12009-10-30 11:49:00 +0000236
237 // Compute the page address from the heap object pointer, leave it in
238 // 'object' (immediate value is sign extended).
239 and_(object, Immediate(~Page::kPageAlignmentMask));
240
241 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
242 // to limit code size. We should probably evaluate this decision by
243 // measuring the performance of an equivalent implementation using
244 // "simpler" instructions
Steve Block3ce2e202009-11-05 08:53:23 +0000245 bts(Operand(object, Page::kRSetOffset), scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000246 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000247 Register dst = smi_index;
Steve Blocka7e24c12009-10-30 11:49:00 +0000248 if (offset != 0) {
249 lea(dst, Operand(object, offset));
250 } else {
251 // array access: calculate the destination address in the same manner as
Steve Block3ce2e202009-11-05 08:53:23 +0000252 // KeyedStoreIC::GenerateGeneric.
253 SmiIndex index = SmiToIndex(smi_index, smi_index, kPointerSizeLog2);
254 lea(dst, Operand(object,
255 index.reg,
256 index.scale,
Steve Blocka7e24c12009-10-30 11:49:00 +0000257 FixedArray::kHeaderSize - kHeapObjectTag));
258 }
259 // If we are already generating a shared stub, not inlining the
260 // record write code isn't going to save us any memory.
261 if (generating_stub()) {
Steve Block3ce2e202009-11-05 08:53:23 +0000262 RecordWriteHelper(this, object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000263 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000264 RecordWriteStub stub(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000265 CallStub(&stub);
266 }
267 }
268
269 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000270
271 // Clobber all input registers when running with the debug-code flag
272 // turned on to provoke errors.
273 if (FLAG_debug_code) {
274 movq(object, bit_cast<int64_t>(kZapValue), RelocInfo::NONE);
275 movq(scratch, bit_cast<int64_t>(kZapValue), RelocInfo::NONE);
276 movq(smi_index, bit_cast<int64_t>(kZapValue), RelocInfo::NONE);
277 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000278}
279
280
281void MacroAssembler::Assert(Condition cc, const char* msg) {
282 if (FLAG_debug_code) Check(cc, msg);
283}
284
285
286void MacroAssembler::Check(Condition cc, const char* msg) {
287 Label L;
288 j(cc, &L);
289 Abort(msg);
290 // will not return here
291 bind(&L);
292}
293
294
295void MacroAssembler::NegativeZeroTest(Register result,
296 Register op,
297 Label* then_label) {
298 Label ok;
299 testl(result, result);
300 j(not_zero, &ok);
301 testl(op, op);
302 j(sign, then_label);
303 bind(&ok);
304}
305
306
307void MacroAssembler::Abort(const char* msg) {
308 // We want to pass the msg string like a smi to avoid GC
309 // problems, however msg is not guaranteed to be aligned
310 // properly. Instead, we pass an aligned pointer that is
311 // a proper v8 smi, but also pass the alignment difference
312 // from the real pointer as a smi.
313 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
314 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
315 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
316 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
317#ifdef DEBUG
318 if (msg != NULL) {
319 RecordComment("Abort message: ");
320 RecordComment(msg);
321 }
322#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000323 // Disable stub call restrictions to always allow calls to abort.
324 set_allow_stub_calls(true);
325
Steve Blocka7e24c12009-10-30 11:49:00 +0000326 push(rax);
327 movq(kScratchRegister, p0, RelocInfo::NONE);
328 push(kScratchRegister);
329 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000330 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000331 RelocInfo::NONE);
332 push(kScratchRegister);
333 CallRuntime(Runtime::kAbort, 2);
334 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000335 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000336}
337
338
339void MacroAssembler::CallStub(CodeStub* stub) {
340 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
341 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
342}
343
344
Leon Clarkee46be812010-01-19 14:06:41 +0000345void MacroAssembler::TailCallStub(CodeStub* stub) {
346 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
347 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
348}
349
350
Steve Blocka7e24c12009-10-30 11:49:00 +0000351void MacroAssembler::StubReturn(int argc) {
352 ASSERT(argc >= 1 && generating_stub());
353 ret((argc - 1) * kPointerSize);
354}
355
356
357void MacroAssembler::IllegalOperation(int num_arguments) {
358 if (num_arguments > 0) {
359 addq(rsp, Immediate(num_arguments * kPointerSize));
360 }
361 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
362}
363
364
365void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
366 CallRuntime(Runtime::FunctionForId(id), num_arguments);
367}
368
369
370void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
371 // If the expected number of arguments of the runtime function is
372 // constant, we check that the actual number of arguments match the
373 // expectation.
374 if (f->nargs >= 0 && f->nargs != num_arguments) {
375 IllegalOperation(num_arguments);
376 return;
377 }
378
Leon Clarke4515c472010-02-03 11:58:03 +0000379 // TODO(1236192): Most runtime routines don't need the number of
380 // arguments passed in because it is constant. At some point we
381 // should remove this need and make the runtime routine entry code
382 // smarter.
383 movq(rax, Immediate(num_arguments));
384 movq(rbx, ExternalReference(f));
385 CEntryStub ces(f->result_size);
386 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000387}
388
389
390void MacroAssembler::TailCallRuntime(ExternalReference const& ext,
391 int num_arguments,
392 int result_size) {
393 // ----------- S t a t e -------------
394 // -- rsp[0] : return address
395 // -- rsp[8] : argument num_arguments - 1
396 // ...
397 // -- rsp[8 * num_arguments] : argument 0 (receiver)
398 // -----------------------------------
399
400 // TODO(1236192): Most runtime routines don't need the number of
401 // arguments passed in because it is constant. At some point we
402 // should remove this need and make the runtime routine entry code
403 // smarter.
404 movq(rax, Immediate(num_arguments));
405 JumpToRuntime(ext, result_size);
406}
407
408
409void MacroAssembler::JumpToRuntime(const ExternalReference& ext,
410 int result_size) {
411 // Set the entry point and jump to the C entry runtime stub.
412 movq(rbx, ext);
413 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000414 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000415}
416
417
418void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
419 bool resolved;
420 Handle<Code> code = ResolveBuiltin(id, &resolved);
421
422 const char* name = Builtins::GetName(id);
423 int argc = Builtins::GetArgumentsCount(id);
424
425 movq(target, code, RelocInfo::EMBEDDED_OBJECT);
426 if (!resolved) {
427 uint32_t flags =
428 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
Steve Blocka7e24c12009-10-30 11:49:00 +0000429 Bootstrapper::FixupFlagsUseCodeObject::encode(true);
430 Unresolved entry = { pc_offset() - sizeof(intptr_t), flags, name };
431 unresolved_.Add(entry);
432 }
433 addq(target, Immediate(Code::kHeaderSize - kHeapObjectTag));
434}
435
Steve Blocka7e24c12009-10-30 11:49:00 +0000436Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
437 bool* resolved) {
438 // Move the builtin function into the temporary function slot by
439 // reading it from the builtins object. NOTE: We should be able to
440 // reduce this to two instructions by putting the function table in
441 // the global object instead of the "builtins" object and by using a
442 // real register for the function.
443 movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
444 movq(rdx, FieldOperand(rdx, GlobalObject::kBuiltinsOffset));
445 int builtins_offset =
446 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
447 movq(rdi, FieldOperand(rdx, builtins_offset));
448
Steve Blocka7e24c12009-10-30 11:49:00 +0000449 return Builtins::GetCode(id, resolved);
450}
451
452
453void MacroAssembler::Set(Register dst, int64_t x) {
454 if (x == 0) {
455 xor_(dst, dst);
456 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000457 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000458 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000459 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000460 } else {
461 movq(dst, x, RelocInfo::NONE);
462 }
463}
464
465
466void MacroAssembler::Set(const Operand& dst, int64_t x) {
467 if (x == 0) {
468 xor_(kScratchRegister, kScratchRegister);
469 movq(dst, kScratchRegister);
470 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000471 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000472 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000473 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000474 } else {
475 movq(kScratchRegister, x, RelocInfo::NONE);
476 movq(dst, kScratchRegister);
477 }
478}
479
Steve Blocka7e24c12009-10-30 11:49:00 +0000480// ----------------------------------------------------------------------------
481// Smi tagging, untagging and tag detection.
482
Steve Block3ce2e202009-11-05 08:53:23 +0000483static int kSmiShift = kSmiTagSize + kSmiShiftSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000484
485void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000486 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000487 if (!dst.is(src)) {
488 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000489 }
Steve Block3ce2e202009-11-05 08:53:23 +0000490 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000491}
492
493
494void MacroAssembler::Integer32ToSmi(Register dst,
495 Register src,
496 Label* on_overflow) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000497 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000498 // 32-bit integer always fits in a long smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000499 if (!dst.is(src)) {
500 movl(dst, src);
501 }
Steve Block3ce2e202009-11-05 08:53:23 +0000502 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000503}
504
505
Steve Block3ce2e202009-11-05 08:53:23 +0000506void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
507 Register src,
508 int constant) {
509 if (dst.is(src)) {
510 addq(dst, Immediate(constant));
511 } else {
512 lea(dst, Operand(src, constant));
513 }
514 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000515}
516
517
518void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000519 ASSERT_EQ(0, kSmiTag);
520 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000521 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000522 }
Steve Block3ce2e202009-11-05 08:53:23 +0000523 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000524}
525
526
527void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000528 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000529 if (!dst.is(src)) {
530 movq(dst, src);
531 }
532 sar(dst, Immediate(kSmiShift));
533}
534
535
536void MacroAssembler::SmiTest(Register src) {
537 testq(src, src);
538}
539
540
541void MacroAssembler::SmiCompare(Register dst, Register src) {
542 cmpq(dst, src);
543}
544
545
546void MacroAssembler::SmiCompare(Register dst, Smi* src) {
547 ASSERT(!dst.is(kScratchRegister));
548 if (src->value() == 0) {
549 testq(dst, dst);
550 } else {
551 Move(kScratchRegister, src);
552 cmpq(dst, kScratchRegister);
553 }
554}
555
556
557void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
558 cmpq(dst, src);
559}
560
561
562void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
563 if (src->value() == 0) {
564 // Only tagged long smi to have 32-bit representation.
565 cmpq(dst, Immediate(0));
566 } else {
567 Move(kScratchRegister, src);
568 cmpq(dst, kScratchRegister);
569 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000570}
571
572
573void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
574 Register src,
575 int power) {
576 ASSERT(power >= 0);
577 ASSERT(power < 64);
578 if (power == 0) {
579 SmiToInteger64(dst, src);
580 return;
581 }
Steve Block3ce2e202009-11-05 08:53:23 +0000582 if (!dst.is(src)) {
583 movq(dst, src);
584 }
585 if (power < kSmiShift) {
586 sar(dst, Immediate(kSmiShift - power));
587 } else if (power > kSmiShift) {
588 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000589 }
590}
591
592
Steve Blocka7e24c12009-10-30 11:49:00 +0000593Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000594 ASSERT_EQ(0, kSmiTag);
595 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000596 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000597}
598
599
600Condition MacroAssembler::CheckPositiveSmi(Register src) {
601 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000602 movq(kScratchRegister, src);
603 rol(kScratchRegister, Immediate(1));
604 testl(kScratchRegister, Immediate(0x03));
Steve Blocka7e24c12009-10-30 11:49:00 +0000605 return zero;
606}
607
608
Steve Blocka7e24c12009-10-30 11:49:00 +0000609Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
610 if (first.is(second)) {
611 return CheckSmi(first);
612 }
613 movl(kScratchRegister, first);
614 orl(kScratchRegister, second);
Steve Block3ce2e202009-11-05 08:53:23 +0000615 testb(kScratchRegister, Immediate(kSmiTagMask));
616 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000617}
618
619
Leon Clarked91b9f72010-01-27 17:25:45 +0000620Condition MacroAssembler::CheckBothPositiveSmi(Register first,
621 Register second) {
622 if (first.is(second)) {
623 return CheckPositiveSmi(first);
624 }
625 movl(kScratchRegister, first);
626 orl(kScratchRegister, second);
627 rol(kScratchRegister, Immediate(1));
628 testl(kScratchRegister, Immediate(0x03));
629 return zero;
630}
631
632
633
Leon Clarkee46be812010-01-19 14:06:41 +0000634Condition MacroAssembler::CheckEitherSmi(Register first, Register second) {
635 if (first.is(second)) {
636 return CheckSmi(first);
637 }
638 movl(kScratchRegister, first);
639 andl(kScratchRegister, second);
640 testb(kScratchRegister, Immediate(kSmiTagMask));
641 return zero;
642}
643
644
Steve Blocka7e24c12009-10-30 11:49:00 +0000645Condition MacroAssembler::CheckIsMinSmi(Register src) {
646 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Steve Block3ce2e202009-11-05 08:53:23 +0000647 movq(kScratchRegister, src);
648 rol(kScratchRegister, Immediate(1));
649 cmpq(kScratchRegister, Immediate(1));
Steve Blocka7e24c12009-10-30 11:49:00 +0000650 return equal;
651}
652
Steve Blocka7e24c12009-10-30 11:49:00 +0000653
654Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000655 // A 32-bit integer value can always be converted to a smi.
656 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +0000657}
658
659
Steve Block3ce2e202009-11-05 08:53:23 +0000660Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
661 // An unsigned 32-bit integer value is valid as long as the high bit
662 // is not set.
663 testq(src, Immediate(0x80000000));
664 return zero;
665}
666
667
668void MacroAssembler::SmiNeg(Register dst, Register src, Label* on_smi_result) {
669 if (dst.is(src)) {
670 ASSERT(!dst.is(kScratchRegister));
671 movq(kScratchRegister, src);
672 neg(dst); // Low 32 bits are retained as zero by negation.
673 // Test if result is zero or Smi::kMinValue.
674 cmpq(dst, kScratchRegister);
675 j(not_equal, on_smi_result);
676 movq(src, kScratchRegister);
677 } else {
678 movq(dst, src);
679 neg(dst);
680 cmpq(dst, src);
681 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
682 j(not_equal, on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000683 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000684}
685
686
687void MacroAssembler::SmiAdd(Register dst,
688 Register src1,
689 Register src2,
690 Label* on_not_smi_result) {
691 ASSERT(!dst.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +0000692 if (dst.is(src1)) {
693 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000694 Label smi_result;
695 j(no_overflow, &smi_result);
696 // Restore src1.
Steve Block3ce2e202009-11-05 08:53:23 +0000697 subq(src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000698 jmp(on_not_smi_result);
699 bind(&smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +0000700 } else {
701 movq(dst, src1);
702 addq(dst, src2);
703 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000704 }
705}
706
707
Steve Blocka7e24c12009-10-30 11:49:00 +0000708void MacroAssembler::SmiSub(Register dst,
709 Register src1,
710 Register src2,
711 Label* on_not_smi_result) {
712 ASSERT(!dst.is(src2));
Leon Clarked91b9f72010-01-27 17:25:45 +0000713 if (on_not_smi_result == NULL) {
714 // No overflow checking. Use only when it's known that
715 // overflowing is impossible (e.g., subtracting two positive smis).
716 if (dst.is(src1)) {
717 subq(dst, src2);
718 } else {
719 movq(dst, src1);
720 subq(dst, src2);
721 }
722 Assert(no_overflow, "Smi substraction onverflow");
723 } else if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000724 subq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000725 Label smi_result;
726 j(no_overflow, &smi_result);
727 // Restore src1.
Steve Block3ce2e202009-11-05 08:53:23 +0000728 addq(src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000729 jmp(on_not_smi_result);
730 bind(&smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +0000731 } else {
732 movq(dst, src1);
733 subq(dst, src2);
734 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000735 }
736}
737
738
739void MacroAssembler::SmiMul(Register dst,
740 Register src1,
741 Register src2,
742 Label* on_not_smi_result) {
743 ASSERT(!dst.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +0000744 ASSERT(!dst.is(kScratchRegister));
745 ASSERT(!src1.is(kScratchRegister));
746 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000747
748 if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000749 Label failure, zero_correct_result;
750 movq(kScratchRegister, src1); // Create backup for later testing.
751 SmiToInteger64(dst, src1);
752 imul(dst, src2);
753 j(overflow, &failure);
754
755 // Check for negative zero result. If product is zero, and one
756 // argument is negative, go to slow case.
757 Label correct_result;
758 testq(dst, dst);
759 j(not_zero, &correct_result);
760
761 movq(dst, kScratchRegister);
762 xor_(dst, src2);
763 j(positive, &zero_correct_result); // Result was positive zero.
764
765 bind(&failure); // Reused failure exit, restores src1.
766 movq(src1, kScratchRegister);
767 jmp(on_not_smi_result);
768
769 bind(&zero_correct_result);
770 xor_(dst, dst);
771
772 bind(&correct_result);
773 } else {
774 SmiToInteger64(dst, src1);
775 imul(dst, src2);
776 j(overflow, on_not_smi_result);
777 // Check for negative zero result. If product is zero, and one
778 // argument is negative, go to slow case.
779 Label correct_result;
780 testq(dst, dst);
781 j(not_zero, &correct_result);
782 // One of src1 and src2 is zero, the check whether the other is
783 // negative.
Steve Blocka7e24c12009-10-30 11:49:00 +0000784 movq(kScratchRegister, src1);
Steve Block3ce2e202009-11-05 08:53:23 +0000785 xor_(kScratchRegister, src2);
786 j(negative, on_not_smi_result);
787 bind(&correct_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000788 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000789}
790
791
792void MacroAssembler::SmiTryAddConstant(Register dst,
793 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000794 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000795 Label* on_not_smi_result) {
796 // Does not assume that src is a smi.
Steve Block3ce2e202009-11-05 08:53:23 +0000797 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +0000798 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000799 ASSERT(!dst.is(kScratchRegister));
800 ASSERT(!src.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000801
Steve Block3ce2e202009-11-05 08:53:23 +0000802 JumpIfNotSmi(src, on_not_smi_result);
803 Register tmp = (dst.is(src) ? kScratchRegister : dst);
804 Move(tmp, constant);
805 addq(tmp, src);
806 j(overflow, on_not_smi_result);
807 if (dst.is(src)) {
808 movq(dst, tmp);
809 }
810}
811
812
813void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
814 if (constant->value() == 0) {
815 if (!dst.is(src)) {
816 movq(dst, src);
817 }
818 } else if (dst.is(src)) {
819 ASSERT(!dst.is(kScratchRegister));
820
821 Move(kScratchRegister, constant);
822 addq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000823 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000824 Move(dst, constant);
825 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000826 }
827}
828
829
830void MacroAssembler::SmiAddConstant(Register dst,
831 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000832 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000833 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +0000834 if (constant->value() == 0) {
835 if (!dst.is(src)) {
836 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000837 }
Steve Block3ce2e202009-11-05 08:53:23 +0000838 } else if (dst.is(src)) {
839 ASSERT(!dst.is(kScratchRegister));
840
841 Move(kScratchRegister, constant);
842 addq(dst, kScratchRegister);
843 Label result_ok;
844 j(no_overflow, &result_ok);
845 subq(dst, kScratchRegister);
846 jmp(on_not_smi_result);
847 bind(&result_ok);
Steve Blocka7e24c12009-10-30 11:49:00 +0000848 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000849 Move(dst, constant);
850 addq(dst, src);
851 j(overflow, on_not_smi_result);
852 }
853}
854
855
856void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
857 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000858 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000859 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000860 }
Steve Block3ce2e202009-11-05 08:53:23 +0000861 } else if (dst.is(src)) {
862 ASSERT(!dst.is(kScratchRegister));
863
864 Move(kScratchRegister, constant);
865 subq(dst, kScratchRegister);
866 } else {
867 // Subtract by adding the negative, to do it in two operations.
868 if (constant->value() == Smi::kMinValue) {
869 Move(kScratchRegister, constant);
870 movq(dst, src);
871 subq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000872 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000873 Move(dst, Smi::FromInt(-constant->value()));
874 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000875 }
876 }
877}
878
879
880void MacroAssembler::SmiSubConstant(Register dst,
881 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000882 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000883 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +0000884 if (constant->value() == 0) {
885 if (!dst.is(src)) {
886 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000887 }
Steve Block3ce2e202009-11-05 08:53:23 +0000888 } else if (dst.is(src)) {
889 ASSERT(!dst.is(kScratchRegister));
890
891 Move(kScratchRegister, constant);
892 subq(dst, kScratchRegister);
893 Label sub_success;
894 j(no_overflow, &sub_success);
895 addq(src, kScratchRegister);
896 jmp(on_not_smi_result);
897 bind(&sub_success);
Steve Blocka7e24c12009-10-30 11:49:00 +0000898 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000899 if (constant->value() == Smi::kMinValue) {
900 Move(kScratchRegister, constant);
901 movq(dst, src);
902 subq(dst, kScratchRegister);
903 j(overflow, on_not_smi_result);
904 } else {
905 Move(dst, Smi::FromInt(-(constant->value())));
906 addq(dst, src);
907 j(overflow, on_not_smi_result);
908 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000909 }
910}
911
912
913void MacroAssembler::SmiDiv(Register dst,
914 Register src1,
915 Register src2,
916 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +0000917 ASSERT(!src1.is(kScratchRegister));
918 ASSERT(!src2.is(kScratchRegister));
919 ASSERT(!dst.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000920 ASSERT(!src2.is(rax));
921 ASSERT(!src2.is(rdx));
922 ASSERT(!src1.is(rdx));
923
924 // Check for 0 divisor (result is +/-Infinity).
925 Label positive_divisor;
Steve Block3ce2e202009-11-05 08:53:23 +0000926 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000927 j(zero, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000928
Steve Block3ce2e202009-11-05 08:53:23 +0000929 if (src1.is(rax)) {
930 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000931 }
Steve Block3ce2e202009-11-05 08:53:23 +0000932 SmiToInteger32(rax, src1);
933 // We need to rule out dividing Smi::kMinValue by -1, since that would
934 // overflow in idiv and raise an exception.
935 // We combine this with negative zero test (negative zero only happens
936 // when dividing zero by a negative number).
Steve Blocka7e24c12009-10-30 11:49:00 +0000937
Steve Block3ce2e202009-11-05 08:53:23 +0000938 // We overshoot a little and go to slow case if we divide min-value
939 // by any negative value, not just -1.
940 Label safe_div;
941 testl(rax, Immediate(0x7fffffff));
942 j(not_zero, &safe_div);
943 testq(src2, src2);
944 if (src1.is(rax)) {
945 j(positive, &safe_div);
946 movq(src1, kScratchRegister);
947 jmp(on_not_smi_result);
948 } else {
949 j(negative, on_not_smi_result);
950 }
951 bind(&safe_div);
952
953 SmiToInteger32(src2, src2);
954 // Sign extend src1 into edx:eax.
955 cdq();
Steve Blocka7e24c12009-10-30 11:49:00 +0000956 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +0000957 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000958 // Check that the remainder is zero.
959 testl(rdx, rdx);
Steve Block3ce2e202009-11-05 08:53:23 +0000960 if (src1.is(rax)) {
961 Label smi_result;
962 j(zero, &smi_result);
963 movq(src1, kScratchRegister);
964 jmp(on_not_smi_result);
965 bind(&smi_result);
966 } else {
967 j(not_zero, on_not_smi_result);
968 }
969 if (!dst.is(src1) && src1.is(rax)) {
970 movq(src1, kScratchRegister);
971 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000972 Integer32ToSmi(dst, rax);
973}
974
975
976void MacroAssembler::SmiMod(Register dst,
977 Register src1,
978 Register src2,
979 Label* on_not_smi_result) {
980 ASSERT(!dst.is(kScratchRegister));
981 ASSERT(!src1.is(kScratchRegister));
982 ASSERT(!src2.is(kScratchRegister));
983 ASSERT(!src2.is(rax));
984 ASSERT(!src2.is(rdx));
985 ASSERT(!src1.is(rdx));
Steve Block3ce2e202009-11-05 08:53:23 +0000986 ASSERT(!src1.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000987
Steve Block3ce2e202009-11-05 08:53:23 +0000988 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000989 j(zero, on_not_smi_result);
990
991 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000992 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000993 }
Steve Block3ce2e202009-11-05 08:53:23 +0000994 SmiToInteger32(rax, src1);
995 SmiToInteger32(src2, src2);
996
997 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
998 Label safe_div;
999 cmpl(rax, Immediate(Smi::kMinValue));
1000 j(not_equal, &safe_div);
1001 cmpl(src2, Immediate(-1));
1002 j(not_equal, &safe_div);
1003 // Retag inputs and go slow case.
1004 Integer32ToSmi(src2, src2);
1005 if (src1.is(rax)) {
1006 movq(src1, kScratchRegister);
1007 }
1008 jmp(on_not_smi_result);
1009 bind(&safe_div);
1010
Steve Blocka7e24c12009-10-30 11:49:00 +00001011 // Sign extend eax into edx:eax.
1012 cdq();
1013 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001014 // Restore smi tags on inputs.
1015 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001016 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001017 movq(src1, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001018 }
Steve Block3ce2e202009-11-05 08:53:23 +00001019 // Check for a negative zero result. If the result is zero, and the
1020 // dividend is negative, go slow to return a floating point negative zero.
1021 Label smi_result;
1022 testl(rdx, rdx);
1023 j(not_zero, &smi_result);
1024 testq(src1, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001025 j(negative, on_not_smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +00001026 bind(&smi_result);
1027 Integer32ToSmi(dst, rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001028}
1029
1030
1031void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001032 ASSERT(!dst.is(kScratchRegister));
1033 ASSERT(!src.is(kScratchRegister));
1034 // Set tag and padding bits before negating, so that they are zero afterwards.
1035 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001036 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001037 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001038 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001039 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001040 }
Steve Block3ce2e202009-11-05 08:53:23 +00001041 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001042}
1043
1044
1045void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001046 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001047 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001048 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001049 }
1050 and_(dst, src2);
1051}
1052
1053
Steve Block3ce2e202009-11-05 08:53:23 +00001054void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1055 if (constant->value() == 0) {
1056 xor_(dst, dst);
1057 } else if (dst.is(src)) {
1058 ASSERT(!dst.is(kScratchRegister));
1059 Move(kScratchRegister, constant);
1060 and_(dst, kScratchRegister);
1061 } else {
1062 Move(dst, constant);
1063 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001064 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001065}
1066
1067
1068void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1069 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001070 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001071 }
1072 or_(dst, src2);
1073}
1074
1075
Steve Block3ce2e202009-11-05 08:53:23 +00001076void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1077 if (dst.is(src)) {
1078 ASSERT(!dst.is(kScratchRegister));
1079 Move(kScratchRegister, constant);
1080 or_(dst, kScratchRegister);
1081 } else {
1082 Move(dst, constant);
1083 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001084 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001085}
1086
Steve Block3ce2e202009-11-05 08:53:23 +00001087
Steve Blocka7e24c12009-10-30 11:49:00 +00001088void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1089 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001090 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001091 }
1092 xor_(dst, src2);
1093}
1094
1095
Steve Block3ce2e202009-11-05 08:53:23 +00001096void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1097 if (dst.is(src)) {
1098 ASSERT(!dst.is(kScratchRegister));
1099 Move(kScratchRegister, constant);
1100 xor_(dst, kScratchRegister);
1101 } else {
1102 Move(dst, constant);
1103 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001104 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001105}
1106
1107
Steve Blocka7e24c12009-10-30 11:49:00 +00001108void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1109 Register src,
1110 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001111 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001112 if (shift_value > 0) {
1113 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001114 sar(dst, Immediate(shift_value + kSmiShift));
1115 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001116 } else {
1117 UNIMPLEMENTED(); // Not used.
1118 }
1119 }
1120}
1121
1122
1123void MacroAssembler::SmiShiftLogicalRightConstant(Register dst,
1124 Register src,
1125 int shift_value,
1126 Label* on_not_smi_result) {
1127 // Logic right shift interprets its result as an *unsigned* number.
1128 if (dst.is(src)) {
1129 UNIMPLEMENTED(); // Not used.
1130 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001131 movq(dst, src);
1132 if (shift_value == 0) {
1133 testq(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001134 j(negative, on_not_smi_result);
1135 }
Steve Block3ce2e202009-11-05 08:53:23 +00001136 shr(dst, Immediate(shift_value + kSmiShift));
1137 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001138 }
1139}
1140
1141
1142void MacroAssembler::SmiShiftLeftConstant(Register dst,
1143 Register src,
1144 int shift_value,
1145 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001146 if (!dst.is(src)) {
1147 movq(dst, src);
1148 }
1149 if (shift_value > 0) {
1150 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001151 }
1152}
1153
1154
1155void MacroAssembler::SmiShiftLeft(Register dst,
1156 Register src1,
1157 Register src2,
1158 Label* on_not_smi_result) {
1159 ASSERT(!dst.is(rcx));
1160 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001161 // Untag shift amount.
1162 if (!dst.is(src1)) {
1163 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001164 }
Steve Block3ce2e202009-11-05 08:53:23 +00001165 SmiToInteger32(rcx, src2);
1166 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1167 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001168 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001169}
1170
1171
1172void MacroAssembler::SmiShiftLogicalRight(Register dst,
1173 Register src1,
1174 Register src2,
1175 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001176 ASSERT(!dst.is(kScratchRegister));
1177 ASSERT(!src1.is(kScratchRegister));
1178 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001179 ASSERT(!dst.is(rcx));
1180 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001181 if (src1.is(rcx) || src2.is(rcx)) {
1182 movq(kScratchRegister, rcx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001183 }
Steve Block3ce2e202009-11-05 08:53:23 +00001184 if (!dst.is(src1)) {
1185 movq(dst, src1);
1186 }
1187 SmiToInteger32(rcx, src2);
1188 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001189 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
Steve Block3ce2e202009-11-05 08:53:23 +00001190 shl(dst, Immediate(kSmiShift));
1191 testq(dst, dst);
1192 if (src1.is(rcx) || src2.is(rcx)) {
1193 Label positive_result;
1194 j(positive, &positive_result);
1195 if (src1.is(rcx)) {
1196 movq(src1, kScratchRegister);
1197 } else {
1198 movq(src2, kScratchRegister);
1199 }
1200 jmp(on_not_smi_result);
1201 bind(&positive_result);
1202 } else {
1203 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1204 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001205}
1206
1207
1208void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1209 Register src1,
1210 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001211 ASSERT(!dst.is(kScratchRegister));
1212 ASSERT(!src1.is(kScratchRegister));
1213 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001214 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001215 if (src1.is(rcx)) {
1216 movq(kScratchRegister, src1);
1217 } else if (src2.is(rcx)) {
1218 movq(kScratchRegister, src2);
1219 }
1220 if (!dst.is(src1)) {
1221 movq(dst, src1);
1222 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001223 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001224 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001225 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001226 shl(dst, Immediate(kSmiShift));
1227 if (src1.is(rcx)) {
1228 movq(src1, kScratchRegister);
1229 } else if (src2.is(rcx)) {
1230 movq(src2, kScratchRegister);
1231 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001232}
1233
1234
1235void MacroAssembler::SelectNonSmi(Register dst,
1236 Register src1,
1237 Register src2,
1238 Label* on_not_smis) {
Steve Block3ce2e202009-11-05 08:53:23 +00001239 ASSERT(!dst.is(kScratchRegister));
1240 ASSERT(!src1.is(kScratchRegister));
1241 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001242 ASSERT(!dst.is(src1));
1243 ASSERT(!dst.is(src2));
1244 // Both operands must not be smis.
1245#ifdef DEBUG
Steve Block3ce2e202009-11-05 08:53:23 +00001246 if (allow_stub_calls()) { // Check contains a stub call.
1247 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1248 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1249 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001250#endif
1251 ASSERT_EQ(0, kSmiTag);
1252 ASSERT_EQ(0, Smi::FromInt(0));
Steve Block3ce2e202009-11-05 08:53:23 +00001253 movl(kScratchRegister, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001254 and_(kScratchRegister, src1);
1255 testl(kScratchRegister, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001256 // If non-zero then both are smis.
Steve Blocka7e24c12009-10-30 11:49:00 +00001257 j(not_zero, on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00001258
Steve Block3ce2e202009-11-05 08:53:23 +00001259 // Exactly one operand is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001260 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1261 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1262 subq(kScratchRegister, Immediate(1));
1263 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1264 movq(dst, src1);
1265 xor_(dst, src2);
1266 and_(dst, kScratchRegister);
1267 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1268 xor_(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001269 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001270}
1271
Steve Block3ce2e202009-11-05 08:53:23 +00001272SmiIndex MacroAssembler::SmiToIndex(Register dst,
1273 Register src,
1274 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001275 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001276 // There is a possible optimization if shift is in the range 60-63, but that
1277 // will (and must) never happen.
1278 if (!dst.is(src)) {
1279 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001280 }
Steve Block3ce2e202009-11-05 08:53:23 +00001281 if (shift < kSmiShift) {
1282 sar(dst, Immediate(kSmiShift - shift));
1283 } else {
1284 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001285 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001286 return SmiIndex(dst, times_1);
1287}
1288
Steve Blocka7e24c12009-10-30 11:49:00 +00001289SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1290 Register src,
1291 int shift) {
1292 // Register src holds a positive smi.
1293 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001294 if (!dst.is(src)) {
1295 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001296 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001297 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001298 if (shift < kSmiShift) {
1299 sar(dst, Immediate(kSmiShift - shift));
1300 } else {
1301 shl(dst, Immediate(shift - kSmiShift));
1302 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001303 return SmiIndex(dst, times_1);
1304}
1305
1306
Steve Block3ce2e202009-11-05 08:53:23 +00001307void MacroAssembler::JumpIfSmi(Register src, Label* on_smi) {
1308 ASSERT_EQ(0, kSmiTag);
1309 Condition smi = CheckSmi(src);
1310 j(smi, on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001311}
1312
Steve Block3ce2e202009-11-05 08:53:23 +00001313
1314void MacroAssembler::JumpIfNotSmi(Register src, Label* on_not_smi) {
1315 Condition smi = CheckSmi(src);
1316 j(NegateCondition(smi), on_not_smi);
1317}
1318
1319
1320void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1321 Label* on_not_positive_smi) {
1322 Condition positive_smi = CheckPositiveSmi(src);
1323 j(NegateCondition(positive_smi), on_not_positive_smi);
1324}
1325
1326
1327void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1328 Smi* constant,
1329 Label* on_equals) {
1330 SmiCompare(src, constant);
1331 j(equal, on_equals);
1332}
1333
1334
1335void MacroAssembler::JumpIfNotValidSmiValue(Register src, Label* on_invalid) {
1336 Condition is_valid = CheckInteger32ValidSmiValue(src);
1337 j(NegateCondition(is_valid), on_invalid);
1338}
1339
1340
1341void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1342 Label* on_invalid) {
1343 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1344 j(NegateCondition(is_valid), on_invalid);
1345}
1346
1347
1348void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2,
1349 Label* on_not_both_smi) {
1350 Condition both_smi = CheckBothSmi(src1, src2);
1351 j(NegateCondition(both_smi), on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001352}
1353
1354
Leon Clarked91b9f72010-01-27 17:25:45 +00001355void MacroAssembler::JumpIfNotBothPositiveSmi(Register src1, Register src2,
1356 Label* on_not_both_smi) {
1357 Condition both_smi = CheckBothPositiveSmi(src1, src2);
1358 j(NegateCondition(both_smi), on_not_both_smi);
1359}
1360
1361
1362
Leon Clarkee46be812010-01-19 14:06:41 +00001363void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1364 Register second_object,
1365 Register scratch1,
1366 Register scratch2,
1367 Label* on_fail) {
1368 // Check that both objects are not smis.
1369 Condition either_smi = CheckEitherSmi(first_object, second_object);
1370 j(either_smi, on_fail);
1371
1372 // Load instance type for both strings.
1373 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1374 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1375 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1376 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1377
1378 // Check that both are flat ascii strings.
1379 ASSERT(kNotStringTag != 0);
1380 const int kFlatAsciiStringMask =
1381 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Leon Clarked91b9f72010-01-27 17:25:45 +00001382 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
Leon Clarkee46be812010-01-19 14:06:41 +00001383
1384 andl(scratch1, Immediate(kFlatAsciiStringMask));
1385 andl(scratch2, Immediate(kFlatAsciiStringMask));
1386 // Interleave the bits to check both scratch1 and scratch2 in one test.
1387 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1388 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1389 cmpl(scratch1,
Leon Clarked91b9f72010-01-27 17:25:45 +00001390 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
Leon Clarkee46be812010-01-19 14:06:41 +00001391 j(not_equal, on_fail);
1392}
1393
1394
Steve Blocka7e24c12009-10-30 11:49:00 +00001395void MacroAssembler::Move(Register dst, Handle<Object> source) {
1396 ASSERT(!source->IsFailure());
1397 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001398 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001399 } else {
1400 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1401 }
1402}
1403
1404
1405void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001406 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001407 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001408 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001409 } else {
1410 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1411 movq(dst, kScratchRegister);
1412 }
1413}
1414
1415
1416void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001417 if (source->IsSmi()) {
1418 SmiCompare(dst, Smi::cast(*source));
1419 } else {
1420 Move(kScratchRegister, source);
1421 cmpq(dst, kScratchRegister);
1422 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001423}
1424
1425
1426void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1427 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001428 SmiCompare(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001429 } else {
1430 ASSERT(source->IsHeapObject());
1431 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1432 cmpq(dst, kScratchRegister);
1433 }
1434}
1435
1436
1437void MacroAssembler::Push(Handle<Object> source) {
1438 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001439 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001440 } else {
1441 ASSERT(source->IsHeapObject());
1442 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1443 push(kScratchRegister);
1444 }
1445}
1446
1447
1448void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001449 intptr_t smi = reinterpret_cast<intptr_t>(source);
1450 if (is_int32(smi)) {
1451 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001452 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001453 Set(kScratchRegister, smi);
1454 push(kScratchRegister);
1455 }
1456}
1457
1458
Leon Clarkee46be812010-01-19 14:06:41 +00001459void MacroAssembler::Drop(int stack_elements) {
1460 if (stack_elements > 0) {
1461 addq(rsp, Immediate(stack_elements * kPointerSize));
1462 }
1463}
1464
1465
Steve Block3ce2e202009-11-05 08:53:23 +00001466void MacroAssembler::Test(const Operand& src, Smi* source) {
1467 intptr_t smi = reinterpret_cast<intptr_t>(source);
1468 if (is_int32(smi)) {
1469 testl(src, Immediate(static_cast<int32_t>(smi)));
1470 } else {
1471 Move(kScratchRegister, source);
1472 testq(src, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001473 }
1474}
1475
1476
1477void MacroAssembler::Jump(ExternalReference ext) {
1478 movq(kScratchRegister, ext);
1479 jmp(kScratchRegister);
1480}
1481
1482
1483void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1484 movq(kScratchRegister, destination, rmode);
1485 jmp(kScratchRegister);
1486}
1487
1488
1489void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001490 // TODO(X64): Inline this
1491 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001492}
1493
1494
1495void MacroAssembler::Call(ExternalReference ext) {
1496 movq(kScratchRegister, ext);
1497 call(kScratchRegister);
1498}
1499
1500
1501void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1502 movq(kScratchRegister, destination, rmode);
1503 call(kScratchRegister);
1504}
1505
1506
1507void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1508 ASSERT(RelocInfo::IsCodeTarget(rmode));
1509 WriteRecordedPositions();
Steve Block3ce2e202009-11-05 08:53:23 +00001510 call(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001511}
1512
1513
1514void MacroAssembler::PushTryHandler(CodeLocation try_location,
1515 HandlerType type) {
1516 // Adjust this code if not the case.
1517 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1518
1519 // The pc (return address) is already on TOS. This code pushes state,
1520 // frame pointer and current handler. Check that they are expected
1521 // next on the stack, in that order.
1522 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1523 StackHandlerConstants::kPCOffset - kPointerSize);
1524 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1525 StackHandlerConstants::kStateOffset - kPointerSize);
1526 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1527 StackHandlerConstants::kFPOffset - kPointerSize);
1528
1529 if (try_location == IN_JAVASCRIPT) {
1530 if (type == TRY_CATCH_HANDLER) {
1531 push(Immediate(StackHandler::TRY_CATCH));
1532 } else {
1533 push(Immediate(StackHandler::TRY_FINALLY));
1534 }
1535 push(rbp);
1536 } else {
1537 ASSERT(try_location == IN_JS_ENTRY);
1538 // The frame pointer does not point to a JS frame so we save NULL
1539 // for rbp. We expect the code throwing an exception to check rbp
1540 // before dereferencing it to restore the context.
1541 push(Immediate(StackHandler::ENTRY));
1542 push(Immediate(0)); // NULL frame pointer.
1543 }
1544 // Save the current handler.
1545 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1546 push(Operand(kScratchRegister, 0));
1547 // Link this handler.
1548 movq(Operand(kScratchRegister, 0), rsp);
1549}
1550
1551
Leon Clarkee46be812010-01-19 14:06:41 +00001552void MacroAssembler::PopTryHandler() {
1553 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1554 // Unlink this handler.
1555 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1556 pop(Operand(kScratchRegister, 0));
1557 // Remove the remaining fields.
1558 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1559}
1560
1561
Steve Blocka7e24c12009-10-30 11:49:00 +00001562void MacroAssembler::Ret() {
1563 ret(0);
1564}
1565
1566
1567void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001568 fucomip();
1569 ffree(0);
1570 fincstp();
Steve Blocka7e24c12009-10-30 11:49:00 +00001571}
1572
1573
1574void MacroAssembler::CmpObjectType(Register heap_object,
1575 InstanceType type,
1576 Register map) {
1577 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1578 CmpInstanceType(map, type);
1579}
1580
1581
1582void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1583 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1584 Immediate(static_cast<int8_t>(type)));
1585}
1586
1587
Andrei Popescu31002712010-02-23 13:46:05 +00001588void MacroAssembler::CheckMap(Register obj,
1589 Handle<Map> map,
1590 Label* fail,
1591 bool is_heap_object) {
1592 if (!is_heap_object) {
1593 JumpIfSmi(obj, fail);
1594 }
1595 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1596 j(not_equal, fail);
1597}
1598
1599
Leon Clarked91b9f72010-01-27 17:25:45 +00001600Condition MacroAssembler::IsObjectStringType(Register heap_object,
1601 Register map,
1602 Register instance_type) {
1603 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00001604 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00001605 ASSERT(kNotStringTag != 0);
1606 testb(instance_type, Immediate(kIsNotStringMask));
1607 return zero;
1608}
1609
1610
Steve Blocka7e24c12009-10-30 11:49:00 +00001611void MacroAssembler::TryGetFunctionPrototype(Register function,
1612 Register result,
1613 Label* miss) {
1614 // Check that the receiver isn't a smi.
1615 testl(function, Immediate(kSmiTagMask));
1616 j(zero, miss);
1617
1618 // Check that the function really is a function.
1619 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1620 j(not_equal, miss);
1621
1622 // Make sure that the function has an instance prototype.
1623 Label non_instance;
1624 testb(FieldOperand(result, Map::kBitFieldOffset),
1625 Immediate(1 << Map::kHasNonInstancePrototype));
1626 j(not_zero, &non_instance);
1627
1628 // Get the prototype or initial map from the function.
1629 movq(result,
1630 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1631
1632 // If the prototype or initial map is the hole, don't return it and
1633 // simply miss the cache instead. This will allow us to allocate a
1634 // prototype object on-demand in the runtime system.
1635 CompareRoot(result, Heap::kTheHoleValueRootIndex);
1636 j(equal, miss);
1637
1638 // If the function does not have an initial map, we're done.
1639 Label done;
1640 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1641 j(not_equal, &done);
1642
1643 // Get the prototype from the initial map.
1644 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1645 jmp(&done);
1646
1647 // Non-instance prototype: Fetch prototype from constructor field
1648 // in initial map.
1649 bind(&non_instance);
1650 movq(result, FieldOperand(result, Map::kConstructorOffset));
1651
1652 // All done.
1653 bind(&done);
1654}
1655
1656
1657void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1658 if (FLAG_native_code_counters && counter->Enabled()) {
1659 movq(kScratchRegister, ExternalReference(counter));
1660 movl(Operand(kScratchRegister, 0), Immediate(value));
1661 }
1662}
1663
1664
1665void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1666 ASSERT(value > 0);
1667 if (FLAG_native_code_counters && counter->Enabled()) {
1668 movq(kScratchRegister, ExternalReference(counter));
1669 Operand operand(kScratchRegister, 0);
1670 if (value == 1) {
1671 incl(operand);
1672 } else {
1673 addl(operand, Immediate(value));
1674 }
1675 }
1676}
1677
1678
1679void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1680 ASSERT(value > 0);
1681 if (FLAG_native_code_counters && counter->Enabled()) {
1682 movq(kScratchRegister, ExternalReference(counter));
1683 Operand operand(kScratchRegister, 0);
1684 if (value == 1) {
1685 decl(operand);
1686 } else {
1687 subl(operand, Immediate(value));
1688 }
1689 }
1690}
1691
Steve Blocka7e24c12009-10-30 11:49:00 +00001692#ifdef ENABLE_DEBUGGER_SUPPORT
1693
1694void MacroAssembler::PushRegistersFromMemory(RegList regs) {
1695 ASSERT((regs & ~kJSCallerSaved) == 0);
1696 // Push the content of the memory location to the stack.
1697 for (int i = 0; i < kNumJSCallerSaved; i++) {
1698 int r = JSCallerSavedCode(i);
1699 if ((regs & (1 << r)) != 0) {
1700 ExternalReference reg_addr =
1701 ExternalReference(Debug_Address::Register(i));
1702 movq(kScratchRegister, reg_addr);
1703 push(Operand(kScratchRegister, 0));
1704 }
1705 }
1706}
1707
Steve Block3ce2e202009-11-05 08:53:23 +00001708
Steve Blocka7e24c12009-10-30 11:49:00 +00001709void MacroAssembler::SaveRegistersToMemory(RegList regs) {
1710 ASSERT((regs & ~kJSCallerSaved) == 0);
1711 // Copy the content of registers to memory location.
1712 for (int i = 0; i < kNumJSCallerSaved; i++) {
1713 int r = JSCallerSavedCode(i);
1714 if ((regs & (1 << r)) != 0) {
1715 Register reg = { r };
1716 ExternalReference reg_addr =
1717 ExternalReference(Debug_Address::Register(i));
1718 movq(kScratchRegister, reg_addr);
1719 movq(Operand(kScratchRegister, 0), reg);
1720 }
1721 }
1722}
1723
1724
1725void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
1726 ASSERT((regs & ~kJSCallerSaved) == 0);
1727 // Copy the content of memory location to registers.
1728 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1729 int r = JSCallerSavedCode(i);
1730 if ((regs & (1 << r)) != 0) {
1731 Register reg = { r };
1732 ExternalReference reg_addr =
1733 ExternalReference(Debug_Address::Register(i));
1734 movq(kScratchRegister, reg_addr);
1735 movq(reg, Operand(kScratchRegister, 0));
1736 }
1737 }
1738}
1739
1740
1741void MacroAssembler::PopRegistersToMemory(RegList regs) {
1742 ASSERT((regs & ~kJSCallerSaved) == 0);
1743 // Pop the content from the stack to the memory location.
1744 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1745 int r = JSCallerSavedCode(i);
1746 if ((regs & (1 << r)) != 0) {
1747 ExternalReference reg_addr =
1748 ExternalReference(Debug_Address::Register(i));
1749 movq(kScratchRegister, reg_addr);
1750 pop(Operand(kScratchRegister, 0));
1751 }
1752 }
1753}
1754
1755
1756void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
1757 Register scratch,
1758 RegList regs) {
1759 ASSERT(!scratch.is(kScratchRegister));
1760 ASSERT(!base.is(kScratchRegister));
1761 ASSERT(!base.is(scratch));
1762 ASSERT((regs & ~kJSCallerSaved) == 0);
1763 // Copy the content of the stack to the memory location and adjust base.
1764 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1765 int r = JSCallerSavedCode(i);
1766 if ((regs & (1 << r)) != 0) {
1767 movq(scratch, Operand(base, 0));
1768 ExternalReference reg_addr =
1769 ExternalReference(Debug_Address::Register(i));
1770 movq(kScratchRegister, reg_addr);
1771 movq(Operand(kScratchRegister, 0), scratch);
1772 lea(base, Operand(base, kPointerSize));
1773 }
1774 }
1775}
1776
1777#endif // ENABLE_DEBUGGER_SUPPORT
1778
1779
1780void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
1781 bool resolved;
1782 Handle<Code> code = ResolveBuiltin(id, &resolved);
1783
1784 // Calls are not allowed in some stubs.
1785 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1786
1787 // Rely on the assertion to check that the number of provided
1788 // arguments match the expected number of arguments. Fake a
1789 // parameter count to avoid emitting code to do the check.
1790 ParameterCount expected(0);
Steve Block3ce2e202009-11-05 08:53:23 +00001791 InvokeCode(Handle<Code>(code),
1792 expected,
1793 expected,
1794 RelocInfo::CODE_TARGET,
1795 flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001796
1797 const char* name = Builtins::GetName(id);
1798 int argc = Builtins::GetArgumentsCount(id);
1799 // The target address for the jump is stored as an immediate at offset
1800 // kInvokeCodeAddressOffset.
1801 if (!resolved) {
1802 uint32_t flags =
1803 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
Steve Blocka7e24c12009-10-30 11:49:00 +00001804 Bootstrapper::FixupFlagsUseCodeObject::encode(false);
1805 Unresolved entry =
1806 { pc_offset() - kCallTargetAddressOffset, flags, name };
1807 unresolved_.Add(entry);
1808 }
1809}
1810
1811
1812void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1813 const ParameterCount& actual,
1814 Handle<Code> code_constant,
1815 Register code_register,
1816 Label* done,
1817 InvokeFlag flag) {
1818 bool definitely_matches = false;
1819 Label invoke;
1820 if (expected.is_immediate()) {
1821 ASSERT(actual.is_immediate());
1822 if (expected.immediate() == actual.immediate()) {
1823 definitely_matches = true;
1824 } else {
1825 movq(rax, Immediate(actual.immediate()));
1826 if (expected.immediate() ==
Steve Block3ce2e202009-11-05 08:53:23 +00001827 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001828 // Don't worry about adapting arguments for built-ins that
1829 // don't want that done. Skip adaption code by making it look
1830 // like we have a match between expected and actual number of
1831 // arguments.
1832 definitely_matches = true;
1833 } else {
1834 movq(rbx, Immediate(expected.immediate()));
1835 }
1836 }
1837 } else {
1838 if (actual.is_immediate()) {
1839 // Expected is in register, actual is immediate. This is the
1840 // case when we invoke function values without going through the
1841 // IC mechanism.
1842 cmpq(expected.reg(), Immediate(actual.immediate()));
1843 j(equal, &invoke);
1844 ASSERT(expected.reg().is(rbx));
1845 movq(rax, Immediate(actual.immediate()));
1846 } else if (!expected.reg().is(actual.reg())) {
1847 // Both expected and actual are in (different) registers. This
1848 // is the case when we invoke functions using call and apply.
1849 cmpq(expected.reg(), actual.reg());
1850 j(equal, &invoke);
1851 ASSERT(actual.reg().is(rax));
1852 ASSERT(expected.reg().is(rbx));
1853 }
1854 }
1855
1856 if (!definitely_matches) {
1857 Handle<Code> adaptor =
1858 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1859 if (!code_constant.is_null()) {
1860 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1861 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1862 } else if (!code_register.is(rdx)) {
1863 movq(rdx, code_register);
1864 }
1865
1866 if (flag == CALL_FUNCTION) {
1867 Call(adaptor, RelocInfo::CODE_TARGET);
1868 jmp(done);
1869 } else {
1870 Jump(adaptor, RelocInfo::CODE_TARGET);
1871 }
1872 bind(&invoke);
1873 }
1874}
1875
1876
1877void MacroAssembler::InvokeCode(Register code,
1878 const ParameterCount& expected,
1879 const ParameterCount& actual,
1880 InvokeFlag flag) {
1881 Label done;
1882 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1883 if (flag == CALL_FUNCTION) {
1884 call(code);
1885 } else {
1886 ASSERT(flag == JUMP_FUNCTION);
1887 jmp(code);
1888 }
1889 bind(&done);
1890}
1891
1892
1893void MacroAssembler::InvokeCode(Handle<Code> code,
1894 const ParameterCount& expected,
1895 const ParameterCount& actual,
1896 RelocInfo::Mode rmode,
1897 InvokeFlag flag) {
1898 Label done;
1899 Register dummy = rax;
1900 InvokePrologue(expected, actual, code, dummy, &done, flag);
1901 if (flag == CALL_FUNCTION) {
1902 Call(code, rmode);
1903 } else {
1904 ASSERT(flag == JUMP_FUNCTION);
1905 Jump(code, rmode);
1906 }
1907 bind(&done);
1908}
1909
1910
1911void MacroAssembler::InvokeFunction(Register function,
1912 const ParameterCount& actual,
1913 InvokeFlag flag) {
1914 ASSERT(function.is(rdi));
1915 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1916 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
1917 movsxlq(rbx,
1918 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
1919 movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
1920 // Advances rdx to the end of the Code object header, to the start of
1921 // the executable code.
1922 lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
1923
1924 ParameterCount expected(rbx);
1925 InvokeCode(rdx, expected, actual, flag);
1926}
1927
1928
1929void MacroAssembler::EnterFrame(StackFrame::Type type) {
1930 push(rbp);
1931 movq(rbp, rsp);
1932 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00001933 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00001934 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1935 push(kScratchRegister);
1936 if (FLAG_debug_code) {
1937 movq(kScratchRegister,
1938 Factory::undefined_value(),
1939 RelocInfo::EMBEDDED_OBJECT);
1940 cmpq(Operand(rsp, 0), kScratchRegister);
1941 Check(not_equal, "code object not properly patched");
1942 }
1943}
1944
1945
1946void MacroAssembler::LeaveFrame(StackFrame::Type type) {
1947 if (FLAG_debug_code) {
Steve Block3ce2e202009-11-05 08:53:23 +00001948 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00001949 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
1950 Check(equal, "stack frame types must match");
1951 }
1952 movq(rsp, rbp);
1953 pop(rbp);
1954}
1955
1956
Steve Blockd0582a62009-12-15 09:54:21 +00001957void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001958 // Setup the frame structure on the stack.
1959 // All constants are relative to the frame pointer of the exit frame.
1960 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
1961 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
1962 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
1963 push(rbp);
1964 movq(rbp, rsp);
1965
1966 // Reserve room for entry stack pointer and push the debug marker.
Steve Block3ce2e202009-11-05 08:53:23 +00001967 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00001968 push(Immediate(0)); // saved entry sp, patched before call
Steve Blockd0582a62009-12-15 09:54:21 +00001969 if (mode == ExitFrame::MODE_DEBUG) {
1970 push(Immediate(0));
1971 } else {
1972 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1973 push(kScratchRegister);
1974 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001975
1976 // Save the frame pointer and the context in top.
1977 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1978 ExternalReference context_address(Top::k_context_address);
1979 movq(r14, rax); // Backup rax before we use it.
1980
1981 movq(rax, rbp);
1982 store_rax(c_entry_fp_address);
1983 movq(rax, rsi);
1984 store_rax(context_address);
1985
1986 // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
1987 // so it must be retained across the C-call.
1988 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1989 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
1990
1991#ifdef ENABLE_DEBUGGER_SUPPORT
1992 // Save the state of all registers to the stack from the memory
1993 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00001994 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001995 // TODO(1243899): This should be symmetric to
1996 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
1997 // correct here, but computed for the other call. Very error
1998 // prone! FIX THIS. Actually there are deeper problems with
1999 // register saving than this asymmetry (see the bug report
2000 // associated with this issue).
2001 PushRegistersFromMemory(kJSCallerSaved);
2002 }
2003#endif
2004
2005#ifdef _WIN64
2006 // Reserve space on stack for result and argument structures, if necessary.
2007 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
2008 // Reserve space for the Arguments object. The Windows 64-bit ABI
2009 // requires us to pass this structure as a pointer to its location on
2010 // the stack. The structure contains 2 values.
2011 int argument_stack_space = 2 * kPointerSize;
2012 // We also need backing space for 4 parameters, even though
2013 // we only pass one or two parameter, and it is in a register.
2014 int argument_mirror_space = 4 * kPointerSize;
2015 int total_stack_space =
2016 argument_mirror_space + argument_stack_space + result_stack_space;
2017 subq(rsp, Immediate(total_stack_space));
2018#endif
2019
2020 // Get the required frame alignment for the OS.
2021 static const int kFrameAlignment = OS::ActivationFrameAlignment();
2022 if (kFrameAlignment > 0) {
2023 ASSERT(IsPowerOf2(kFrameAlignment));
2024 movq(kScratchRegister, Immediate(-kFrameAlignment));
2025 and_(rsp, kScratchRegister);
2026 }
2027
2028 // Patch the saved entry sp.
2029 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2030}
2031
2032
Steve Blockd0582a62009-12-15 09:54:21 +00002033void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002034 // Registers:
2035 // r15 : argv
2036#ifdef ENABLE_DEBUGGER_SUPPORT
2037 // Restore the memory copy of the registers by digging them out from
2038 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00002039 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002040 // It's okay to clobber register rbx below because we don't need
2041 // the function pointer after this.
2042 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +00002043 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002044 lea(rbx, Operand(rbp, kOffset));
2045 CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
2046 }
2047#endif
2048
2049 // Get the return address from the stack and restore the frame pointer.
2050 movq(rcx, Operand(rbp, 1 * kPointerSize));
2051 movq(rbp, Operand(rbp, 0 * kPointerSize));
2052
Steve Blocka7e24c12009-10-30 11:49:00 +00002053 // Pop everything up to and including the arguments and the receiver
2054 // from the caller stack.
2055 lea(rsp, Operand(r15, 1 * kPointerSize));
2056
2057 // Restore current context from top and clear it in debug mode.
2058 ExternalReference context_address(Top::k_context_address);
2059 movq(kScratchRegister, context_address);
2060 movq(rsi, Operand(kScratchRegister, 0));
2061#ifdef DEBUG
2062 movq(Operand(kScratchRegister, 0), Immediate(0));
2063#endif
2064
2065 // Push the return address to get ready to return.
2066 push(rcx);
2067
2068 // Clear the top frame.
2069 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2070 movq(kScratchRegister, c_entry_fp_address);
2071 movq(Operand(kScratchRegister, 0), Immediate(0));
2072}
2073
2074
Steve Block3ce2e202009-11-05 08:53:23 +00002075Register MacroAssembler::CheckMaps(JSObject* object,
2076 Register object_reg,
2077 JSObject* holder,
2078 Register holder_reg,
Steve Blocka7e24c12009-10-30 11:49:00 +00002079 Register scratch,
2080 Label* miss) {
2081 // Make sure there's no overlap between scratch and the other
2082 // registers.
2083 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
2084
2085 // Keep track of the current object in register reg. On the first
2086 // iteration, reg is an alias for object_reg, on later iterations,
2087 // it is an alias for holder_reg.
2088 Register reg = object_reg;
2089 int depth = 1;
2090
2091 // Check the maps in the prototype chain.
2092 // Traverse the prototype chain from the object and do map checks.
2093 while (object != holder) {
2094 depth++;
2095
2096 // Only global objects and objects that do not require access
2097 // checks are allowed in stubs.
2098 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2099
2100 JSObject* prototype = JSObject::cast(object->GetPrototype());
2101 if (Heap::InNewSpace(prototype)) {
2102 // Get the map of the current object.
2103 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2104 Cmp(scratch, Handle<Map>(object->map()));
2105 // Branch on the result of the map check.
2106 j(not_equal, miss);
2107 // Check access rights to the global object. This has to happen
2108 // after the map check so that we know that the object is
2109 // actually a global object.
2110 if (object->IsJSGlobalProxy()) {
2111 CheckAccessGlobalProxy(reg, scratch, miss);
2112
2113 // Restore scratch register to be the map of the object.
2114 // We load the prototype from the map in the scratch register.
2115 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2116 }
2117 // The prototype is in new space; we cannot store a reference
2118 // to it in the code. Load it from the map.
2119 reg = holder_reg; // from now the object is in holder_reg
2120 movq(reg, FieldOperand(scratch, Map::kPrototypeOffset));
2121
2122 } else {
2123 // Check the map of the current object.
2124 Cmp(FieldOperand(reg, HeapObject::kMapOffset),
2125 Handle<Map>(object->map()));
2126 // Branch on the result of the map check.
2127 j(not_equal, miss);
2128 // Check access rights to the global object. This has to happen
2129 // after the map check so that we know that the object is
2130 // actually a global object.
2131 if (object->IsJSGlobalProxy()) {
2132 CheckAccessGlobalProxy(reg, scratch, miss);
2133 }
2134 // The prototype is in old space; load it directly.
2135 reg = holder_reg; // from now the object is in holder_reg
2136 Move(reg, Handle<JSObject>(prototype));
2137 }
2138
2139 // Go to the next object in the prototype chain.
2140 object = prototype;
2141 }
2142
2143 // Check the holder map.
Steve Block3ce2e202009-11-05 08:53:23 +00002144 Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002145 j(not_equal, miss);
2146
2147 // Log the check depth.
2148 LOG(IntEvent("check-maps-depth", depth));
2149
2150 // Perform security check for access to the global object and return
2151 // the holder register.
2152 ASSERT(object == holder);
2153 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2154 if (object->IsJSGlobalProxy()) {
2155 CheckAccessGlobalProxy(reg, scratch, miss);
2156 }
2157 return reg;
2158}
2159
2160
Steve Blocka7e24c12009-10-30 11:49:00 +00002161void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2162 Register scratch,
2163 Label* miss) {
2164 Label same_contexts;
2165
2166 ASSERT(!holder_reg.is(scratch));
2167 ASSERT(!scratch.is(kScratchRegister));
2168 // Load current lexical context from the stack frame.
2169 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2170
2171 // When generating debug code, make sure the lexical context is set.
2172 if (FLAG_debug_code) {
2173 cmpq(scratch, Immediate(0));
2174 Check(not_equal, "we should not have an empty lexical context");
2175 }
2176 // Load the global context of the current context.
2177 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2178 movq(scratch, FieldOperand(scratch, offset));
2179 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2180
2181 // Check the context is a global context.
2182 if (FLAG_debug_code) {
2183 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2184 Factory::global_context_map());
2185 Check(equal, "JSGlobalObject::global_context should be a global context.");
2186 }
2187
2188 // Check if both contexts are the same.
2189 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2190 j(equal, &same_contexts);
2191
2192 // Compare security tokens.
2193 // Check that the security token in the calling global object is
2194 // compatible with the security token in the receiving global
2195 // object.
2196
2197 // Check the context is a global context.
2198 if (FLAG_debug_code) {
2199 // Preserve original value of holder_reg.
2200 push(holder_reg);
2201 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2202 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2203 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2204
2205 // Read the first word and compare to global_context_map(),
2206 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2207 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2208 Check(equal, "JSGlobalObject::global_context should be a global context.");
2209 pop(holder_reg);
2210 }
2211
2212 movq(kScratchRegister,
2213 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00002214 int token_offset =
2215 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002216 movq(scratch, FieldOperand(scratch, token_offset));
2217 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2218 j(not_equal, miss);
2219
2220 bind(&same_contexts);
2221}
2222
2223
2224void MacroAssembler::LoadAllocationTopHelper(Register result,
2225 Register result_end,
2226 Register scratch,
2227 AllocationFlags flags) {
2228 ExternalReference new_space_allocation_top =
2229 ExternalReference::new_space_allocation_top_address();
2230
2231 // Just return if allocation top is already known.
2232 if ((flags & RESULT_CONTAINS_TOP) != 0) {
2233 // No use of scratch if allocation top is provided.
2234 ASSERT(scratch.is(no_reg));
2235#ifdef DEBUG
2236 // Assert that result actually contains top on entry.
2237 movq(kScratchRegister, new_space_allocation_top);
2238 cmpq(result, Operand(kScratchRegister, 0));
2239 Check(equal, "Unexpected allocation top");
2240#endif
2241 return;
2242 }
2243
2244 // Move address of new object to result. Use scratch register if available.
2245 if (scratch.is(no_reg)) {
2246 movq(kScratchRegister, new_space_allocation_top);
2247 movq(result, Operand(kScratchRegister, 0));
2248 } else {
2249 ASSERT(!scratch.is(result_end));
2250 movq(scratch, new_space_allocation_top);
2251 movq(result, Operand(scratch, 0));
2252 }
2253}
2254
2255
2256void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2257 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +00002258 if (FLAG_debug_code) {
2259 testq(result_end, Immediate(kObjectAlignmentMask));
2260 Check(zero, "Unaligned allocation in new space");
2261 }
2262
Steve Blocka7e24c12009-10-30 11:49:00 +00002263 ExternalReference new_space_allocation_top =
2264 ExternalReference::new_space_allocation_top_address();
2265
2266 // Update new top.
2267 if (result_end.is(rax)) {
2268 // rax can be stored directly to a memory location.
2269 store_rax(new_space_allocation_top);
2270 } else {
2271 // Register required - use scratch provided if available.
2272 if (scratch.is(no_reg)) {
2273 movq(kScratchRegister, new_space_allocation_top);
2274 movq(Operand(kScratchRegister, 0), result_end);
2275 } else {
2276 movq(Operand(scratch, 0), result_end);
2277 }
2278 }
2279}
2280
2281
2282void MacroAssembler::AllocateInNewSpace(int object_size,
2283 Register result,
2284 Register result_end,
2285 Register scratch,
2286 Label* gc_required,
2287 AllocationFlags flags) {
2288 ASSERT(!result.is(result_end));
2289
2290 // Load address of new object into result.
2291 LoadAllocationTopHelper(result, result_end, scratch, flags);
2292
2293 // Calculate new top and bail out if new space is exhausted.
2294 ExternalReference new_space_allocation_limit =
2295 ExternalReference::new_space_allocation_limit_address();
2296 lea(result_end, Operand(result, object_size));
2297 movq(kScratchRegister, new_space_allocation_limit);
2298 cmpq(result_end, Operand(kScratchRegister, 0));
2299 j(above, gc_required);
2300
2301 // Update allocation top.
2302 UpdateAllocationTopHelper(result_end, scratch);
2303
2304 // Tag the result if requested.
2305 if ((flags & TAG_OBJECT) != 0) {
2306 addq(result, Immediate(kHeapObjectTag));
2307 }
2308}
2309
2310
2311void MacroAssembler::AllocateInNewSpace(int header_size,
2312 ScaleFactor element_size,
2313 Register element_count,
2314 Register result,
2315 Register result_end,
2316 Register scratch,
2317 Label* gc_required,
2318 AllocationFlags flags) {
2319 ASSERT(!result.is(result_end));
2320
2321 // Load address of new object into result.
2322 LoadAllocationTopHelper(result, result_end, scratch, flags);
2323
2324 // Calculate new top and bail out if new space is exhausted.
2325 ExternalReference new_space_allocation_limit =
2326 ExternalReference::new_space_allocation_limit_address();
2327 lea(result_end, Operand(result, element_count, element_size, header_size));
2328 movq(kScratchRegister, new_space_allocation_limit);
2329 cmpq(result_end, Operand(kScratchRegister, 0));
2330 j(above, gc_required);
2331
2332 // Update allocation top.
2333 UpdateAllocationTopHelper(result_end, scratch);
2334
2335 // Tag the result if requested.
2336 if ((flags & TAG_OBJECT) != 0) {
2337 addq(result, Immediate(kHeapObjectTag));
2338 }
2339}
2340
2341
2342void MacroAssembler::AllocateInNewSpace(Register object_size,
2343 Register result,
2344 Register result_end,
2345 Register scratch,
2346 Label* gc_required,
2347 AllocationFlags flags) {
2348 // Load address of new object into result.
2349 LoadAllocationTopHelper(result, result_end, scratch, flags);
2350
2351 // Calculate new top and bail out if new space is exhausted.
2352 ExternalReference new_space_allocation_limit =
2353 ExternalReference::new_space_allocation_limit_address();
2354 if (!object_size.is(result_end)) {
2355 movq(result_end, object_size);
2356 }
2357 addq(result_end, result);
2358 movq(kScratchRegister, new_space_allocation_limit);
2359 cmpq(result_end, Operand(kScratchRegister, 0));
2360 j(above, gc_required);
2361
2362 // Update allocation top.
2363 UpdateAllocationTopHelper(result_end, scratch);
2364
2365 // Tag the result if requested.
2366 if ((flags & TAG_OBJECT) != 0) {
2367 addq(result, Immediate(kHeapObjectTag));
2368 }
2369}
2370
2371
2372void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2373 ExternalReference new_space_allocation_top =
2374 ExternalReference::new_space_allocation_top_address();
2375
2376 // Make sure the object has no tag before resetting top.
2377 and_(object, Immediate(~kHeapObjectTagMask));
2378 movq(kScratchRegister, new_space_allocation_top);
2379#ifdef DEBUG
2380 cmpq(object, Operand(kScratchRegister, 0));
2381 Check(below, "Undo allocation of non allocated memory");
2382#endif
2383 movq(Operand(kScratchRegister, 0), object);
2384}
2385
2386
Steve Block3ce2e202009-11-05 08:53:23 +00002387void MacroAssembler::AllocateHeapNumber(Register result,
2388 Register scratch,
2389 Label* gc_required) {
2390 // Allocate heap number in new space.
2391 AllocateInNewSpace(HeapNumber::kSize,
2392 result,
2393 scratch,
2394 no_reg,
2395 gc_required,
2396 TAG_OBJECT);
2397
2398 // Set the map.
2399 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2400 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2401}
2402
2403
Leon Clarkee46be812010-01-19 14:06:41 +00002404void MacroAssembler::AllocateTwoByteString(Register result,
2405 Register length,
2406 Register scratch1,
2407 Register scratch2,
2408 Register scratch3,
2409 Label* gc_required) {
2410 // Calculate the number of bytes needed for the characters in the string while
2411 // observing object alignment.
2412 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2413 ASSERT(kShortSize == 2);
2414 // scratch1 = length * 2 + kObjectAlignmentMask.
2415 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
2416 and_(scratch1, Immediate(~kObjectAlignmentMask));
2417
2418 // Allocate two byte string in new space.
2419 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2420 times_1,
2421 scratch1,
2422 result,
2423 scratch2,
2424 scratch3,
2425 gc_required,
2426 TAG_OBJECT);
2427
2428 // Set the map, length and hash field.
2429 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2430 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2431 movl(FieldOperand(result, String::kLengthOffset), length);
2432 movl(FieldOperand(result, String::kHashFieldOffset),
2433 Immediate(String::kEmptyHashField));
2434}
2435
2436
2437void MacroAssembler::AllocateAsciiString(Register result,
2438 Register length,
2439 Register scratch1,
2440 Register scratch2,
2441 Register scratch3,
2442 Label* gc_required) {
2443 // Calculate the number of bytes needed for the characters in the string while
2444 // observing object alignment.
2445 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
2446 movl(scratch1, length);
2447 ASSERT(kCharSize == 1);
2448 addq(scratch1, Immediate(kObjectAlignmentMask));
2449 and_(scratch1, Immediate(~kObjectAlignmentMask));
2450
2451 // Allocate ascii string in new space.
2452 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2453 times_1,
2454 scratch1,
2455 result,
2456 scratch2,
2457 scratch3,
2458 gc_required,
2459 TAG_OBJECT);
2460
2461 // Set the map, length and hash field.
2462 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2463 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2464 movl(FieldOperand(result, String::kLengthOffset), length);
2465 movl(FieldOperand(result, String::kHashFieldOffset),
2466 Immediate(String::kEmptyHashField));
2467}
2468
2469
2470void MacroAssembler::AllocateConsString(Register result,
2471 Register scratch1,
2472 Register scratch2,
2473 Label* gc_required) {
2474 // Allocate heap number in new space.
2475 AllocateInNewSpace(ConsString::kSize,
2476 result,
2477 scratch1,
2478 scratch2,
2479 gc_required,
2480 TAG_OBJECT);
2481
2482 // Set the map. The other fields are left uninitialized.
2483 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2484 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2485}
2486
2487
2488void MacroAssembler::AllocateAsciiConsString(Register result,
2489 Register scratch1,
2490 Register scratch2,
2491 Label* gc_required) {
2492 // Allocate heap number in new space.
2493 AllocateInNewSpace(ConsString::kSize,
2494 result,
2495 scratch1,
2496 scratch2,
2497 gc_required,
2498 TAG_OBJECT);
2499
2500 // Set the map. The other fields are left uninitialized.
2501 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2502 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2503}
2504
2505
Steve Blockd0582a62009-12-15 09:54:21 +00002506void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2507 if (context_chain_length > 0) {
2508 // Move up the chain of contexts to the context containing the slot.
2509 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2510 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002511 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002512 for (int i = 1; i < context_chain_length; i++) {
2513 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2514 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2515 }
2516 // The context may be an intermediate context, not a function context.
2517 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2518 } else { // context is the current function context.
2519 // The context may be an intermediate context, not a function context.
2520 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2521 }
2522}
2523
Leon Clarke4515c472010-02-03 11:58:03 +00002524int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2525 // On Windows stack slots are reserved by the caller for all arguments
2526 // including the ones passed in registers. On Linux 6 arguments are passed in
2527 // registers and the caller does not reserve stack slots for them.
2528 ASSERT(num_arguments >= 0);
2529#ifdef _WIN64
2530 static const int kArgumentsWithoutStackSlot = 0;
2531#else
2532 static const int kArgumentsWithoutStackSlot = 6;
2533#endif
2534 return num_arguments > kArgumentsWithoutStackSlot ?
2535 num_arguments - kArgumentsWithoutStackSlot : 0;
2536}
2537
2538void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2539 int frame_alignment = OS::ActivationFrameAlignment();
2540 ASSERT(frame_alignment != 0);
2541 ASSERT(num_arguments >= 0);
2542 // Make stack end at alignment and allocate space for arguments and old rsp.
2543 movq(kScratchRegister, rsp);
2544 ASSERT(IsPowerOf2(frame_alignment));
2545 int argument_slots_on_stack =
2546 ArgumentStackSlotsForCFunctionCall(num_arguments);
2547 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2548 and_(rsp, Immediate(-frame_alignment));
2549 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2550}
2551
2552
2553void MacroAssembler::CallCFunction(ExternalReference function,
2554 int num_arguments) {
2555 movq(rax, function);
2556 CallCFunction(rax, num_arguments);
2557}
2558
2559
2560void MacroAssembler::CallCFunction(Register function, int num_arguments) {
2561 call(function);
2562 ASSERT(OS::ActivationFrameAlignment() != 0);
2563 ASSERT(num_arguments >= 0);
2564 int argument_slots_on_stack =
2565 ArgumentStackSlotsForCFunctionCall(num_arguments);
2566 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2567}
2568
Steve Blockd0582a62009-12-15 09:54:21 +00002569
Steve Blocka7e24c12009-10-30 11:49:00 +00002570CodePatcher::CodePatcher(byte* address, int size)
2571 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2572 // Create a new macro assembler pointing to the address of the code to patch.
2573 // The size is adjusted with kGap on order for the assembler to generate size
2574 // bytes of instructions without failing with buffer size constraints.
2575 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2576}
2577
2578
2579CodePatcher::~CodePatcher() {
2580 // Indicate that code has changed.
2581 CPU::FlushICache(address_, size_);
2582
2583 // Check that the code was patched as expected.
2584 ASSERT(masm_.pc_ == address_ + size_);
2585 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2586}
2587
Steve Blocka7e24c12009-10-30 11:49:00 +00002588} } // namespace v8::internal