blob: 165c51dd2756e603bf3e99ac58984a3da920f406 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010038#include "heap.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
40namespace v8 {
41namespace internal {
42
43MacroAssembler::MacroAssembler(void* buffer, int size)
Steve Block3ce2e202009-11-05 08:53:23 +000044 : Assembler(buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000045 generating_stub_(false),
46 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
Steve Blocka7e24c12009-10-30 11:49:00 +000048}
49
50
Steve Block3ce2e202009-11-05 08:53:23 +000051void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010052 movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000053}
54
55
Kristian Monsen25f61362010-05-21 11:50:48 +010056void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
57 movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
58}
59
60
Steve Blocka7e24c12009-10-30 11:49:00 +000061void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010062 push(Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000063}
64
65
Steve Block3ce2e202009-11-05 08:53:23 +000066void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010067 cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000068}
69
70
Steve Block3ce2e202009-11-05 08:53:23 +000071void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000072 LoadRoot(kScratchRegister, index);
73 cmpq(with, kScratchRegister);
74}
75
76
Steve Blockd0582a62009-12-15 09:54:21 +000077void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
78 CompareRoot(rsp, Heap::kStackLimitRootIndex);
79 j(below, on_stack_overflow);
80}
81
82
Steve Block6ded16b2010-05-10 14:33:55 +010083void MacroAssembler::RecordWriteHelper(Register object,
84 Register addr,
85 Register scratch) {
86 if (FLAG_debug_code) {
87 // Check that the object is not in new space.
88 Label not_in_new_space;
89 InNewSpace(object, scratch, not_equal, &not_in_new_space);
90 Abort("new-space object passed to RecordWriteHelper");
91 bind(&not_in_new_space);
92 }
93
Steve Blocka7e24c12009-10-30 11:49:00 +000094 // Compute the page start address from the heap object pointer, and reuse
95 // the 'object' register for it.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010096 and_(object, Immediate(~Page::kPageAlignmentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +000097
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010098 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
99 // method for more details.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100100 shrl(addr, Immediate(Page::kRegionSizeLog2));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100101 andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000102
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100103 // Set dirty mark for region.
104 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000105}
106
107
Steve Blocka7e24c12009-10-30 11:49:00 +0000108void MacroAssembler::RecordWrite(Register object,
109 int offset,
110 Register value,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100111 Register index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000112 // The compiled code assumes that record write doesn't change the
113 // context register, so we check that none of the clobbered
114 // registers are rsi.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100115 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
Leon Clarke4515c472010-02-03 11:58:03 +0000116
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100117 // First, check if a write barrier is even needed. The tests below
118 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000119 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000120 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000121
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100122 RecordWriteNonSmi(object, offset, value, index);
Steve Block3ce2e202009-11-05 08:53:23 +0000123 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000124
125 // Clobber all input registers when running with the debug-code flag
126 // turned on to provoke errors. This clobbering repeats the
127 // clobbering done inside RecordWriteNonSmi but it's necessary to
128 // avoid having the fast case for smis leave the registers
129 // unchanged.
130 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100131 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
132 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100133 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000134 }
Steve Block3ce2e202009-11-05 08:53:23 +0000135}
136
137
Steve Block8defd9f2010-07-08 12:39:36 +0100138void MacroAssembler::RecordWrite(Register object,
139 Register address,
140 Register value) {
141 // The compiled code assumes that record write doesn't change the
142 // context register, so we check that none of the clobbered
143 // registers are esi.
144 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
145
146 // First, check if a write barrier is even needed. The tests below
147 // catch stores of Smis and stores into young gen.
148 Label done;
149 JumpIfSmi(value, &done);
150
151 InNewSpace(object, value, equal, &done);
152
153 RecordWriteHelper(object, address, value);
154
155 bind(&done);
156
157 // Clobber all input registers when running with the debug-code flag
158 // turned on to provoke errors.
159 if (FLAG_debug_code) {
160 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
161 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
162 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
163 }
164}
165
166
Steve Block3ce2e202009-11-05 08:53:23 +0000167void MacroAssembler::RecordWriteNonSmi(Register object,
168 int offset,
169 Register scratch,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100170 Register index) {
Steve Block3ce2e202009-11-05 08:53:23 +0000171 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000172
173 if (FLAG_debug_code) {
174 Label okay;
175 JumpIfNotSmi(object, &okay);
176 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
177 bind(&okay);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100178
179 if (offset == 0) {
180 // index must be int32.
181 Register tmp = index.is(rax) ? rbx : rax;
182 push(tmp);
183 movl(tmp, index);
184 cmpq(tmp, index);
185 Check(equal, "Index register for RecordWrite must be untagged int32.");
186 pop(tmp);
187 }
Leon Clarke4515c472010-02-03 11:58:03 +0000188 }
189
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100190 // Test that the object address is not in the new space. We cannot
191 // update page dirty marks for new space pages.
Steve Block6ded16b2010-05-10 14:33:55 +0100192 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000193
Steve Block6ded16b2010-05-10 14:33:55 +0100194 // The offset is relative to a tagged or untagged HeapObject pointer,
195 // so either offset or offset + kHeapObjectTag must be a
196 // multiple of kPointerSize.
197 ASSERT(IsAligned(offset, kPointerSize) ||
198 IsAligned(offset + kHeapObjectTag, kPointerSize));
199
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100200 Register dst = index;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100201 if (offset != 0) {
202 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000203 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100204 // array access: calculate the destination address in the same manner as
205 // KeyedStoreIC::GenerateGeneric.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100206 lea(dst, FieldOperand(object,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100207 index,
208 times_pointer_size,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100209 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000210 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100211 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000212
213 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000214
215 // Clobber all input registers when running with the debug-code flag
216 // turned on to provoke errors.
217 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100218 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
219 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100220 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Steve Block6ded16b2010-05-10 14:33:55 +0100221 }
222}
223
224
225void MacroAssembler::InNewSpace(Register object,
226 Register scratch,
227 Condition cc,
228 Label* branch) {
229 if (Serializer::enabled()) {
230 // Can't do arithmetic on external references if it might get serialized.
231 // The mask isn't really an address. We load it as an external reference in
232 // case the size of the new space is different between the snapshot maker
233 // and the running system.
234 if (scratch.is(object)) {
235 movq(kScratchRegister, ExternalReference::new_space_mask());
236 and_(scratch, kScratchRegister);
237 } else {
238 movq(scratch, ExternalReference::new_space_mask());
239 and_(scratch, object);
240 }
241 movq(kScratchRegister, ExternalReference::new_space_start());
242 cmpq(scratch, kScratchRegister);
243 j(cc, branch);
244 } else {
245 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
246 intptr_t new_space_start =
247 reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
248 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
249 if (scratch.is(object)) {
250 addq(scratch, kScratchRegister);
251 } else {
252 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
253 }
254 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
255 j(cc, branch);
Leon Clarke4515c472010-02-03 11:58:03 +0000256 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000257}
258
259
260void MacroAssembler::Assert(Condition cc, const char* msg) {
261 if (FLAG_debug_code) Check(cc, msg);
262}
263
264
Iain Merrick75681382010-08-19 15:07:18 +0100265void MacroAssembler::AssertFastElements(Register elements) {
266 if (FLAG_debug_code) {
267 Label ok;
268 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
269 Heap::kFixedArrayMapRootIndex);
270 j(equal, &ok);
271 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
272 Heap::kFixedCOWArrayMapRootIndex);
273 j(equal, &ok);
274 Abort("JSObject with fast elements map has slow elements");
275 bind(&ok);
276 }
277}
278
279
Steve Blocka7e24c12009-10-30 11:49:00 +0000280void MacroAssembler::Check(Condition cc, const char* msg) {
281 Label L;
282 j(cc, &L);
283 Abort(msg);
284 // will not return here
285 bind(&L);
286}
287
288
Steve Block6ded16b2010-05-10 14:33:55 +0100289void MacroAssembler::CheckStackAlignment() {
290 int frame_alignment = OS::ActivationFrameAlignment();
291 int frame_alignment_mask = frame_alignment - 1;
292 if (frame_alignment > kPointerSize) {
293 ASSERT(IsPowerOf2(frame_alignment));
294 Label alignment_as_expected;
295 testq(rsp, Immediate(frame_alignment_mask));
296 j(zero, &alignment_as_expected);
297 // Abort if stack is not aligned.
298 int3();
299 bind(&alignment_as_expected);
300 }
301}
302
303
Steve Blocka7e24c12009-10-30 11:49:00 +0000304void MacroAssembler::NegativeZeroTest(Register result,
305 Register op,
306 Label* then_label) {
307 Label ok;
308 testl(result, result);
309 j(not_zero, &ok);
310 testl(op, op);
311 j(sign, then_label);
312 bind(&ok);
313}
314
315
316void MacroAssembler::Abort(const char* msg) {
317 // We want to pass the msg string like a smi to avoid GC
318 // problems, however msg is not guaranteed to be aligned
319 // properly. Instead, we pass an aligned pointer that is
320 // a proper v8 smi, but also pass the alignment difference
321 // from the real pointer as a smi.
322 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
323 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
324 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
325 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
326#ifdef DEBUG
327 if (msg != NULL) {
328 RecordComment("Abort message: ");
329 RecordComment(msg);
330 }
331#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000332 // Disable stub call restrictions to always allow calls to abort.
333 set_allow_stub_calls(true);
334
Steve Blocka7e24c12009-10-30 11:49:00 +0000335 push(rax);
336 movq(kScratchRegister, p0, RelocInfo::NONE);
337 push(kScratchRegister);
338 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000339 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000340 RelocInfo::NONE);
341 push(kScratchRegister);
342 CallRuntime(Runtime::kAbort, 2);
343 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000344 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000345}
346
347
348void MacroAssembler::CallStub(CodeStub* stub) {
349 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
350 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
351}
352
353
Ben Murdochbb769b22010-08-11 14:56:33 +0100354Object* MacroAssembler::TryCallStub(CodeStub* stub) {
355 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
356 Object* result = stub->TryGetCode();
357 if (!result->IsFailure()) {
358 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
359 }
360 return result;
361}
362
363
Leon Clarkee46be812010-01-19 14:06:41 +0000364void MacroAssembler::TailCallStub(CodeStub* stub) {
365 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
366 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
367}
368
369
Ben Murdochbb769b22010-08-11 14:56:33 +0100370Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
371 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
372 Object* result = stub->TryGetCode();
373 if (!result->IsFailure()) {
374 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
375 }
376 return result;
377}
378
379
Steve Blocka7e24c12009-10-30 11:49:00 +0000380void MacroAssembler::StubReturn(int argc) {
381 ASSERT(argc >= 1 && generating_stub());
382 ret((argc - 1) * kPointerSize);
383}
384
385
386void MacroAssembler::IllegalOperation(int num_arguments) {
387 if (num_arguments > 0) {
388 addq(rsp, Immediate(num_arguments * kPointerSize));
389 }
390 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
391}
392
393
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100394void MacroAssembler::IndexFromHash(Register hash, Register index) {
395 // The assert checks that the constants for the maximum number of digits
396 // for an array index cached in the hash field and the number of bits
397 // reserved for it does not conflict.
398 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
399 (1 << String::kArrayIndexValueBits));
400 // We want the smi-tagged index in key. Even if we subsequently go to
401 // the slow case, converting the key to a smi is always valid.
402 // key: string key
403 // hash: key's hash field, including its array index value.
404 and_(hash, Immediate(String::kArrayIndexValueMask));
405 shr(hash, Immediate(String::kHashShift));
406 // Here we actually clobber the key which will be used if calling into
407 // runtime later. However as the new key is the numeric value of a string key
408 // there is no difference in using either key.
409 Integer32ToSmi(index, hash);
410}
411
412
Steve Blocka7e24c12009-10-30 11:49:00 +0000413void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
414 CallRuntime(Runtime::FunctionForId(id), num_arguments);
415}
416
417
Ben Murdochbb769b22010-08-11 14:56:33 +0100418Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
419 int num_arguments) {
420 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
421}
422
423
Steve Blocka7e24c12009-10-30 11:49:00 +0000424void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
425 // If the expected number of arguments of the runtime function is
426 // constant, we check that the actual number of arguments match the
427 // expectation.
428 if (f->nargs >= 0 && f->nargs != num_arguments) {
429 IllegalOperation(num_arguments);
430 return;
431 }
432
Leon Clarke4515c472010-02-03 11:58:03 +0000433 // TODO(1236192): Most runtime routines don't need the number of
434 // arguments passed in because it is constant. At some point we
435 // should remove this need and make the runtime routine entry code
436 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100437 Set(rax, num_arguments);
Leon Clarke4515c472010-02-03 11:58:03 +0000438 movq(rbx, ExternalReference(f));
439 CEntryStub ces(f->result_size);
440 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000441}
442
443
Ben Murdochbb769b22010-08-11 14:56:33 +0100444Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
445 int num_arguments) {
446 if (f->nargs >= 0 && f->nargs != num_arguments) {
447 IllegalOperation(num_arguments);
448 // Since we did not call the stub, there was no allocation failure.
449 // Return some non-failure object.
450 return Heap::undefined_value();
451 }
452
453 // TODO(1236192): Most runtime routines don't need the number of
454 // arguments passed in because it is constant. At some point we
455 // should remove this need and make the runtime routine entry code
456 // smarter.
457 Set(rax, num_arguments);
458 movq(rbx, ExternalReference(f));
459 CEntryStub ces(f->result_size);
460 return TryCallStub(&ces);
461}
462
463
Andrei Popescu402d9372010-02-26 13:31:12 +0000464void MacroAssembler::CallExternalReference(const ExternalReference& ext,
465 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100466 Set(rax, num_arguments);
Andrei Popescu402d9372010-02-26 13:31:12 +0000467 movq(rbx, ext);
468
469 CEntryStub stub(1);
470 CallStub(&stub);
471}
472
473
Steve Block6ded16b2010-05-10 14:33:55 +0100474void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
475 int num_arguments,
476 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000477 // ----------- S t a t e -------------
478 // -- rsp[0] : return address
479 // -- rsp[8] : argument num_arguments - 1
480 // ...
481 // -- rsp[8 * num_arguments] : argument 0 (receiver)
482 // -----------------------------------
483
484 // TODO(1236192): Most runtime routines don't need the number of
485 // arguments passed in because it is constant. At some point we
486 // should remove this need and make the runtime routine entry code
487 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100488 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100489 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000490}
491
492
Steve Block6ded16b2010-05-10 14:33:55 +0100493void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
494 int num_arguments,
495 int result_size) {
496 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
497}
498
499
Ben Murdochbb769b22010-08-11 14:56:33 +0100500static int Offset(ExternalReference ref0, ExternalReference ref1) {
501 int64_t offset = (ref0.address() - ref1.address());
502 // Check that fits into int.
503 ASSERT(static_cast<int>(offset) == offset);
504 return static_cast<int>(offset);
505}
506
507
508void MacroAssembler::PushHandleScope(Register scratch) {
509 ExternalReference extensions_address =
510 ExternalReference::handle_scope_extensions_address();
511 const int kExtensionsOffset = 0;
512 const int kNextOffset = Offset(
513 ExternalReference::handle_scope_next_address(),
514 extensions_address);
515 const int kLimitOffset = Offset(
516 ExternalReference::handle_scope_limit_address(),
517 extensions_address);
518
519 // Push the number of extensions, smi-tagged so the gc will ignore it.
520 movq(kScratchRegister, extensions_address);
521 movq(scratch, Operand(kScratchRegister, kExtensionsOffset));
522 movq(Operand(kScratchRegister, kExtensionsOffset), Immediate(0));
523 Integer32ToSmi(scratch, scratch);
524 push(scratch);
525 // Push next and limit pointers which will be wordsize aligned and
526 // hence automatically smi tagged.
527 push(Operand(kScratchRegister, kNextOffset));
528 push(Operand(kScratchRegister, kLimitOffset));
529}
530
531
532Object* MacroAssembler::PopHandleScopeHelper(Register saved,
533 Register scratch,
534 bool gc_allowed) {
535 ExternalReference extensions_address =
536 ExternalReference::handle_scope_extensions_address();
537 const int kExtensionsOffset = 0;
538 const int kNextOffset = Offset(
539 ExternalReference::handle_scope_next_address(),
540 extensions_address);
541 const int kLimitOffset = Offset(
542 ExternalReference::handle_scope_limit_address(),
543 extensions_address);
544
545 Object* result = NULL;
546 Label write_back;
547 movq(kScratchRegister, extensions_address);
548 cmpq(Operand(kScratchRegister, kExtensionsOffset), Immediate(0));
549 j(equal, &write_back);
550 push(saved);
551 if (gc_allowed) {
552 CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
553 } else {
554 result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
555 if (result->IsFailure()) return result;
556 }
557 pop(saved);
558 movq(kScratchRegister, extensions_address);
559
560 bind(&write_back);
561 pop(Operand(kScratchRegister, kLimitOffset));
562 pop(Operand(kScratchRegister, kNextOffset));
563 pop(scratch);
564 SmiToInteger32(scratch, scratch);
565 movq(Operand(kScratchRegister, kExtensionsOffset), scratch);
566
567 return result;
568}
569
570
571void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
572 PopHandleScopeHelper(saved, scratch, true);
573}
574
575
576Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
577 return PopHandleScopeHelper(saved, scratch, false);
578}
579
580
Steve Block6ded16b2010-05-10 14:33:55 +0100581void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
582 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000583 // Set the entry point and jump to the C entry runtime stub.
584 movq(rbx, ext);
585 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000586 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000587}
588
589
Andrei Popescu402d9372010-02-26 13:31:12 +0000590void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
591 // Calls are not allowed in some stubs.
592 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000593
Andrei Popescu402d9372010-02-26 13:31:12 +0000594 // Rely on the assertion to check that the number of provided
595 // arguments match the expected number of arguments. Fake a
596 // parameter count to avoid emitting code to do the check.
597 ParameterCount expected(0);
598 GetBuiltinEntry(rdx, id);
599 InvokeCode(rdx, expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +0000600}
601
Andrei Popescu402d9372010-02-26 13:31:12 +0000602
Steve Block791712a2010-08-27 10:21:07 +0100603void MacroAssembler::GetBuiltinFunction(Register target,
604 Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100605 // Load the builtins object into target register.
606 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
607 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
Steve Block791712a2010-08-27 10:21:07 +0100608 movq(target, FieldOperand(target,
609 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
610}
Steve Block6ded16b2010-05-10 14:33:55 +0100611
Steve Block791712a2010-08-27 10:21:07 +0100612
613void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
614 ASSERT(!target.is(rdi));
Andrei Popescu402d9372010-02-26 13:31:12 +0000615 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +0100616 GetBuiltinFunction(rdi, id);
617 movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000618}
619
620
621void MacroAssembler::Set(Register dst, int64_t x) {
622 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100623 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000624 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000625 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000626 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000627 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000628 } else {
629 movq(dst, x, RelocInfo::NONE);
630 }
631}
632
Steve Blocka7e24c12009-10-30 11:49:00 +0000633void MacroAssembler::Set(const Operand& dst, int64_t x) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100634 if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000635 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000636 } else {
637 movq(kScratchRegister, x, RelocInfo::NONE);
638 movq(dst, kScratchRegister);
639 }
640}
641
Steve Blocka7e24c12009-10-30 11:49:00 +0000642// ----------------------------------------------------------------------------
643// Smi tagging, untagging and tag detection.
644
Steve Block3ce2e202009-11-05 08:53:23 +0000645static int kSmiShift = kSmiTagSize + kSmiShiftSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000646
Steve Block8defd9f2010-07-08 12:39:36 +0100647Register MacroAssembler::GetSmiConstant(Smi* source) {
648 int value = source->value();
649 if (value == 0) {
650 xorl(kScratchRegister, kScratchRegister);
651 return kScratchRegister;
652 }
653 if (value == 1) {
654 return kSmiConstantRegister;
655 }
656 LoadSmiConstant(kScratchRegister, source);
657 return kScratchRegister;
658}
659
660void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
661 if (FLAG_debug_code) {
662 movq(dst,
663 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
664 RelocInfo::NONE);
665 cmpq(dst, kSmiConstantRegister);
666 if (allow_stub_calls()) {
667 Assert(equal, "Uninitialized kSmiConstantRegister");
668 } else {
669 Label ok;
670 j(equal, &ok);
671 int3();
672 bind(&ok);
673 }
674 }
675 if (source->value() == 0) {
676 xorl(dst, dst);
677 return;
678 }
679 int value = source->value();
680 bool negative = value < 0;
681 unsigned int uvalue = negative ? -value : value;
682
683 switch (uvalue) {
684 case 9:
685 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
686 break;
687 case 8:
688 xorl(dst, dst);
689 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
690 break;
691 case 4:
692 xorl(dst, dst);
693 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
694 break;
695 case 5:
696 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
697 break;
698 case 3:
699 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
700 break;
701 case 2:
702 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
703 break;
704 case 1:
705 movq(dst, kSmiConstantRegister);
706 break;
707 case 0:
708 UNREACHABLE();
709 return;
710 default:
711 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
712 return;
713 }
714 if (negative) {
715 neg(dst);
716 }
717}
718
Steve Blocka7e24c12009-10-30 11:49:00 +0000719void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000720 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000721 if (!dst.is(src)) {
722 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000723 }
Steve Block3ce2e202009-11-05 08:53:23 +0000724 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000725}
726
727
728void MacroAssembler::Integer32ToSmi(Register dst,
729 Register src,
730 Label* on_overflow) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000731 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000732 // 32-bit integer always fits in a long smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000733 if (!dst.is(src)) {
734 movl(dst, src);
735 }
Steve Block3ce2e202009-11-05 08:53:23 +0000736 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000737}
738
739
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100740void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
741 if (FLAG_debug_code) {
742 testb(dst, Immediate(0x01));
743 Label ok;
744 j(zero, &ok);
745 if (allow_stub_calls()) {
746 Abort("Integer32ToSmiField writing to non-smi location");
747 } else {
748 int3();
749 }
750 bind(&ok);
751 }
752 ASSERT(kSmiShift % kBitsPerByte == 0);
753 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
754}
755
756
Steve Block3ce2e202009-11-05 08:53:23 +0000757void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
758 Register src,
759 int constant) {
760 if (dst.is(src)) {
761 addq(dst, Immediate(constant));
762 } else {
763 lea(dst, Operand(src, constant));
764 }
765 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000766}
767
768
769void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000770 ASSERT_EQ(0, kSmiTag);
771 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000772 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000773 }
Steve Block3ce2e202009-11-05 08:53:23 +0000774 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000775}
776
777
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100778void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
779 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
780}
781
782
Steve Blocka7e24c12009-10-30 11:49:00 +0000783void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000784 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000785 if (!dst.is(src)) {
786 movq(dst, src);
787 }
788 sar(dst, Immediate(kSmiShift));
789}
790
791
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100792void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
793 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
794}
795
796
Steve Block3ce2e202009-11-05 08:53:23 +0000797void MacroAssembler::SmiTest(Register src) {
798 testq(src, src);
799}
800
801
802void MacroAssembler::SmiCompare(Register dst, Register src) {
803 cmpq(dst, src);
804}
805
806
807void MacroAssembler::SmiCompare(Register dst, Smi* src) {
808 ASSERT(!dst.is(kScratchRegister));
809 if (src->value() == 0) {
810 testq(dst, dst);
811 } else {
Iain Merrick75681382010-08-19 15:07:18 +0100812 Register constant_reg = GetSmiConstant(src);
813 cmpq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +0000814 }
815}
816
817
Leon Clarkef7060e22010-06-03 12:02:55 +0100818void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block6ded16b2010-05-10 14:33:55 +0100819 cmpq(dst, src);
820}
821
822
Steve Block3ce2e202009-11-05 08:53:23 +0000823void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
824 cmpq(dst, src);
825}
826
827
828void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100829 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000830}
831
832
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100833void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
834 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
835}
836
837
Steve Blocka7e24c12009-10-30 11:49:00 +0000838void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
839 Register src,
840 int power) {
841 ASSERT(power >= 0);
842 ASSERT(power < 64);
843 if (power == 0) {
844 SmiToInteger64(dst, src);
845 return;
846 }
Steve Block3ce2e202009-11-05 08:53:23 +0000847 if (!dst.is(src)) {
848 movq(dst, src);
849 }
850 if (power < kSmiShift) {
851 sar(dst, Immediate(kSmiShift - power));
852 } else if (power > kSmiShift) {
853 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000854 }
855}
856
857
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100858void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
859 Register src,
860 int power) {
861 ASSERT((0 <= power) && (power < 32));
862 if (dst.is(src)) {
863 shr(dst, Immediate(power + kSmiShift));
864 } else {
865 UNIMPLEMENTED(); // Not used.
866 }
867}
868
869
Steve Blocka7e24c12009-10-30 11:49:00 +0000870Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000871 ASSERT_EQ(0, kSmiTag);
872 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000873 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000874}
875
876
877Condition MacroAssembler::CheckPositiveSmi(Register src) {
878 ASSERT_EQ(0, kSmiTag);
Steve Block8defd9f2010-07-08 12:39:36 +0100879 // Make mask 0x8000000000000001 and test that both bits are zero.
Steve Block3ce2e202009-11-05 08:53:23 +0000880 movq(kScratchRegister, src);
881 rol(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +0100882 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +0000883 return zero;
884}
885
886
Steve Blocka7e24c12009-10-30 11:49:00 +0000887Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
888 if (first.is(second)) {
889 return CheckSmi(first);
890 }
Steve Block8defd9f2010-07-08 12:39:36 +0100891 ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
892 leal(kScratchRegister, Operand(first, second, times_1, 0));
893 testb(kScratchRegister, Immediate(0x03));
Steve Block3ce2e202009-11-05 08:53:23 +0000894 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000895}
896
897
Leon Clarked91b9f72010-01-27 17:25:45 +0000898Condition MacroAssembler::CheckBothPositiveSmi(Register first,
899 Register second) {
900 if (first.is(second)) {
901 return CheckPositiveSmi(first);
902 }
Steve Block8defd9f2010-07-08 12:39:36 +0100903 movq(kScratchRegister, first);
904 or_(kScratchRegister, second);
Leon Clarked91b9f72010-01-27 17:25:45 +0000905 rol(kScratchRegister, Immediate(1));
906 testl(kScratchRegister, Immediate(0x03));
907 return zero;
908}
909
910
Ben Murdochbb769b22010-08-11 14:56:33 +0100911Condition MacroAssembler::CheckEitherSmi(Register first,
912 Register second,
913 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +0000914 if (first.is(second)) {
915 return CheckSmi(first);
916 }
Ben Murdochbb769b22010-08-11 14:56:33 +0100917 if (scratch.is(second)) {
918 andl(scratch, first);
919 } else {
920 if (!scratch.is(first)) {
921 movl(scratch, first);
922 }
923 andl(scratch, second);
924 }
925 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +0000926 return zero;
927}
928
929
Steve Blocka7e24c12009-10-30 11:49:00 +0000930Condition MacroAssembler::CheckIsMinSmi(Register src) {
Steve Block8defd9f2010-07-08 12:39:36 +0100931 ASSERT(!src.is(kScratchRegister));
932 // If we overflow by subtracting one, it's the minimal smi value.
933 cmpq(src, kSmiConstantRegister);
934 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +0000935}
936
Steve Blocka7e24c12009-10-30 11:49:00 +0000937
938Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000939 // A 32-bit integer value can always be converted to a smi.
940 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +0000941}
942
943
Steve Block3ce2e202009-11-05 08:53:23 +0000944Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
945 // An unsigned 32-bit integer value is valid as long as the high bit
946 // is not set.
Steve Block8defd9f2010-07-08 12:39:36 +0100947 testl(src, src);
948 return positive;
Steve Block3ce2e202009-11-05 08:53:23 +0000949}
950
951
952void MacroAssembler::SmiNeg(Register dst, Register src, Label* on_smi_result) {
953 if (dst.is(src)) {
954 ASSERT(!dst.is(kScratchRegister));
955 movq(kScratchRegister, src);
956 neg(dst); // Low 32 bits are retained as zero by negation.
957 // Test if result is zero or Smi::kMinValue.
958 cmpq(dst, kScratchRegister);
959 j(not_equal, on_smi_result);
960 movq(src, kScratchRegister);
961 } else {
962 movq(dst, src);
963 neg(dst);
964 cmpq(dst, src);
965 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
966 j(not_equal, on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000967 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000968}
969
970
971void MacroAssembler::SmiAdd(Register dst,
972 Register src1,
973 Register src2,
974 Label* on_not_smi_result) {
975 ASSERT(!dst.is(src2));
Steve Block6ded16b2010-05-10 14:33:55 +0100976 if (on_not_smi_result == NULL) {
977 // No overflow checking. Use only when it's known that
978 // overflowing is impossible.
979 if (dst.is(src1)) {
980 addq(dst, src2);
981 } else {
982 movq(dst, src1);
983 addq(dst, src2);
984 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100985 Assert(no_overflow, "Smi addition overflow");
Steve Block6ded16b2010-05-10 14:33:55 +0100986 } else if (dst.is(src1)) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100987 movq(kScratchRegister, src1);
988 addq(kScratchRegister, src2);
989 j(overflow, on_not_smi_result);
990 movq(dst, kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +0000991 } else {
992 movq(dst, src1);
993 addq(dst, src2);
994 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000995 }
996}
997
998
Steve Blocka7e24c12009-10-30 11:49:00 +0000999void MacroAssembler::SmiSub(Register dst,
1000 Register src1,
1001 Register src2,
1002 Label* on_not_smi_result) {
1003 ASSERT(!dst.is(src2));
Leon Clarked91b9f72010-01-27 17:25:45 +00001004 if (on_not_smi_result == NULL) {
1005 // No overflow checking. Use only when it's known that
1006 // overflowing is impossible (e.g., subtracting two positive smis).
1007 if (dst.is(src1)) {
1008 subq(dst, src2);
1009 } else {
1010 movq(dst, src1);
1011 subq(dst, src2);
1012 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001013 Assert(no_overflow, "Smi subtraction overflow");
Leon Clarked91b9f72010-01-27 17:25:45 +00001014 } else if (dst.is(src1)) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001015 cmpq(dst, src2);
1016 j(overflow, on_not_smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +00001017 subq(dst, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001018 } else {
1019 movq(dst, src1);
1020 subq(dst, src2);
1021 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001022 }
1023}
1024
1025
Steve Block6ded16b2010-05-10 14:33:55 +01001026void MacroAssembler::SmiSub(Register dst,
1027 Register src1,
Leon Clarkef7060e22010-06-03 12:02:55 +01001028 const Operand& src2,
Steve Block6ded16b2010-05-10 14:33:55 +01001029 Label* on_not_smi_result) {
1030 if (on_not_smi_result == NULL) {
1031 // No overflow checking. Use only when it's known that
1032 // overflowing is impossible (e.g., subtracting two positive smis).
1033 if (dst.is(src1)) {
1034 subq(dst, src2);
1035 } else {
1036 movq(dst, src1);
1037 subq(dst, src2);
1038 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001039 Assert(no_overflow, "Smi subtraction overflow");
Steve Block6ded16b2010-05-10 14:33:55 +01001040 } else if (dst.is(src1)) {
Steve Block8defd9f2010-07-08 12:39:36 +01001041 movq(kScratchRegister, src2);
1042 cmpq(src1, kScratchRegister);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001043 j(overflow, on_not_smi_result);
Steve Block8defd9f2010-07-08 12:39:36 +01001044 subq(src1, kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01001045 } else {
1046 movq(dst, src1);
1047 subq(dst, src2);
1048 j(overflow, on_not_smi_result);
1049 }
1050}
1051
Steve Blocka7e24c12009-10-30 11:49:00 +00001052void MacroAssembler::SmiMul(Register dst,
1053 Register src1,
1054 Register src2,
1055 Label* on_not_smi_result) {
1056 ASSERT(!dst.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +00001057 ASSERT(!dst.is(kScratchRegister));
1058 ASSERT(!src1.is(kScratchRegister));
1059 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001060
1061 if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001062 Label failure, zero_correct_result;
1063 movq(kScratchRegister, src1); // Create backup for later testing.
1064 SmiToInteger64(dst, src1);
1065 imul(dst, src2);
1066 j(overflow, &failure);
1067
1068 // Check for negative zero result. If product is zero, and one
1069 // argument is negative, go to slow case.
1070 Label correct_result;
1071 testq(dst, dst);
1072 j(not_zero, &correct_result);
1073
1074 movq(dst, kScratchRegister);
1075 xor_(dst, src2);
1076 j(positive, &zero_correct_result); // Result was positive zero.
1077
1078 bind(&failure); // Reused failure exit, restores src1.
1079 movq(src1, kScratchRegister);
1080 jmp(on_not_smi_result);
1081
1082 bind(&zero_correct_result);
1083 xor_(dst, dst);
1084
1085 bind(&correct_result);
1086 } else {
1087 SmiToInteger64(dst, src1);
1088 imul(dst, src2);
1089 j(overflow, on_not_smi_result);
1090 // Check for negative zero result. If product is zero, and one
1091 // argument is negative, go to slow case.
1092 Label correct_result;
1093 testq(dst, dst);
1094 j(not_zero, &correct_result);
1095 // One of src1 and src2 is zero, the check whether the other is
1096 // negative.
Steve Blocka7e24c12009-10-30 11:49:00 +00001097 movq(kScratchRegister, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001098 xor_(kScratchRegister, src2);
1099 j(negative, on_not_smi_result);
1100 bind(&correct_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001101 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001102}
1103
1104
1105void MacroAssembler::SmiTryAddConstant(Register dst,
1106 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +00001107 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +00001108 Label* on_not_smi_result) {
1109 // Does not assume that src is a smi.
Steve Block3ce2e202009-11-05 08:53:23 +00001110 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001111 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +00001112 ASSERT(!dst.is(kScratchRegister));
1113 ASSERT(!src.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001114
Steve Block3ce2e202009-11-05 08:53:23 +00001115 JumpIfNotSmi(src, on_not_smi_result);
1116 Register tmp = (dst.is(src) ? kScratchRegister : dst);
Steve Block8defd9f2010-07-08 12:39:36 +01001117 LoadSmiConstant(tmp, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001118 addq(tmp, src);
1119 j(overflow, on_not_smi_result);
1120 if (dst.is(src)) {
1121 movq(dst, tmp);
1122 }
1123}
1124
1125
1126void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1127 if (constant->value() == 0) {
1128 if (!dst.is(src)) {
1129 movq(dst, src);
1130 }
Steve Block8defd9f2010-07-08 12:39:36 +01001131 return;
Steve Block3ce2e202009-11-05 08:53:23 +00001132 } else if (dst.is(src)) {
1133 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001134 switch (constant->value()) {
1135 case 1:
1136 addq(dst, kSmiConstantRegister);
1137 return;
1138 case 2:
1139 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1140 return;
1141 case 4:
1142 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1143 return;
1144 case 8:
1145 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1146 return;
1147 default:
1148 Register constant_reg = GetSmiConstant(constant);
1149 addq(dst, constant_reg);
1150 return;
1151 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001152 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001153 switch (constant->value()) {
1154 case 1:
1155 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
1156 return;
1157 case 2:
1158 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1159 return;
1160 case 4:
1161 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1162 return;
1163 case 8:
1164 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1165 return;
1166 default:
1167 LoadSmiConstant(dst, constant);
1168 addq(dst, src);
1169 return;
1170 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001171 }
1172}
1173
1174
Leon Clarkef7060e22010-06-03 12:02:55 +01001175void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1176 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001177 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01001178 }
1179}
1180
1181
Steve Blocka7e24c12009-10-30 11:49:00 +00001182void MacroAssembler::SmiAddConstant(Register dst,
1183 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +00001184 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +00001185 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001186 if (constant->value() == 0) {
1187 if (!dst.is(src)) {
1188 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001189 }
Steve Block3ce2e202009-11-05 08:53:23 +00001190 } else if (dst.is(src)) {
1191 ASSERT(!dst.is(kScratchRegister));
1192
Steve Block8defd9f2010-07-08 12:39:36 +01001193 LoadSmiConstant(kScratchRegister, constant);
1194 addq(kScratchRegister, src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001195 j(overflow, on_not_smi_result);
1196 movq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001197 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001198 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001199 addq(dst, src);
1200 j(overflow, on_not_smi_result);
1201 }
1202}
1203
1204
1205void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1206 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001207 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001208 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001209 }
Steve Block3ce2e202009-11-05 08:53:23 +00001210 } else if (dst.is(src)) {
1211 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001212 Register constant_reg = GetSmiConstant(constant);
1213 subq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001214 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001215 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001216 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001217 // Adding and subtracting the min-value gives the same result, it only
1218 // differs on the overflow bit, which we don't check here.
1219 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001220 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001221 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001222 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Steve Block3ce2e202009-11-05 08:53:23 +00001223 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001224 }
1225 }
1226}
1227
1228
1229void MacroAssembler::SmiSubConstant(Register dst,
1230 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +00001231 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +00001232 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001233 if (constant->value() == 0) {
1234 if (!dst.is(src)) {
1235 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001236 }
Steve Block3ce2e202009-11-05 08:53:23 +00001237 } else if (dst.is(src)) {
1238 ASSERT(!dst.is(kScratchRegister));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001239 if (constant->value() == Smi::kMinValue) {
1240 // Subtracting min-value from any non-negative value will overflow.
1241 // We test the non-negativeness before doing the subtraction.
1242 testq(src, src);
1243 j(not_sign, on_not_smi_result);
Steve Block8defd9f2010-07-08 12:39:36 +01001244 LoadSmiConstant(kScratchRegister, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001245 subq(dst, kScratchRegister);
1246 } else {
1247 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001248 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001249 addq(kScratchRegister, dst);
1250 j(overflow, on_not_smi_result);
1251 movq(dst, kScratchRegister);
1252 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001253 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001254 if (constant->value() == Smi::kMinValue) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001255 // Subtracting min-value from any non-negative value will overflow.
1256 // We test the non-negativeness before doing the subtraction.
1257 testq(src, src);
1258 j(not_sign, on_not_smi_result);
Steve Block8defd9f2010-07-08 12:39:36 +01001259 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001260 // Adding and subtracting the min-value gives the same result, it only
1261 // differs on the overflow bit, which we don't check here.
1262 addq(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001263 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001264 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001265 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
Steve Block3ce2e202009-11-05 08:53:23 +00001266 addq(dst, src);
1267 j(overflow, on_not_smi_result);
1268 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001269 }
1270}
1271
1272
1273void MacroAssembler::SmiDiv(Register dst,
1274 Register src1,
1275 Register src2,
1276 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001277 ASSERT(!src1.is(kScratchRegister));
1278 ASSERT(!src2.is(kScratchRegister));
1279 ASSERT(!dst.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001280 ASSERT(!src2.is(rax));
1281 ASSERT(!src2.is(rdx));
1282 ASSERT(!src1.is(rdx));
1283
1284 // Check for 0 divisor (result is +/-Infinity).
1285 Label positive_divisor;
Steve Block3ce2e202009-11-05 08:53:23 +00001286 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001287 j(zero, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001288
Steve Block3ce2e202009-11-05 08:53:23 +00001289 if (src1.is(rax)) {
1290 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001291 }
Steve Block3ce2e202009-11-05 08:53:23 +00001292 SmiToInteger32(rax, src1);
1293 // We need to rule out dividing Smi::kMinValue by -1, since that would
1294 // overflow in idiv and raise an exception.
1295 // We combine this with negative zero test (negative zero only happens
1296 // when dividing zero by a negative number).
Steve Blocka7e24c12009-10-30 11:49:00 +00001297
Steve Block3ce2e202009-11-05 08:53:23 +00001298 // We overshoot a little and go to slow case if we divide min-value
1299 // by any negative value, not just -1.
1300 Label safe_div;
1301 testl(rax, Immediate(0x7fffffff));
1302 j(not_zero, &safe_div);
1303 testq(src2, src2);
1304 if (src1.is(rax)) {
1305 j(positive, &safe_div);
1306 movq(src1, kScratchRegister);
1307 jmp(on_not_smi_result);
1308 } else {
1309 j(negative, on_not_smi_result);
1310 }
1311 bind(&safe_div);
1312
1313 SmiToInteger32(src2, src2);
1314 // Sign extend src1 into edx:eax.
1315 cdq();
Steve Blocka7e24c12009-10-30 11:49:00 +00001316 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001317 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001318 // Check that the remainder is zero.
1319 testl(rdx, rdx);
Steve Block3ce2e202009-11-05 08:53:23 +00001320 if (src1.is(rax)) {
1321 Label smi_result;
1322 j(zero, &smi_result);
1323 movq(src1, kScratchRegister);
1324 jmp(on_not_smi_result);
1325 bind(&smi_result);
1326 } else {
1327 j(not_zero, on_not_smi_result);
1328 }
1329 if (!dst.is(src1) && src1.is(rax)) {
1330 movq(src1, kScratchRegister);
1331 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001332 Integer32ToSmi(dst, rax);
1333}
1334
1335
1336void MacroAssembler::SmiMod(Register dst,
1337 Register src1,
1338 Register src2,
1339 Label* on_not_smi_result) {
1340 ASSERT(!dst.is(kScratchRegister));
1341 ASSERT(!src1.is(kScratchRegister));
1342 ASSERT(!src2.is(kScratchRegister));
1343 ASSERT(!src2.is(rax));
1344 ASSERT(!src2.is(rdx));
1345 ASSERT(!src1.is(rdx));
Steve Block3ce2e202009-11-05 08:53:23 +00001346 ASSERT(!src1.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001347
Steve Block3ce2e202009-11-05 08:53:23 +00001348 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001349 j(zero, on_not_smi_result);
1350
1351 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001352 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001353 }
Steve Block3ce2e202009-11-05 08:53:23 +00001354 SmiToInteger32(rax, src1);
1355 SmiToInteger32(src2, src2);
1356
1357 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1358 Label safe_div;
1359 cmpl(rax, Immediate(Smi::kMinValue));
1360 j(not_equal, &safe_div);
1361 cmpl(src2, Immediate(-1));
1362 j(not_equal, &safe_div);
1363 // Retag inputs and go slow case.
1364 Integer32ToSmi(src2, src2);
1365 if (src1.is(rax)) {
1366 movq(src1, kScratchRegister);
1367 }
1368 jmp(on_not_smi_result);
1369 bind(&safe_div);
1370
Steve Blocka7e24c12009-10-30 11:49:00 +00001371 // Sign extend eax into edx:eax.
1372 cdq();
1373 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001374 // Restore smi tags on inputs.
1375 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001376 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001377 movq(src1, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001378 }
Steve Block3ce2e202009-11-05 08:53:23 +00001379 // Check for a negative zero result. If the result is zero, and the
1380 // dividend is negative, go slow to return a floating point negative zero.
1381 Label smi_result;
1382 testl(rdx, rdx);
1383 j(not_zero, &smi_result);
1384 testq(src1, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001385 j(negative, on_not_smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +00001386 bind(&smi_result);
1387 Integer32ToSmi(dst, rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001388}
1389
1390
1391void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001392 ASSERT(!dst.is(kScratchRegister));
1393 ASSERT(!src.is(kScratchRegister));
1394 // Set tag and padding bits before negating, so that they are zero afterwards.
1395 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001396 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001397 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001398 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001399 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001400 }
Steve Block3ce2e202009-11-05 08:53:23 +00001401 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001402}
1403
1404
1405void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001406 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001407 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001408 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001409 }
1410 and_(dst, src2);
1411}
1412
1413
Steve Block3ce2e202009-11-05 08:53:23 +00001414void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1415 if (constant->value() == 0) {
1416 xor_(dst, dst);
1417 } else if (dst.is(src)) {
1418 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001419 Register constant_reg = GetSmiConstant(constant);
1420 and_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001421 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001422 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001423 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001424 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001425}
1426
1427
1428void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1429 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001430 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001431 }
1432 or_(dst, src2);
1433}
1434
1435
Steve Block3ce2e202009-11-05 08:53:23 +00001436void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1437 if (dst.is(src)) {
1438 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001439 Register constant_reg = GetSmiConstant(constant);
1440 or_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001441 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001442 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001443 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001444 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001445}
1446
Steve Block3ce2e202009-11-05 08:53:23 +00001447
Steve Blocka7e24c12009-10-30 11:49:00 +00001448void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1449 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001450 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001451 }
1452 xor_(dst, src2);
1453}
1454
1455
Steve Block3ce2e202009-11-05 08:53:23 +00001456void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1457 if (dst.is(src)) {
1458 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001459 Register constant_reg = GetSmiConstant(constant);
1460 xor_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001461 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001462 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001463 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001464 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001465}
1466
1467
Steve Blocka7e24c12009-10-30 11:49:00 +00001468void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1469 Register src,
1470 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001471 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001472 if (shift_value > 0) {
1473 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001474 sar(dst, Immediate(shift_value + kSmiShift));
1475 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001476 } else {
1477 UNIMPLEMENTED(); // Not used.
1478 }
1479 }
1480}
1481
1482
1483void MacroAssembler::SmiShiftLogicalRightConstant(Register dst,
1484 Register src,
1485 int shift_value,
1486 Label* on_not_smi_result) {
1487 // Logic right shift interprets its result as an *unsigned* number.
1488 if (dst.is(src)) {
1489 UNIMPLEMENTED(); // Not used.
1490 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001491 movq(dst, src);
1492 if (shift_value == 0) {
1493 testq(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001494 j(negative, on_not_smi_result);
1495 }
Steve Block3ce2e202009-11-05 08:53:23 +00001496 shr(dst, Immediate(shift_value + kSmiShift));
1497 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001498 }
1499}
1500
1501
1502void MacroAssembler::SmiShiftLeftConstant(Register dst,
1503 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001504 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001505 if (!dst.is(src)) {
1506 movq(dst, src);
1507 }
1508 if (shift_value > 0) {
1509 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001510 }
1511}
1512
1513
1514void MacroAssembler::SmiShiftLeft(Register dst,
1515 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001516 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001517 ASSERT(!dst.is(rcx));
1518 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001519 // Untag shift amount.
1520 if (!dst.is(src1)) {
1521 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001522 }
Steve Block3ce2e202009-11-05 08:53:23 +00001523 SmiToInteger32(rcx, src2);
1524 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1525 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001526 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001527}
1528
1529
1530void MacroAssembler::SmiShiftLogicalRight(Register dst,
1531 Register src1,
1532 Register src2,
1533 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001534 ASSERT(!dst.is(kScratchRegister));
1535 ASSERT(!src1.is(kScratchRegister));
1536 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001537 ASSERT(!dst.is(rcx));
1538 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001539 if (src1.is(rcx) || src2.is(rcx)) {
1540 movq(kScratchRegister, rcx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001541 }
Steve Block3ce2e202009-11-05 08:53:23 +00001542 if (!dst.is(src1)) {
1543 movq(dst, src1);
1544 }
1545 SmiToInteger32(rcx, src2);
1546 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001547 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
Steve Block3ce2e202009-11-05 08:53:23 +00001548 shl(dst, Immediate(kSmiShift));
1549 testq(dst, dst);
1550 if (src1.is(rcx) || src2.is(rcx)) {
1551 Label positive_result;
1552 j(positive, &positive_result);
1553 if (src1.is(rcx)) {
1554 movq(src1, kScratchRegister);
1555 } else {
1556 movq(src2, kScratchRegister);
1557 }
1558 jmp(on_not_smi_result);
1559 bind(&positive_result);
1560 } else {
1561 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1562 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001563}
1564
1565
1566void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1567 Register src1,
1568 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001569 ASSERT(!dst.is(kScratchRegister));
1570 ASSERT(!src1.is(kScratchRegister));
1571 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001572 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001573 if (src1.is(rcx)) {
1574 movq(kScratchRegister, src1);
1575 } else if (src2.is(rcx)) {
1576 movq(kScratchRegister, src2);
1577 }
1578 if (!dst.is(src1)) {
1579 movq(dst, src1);
1580 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001581 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001582 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001583 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001584 shl(dst, Immediate(kSmiShift));
1585 if (src1.is(rcx)) {
1586 movq(src1, kScratchRegister);
1587 } else if (src2.is(rcx)) {
1588 movq(src2, kScratchRegister);
1589 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001590}
1591
1592
1593void MacroAssembler::SelectNonSmi(Register dst,
1594 Register src1,
1595 Register src2,
1596 Label* on_not_smis) {
Steve Block3ce2e202009-11-05 08:53:23 +00001597 ASSERT(!dst.is(kScratchRegister));
1598 ASSERT(!src1.is(kScratchRegister));
1599 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001600 ASSERT(!dst.is(src1));
1601 ASSERT(!dst.is(src2));
1602 // Both operands must not be smis.
1603#ifdef DEBUG
Steve Block3ce2e202009-11-05 08:53:23 +00001604 if (allow_stub_calls()) { // Check contains a stub call.
1605 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1606 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1607 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001608#endif
1609 ASSERT_EQ(0, kSmiTag);
1610 ASSERT_EQ(0, Smi::FromInt(0));
Steve Block3ce2e202009-11-05 08:53:23 +00001611 movl(kScratchRegister, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001612 and_(kScratchRegister, src1);
1613 testl(kScratchRegister, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001614 // If non-zero then both are smis.
Steve Blocka7e24c12009-10-30 11:49:00 +00001615 j(not_zero, on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00001616
Steve Block3ce2e202009-11-05 08:53:23 +00001617 // Exactly one operand is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001618 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1619 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1620 subq(kScratchRegister, Immediate(1));
1621 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1622 movq(dst, src1);
1623 xor_(dst, src2);
1624 and_(dst, kScratchRegister);
1625 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1626 xor_(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001627 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001628}
1629
Steve Block8defd9f2010-07-08 12:39:36 +01001630
Steve Block3ce2e202009-11-05 08:53:23 +00001631SmiIndex MacroAssembler::SmiToIndex(Register dst,
1632 Register src,
1633 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001634 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001635 // There is a possible optimization if shift is in the range 60-63, but that
1636 // will (and must) never happen.
1637 if (!dst.is(src)) {
1638 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001639 }
Steve Block3ce2e202009-11-05 08:53:23 +00001640 if (shift < kSmiShift) {
1641 sar(dst, Immediate(kSmiShift - shift));
1642 } else {
1643 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001644 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001645 return SmiIndex(dst, times_1);
1646}
1647
Steve Blocka7e24c12009-10-30 11:49:00 +00001648SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1649 Register src,
1650 int shift) {
1651 // Register src holds a positive smi.
1652 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001653 if (!dst.is(src)) {
1654 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001655 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001656 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001657 if (shift < kSmiShift) {
1658 sar(dst, Immediate(kSmiShift - shift));
1659 } else {
1660 shl(dst, Immediate(shift - kSmiShift));
1661 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001662 return SmiIndex(dst, times_1);
1663}
1664
1665
Steve Block3ce2e202009-11-05 08:53:23 +00001666void MacroAssembler::JumpIfSmi(Register src, Label* on_smi) {
1667 ASSERT_EQ(0, kSmiTag);
1668 Condition smi = CheckSmi(src);
1669 j(smi, on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001670}
1671
Steve Block3ce2e202009-11-05 08:53:23 +00001672
1673void MacroAssembler::JumpIfNotSmi(Register src, Label* on_not_smi) {
1674 Condition smi = CheckSmi(src);
1675 j(NegateCondition(smi), on_not_smi);
1676}
1677
1678
1679void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1680 Label* on_not_positive_smi) {
1681 Condition positive_smi = CheckPositiveSmi(src);
1682 j(NegateCondition(positive_smi), on_not_positive_smi);
1683}
1684
1685
1686void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1687 Smi* constant,
1688 Label* on_equals) {
1689 SmiCompare(src, constant);
1690 j(equal, on_equals);
1691}
1692
1693
1694void MacroAssembler::JumpIfNotValidSmiValue(Register src, Label* on_invalid) {
1695 Condition is_valid = CheckInteger32ValidSmiValue(src);
1696 j(NegateCondition(is_valid), on_invalid);
1697}
1698
1699
1700void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1701 Label* on_invalid) {
1702 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1703 j(NegateCondition(is_valid), on_invalid);
1704}
1705
1706
1707void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2,
1708 Label* on_not_both_smi) {
1709 Condition both_smi = CheckBothSmi(src1, src2);
1710 j(NegateCondition(both_smi), on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001711}
1712
1713
Leon Clarked91b9f72010-01-27 17:25:45 +00001714void MacroAssembler::JumpIfNotBothPositiveSmi(Register src1, Register src2,
1715 Label* on_not_both_smi) {
1716 Condition both_smi = CheckBothPositiveSmi(src1, src2);
1717 j(NegateCondition(both_smi), on_not_both_smi);
1718}
1719
1720
1721
Leon Clarkee46be812010-01-19 14:06:41 +00001722void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1723 Register second_object,
1724 Register scratch1,
1725 Register scratch2,
1726 Label* on_fail) {
1727 // Check that both objects are not smis.
1728 Condition either_smi = CheckEitherSmi(first_object, second_object);
1729 j(either_smi, on_fail);
1730
1731 // Load instance type for both strings.
1732 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1733 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1734 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1735 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1736
1737 // Check that both are flat ascii strings.
1738 ASSERT(kNotStringTag != 0);
1739 const int kFlatAsciiStringMask =
1740 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Leon Clarked91b9f72010-01-27 17:25:45 +00001741 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
Leon Clarkee46be812010-01-19 14:06:41 +00001742
1743 andl(scratch1, Immediate(kFlatAsciiStringMask));
1744 andl(scratch2, Immediate(kFlatAsciiStringMask));
1745 // Interleave the bits to check both scratch1 and scratch2 in one test.
1746 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1747 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1748 cmpl(scratch1,
Leon Clarked91b9f72010-01-27 17:25:45 +00001749 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
Leon Clarkee46be812010-01-19 14:06:41 +00001750 j(not_equal, on_fail);
1751}
1752
1753
Steve Block6ded16b2010-05-10 14:33:55 +01001754void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1755 Register instance_type,
1756 Register scratch,
1757 Label *failure) {
1758 if (!scratch.is(instance_type)) {
1759 movl(scratch, instance_type);
1760 }
1761
1762 const int kFlatAsciiStringMask =
1763 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1764
1765 andl(scratch, Immediate(kFlatAsciiStringMask));
1766 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1767 j(not_equal, failure);
1768}
1769
1770
1771void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1772 Register first_object_instance_type,
1773 Register second_object_instance_type,
1774 Register scratch1,
1775 Register scratch2,
1776 Label* on_fail) {
1777 // Load instance type for both strings.
1778 movq(scratch1, first_object_instance_type);
1779 movq(scratch2, second_object_instance_type);
1780
1781 // Check that both are flat ascii strings.
1782 ASSERT(kNotStringTag != 0);
1783 const int kFlatAsciiStringMask =
1784 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1785 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1786
1787 andl(scratch1, Immediate(kFlatAsciiStringMask));
1788 andl(scratch2, Immediate(kFlatAsciiStringMask));
1789 // Interleave the bits to check both scratch1 and scratch2 in one test.
1790 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1791 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1792 cmpl(scratch1,
1793 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1794 j(not_equal, on_fail);
1795}
1796
1797
Steve Blocka7e24c12009-10-30 11:49:00 +00001798void MacroAssembler::Move(Register dst, Handle<Object> source) {
1799 ASSERT(!source->IsFailure());
1800 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001801 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001802 } else {
1803 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1804 }
1805}
1806
1807
1808void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001809 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001810 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001811 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001812 } else {
1813 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1814 movq(dst, kScratchRegister);
1815 }
1816}
1817
1818
1819void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001820 if (source->IsSmi()) {
1821 SmiCompare(dst, Smi::cast(*source));
1822 } else {
1823 Move(kScratchRegister, source);
1824 cmpq(dst, kScratchRegister);
1825 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001826}
1827
1828
1829void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1830 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001831 SmiCompare(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001832 } else {
1833 ASSERT(source->IsHeapObject());
1834 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1835 cmpq(dst, kScratchRegister);
1836 }
1837}
1838
1839
1840void MacroAssembler::Push(Handle<Object> source) {
1841 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001842 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001843 } else {
1844 ASSERT(source->IsHeapObject());
1845 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1846 push(kScratchRegister);
1847 }
1848}
1849
1850
1851void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001852 intptr_t smi = reinterpret_cast<intptr_t>(source);
1853 if (is_int32(smi)) {
1854 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001855 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001856 Register constant = GetSmiConstant(source);
1857 push(constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001858 }
1859}
1860
1861
Leon Clarkee46be812010-01-19 14:06:41 +00001862void MacroAssembler::Drop(int stack_elements) {
1863 if (stack_elements > 0) {
1864 addq(rsp, Immediate(stack_elements * kPointerSize));
1865 }
1866}
1867
1868
Steve Block3ce2e202009-11-05 08:53:23 +00001869void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001870 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001871}
1872
1873
1874void MacroAssembler::Jump(ExternalReference ext) {
1875 movq(kScratchRegister, ext);
1876 jmp(kScratchRegister);
1877}
1878
1879
1880void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1881 movq(kScratchRegister, destination, rmode);
1882 jmp(kScratchRegister);
1883}
1884
1885
1886void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001887 // TODO(X64): Inline this
1888 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001889}
1890
1891
1892void MacroAssembler::Call(ExternalReference ext) {
1893 movq(kScratchRegister, ext);
1894 call(kScratchRegister);
1895}
1896
1897
1898void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1899 movq(kScratchRegister, destination, rmode);
1900 call(kScratchRegister);
1901}
1902
1903
1904void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1905 ASSERT(RelocInfo::IsCodeTarget(rmode));
1906 WriteRecordedPositions();
Steve Block3ce2e202009-11-05 08:53:23 +00001907 call(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001908}
1909
1910
1911void MacroAssembler::PushTryHandler(CodeLocation try_location,
1912 HandlerType type) {
1913 // Adjust this code if not the case.
1914 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1915
1916 // The pc (return address) is already on TOS. This code pushes state,
1917 // frame pointer and current handler. Check that they are expected
1918 // next on the stack, in that order.
1919 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1920 StackHandlerConstants::kPCOffset - kPointerSize);
1921 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1922 StackHandlerConstants::kStateOffset - kPointerSize);
1923 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1924 StackHandlerConstants::kFPOffset - kPointerSize);
1925
1926 if (try_location == IN_JAVASCRIPT) {
1927 if (type == TRY_CATCH_HANDLER) {
1928 push(Immediate(StackHandler::TRY_CATCH));
1929 } else {
1930 push(Immediate(StackHandler::TRY_FINALLY));
1931 }
1932 push(rbp);
1933 } else {
1934 ASSERT(try_location == IN_JS_ENTRY);
1935 // The frame pointer does not point to a JS frame so we save NULL
1936 // for rbp. We expect the code throwing an exception to check rbp
1937 // before dereferencing it to restore the context.
1938 push(Immediate(StackHandler::ENTRY));
1939 push(Immediate(0)); // NULL frame pointer.
1940 }
1941 // Save the current handler.
1942 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1943 push(Operand(kScratchRegister, 0));
1944 // Link this handler.
1945 movq(Operand(kScratchRegister, 0), rsp);
1946}
1947
1948
Leon Clarkee46be812010-01-19 14:06:41 +00001949void MacroAssembler::PopTryHandler() {
1950 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1951 // Unlink this handler.
1952 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1953 pop(Operand(kScratchRegister, 0));
1954 // Remove the remaining fields.
1955 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1956}
1957
1958
Steve Blocka7e24c12009-10-30 11:49:00 +00001959void MacroAssembler::Ret() {
1960 ret(0);
1961}
1962
1963
1964void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001965 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01001966 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001967}
1968
1969
1970void MacroAssembler::CmpObjectType(Register heap_object,
1971 InstanceType type,
1972 Register map) {
1973 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1974 CmpInstanceType(map, type);
1975}
1976
1977
1978void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1979 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1980 Immediate(static_cast<int8_t>(type)));
1981}
1982
1983
Andrei Popescu31002712010-02-23 13:46:05 +00001984void MacroAssembler::CheckMap(Register obj,
1985 Handle<Map> map,
1986 Label* fail,
1987 bool is_heap_object) {
1988 if (!is_heap_object) {
1989 JumpIfSmi(obj, fail);
1990 }
1991 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1992 j(not_equal, fail);
1993}
1994
1995
Leon Clarkef7060e22010-06-03 12:02:55 +01001996void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001997 Label ok;
1998 Condition is_smi = CheckSmi(object);
1999 j(is_smi, &ok);
2000 Cmp(FieldOperand(object, HeapObject::kMapOffset),
2001 Factory::heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01002002 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00002003 bind(&ok);
2004}
2005
2006
Iain Merrick75681382010-08-19 15:07:18 +01002007void MacroAssembler::AbortIfSmi(Register object) {
2008 Label ok;
2009 Condition is_smi = CheckSmi(object);
2010 Assert(NegateCondition(is_smi), "Operand is a smi");
2011}
2012
2013
Leon Clarkef7060e22010-06-03 12:02:55 +01002014void MacroAssembler::AbortIfNotSmi(Register object) {
Steve Block6ded16b2010-05-10 14:33:55 +01002015 Label ok;
2016 Condition is_smi = CheckSmi(object);
Iain Merrick75681382010-08-19 15:07:18 +01002017 Assert(is_smi, "Operand is not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01002018}
2019
2020
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002021void MacroAssembler::AbortIfNotRootValue(Register src,
2022 Heap::RootListIndex root_value_index,
2023 const char* message) {
2024 ASSERT(!src.is(kScratchRegister));
2025 LoadRoot(kScratchRegister, root_value_index);
2026 cmpq(src, kScratchRegister);
2027 Check(equal, message);
2028}
2029
2030
2031
Leon Clarked91b9f72010-01-27 17:25:45 +00002032Condition MacroAssembler::IsObjectStringType(Register heap_object,
2033 Register map,
2034 Register instance_type) {
2035 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00002036 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00002037 ASSERT(kNotStringTag != 0);
2038 testb(instance_type, Immediate(kIsNotStringMask));
2039 return zero;
2040}
2041
2042
Steve Blocka7e24c12009-10-30 11:49:00 +00002043void MacroAssembler::TryGetFunctionPrototype(Register function,
2044 Register result,
2045 Label* miss) {
2046 // Check that the receiver isn't a smi.
2047 testl(function, Immediate(kSmiTagMask));
2048 j(zero, miss);
2049
2050 // Check that the function really is a function.
2051 CmpObjectType(function, JS_FUNCTION_TYPE, result);
2052 j(not_equal, miss);
2053
2054 // Make sure that the function has an instance prototype.
2055 Label non_instance;
2056 testb(FieldOperand(result, Map::kBitFieldOffset),
2057 Immediate(1 << Map::kHasNonInstancePrototype));
2058 j(not_zero, &non_instance);
2059
2060 // Get the prototype or initial map from the function.
2061 movq(result,
2062 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2063
2064 // If the prototype or initial map is the hole, don't return it and
2065 // simply miss the cache instead. This will allow us to allocate a
2066 // prototype object on-demand in the runtime system.
2067 CompareRoot(result, Heap::kTheHoleValueRootIndex);
2068 j(equal, miss);
2069
2070 // If the function does not have an initial map, we're done.
2071 Label done;
2072 CmpObjectType(result, MAP_TYPE, kScratchRegister);
2073 j(not_equal, &done);
2074
2075 // Get the prototype from the initial map.
2076 movq(result, FieldOperand(result, Map::kPrototypeOffset));
2077 jmp(&done);
2078
2079 // Non-instance prototype: Fetch prototype from constructor field
2080 // in initial map.
2081 bind(&non_instance);
2082 movq(result, FieldOperand(result, Map::kConstructorOffset));
2083
2084 // All done.
2085 bind(&done);
2086}
2087
2088
2089void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2090 if (FLAG_native_code_counters && counter->Enabled()) {
2091 movq(kScratchRegister, ExternalReference(counter));
2092 movl(Operand(kScratchRegister, 0), Immediate(value));
2093 }
2094}
2095
2096
2097void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2098 ASSERT(value > 0);
2099 if (FLAG_native_code_counters && counter->Enabled()) {
2100 movq(kScratchRegister, ExternalReference(counter));
2101 Operand operand(kScratchRegister, 0);
2102 if (value == 1) {
2103 incl(operand);
2104 } else {
2105 addl(operand, Immediate(value));
2106 }
2107 }
2108}
2109
2110
2111void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2112 ASSERT(value > 0);
2113 if (FLAG_native_code_counters && counter->Enabled()) {
2114 movq(kScratchRegister, ExternalReference(counter));
2115 Operand operand(kScratchRegister, 0);
2116 if (value == 1) {
2117 decl(operand);
2118 } else {
2119 subl(operand, Immediate(value));
2120 }
2121 }
2122}
2123
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002124
Steve Blocka7e24c12009-10-30 11:49:00 +00002125#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00002126void MacroAssembler::DebugBreak() {
2127 ASSERT(allow_stub_calls());
2128 xor_(rax, rax); // no arguments
2129 movq(rbx, ExternalReference(Runtime::kDebugBreak));
2130 CEntryStub ces(1);
2131 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00002132}
Andrei Popescu402d9372010-02-26 13:31:12 +00002133#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00002134
2135
2136void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2137 const ParameterCount& actual,
2138 Handle<Code> code_constant,
2139 Register code_register,
2140 Label* done,
2141 InvokeFlag flag) {
2142 bool definitely_matches = false;
2143 Label invoke;
2144 if (expected.is_immediate()) {
2145 ASSERT(actual.is_immediate());
2146 if (expected.immediate() == actual.immediate()) {
2147 definitely_matches = true;
2148 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002149 Set(rax, actual.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002150 if (expected.immediate() ==
Steve Block3ce2e202009-11-05 08:53:23 +00002151 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002152 // Don't worry about adapting arguments for built-ins that
2153 // don't want that done. Skip adaption code by making it look
2154 // like we have a match between expected and actual number of
2155 // arguments.
2156 definitely_matches = true;
2157 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002158 Set(rbx, expected.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002159 }
2160 }
2161 } else {
2162 if (actual.is_immediate()) {
2163 // Expected is in register, actual is immediate. This is the
2164 // case when we invoke function values without going through the
2165 // IC mechanism.
2166 cmpq(expected.reg(), Immediate(actual.immediate()));
2167 j(equal, &invoke);
2168 ASSERT(expected.reg().is(rbx));
Steve Block8defd9f2010-07-08 12:39:36 +01002169 Set(rax, actual.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002170 } else if (!expected.reg().is(actual.reg())) {
2171 // Both expected and actual are in (different) registers. This
2172 // is the case when we invoke functions using call and apply.
2173 cmpq(expected.reg(), actual.reg());
2174 j(equal, &invoke);
2175 ASSERT(actual.reg().is(rax));
2176 ASSERT(expected.reg().is(rbx));
2177 }
2178 }
2179
2180 if (!definitely_matches) {
2181 Handle<Code> adaptor =
2182 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
2183 if (!code_constant.is_null()) {
2184 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
2185 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2186 } else if (!code_register.is(rdx)) {
2187 movq(rdx, code_register);
2188 }
2189
2190 if (flag == CALL_FUNCTION) {
2191 Call(adaptor, RelocInfo::CODE_TARGET);
2192 jmp(done);
2193 } else {
2194 Jump(adaptor, RelocInfo::CODE_TARGET);
2195 }
2196 bind(&invoke);
2197 }
2198}
2199
2200
2201void MacroAssembler::InvokeCode(Register code,
2202 const ParameterCount& expected,
2203 const ParameterCount& actual,
2204 InvokeFlag flag) {
2205 Label done;
2206 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
2207 if (flag == CALL_FUNCTION) {
2208 call(code);
2209 } else {
2210 ASSERT(flag == JUMP_FUNCTION);
2211 jmp(code);
2212 }
2213 bind(&done);
2214}
2215
2216
2217void MacroAssembler::InvokeCode(Handle<Code> code,
2218 const ParameterCount& expected,
2219 const ParameterCount& actual,
2220 RelocInfo::Mode rmode,
2221 InvokeFlag flag) {
2222 Label done;
2223 Register dummy = rax;
2224 InvokePrologue(expected, actual, code, dummy, &done, flag);
2225 if (flag == CALL_FUNCTION) {
2226 Call(code, rmode);
2227 } else {
2228 ASSERT(flag == JUMP_FUNCTION);
2229 Jump(code, rmode);
2230 }
2231 bind(&done);
2232}
2233
2234
2235void MacroAssembler::InvokeFunction(Register function,
2236 const ParameterCount& actual,
2237 InvokeFlag flag) {
2238 ASSERT(function.is(rdi));
2239 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2240 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
2241 movsxlq(rbx,
2242 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002243 // Advances rdx to the end of the Code object header, to the start of
2244 // the executable code.
Steve Block791712a2010-08-27 10:21:07 +01002245 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002246
2247 ParameterCount expected(rbx);
2248 InvokeCode(rdx, expected, actual, flag);
2249}
2250
2251
Andrei Popescu402d9372010-02-26 13:31:12 +00002252void MacroAssembler::InvokeFunction(JSFunction* function,
2253 const ParameterCount& actual,
2254 InvokeFlag flag) {
2255 ASSERT(function->is_compiled());
2256 // Get the function and setup the context.
2257 Move(rdi, Handle<JSFunction>(function));
2258 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2259
2260 // Invoke the cached code.
2261 Handle<Code> code(function->code());
2262 ParameterCount expected(function->shared()->formal_parameter_count());
2263 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
2264}
2265
2266
Steve Blocka7e24c12009-10-30 11:49:00 +00002267void MacroAssembler::EnterFrame(StackFrame::Type type) {
2268 push(rbp);
2269 movq(rbp, rsp);
2270 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00002271 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002272 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2273 push(kScratchRegister);
2274 if (FLAG_debug_code) {
2275 movq(kScratchRegister,
2276 Factory::undefined_value(),
2277 RelocInfo::EMBEDDED_OBJECT);
2278 cmpq(Operand(rsp, 0), kScratchRegister);
2279 Check(not_equal, "code object not properly patched");
2280 }
2281}
2282
2283
2284void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2285 if (FLAG_debug_code) {
Steve Block3ce2e202009-11-05 08:53:23 +00002286 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002287 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2288 Check(equal, "stack frame types must match");
2289 }
2290 movq(rsp, rbp);
2291 pop(rbp);
2292}
2293
2294
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002295void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002296 // Setup the frame structure on the stack.
2297 // All constants are relative to the frame pointer of the exit frame.
2298 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
2299 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
2300 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
2301 push(rbp);
2302 movq(rbp, rsp);
2303
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002304 // Reserve room for entry stack pointer and push the code object.
Steve Block3ce2e202009-11-05 08:53:23 +00002305 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00002306 push(Immediate(0)); // Saved entry sp, patched before call.
2307 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2308 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00002309
2310 // Save the frame pointer and the context in top.
2311 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2312 ExternalReference context_address(Top::k_context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01002313 if (save_rax) {
2314 movq(r14, rax); // Backup rax before we use it.
2315 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002316
2317 movq(rax, rbp);
2318 store_rax(c_entry_fp_address);
2319 movq(rax, rsi);
2320 store_rax(context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01002321}
Steve Blocka7e24c12009-10-30 11:49:00 +00002322
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002323void MacroAssembler::EnterExitFrameEpilogue(int result_size,
Ben Murdochbb769b22010-08-11 14:56:33 +01002324 int argc) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002325#ifdef _WIN64
2326 // Reserve space on stack for result and argument structures, if necessary.
2327 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
2328 // Reserve space for the Arguments object. The Windows 64-bit ABI
2329 // requires us to pass this structure as a pointer to its location on
2330 // the stack. The structure contains 2 values.
Ben Murdochbb769b22010-08-11 14:56:33 +01002331 int argument_stack_space = argc * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002332 // We also need backing space for 4 parameters, even though
2333 // we only pass one or two parameter, and it is in a register.
2334 int argument_mirror_space = 4 * kPointerSize;
2335 int total_stack_space =
2336 argument_mirror_space + argument_stack_space + result_stack_space;
2337 subq(rsp, Immediate(total_stack_space));
2338#endif
2339
2340 // Get the required frame alignment for the OS.
2341 static const int kFrameAlignment = OS::ActivationFrameAlignment();
2342 if (kFrameAlignment > 0) {
2343 ASSERT(IsPowerOf2(kFrameAlignment));
2344 movq(kScratchRegister, Immediate(-kFrameAlignment));
2345 and_(rsp, kScratchRegister);
2346 }
2347
2348 // Patch the saved entry sp.
2349 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2350}
2351
2352
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002353void MacroAssembler::EnterExitFrame(int result_size) {
2354 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01002355
2356 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
2357 // so it must be retained across the C-call.
2358 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
2359 lea(r12, Operand(rbp, r14, times_pointer_size, offset));
2360
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002361 EnterExitFrameEpilogue(result_size, 2);
Ben Murdochbb769b22010-08-11 14:56:33 +01002362}
2363
2364
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002365void MacroAssembler::EnterApiExitFrame(int stack_space,
Ben Murdochbb769b22010-08-11 14:56:33 +01002366 int argc,
2367 int result_size) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002368 EnterExitFramePrologue(false);
Ben Murdochbb769b22010-08-11 14:56:33 +01002369
2370 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
2371 // so it must be retained across the C-call.
2372 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
2373 lea(r12, Operand(rbp, (stack_space * kPointerSize) + offset));
2374
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002375 EnterExitFrameEpilogue(result_size, argc);
Ben Murdochbb769b22010-08-11 14:56:33 +01002376}
2377
2378
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002379void MacroAssembler::LeaveExitFrame(int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002380 // Registers:
Steve Block8defd9f2010-07-08 12:39:36 +01002381 // r12 : argv
Steve Blocka7e24c12009-10-30 11:49:00 +00002382
2383 // Get the return address from the stack and restore the frame pointer.
2384 movq(rcx, Operand(rbp, 1 * kPointerSize));
2385 movq(rbp, Operand(rbp, 0 * kPointerSize));
2386
Steve Blocka7e24c12009-10-30 11:49:00 +00002387 // Pop everything up to and including the arguments and the receiver
2388 // from the caller stack.
Steve Block8defd9f2010-07-08 12:39:36 +01002389 lea(rsp, Operand(r12, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00002390
2391 // Restore current context from top and clear it in debug mode.
2392 ExternalReference context_address(Top::k_context_address);
2393 movq(kScratchRegister, context_address);
2394 movq(rsi, Operand(kScratchRegister, 0));
2395#ifdef DEBUG
2396 movq(Operand(kScratchRegister, 0), Immediate(0));
2397#endif
2398
2399 // Push the return address to get ready to return.
2400 push(rcx);
2401
2402 // Clear the top frame.
2403 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2404 movq(kScratchRegister, c_entry_fp_address);
2405 movq(Operand(kScratchRegister, 0), Immediate(0));
2406}
2407
2408
Steve Blocka7e24c12009-10-30 11:49:00 +00002409void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2410 Register scratch,
2411 Label* miss) {
2412 Label same_contexts;
2413
2414 ASSERT(!holder_reg.is(scratch));
2415 ASSERT(!scratch.is(kScratchRegister));
2416 // Load current lexical context from the stack frame.
2417 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2418
2419 // When generating debug code, make sure the lexical context is set.
2420 if (FLAG_debug_code) {
2421 cmpq(scratch, Immediate(0));
2422 Check(not_equal, "we should not have an empty lexical context");
2423 }
2424 // Load the global context of the current context.
2425 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2426 movq(scratch, FieldOperand(scratch, offset));
2427 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2428
2429 // Check the context is a global context.
2430 if (FLAG_debug_code) {
2431 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2432 Factory::global_context_map());
2433 Check(equal, "JSGlobalObject::global_context should be a global context.");
2434 }
2435
2436 // Check if both contexts are the same.
2437 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2438 j(equal, &same_contexts);
2439
2440 // Compare security tokens.
2441 // Check that the security token in the calling global object is
2442 // compatible with the security token in the receiving global
2443 // object.
2444
2445 // Check the context is a global context.
2446 if (FLAG_debug_code) {
2447 // Preserve original value of holder_reg.
2448 push(holder_reg);
2449 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2450 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2451 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2452
2453 // Read the first word and compare to global_context_map(),
2454 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2455 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2456 Check(equal, "JSGlobalObject::global_context should be a global context.");
2457 pop(holder_reg);
2458 }
2459
2460 movq(kScratchRegister,
2461 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00002462 int token_offset =
2463 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002464 movq(scratch, FieldOperand(scratch, token_offset));
2465 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2466 j(not_equal, miss);
2467
2468 bind(&same_contexts);
2469}
2470
2471
2472void MacroAssembler::LoadAllocationTopHelper(Register result,
2473 Register result_end,
2474 Register scratch,
2475 AllocationFlags flags) {
2476 ExternalReference new_space_allocation_top =
2477 ExternalReference::new_space_allocation_top_address();
2478
2479 // Just return if allocation top is already known.
2480 if ((flags & RESULT_CONTAINS_TOP) != 0) {
2481 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01002482 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00002483#ifdef DEBUG
2484 // Assert that result actually contains top on entry.
2485 movq(kScratchRegister, new_space_allocation_top);
2486 cmpq(result, Operand(kScratchRegister, 0));
2487 Check(equal, "Unexpected allocation top");
2488#endif
2489 return;
2490 }
2491
Steve Block6ded16b2010-05-10 14:33:55 +01002492 // Move address of new object to result. Use scratch register if available,
2493 // and keep address in scratch until call to UpdateAllocationTopHelper.
2494 if (scratch.is_valid()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002495 ASSERT(!scratch.is(result_end));
2496 movq(scratch, new_space_allocation_top);
2497 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01002498 } else if (result.is(rax)) {
2499 load_rax(new_space_allocation_top);
2500 } else {
2501 movq(kScratchRegister, new_space_allocation_top);
2502 movq(result, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002503 }
2504}
2505
2506
2507void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2508 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +00002509 if (FLAG_debug_code) {
2510 testq(result_end, Immediate(kObjectAlignmentMask));
2511 Check(zero, "Unaligned allocation in new space");
2512 }
2513
Steve Blocka7e24c12009-10-30 11:49:00 +00002514 ExternalReference new_space_allocation_top =
2515 ExternalReference::new_space_allocation_top_address();
2516
2517 // Update new top.
2518 if (result_end.is(rax)) {
2519 // rax can be stored directly to a memory location.
2520 store_rax(new_space_allocation_top);
2521 } else {
2522 // Register required - use scratch provided if available.
Steve Block6ded16b2010-05-10 14:33:55 +01002523 if (scratch.is_valid()) {
2524 movq(Operand(scratch, 0), result_end);
2525 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00002526 movq(kScratchRegister, new_space_allocation_top);
2527 movq(Operand(kScratchRegister, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00002528 }
2529 }
2530}
2531
2532
2533void MacroAssembler::AllocateInNewSpace(int object_size,
2534 Register result,
2535 Register result_end,
2536 Register scratch,
2537 Label* gc_required,
2538 AllocationFlags flags) {
2539 ASSERT(!result.is(result_end));
2540
2541 // Load address of new object into result.
2542 LoadAllocationTopHelper(result, result_end, scratch, flags);
2543
2544 // Calculate new top and bail out if new space is exhausted.
2545 ExternalReference new_space_allocation_limit =
2546 ExternalReference::new_space_allocation_limit_address();
Steve Block6ded16b2010-05-10 14:33:55 +01002547
2548 Register top_reg = result_end.is_valid() ? result_end : result;
2549
2550 if (top_reg.is(result)) {
2551 addq(top_reg, Immediate(object_size));
2552 } else {
2553 lea(top_reg, Operand(result, object_size));
2554 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002555 movq(kScratchRegister, new_space_allocation_limit);
Steve Block6ded16b2010-05-10 14:33:55 +01002556 cmpq(top_reg, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002557 j(above, gc_required);
2558
2559 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01002560 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00002561
Steve Block6ded16b2010-05-10 14:33:55 +01002562 if (top_reg.is(result)) {
2563 if ((flags & TAG_OBJECT) != 0) {
2564 subq(result, Immediate(object_size - kHeapObjectTag));
2565 } else {
2566 subq(result, Immediate(object_size));
2567 }
2568 } else if ((flags & TAG_OBJECT) != 0) {
2569 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00002570 addq(result, Immediate(kHeapObjectTag));
2571 }
2572}
2573
2574
2575void MacroAssembler::AllocateInNewSpace(int header_size,
2576 ScaleFactor element_size,
2577 Register element_count,
2578 Register result,
2579 Register result_end,
2580 Register scratch,
2581 Label* gc_required,
2582 AllocationFlags flags) {
2583 ASSERT(!result.is(result_end));
2584
2585 // Load address of new object into result.
2586 LoadAllocationTopHelper(result, result_end, scratch, flags);
2587
2588 // Calculate new top and bail out if new space is exhausted.
2589 ExternalReference new_space_allocation_limit =
2590 ExternalReference::new_space_allocation_limit_address();
2591 lea(result_end, Operand(result, element_count, element_size, header_size));
2592 movq(kScratchRegister, new_space_allocation_limit);
2593 cmpq(result_end, Operand(kScratchRegister, 0));
2594 j(above, gc_required);
2595
2596 // Update allocation top.
2597 UpdateAllocationTopHelper(result_end, scratch);
2598
2599 // Tag the result if requested.
2600 if ((flags & TAG_OBJECT) != 0) {
2601 addq(result, Immediate(kHeapObjectTag));
2602 }
2603}
2604
2605
2606void MacroAssembler::AllocateInNewSpace(Register object_size,
2607 Register result,
2608 Register result_end,
2609 Register scratch,
2610 Label* gc_required,
2611 AllocationFlags flags) {
2612 // Load address of new object into result.
2613 LoadAllocationTopHelper(result, result_end, scratch, flags);
2614
2615 // Calculate new top and bail out if new space is exhausted.
2616 ExternalReference new_space_allocation_limit =
2617 ExternalReference::new_space_allocation_limit_address();
2618 if (!object_size.is(result_end)) {
2619 movq(result_end, object_size);
2620 }
2621 addq(result_end, result);
2622 movq(kScratchRegister, new_space_allocation_limit);
2623 cmpq(result_end, Operand(kScratchRegister, 0));
2624 j(above, gc_required);
2625
2626 // Update allocation top.
2627 UpdateAllocationTopHelper(result_end, scratch);
2628
2629 // Tag the result if requested.
2630 if ((flags & TAG_OBJECT) != 0) {
2631 addq(result, Immediate(kHeapObjectTag));
2632 }
2633}
2634
2635
2636void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2637 ExternalReference new_space_allocation_top =
2638 ExternalReference::new_space_allocation_top_address();
2639
2640 // Make sure the object has no tag before resetting top.
2641 and_(object, Immediate(~kHeapObjectTagMask));
2642 movq(kScratchRegister, new_space_allocation_top);
2643#ifdef DEBUG
2644 cmpq(object, Operand(kScratchRegister, 0));
2645 Check(below, "Undo allocation of non allocated memory");
2646#endif
2647 movq(Operand(kScratchRegister, 0), object);
2648}
2649
2650
Steve Block3ce2e202009-11-05 08:53:23 +00002651void MacroAssembler::AllocateHeapNumber(Register result,
2652 Register scratch,
2653 Label* gc_required) {
2654 // Allocate heap number in new space.
2655 AllocateInNewSpace(HeapNumber::kSize,
2656 result,
2657 scratch,
2658 no_reg,
2659 gc_required,
2660 TAG_OBJECT);
2661
2662 // Set the map.
2663 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2664 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2665}
2666
2667
Leon Clarkee46be812010-01-19 14:06:41 +00002668void MacroAssembler::AllocateTwoByteString(Register result,
2669 Register length,
2670 Register scratch1,
2671 Register scratch2,
2672 Register scratch3,
2673 Label* gc_required) {
2674 // Calculate the number of bytes needed for the characters in the string while
2675 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002676 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2677 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002678 ASSERT(kShortSize == 2);
2679 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01002680 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2681 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002682 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002683 if (kHeaderAlignment > 0) {
2684 subq(scratch1, Immediate(kHeaderAlignment));
2685 }
Leon Clarkee46be812010-01-19 14:06:41 +00002686
2687 // Allocate two byte string in new space.
2688 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2689 times_1,
2690 scratch1,
2691 result,
2692 scratch2,
2693 scratch3,
2694 gc_required,
2695 TAG_OBJECT);
2696
2697 // Set the map, length and hash field.
2698 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2699 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002700 Integer32ToSmi(scratch1, length);
2701 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002702 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002703 Immediate(String::kEmptyHashField));
2704}
2705
2706
2707void MacroAssembler::AllocateAsciiString(Register result,
2708 Register length,
2709 Register scratch1,
2710 Register scratch2,
2711 Register scratch3,
2712 Label* gc_required) {
2713 // Calculate the number of bytes needed for the characters in the string while
2714 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002715 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2716 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002717 movl(scratch1, length);
2718 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01002719 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002720 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002721 if (kHeaderAlignment > 0) {
2722 subq(scratch1, Immediate(kHeaderAlignment));
2723 }
Leon Clarkee46be812010-01-19 14:06:41 +00002724
2725 // Allocate ascii string in new space.
2726 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2727 times_1,
2728 scratch1,
2729 result,
2730 scratch2,
2731 scratch3,
2732 gc_required,
2733 TAG_OBJECT);
2734
2735 // Set the map, length and hash field.
2736 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2737 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002738 Integer32ToSmi(scratch1, length);
2739 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002740 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002741 Immediate(String::kEmptyHashField));
2742}
2743
2744
2745void MacroAssembler::AllocateConsString(Register result,
2746 Register scratch1,
2747 Register scratch2,
2748 Label* gc_required) {
2749 // Allocate heap number in new space.
2750 AllocateInNewSpace(ConsString::kSize,
2751 result,
2752 scratch1,
2753 scratch2,
2754 gc_required,
2755 TAG_OBJECT);
2756
2757 // Set the map. The other fields are left uninitialized.
2758 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2759 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2760}
2761
2762
2763void MacroAssembler::AllocateAsciiConsString(Register result,
2764 Register scratch1,
2765 Register scratch2,
2766 Label* gc_required) {
2767 // Allocate heap number in new space.
2768 AllocateInNewSpace(ConsString::kSize,
2769 result,
2770 scratch1,
2771 scratch2,
2772 gc_required,
2773 TAG_OBJECT);
2774
2775 // Set the map. The other fields are left uninitialized.
2776 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2777 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2778}
2779
2780
Steve Blockd0582a62009-12-15 09:54:21 +00002781void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2782 if (context_chain_length > 0) {
2783 // Move up the chain of contexts to the context containing the slot.
2784 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2785 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002786 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002787 for (int i = 1; i < context_chain_length; i++) {
2788 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2789 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2790 }
2791 // The context may be an intermediate context, not a function context.
2792 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2793 } else { // context is the current function context.
2794 // The context may be an intermediate context, not a function context.
2795 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2796 }
2797}
2798
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002799
Leon Clarke4515c472010-02-03 11:58:03 +00002800int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002801 // On Windows 64 stack slots are reserved by the caller for all arguments
2802 // including the ones passed in registers, and space is always allocated for
2803 // the four register arguments even if the function takes fewer than four
2804 // arguments.
2805 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2806 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00002807 ASSERT(num_arguments >= 0);
2808#ifdef _WIN64
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002809 static const int kMinimumStackSlots = 4;
2810 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2811 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002812#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002813 static const int kRegisterPassedArguments = 6;
2814 if (num_arguments < kRegisterPassedArguments) return 0;
2815 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002816#endif
Leon Clarke4515c472010-02-03 11:58:03 +00002817}
2818
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002819
Leon Clarke4515c472010-02-03 11:58:03 +00002820void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2821 int frame_alignment = OS::ActivationFrameAlignment();
2822 ASSERT(frame_alignment != 0);
2823 ASSERT(num_arguments >= 0);
2824 // Make stack end at alignment and allocate space for arguments and old rsp.
2825 movq(kScratchRegister, rsp);
2826 ASSERT(IsPowerOf2(frame_alignment));
2827 int argument_slots_on_stack =
2828 ArgumentStackSlotsForCFunctionCall(num_arguments);
2829 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2830 and_(rsp, Immediate(-frame_alignment));
2831 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2832}
2833
2834
2835void MacroAssembler::CallCFunction(ExternalReference function,
2836 int num_arguments) {
2837 movq(rax, function);
2838 CallCFunction(rax, num_arguments);
2839}
2840
2841
2842void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01002843 // Check stack alignment.
2844 if (FLAG_debug_code) {
2845 CheckStackAlignment();
2846 }
2847
Leon Clarke4515c472010-02-03 11:58:03 +00002848 call(function);
2849 ASSERT(OS::ActivationFrameAlignment() != 0);
2850 ASSERT(num_arguments >= 0);
2851 int argument_slots_on_stack =
2852 ArgumentStackSlotsForCFunctionCall(num_arguments);
2853 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2854}
2855
Steve Blockd0582a62009-12-15 09:54:21 +00002856
Steve Blocka7e24c12009-10-30 11:49:00 +00002857CodePatcher::CodePatcher(byte* address, int size)
2858 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2859 // Create a new macro assembler pointing to the address of the code to patch.
2860 // The size is adjusted with kGap on order for the assembler to generate size
2861 // bytes of instructions without failing with buffer size constraints.
2862 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2863}
2864
2865
2866CodePatcher::~CodePatcher() {
2867 // Indicate that code has changed.
2868 CPU::FlushICache(address_, size_);
2869
2870 // Check that the code was patched as expected.
2871 ASSERT(masm_.pc_ == address_ + size_);
2872 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2873}
2874
Steve Blocka7e24c12009-10-30 11:49:00 +00002875} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002876
2877#endif // V8_TARGET_ARCH_X64