blob: f7db250f9ec64b11dab52563cb93baf28c432e2c [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
Ben Murdoch8b112d22011-06-08 16:22:53 +010033#include "codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010038#include "heap.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
40namespace v8 {
41namespace internal {
42
Ben Murdoch8b112d22011-06-08 16:22:53 +010043MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
44 : Assembler(arg_isolate, buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000045 generating_stub_(false),
46 allow_stub_calls_(true),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010047 has_frame_(false),
Ben Murdoch8b112d22011-06-08 16:22:53 +010048 root_array_available_(true) {
49 if (isolate() != NULL) {
50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
51 isolate());
52 }
Steve Block44f0eee2011-05-26 01:26:41 +010053}
54
55
56static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
57 Address roots_register_value = kRootRegisterBias +
Ben Murdoch3ef787d2012-04-12 10:51:47 +010058 reinterpret_cast<Address>(isolate->heap()->roots_array_start());
Steve Block44f0eee2011-05-26 01:26:41 +010059 intptr_t delta = other.address() - roots_register_value;
60 return delta;
61}
62
63
64Operand MacroAssembler::ExternalOperand(ExternalReference target,
65 Register scratch) {
66 if (root_array_available_ && !Serializer::enabled()) {
67 intptr_t delta = RootRegisterDelta(target, isolate());
68 if (is_int32(delta)) {
69 Serializer::TooLateToEnableNow();
70 return Operand(kRootRegister, static_cast<int32_t>(delta));
71 }
72 }
73 movq(scratch, target);
74 return Operand(scratch, 0);
75}
76
77
78void MacroAssembler::Load(Register destination, ExternalReference source) {
79 if (root_array_available_ && !Serializer::enabled()) {
80 intptr_t delta = RootRegisterDelta(source, isolate());
81 if (is_int32(delta)) {
82 Serializer::TooLateToEnableNow();
83 movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
84 return;
85 }
86 }
87 // Safe code.
88 if (destination.is(rax)) {
89 load_rax(source);
90 } else {
91 movq(kScratchRegister, source);
92 movq(destination, Operand(kScratchRegister, 0));
93 }
94}
95
96
97void MacroAssembler::Store(ExternalReference destination, Register source) {
98 if (root_array_available_ && !Serializer::enabled()) {
99 intptr_t delta = RootRegisterDelta(destination, isolate());
100 if (is_int32(delta)) {
101 Serializer::TooLateToEnableNow();
102 movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
103 return;
104 }
105 }
106 // Safe code.
107 if (source.is(rax)) {
108 store_rax(destination);
109 } else {
110 movq(kScratchRegister, destination);
111 movq(Operand(kScratchRegister, 0), source);
112 }
113}
114
115
116void MacroAssembler::LoadAddress(Register destination,
117 ExternalReference source) {
118 if (root_array_available_ && !Serializer::enabled()) {
119 intptr_t delta = RootRegisterDelta(source, isolate());
120 if (is_int32(delta)) {
121 Serializer::TooLateToEnableNow();
122 lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
123 return;
124 }
125 }
126 // Safe code.
127 movq(destination, source);
128}
129
130
131int MacroAssembler::LoadAddressSize(ExternalReference source) {
132 if (root_array_available_ && !Serializer::enabled()) {
133 // This calculation depends on the internals of LoadAddress.
134 // It's correctness is ensured by the asserts in the Call
135 // instruction below.
136 intptr_t delta = RootRegisterDelta(source, isolate());
137 if (is_int32(delta)) {
138 Serializer::TooLateToEnableNow();
139 // Operand is lea(scratch, Operand(kRootRegister, delta));
140 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
141 int size = 4;
142 if (!is_int8(static_cast<int32_t>(delta))) {
143 size += 3; // Need full four-byte displacement in lea.
144 }
145 return size;
146 }
147 }
148 // Size of movq(destination, src);
149 return 10;
Steve Blocka7e24c12009-10-30 11:49:00 +0000150}
151
152
Steve Block3ce2e202009-11-05 08:53:23 +0000153void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100154 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100155 movq(destination, Operand(kRootRegister,
156 (index << kPointerSizeLog2) - kRootRegisterBias));
157}
158
159
160void MacroAssembler::LoadRootIndexed(Register destination,
161 Register variable_offset,
162 int fixed_offset) {
Steve Block44f0eee2011-05-26 01:26:41 +0100163 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100164 movq(destination,
165 Operand(kRootRegister,
166 variable_offset, times_pointer_size,
167 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000168}
169
170
Kristian Monsen25f61362010-05-21 11:50:48 +0100171void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100172 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100173 movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
174 source);
Kristian Monsen25f61362010-05-21 11:50:48 +0100175}
176
177
Steve Blocka7e24c12009-10-30 11:49:00 +0000178void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100179 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100180 push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000181}
182
183
Steve Block3ce2e202009-11-05 08:53:23 +0000184void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100185 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100186 cmpq(with, Operand(kRootRegister,
187 (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000188}
189
190
Steve Block1e0659c2011-05-24 12:43:12 +0100191void MacroAssembler::CompareRoot(const Operand& with,
192 Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100193 ASSERT(root_array_available_);
Steve Block1e0659c2011-05-24 12:43:12 +0100194 ASSERT(!with.AddressUsesRegister(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000195 LoadRoot(kScratchRegister, index);
196 cmpq(with, kScratchRegister);
197}
198
199
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100200void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
201 Register addr,
202 Register scratch,
203 SaveFPRegsMode save_fp,
204 RememberedSetFinalAction and_then) {
205 if (FLAG_debug_code) {
206 Label ok;
207 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
208 int3();
209 bind(&ok);
Steve Block6ded16b2010-05-10 14:33:55 +0100210 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100211 // Load store buffer top.
212 LoadRoot(scratch, Heap::kStoreBufferTopRootIndex);
213 // Store pointer to buffer.
214 movq(Operand(scratch, 0), addr);
215 // Increment buffer top.
216 addq(scratch, Immediate(kPointerSize));
217 // Write back new top of buffer.
218 StoreRoot(scratch, Heap::kStoreBufferTopRootIndex);
219 // Call stub on end of buffer.
220 Label done;
221 // Check for end of buffer.
222 testq(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
223 if (and_then == kReturnAtEnd) {
224 Label buffer_overflowed;
225 j(not_equal, &buffer_overflowed, Label::kNear);
226 ret(0);
227 bind(&buffer_overflowed);
228 } else {
229 ASSERT(and_then == kFallThroughAtEnd);
230 j(equal, &done, Label::kNear);
231 }
232 StoreBufferOverflowStub store_buffer_overflow =
233 StoreBufferOverflowStub(save_fp);
234 CallStub(&store_buffer_overflow);
235 if (and_then == kReturnAtEnd) {
236 ret(0);
237 } else {
238 ASSERT(and_then == kFallThroughAtEnd);
239 bind(&done);
240 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000241}
242
243
Ben Murdoch257744e2011-11-30 15:57:28 +0000244void MacroAssembler::InNewSpace(Register object,
245 Register scratch,
246 Condition cc,
247 Label* branch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100248 Label::Distance distance) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000249 if (Serializer::enabled()) {
250 // Can't do arithmetic on external references if it might get serialized.
251 // The mask isn't really an address. We load it as an external reference in
252 // case the size of the new space is different between the snapshot maker
253 // and the running system.
254 if (scratch.is(object)) {
255 movq(kScratchRegister, ExternalReference::new_space_mask(isolate()));
256 and_(scratch, kScratchRegister);
257 } else {
258 movq(scratch, ExternalReference::new_space_mask(isolate()));
259 and_(scratch, object);
260 }
261 movq(kScratchRegister, ExternalReference::new_space_start(isolate()));
262 cmpq(scratch, kScratchRegister);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100263 j(cc, branch, distance);
Ben Murdoch257744e2011-11-30 15:57:28 +0000264 } else {
265 ASSERT(is_int32(static_cast<int64_t>(HEAP->NewSpaceMask())));
266 intptr_t new_space_start =
267 reinterpret_cast<intptr_t>(HEAP->NewSpaceStart());
268 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
269 if (scratch.is(object)) {
270 addq(scratch, kScratchRegister);
271 } else {
272 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
273 }
274 and_(scratch, Immediate(static_cast<int32_t>(HEAP->NewSpaceMask())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100275 j(cc, branch, distance);
Ben Murdoch257744e2011-11-30 15:57:28 +0000276 }
277}
278
279
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100280void MacroAssembler::RecordWriteField(
281 Register object,
282 int offset,
283 Register value,
284 Register dst,
285 SaveFPRegsMode save_fp,
286 RememberedSetAction remembered_set_action,
287 SmiCheck smi_check) {
Leon Clarke4515c472010-02-03 11:58:03 +0000288 // The compiled code assumes that record write doesn't change the
289 // context register, so we check that none of the clobbered
290 // registers are rsi.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100291 ASSERT(!value.is(rsi) && !dst.is(rsi));
Leon Clarke4515c472010-02-03 11:58:03 +0000292
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100293 // First, check if a write barrier is even needed. The tests below
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100294 // catch stores of Smis.
Steve Blocka7e24c12009-10-30 11:49:00 +0000295 Label done;
296
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100297 // Skip barrier if writing a smi.
298 if (smi_check == INLINE_SMI_CHECK) {
299 JumpIfSmi(value, &done);
300 }
301
302 // Although the object register is tagged, the offset is relative to the start
303 // of the object, so so offset must be a multiple of kPointerSize.
304 ASSERT(IsAligned(offset, kPointerSize));
305
306 lea(dst, FieldOperand(object, offset));
307 if (emit_debug_code()) {
308 Label ok;
309 testb(dst, Immediate((1 << kPointerSizeLog2) - 1));
310 j(zero, &ok, Label::kNear);
311 int3();
312 bind(&ok);
313 }
314
315 RecordWrite(
316 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
317
Steve Block3ce2e202009-11-05 08:53:23 +0000318 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000319
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100320 // Clobber clobbered input registers when running with the debug-code flag
321 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100322 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100323 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
324 movq(dst, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
325 }
326}
327
328
329void MacroAssembler::RecordWriteArray(Register object,
330 Register value,
331 Register index,
332 SaveFPRegsMode save_fp,
333 RememberedSetAction remembered_set_action,
334 SmiCheck smi_check) {
335 // First, check if a write barrier is even needed. The tests below
336 // catch stores of Smis.
337 Label done;
338
339 // Skip barrier if writing a smi.
340 if (smi_check == INLINE_SMI_CHECK) {
341 JumpIfSmi(value, &done);
342 }
343
344 // Array access: calculate the destination address. Index is not a smi.
345 Register dst = index;
346 lea(dst, Operand(object, index, times_pointer_size,
347 FixedArray::kHeaderSize - kHeapObjectTag));
348
349 RecordWrite(
350 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
351
352 bind(&done);
353
354 // Clobber clobbered input registers when running with the debug-code flag
355 // turned on to provoke errors.
356 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100357 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100358 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000359 }
Steve Block3ce2e202009-11-05 08:53:23 +0000360}
361
362
Steve Block8defd9f2010-07-08 12:39:36 +0100363void MacroAssembler::RecordWrite(Register object,
364 Register address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100365 Register value,
366 SaveFPRegsMode fp_mode,
367 RememberedSetAction remembered_set_action,
368 SmiCheck smi_check) {
Steve Block8defd9f2010-07-08 12:39:36 +0100369 // The compiled code assumes that record write doesn't change the
370 // context register, so we check that none of the clobbered
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100371 // registers are rsi.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100372 ASSERT(!value.is(rsi) && !address.is(rsi));
373
374 ASSERT(!object.is(value));
375 ASSERT(!object.is(address));
376 ASSERT(!value.is(address));
377 if (emit_debug_code()) {
378 AbortIfSmi(object);
379 }
380
381 if (remembered_set_action == OMIT_REMEMBERED_SET &&
382 !FLAG_incremental_marking) {
383 return;
384 }
385
386 if (FLAG_debug_code) {
387 Label ok;
388 cmpq(value, Operand(address, 0));
389 j(equal, &ok, Label::kNear);
390 int3();
391 bind(&ok);
392 }
Steve Block8defd9f2010-07-08 12:39:36 +0100393
394 // First, check if a write barrier is even needed. The tests below
Steve Block44f0eee2011-05-26 01:26:41 +0100395 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100396 Label done;
Steve Block8defd9f2010-07-08 12:39:36 +0100397
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100398 if (smi_check == INLINE_SMI_CHECK) {
399 // Skip barrier if writing a smi.
400 JumpIfSmi(value, &done);
401 }
Steve Block8defd9f2010-07-08 12:39:36 +0100402
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100403 CheckPageFlag(value,
404 value, // Used as scratch.
405 MemoryChunk::kPointersToHereAreInterestingMask,
406 zero,
407 &done,
408 Label::kNear);
409
410 CheckPageFlag(object,
411 value, // Used as scratch.
412 MemoryChunk::kPointersFromHereAreInterestingMask,
413 zero,
414 &done,
415 Label::kNear);
416
417 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
418 CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +0100419
420 bind(&done);
421
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100422 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100423 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100424 if (emit_debug_code()) {
Steve Block8defd9f2010-07-08 12:39:36 +0100425 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
426 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
427 }
428}
429
430
Steve Blocka7e24c12009-10-30 11:49:00 +0000431void MacroAssembler::Assert(Condition cc, const char* msg) {
Steve Block44f0eee2011-05-26 01:26:41 +0100432 if (emit_debug_code()) Check(cc, msg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000433}
434
435
Iain Merrick75681382010-08-19 15:07:18 +0100436void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +0100437 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000438 Label ok;
Iain Merrick75681382010-08-19 15:07:18 +0100439 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
440 Heap::kFixedArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000441 j(equal, &ok, Label::kNear);
Iain Merrick75681382010-08-19 15:07:18 +0100442 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000443 Heap::kFixedDoubleArrayMapRootIndex);
444 j(equal, &ok, Label::kNear);
445 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Iain Merrick75681382010-08-19 15:07:18 +0100446 Heap::kFixedCOWArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000447 j(equal, &ok, Label::kNear);
Iain Merrick75681382010-08-19 15:07:18 +0100448 Abort("JSObject with fast elements map has slow elements");
449 bind(&ok);
450 }
451}
452
453
Steve Blocka7e24c12009-10-30 11:49:00 +0000454void MacroAssembler::Check(Condition cc, const char* msg) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000455 Label L;
456 j(cc, &L, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000457 Abort(msg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100458 // Control will not return here.
Steve Blocka7e24c12009-10-30 11:49:00 +0000459 bind(&L);
460}
461
462
Steve Block6ded16b2010-05-10 14:33:55 +0100463void MacroAssembler::CheckStackAlignment() {
464 int frame_alignment = OS::ActivationFrameAlignment();
465 int frame_alignment_mask = frame_alignment - 1;
466 if (frame_alignment > kPointerSize) {
467 ASSERT(IsPowerOf2(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +0000468 Label alignment_as_expected;
Steve Block6ded16b2010-05-10 14:33:55 +0100469 testq(rsp, Immediate(frame_alignment_mask));
Ben Murdoch257744e2011-11-30 15:57:28 +0000470 j(zero, &alignment_as_expected, Label::kNear);
Steve Block6ded16b2010-05-10 14:33:55 +0100471 // Abort if stack is not aligned.
472 int3();
473 bind(&alignment_as_expected);
474 }
475}
476
477
Steve Blocka7e24c12009-10-30 11:49:00 +0000478void MacroAssembler::NegativeZeroTest(Register result,
479 Register op,
480 Label* then_label) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000481 Label ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000482 testl(result, result);
Ben Murdoch257744e2011-11-30 15:57:28 +0000483 j(not_zero, &ok, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000484 testl(op, op);
485 j(sign, then_label);
486 bind(&ok);
487}
488
489
490void MacroAssembler::Abort(const char* msg) {
491 // We want to pass the msg string like a smi to avoid GC
492 // problems, however msg is not guaranteed to be aligned
493 // properly. Instead, we pass an aligned pointer that is
494 // a proper v8 smi, but also pass the alignment difference
495 // from the real pointer as a smi.
496 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
497 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100498 // Note: p0 might not be a valid Smi _value_, but it has a valid Smi tag.
Steve Blocka7e24c12009-10-30 11:49:00 +0000499 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
500#ifdef DEBUG
501 if (msg != NULL) {
502 RecordComment("Abort message: ");
503 RecordComment(msg);
504 }
505#endif
506 push(rax);
507 movq(kScratchRegister, p0, RelocInfo::NONE);
508 push(kScratchRegister);
509 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000510 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000511 RelocInfo::NONE);
512 push(kScratchRegister);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100513
514 if (!has_frame_) {
515 // We don't actually want to generate a pile of code for this, so just
516 // claim there is a stack frame, without generating one.
517 FrameScope scope(this, StackFrame::NONE);
518 CallRuntime(Runtime::kAbort, 2);
519 } else {
520 CallRuntime(Runtime::kAbort, 2);
521 }
522 // Control will not return here.
Steve Blockd0582a62009-12-15 09:54:21 +0000523 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000524}
525
526
Ben Murdoch257744e2011-11-30 15:57:28 +0000527void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100528 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs
Ben Murdoch257744e2011-11-30 15:57:28 +0000529 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000530}
531
532
Leon Clarkee46be812010-01-19 14:06:41 +0000533void MacroAssembler::TailCallStub(CodeStub* stub) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100534 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
Leon Clarkee46be812010-01-19 14:06:41 +0000535 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
536}
537
538
Steve Blocka7e24c12009-10-30 11:49:00 +0000539void MacroAssembler::StubReturn(int argc) {
540 ASSERT(argc >= 1 && generating_stub());
541 ret((argc - 1) * kPointerSize);
542}
543
544
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100545bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
546 if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
547 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
548}
549
550
Steve Blocka7e24c12009-10-30 11:49:00 +0000551void MacroAssembler::IllegalOperation(int num_arguments) {
552 if (num_arguments > 0) {
553 addq(rsp, Immediate(num_arguments * kPointerSize));
554 }
555 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
556}
557
558
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100559void MacroAssembler::IndexFromHash(Register hash, Register index) {
560 // The assert checks that the constants for the maximum number of digits
561 // for an array index cached in the hash field and the number of bits
562 // reserved for it does not conflict.
563 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
564 (1 << String::kArrayIndexValueBits));
565 // We want the smi-tagged index in key. Even if we subsequently go to
566 // the slow case, converting the key to a smi is always valid.
567 // key: string key
568 // hash: key's hash field, including its array index value.
569 and_(hash, Immediate(String::kArrayIndexValueMask));
570 shr(hash, Immediate(String::kHashShift));
571 // Here we actually clobber the key which will be used if calling into
572 // runtime later. However as the new key is the numeric value of a string key
573 // there is no difference in using either key.
574 Integer32ToSmi(index, hash);
575}
576
577
Steve Blocka7e24c12009-10-30 11:49:00 +0000578void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
579 CallRuntime(Runtime::FunctionForId(id), num_arguments);
580}
581
582
Steve Block1e0659c2011-05-24 12:43:12 +0100583void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
Steve Block44f0eee2011-05-26 01:26:41 +0100584 const Runtime::Function* function = Runtime::FunctionForId(id);
Steve Block1e0659c2011-05-24 12:43:12 +0100585 Set(rax, function->nargs);
Steve Block44f0eee2011-05-26 01:26:41 +0100586 LoadAddress(rbx, ExternalReference(function, isolate()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100587 CEntryStub ces(1, kSaveFPRegs);
Steve Block1e0659c2011-05-24 12:43:12 +0100588 CallStub(&ces);
589}
590
591
Steve Block44f0eee2011-05-26 01:26:41 +0100592void MacroAssembler::CallRuntime(const Runtime::Function* f,
593 int num_arguments) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000594 // If the expected number of arguments of the runtime function is
595 // constant, we check that the actual number of arguments match the
596 // expectation.
597 if (f->nargs >= 0 && f->nargs != num_arguments) {
598 IllegalOperation(num_arguments);
599 return;
600 }
601
Leon Clarke4515c472010-02-03 11:58:03 +0000602 // TODO(1236192): Most runtime routines don't need the number of
603 // arguments passed in because it is constant. At some point we
604 // should remove this need and make the runtime routine entry code
605 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100606 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100607 LoadAddress(rbx, ExternalReference(f, isolate()));
Leon Clarke4515c472010-02-03 11:58:03 +0000608 CEntryStub ces(f->result_size);
609 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000610}
611
612
Andrei Popescu402d9372010-02-26 13:31:12 +0000613void MacroAssembler::CallExternalReference(const ExternalReference& ext,
614 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100615 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100616 LoadAddress(rbx, ext);
Andrei Popescu402d9372010-02-26 13:31:12 +0000617
618 CEntryStub stub(1);
619 CallStub(&stub);
620}
621
622
Steve Block6ded16b2010-05-10 14:33:55 +0100623void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
624 int num_arguments,
625 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000626 // ----------- S t a t e -------------
627 // -- rsp[0] : return address
628 // -- rsp[8] : argument num_arguments - 1
629 // ...
630 // -- rsp[8 * num_arguments] : argument 0 (receiver)
631 // -----------------------------------
632
633 // TODO(1236192): Most runtime routines don't need the number of
634 // arguments passed in because it is constant. At some point we
635 // should remove this need and make the runtime routine entry code
636 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100637 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100638 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000639}
640
641
Steve Block6ded16b2010-05-10 14:33:55 +0100642void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
643 int num_arguments,
644 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100645 TailCallExternalReference(ExternalReference(fid, isolate()),
646 num_arguments,
647 result_size);
Steve Block6ded16b2010-05-10 14:33:55 +0100648}
649
650
Ben Murdochbb769b22010-08-11 14:56:33 +0100651static int Offset(ExternalReference ref0, ExternalReference ref1) {
652 int64_t offset = (ref0.address() - ref1.address());
653 // Check that fits into int.
654 ASSERT(static_cast<int>(offset) == offset);
655 return static_cast<int>(offset);
656}
657
658
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800659void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
660#ifdef _WIN64
661 // We need to prepare a slot for result handle on stack and put
662 // a pointer to it into 1st arg register.
663 EnterApiExitFrame(arg_stack_space + 1);
664
665 // rcx must be used to pass the pointer to the return value slot.
666 lea(rcx, StackSpaceOperand(arg_stack_space));
667#else
668 EnterApiExitFrame(arg_stack_space);
669#endif
Ben Murdochbb769b22010-08-11 14:56:33 +0100670}
671
672
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100673void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
674 int stack_space) {
John Reck59135872010-11-02 12:39:01 -0700675 Label empty_result;
676 Label prologue;
677 Label promote_scheduled_exception;
678 Label delete_allocated_handles;
679 Label leave_exit_frame;
Ben Murdochbb769b22010-08-11 14:56:33 +0100680 Label write_back;
Ben Murdochbb769b22010-08-11 14:56:33 +0100681
Ben Murdoch257744e2011-11-30 15:57:28 +0000682 Factory* factory = isolate()->factory();
John Reck59135872010-11-02 12:39:01 -0700683 ExternalReference next_address =
684 ExternalReference::handle_scope_next_address();
685 const int kNextOffset = 0;
686 const int kLimitOffset = Offset(
687 ExternalReference::handle_scope_limit_address(),
688 next_address);
689 const int kLevelOffset = Offset(
690 ExternalReference::handle_scope_level_address(),
691 next_address);
692 ExternalReference scheduled_exception_address =
Steve Block44f0eee2011-05-26 01:26:41 +0100693 ExternalReference::scheduled_exception_address(isolate());
Ben Murdochbb769b22010-08-11 14:56:33 +0100694
John Reck59135872010-11-02 12:39:01 -0700695 // Allocate HandleScope in callee-save registers.
696 Register prev_next_address_reg = r14;
697 Register prev_limit_reg = rbx;
Steve Block44f0eee2011-05-26 01:26:41 +0100698 Register base_reg = r15;
John Reck59135872010-11-02 12:39:01 -0700699 movq(base_reg, next_address);
700 movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
701 movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
702 addl(Operand(base_reg, kLevelOffset), Immediate(1));
703 // Call the api function!
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100704 movq(rax, reinterpret_cast<int64_t>(function_address),
John Reck59135872010-11-02 12:39:01 -0700705 RelocInfo::RUNTIME_ENTRY);
706 call(rax);
Ben Murdochbb769b22010-08-11 14:56:33 +0100707
John Reck59135872010-11-02 12:39:01 -0700708#ifdef _WIN64
709 // rax keeps a pointer to v8::Handle, unpack it.
710 movq(rax, Operand(rax, 0));
711#endif
712 // Check if the result handle holds 0.
713 testq(rax, rax);
714 j(zero, &empty_result);
715 // It was non-zero. Dereference to get the result value.
716 movq(rax, Operand(rax, 0));
717 bind(&prologue);
Ben Murdochbb769b22010-08-11 14:56:33 +0100718
John Reck59135872010-11-02 12:39:01 -0700719 // No more valid handles (the result handle was the last one). Restore
720 // previous handle scope.
721 subl(Operand(base_reg, kLevelOffset), Immediate(1));
722 movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
723 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
724 j(not_equal, &delete_allocated_handles);
725 bind(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100726
John Reck59135872010-11-02 12:39:01 -0700727 // Check if the function scheduled an exception.
728 movq(rsi, scheduled_exception_address);
Ben Murdoch257744e2011-11-30 15:57:28 +0000729 Cmp(Operand(rsi, 0), factory->the_hole_value());
John Reck59135872010-11-02 12:39:01 -0700730 j(not_equal, &promote_scheduled_exception);
Ben Murdochbb769b22010-08-11 14:56:33 +0100731
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800732 LeaveApiExitFrame();
733 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -0700734
735 bind(&promote_scheduled_exception);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100736 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
John Reck59135872010-11-02 12:39:01 -0700737
738 bind(&empty_result);
739 // It was zero; the result is undefined.
Ben Murdoch257744e2011-11-30 15:57:28 +0000740 Move(rax, factory->undefined_value());
John Reck59135872010-11-02 12:39:01 -0700741 jmp(&prologue);
742
743 // HandleScope limit has changed. Delete allocated extensions.
744 bind(&delete_allocated_handles);
745 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
746 movq(prev_limit_reg, rax);
Steve Block44f0eee2011-05-26 01:26:41 +0100747#ifdef _WIN64
748 LoadAddress(rcx, ExternalReference::isolate_address());
749#else
750 LoadAddress(rdi, ExternalReference::isolate_address());
751#endif
752 LoadAddress(rax,
753 ExternalReference::delete_handle_scope_extensions(isolate()));
John Reck59135872010-11-02 12:39:01 -0700754 call(rax);
755 movq(rax, prev_limit_reg);
756 jmp(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100757}
758
759
Steve Block6ded16b2010-05-10 14:33:55 +0100760void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
761 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000762 // Set the entry point and jump to the C entry runtime stub.
Steve Block44f0eee2011-05-26 01:26:41 +0100763 LoadAddress(rbx, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +0000764 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000765 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000766}
767
768
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100769void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
770 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000771 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100772 // You can't call a builtin without a valid frame.
773 ASSERT(flag == JUMP_FUNCTION || has_frame());
Steve Blocka7e24c12009-10-30 11:49:00 +0000774
Andrei Popescu402d9372010-02-26 13:31:12 +0000775 // Rely on the assertion to check that the number of provided
776 // arguments match the expected number of arguments. Fake a
777 // parameter count to avoid emitting code to do the check.
778 ParameterCount expected(0);
779 GetBuiltinEntry(rdx, id);
Ben Murdoch257744e2011-11-30 15:57:28 +0000780 InvokeCode(rdx, expected, expected, flag, call_wrapper, CALL_AS_METHOD);
Steve Blocka7e24c12009-10-30 11:49:00 +0000781}
782
Andrei Popescu402d9372010-02-26 13:31:12 +0000783
Steve Block791712a2010-08-27 10:21:07 +0100784void MacroAssembler::GetBuiltinFunction(Register target,
785 Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100786 // Load the builtins object into target register.
787 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
788 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
Steve Block791712a2010-08-27 10:21:07 +0100789 movq(target, FieldOperand(target,
790 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
791}
Steve Block6ded16b2010-05-10 14:33:55 +0100792
Steve Block791712a2010-08-27 10:21:07 +0100793
794void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
795 ASSERT(!target.is(rdi));
Andrei Popescu402d9372010-02-26 13:31:12 +0000796 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +0100797 GetBuiltinFunction(rdi, id);
798 movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000799}
800
801
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100802#define REG(Name) { kRegister_ ## Name ## _Code }
803
804static const Register saved_regs[] = {
805 REG(rax), REG(rcx), REG(rdx), REG(rbx), REG(rbp), REG(rsi), REG(rdi), REG(r8),
806 REG(r9), REG(r10), REG(r11)
807};
808
809#undef REG
810
811static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
812
813
814void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
815 Register exclusion1,
816 Register exclusion2,
817 Register exclusion3) {
818 // We don't allow a GC during a store buffer overflow so there is no need to
819 // store the registers in any particular way, but we do have to store and
820 // restore them.
821 for (int i = 0; i < kNumberOfSavedRegs; i++) {
822 Register reg = saved_regs[i];
823 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
824 push(reg);
825 }
826 }
827 // R12 to r15 are callee save on all platforms.
828 if (fp_mode == kSaveFPRegs) {
829 CpuFeatures::Scope scope(SSE2);
830 subq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
831 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
832 XMMRegister reg = XMMRegister::from_code(i);
833 movsd(Operand(rsp, i * kDoubleSize), reg);
834 }
835 }
836}
837
838
839void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode,
840 Register exclusion1,
841 Register exclusion2,
842 Register exclusion3) {
843 if (fp_mode == kSaveFPRegs) {
844 CpuFeatures::Scope scope(SSE2);
845 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
846 XMMRegister reg = XMMRegister::from_code(i);
847 movsd(reg, Operand(rsp, i * kDoubleSize));
848 }
849 addq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
850 }
851 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
852 Register reg = saved_regs[i];
853 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
854 pop(reg);
855 }
856 }
857}
858
859
Steve Blocka7e24c12009-10-30 11:49:00 +0000860void MacroAssembler::Set(Register dst, int64_t x) {
861 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100862 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000863 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000864 movl(dst, Immediate(static_cast<uint32_t>(x)));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100865 } else if (is_int32(x)) {
866 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000867 } else {
868 movq(dst, x, RelocInfo::NONE);
869 }
870}
871
Steve Blocka7e24c12009-10-30 11:49:00 +0000872void MacroAssembler::Set(const Operand& dst, int64_t x) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100873 if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000874 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000875 } else {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100876 Set(kScratchRegister, x);
Steve Blocka7e24c12009-10-30 11:49:00 +0000877 movq(dst, kScratchRegister);
878 }
879}
880
Steve Blocka7e24c12009-10-30 11:49:00 +0000881// ----------------------------------------------------------------------------
882// Smi tagging, untagging and tag detection.
883
Steve Block8defd9f2010-07-08 12:39:36 +0100884Register MacroAssembler::GetSmiConstant(Smi* source) {
885 int value = source->value();
886 if (value == 0) {
887 xorl(kScratchRegister, kScratchRegister);
888 return kScratchRegister;
889 }
890 if (value == 1) {
891 return kSmiConstantRegister;
892 }
893 LoadSmiConstant(kScratchRegister, source);
894 return kScratchRegister;
895}
896
897void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
Steve Block44f0eee2011-05-26 01:26:41 +0100898 if (emit_debug_code()) {
Steve Block8defd9f2010-07-08 12:39:36 +0100899 movq(dst,
900 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
901 RelocInfo::NONE);
902 cmpq(dst, kSmiConstantRegister);
903 if (allow_stub_calls()) {
904 Assert(equal, "Uninitialized kSmiConstantRegister");
905 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000906 Label ok;
907 j(equal, &ok, Label::kNear);
Steve Block8defd9f2010-07-08 12:39:36 +0100908 int3();
909 bind(&ok);
910 }
911 }
Steve Block44f0eee2011-05-26 01:26:41 +0100912 int value = source->value();
913 if (value == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100914 xorl(dst, dst);
915 return;
916 }
Steve Block8defd9f2010-07-08 12:39:36 +0100917 bool negative = value < 0;
918 unsigned int uvalue = negative ? -value : value;
919
920 switch (uvalue) {
921 case 9:
922 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
923 break;
924 case 8:
925 xorl(dst, dst);
926 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
927 break;
928 case 4:
929 xorl(dst, dst);
930 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
931 break;
932 case 5:
933 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
934 break;
935 case 3:
936 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
937 break;
938 case 2:
939 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
940 break;
941 case 1:
942 movq(dst, kSmiConstantRegister);
943 break;
944 case 0:
945 UNREACHABLE();
946 return;
947 default:
948 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
949 return;
950 }
951 if (negative) {
952 neg(dst);
953 }
954}
955
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100956
Steve Blocka7e24c12009-10-30 11:49:00 +0000957void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000958 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +0000959 if (!dst.is(src)) {
960 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000961 }
Steve Block3ce2e202009-11-05 08:53:23 +0000962 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000963}
964
965
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100966void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +0100967 if (emit_debug_code()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100968 testb(dst, Immediate(0x01));
Ben Murdoch257744e2011-11-30 15:57:28 +0000969 Label ok;
970 j(zero, &ok, Label::kNear);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100971 if (allow_stub_calls()) {
972 Abort("Integer32ToSmiField writing to non-smi location");
973 } else {
974 int3();
975 }
976 bind(&ok);
977 }
978 ASSERT(kSmiShift % kBitsPerByte == 0);
979 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
980}
981
982
Steve Block3ce2e202009-11-05 08:53:23 +0000983void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
984 Register src,
985 int constant) {
986 if (dst.is(src)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100987 addl(dst, Immediate(constant));
Steve Block3ce2e202009-11-05 08:53:23 +0000988 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100989 leal(dst, Operand(src, constant));
Steve Block3ce2e202009-11-05 08:53:23 +0000990 }
991 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000992}
993
994
995void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000996 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000997 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000998 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000999 }
Steve Block3ce2e202009-11-05 08:53:23 +00001000 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001001}
1002
1003
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001004void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
1005 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1006}
1007
1008
Steve Blocka7e24c12009-10-30 11:49:00 +00001009void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001010 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001011 if (!dst.is(src)) {
1012 movq(dst, src);
1013 }
1014 sar(dst, Immediate(kSmiShift));
1015}
1016
1017
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001018void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
1019 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
1020}
1021
1022
Steve Block3ce2e202009-11-05 08:53:23 +00001023void MacroAssembler::SmiTest(Register src) {
1024 testq(src, src);
1025}
1026
1027
Steve Block44f0eee2011-05-26 01:26:41 +01001028void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
1029 if (emit_debug_code()) {
1030 AbortIfNotSmi(smi1);
1031 AbortIfNotSmi(smi2);
1032 }
1033 cmpq(smi1, smi2);
Steve Block3ce2e202009-11-05 08:53:23 +00001034}
1035
1036
1037void MacroAssembler::SmiCompare(Register dst, Smi* src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001038 if (emit_debug_code()) {
1039 AbortIfNotSmi(dst);
1040 }
1041 Cmp(dst, src);
1042}
1043
1044
1045void MacroAssembler::Cmp(Register dst, Smi* src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001046 ASSERT(!dst.is(kScratchRegister));
1047 if (src->value() == 0) {
1048 testq(dst, dst);
1049 } else {
Iain Merrick75681382010-08-19 15:07:18 +01001050 Register constant_reg = GetSmiConstant(src);
1051 cmpq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001052 }
1053}
1054
1055
Leon Clarkef7060e22010-06-03 12:02:55 +01001056void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001057 if (emit_debug_code()) {
1058 AbortIfNotSmi(dst);
1059 AbortIfNotSmi(src);
1060 }
Steve Block6ded16b2010-05-10 14:33:55 +01001061 cmpq(dst, src);
1062}
1063
1064
Steve Block3ce2e202009-11-05 08:53:23 +00001065void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001066 if (emit_debug_code()) {
1067 AbortIfNotSmi(dst);
1068 AbortIfNotSmi(src);
1069 }
Steve Block3ce2e202009-11-05 08:53:23 +00001070 cmpq(dst, src);
1071}
1072
1073
1074void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001075 if (emit_debug_code()) {
1076 AbortIfNotSmi(dst);
1077 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001078 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001079}
1080
1081
Steve Block44f0eee2011-05-26 01:26:41 +01001082void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1083 // The Operand cannot use the smi register.
1084 Register smi_reg = GetSmiConstant(src);
1085 ASSERT(!dst.AddressUsesRegister(smi_reg));
1086 cmpq(dst, smi_reg);
1087}
1088
1089
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001090void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
1091 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1092}
1093
1094
Steve Blocka7e24c12009-10-30 11:49:00 +00001095void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1096 Register src,
1097 int power) {
1098 ASSERT(power >= 0);
1099 ASSERT(power < 64);
1100 if (power == 0) {
1101 SmiToInteger64(dst, src);
1102 return;
1103 }
Steve Block3ce2e202009-11-05 08:53:23 +00001104 if (!dst.is(src)) {
1105 movq(dst, src);
1106 }
1107 if (power < kSmiShift) {
1108 sar(dst, Immediate(kSmiShift - power));
1109 } else if (power > kSmiShift) {
1110 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001111 }
1112}
1113
1114
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001115void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1116 Register src,
1117 int power) {
1118 ASSERT((0 <= power) && (power < 32));
1119 if (dst.is(src)) {
1120 shr(dst, Immediate(power + kSmiShift));
1121 } else {
1122 UNIMPLEMENTED(); // Not used.
1123 }
1124}
1125
1126
Ben Murdoch257744e2011-11-30 15:57:28 +00001127void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1128 Label* on_not_smis,
1129 Label::Distance near_jump) {
1130 if (dst.is(src1) || dst.is(src2)) {
1131 ASSERT(!src1.is(kScratchRegister));
1132 ASSERT(!src2.is(kScratchRegister));
1133 movq(kScratchRegister, src1);
1134 or_(kScratchRegister, src2);
1135 JumpIfNotSmi(kScratchRegister, on_not_smis, near_jump);
1136 movq(dst, kScratchRegister);
1137 } else {
1138 movq(dst, src1);
1139 or_(dst, src2);
1140 JumpIfNotSmi(dst, on_not_smis, near_jump);
1141 }
1142}
1143
1144
Steve Blocka7e24c12009-10-30 11:49:00 +00001145Condition MacroAssembler::CheckSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001146 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001147 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +00001148 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001149}
1150
1151
Steve Block1e0659c2011-05-24 12:43:12 +01001152Condition MacroAssembler::CheckSmi(const Operand& src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001153 STATIC_ASSERT(kSmiTag == 0);
Steve Block1e0659c2011-05-24 12:43:12 +01001154 testb(src, Immediate(kSmiTagMask));
1155 return zero;
1156}
1157
1158
Ben Murdochf87a2032010-10-22 12:50:53 +01001159Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001160 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001161 // Test that both bits of the mask 0x8000000000000001 are zero.
Steve Block3ce2e202009-11-05 08:53:23 +00001162 movq(kScratchRegister, src);
1163 rol(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +01001164 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +00001165 return zero;
1166}
1167
1168
Steve Blocka7e24c12009-10-30 11:49:00 +00001169Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1170 if (first.is(second)) {
1171 return CheckSmi(first);
1172 }
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001173 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
Steve Block8defd9f2010-07-08 12:39:36 +01001174 leal(kScratchRegister, Operand(first, second, times_1, 0));
1175 testb(kScratchRegister, Immediate(0x03));
Steve Block3ce2e202009-11-05 08:53:23 +00001176 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001177}
1178
1179
Ben Murdochf87a2032010-10-22 12:50:53 +01001180Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1181 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001182 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +01001183 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +00001184 }
Steve Block8defd9f2010-07-08 12:39:36 +01001185 movq(kScratchRegister, first);
1186 or_(kScratchRegister, second);
Leon Clarked91b9f72010-01-27 17:25:45 +00001187 rol(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +01001188 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +00001189 return zero;
1190}
1191
1192
Ben Murdochbb769b22010-08-11 14:56:33 +01001193Condition MacroAssembler::CheckEitherSmi(Register first,
1194 Register second,
1195 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +00001196 if (first.is(second)) {
1197 return CheckSmi(first);
1198 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001199 if (scratch.is(second)) {
1200 andl(scratch, first);
1201 } else {
1202 if (!scratch.is(first)) {
1203 movl(scratch, first);
1204 }
1205 andl(scratch, second);
1206 }
1207 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +00001208 return zero;
1209}
1210
1211
Steve Blocka7e24c12009-10-30 11:49:00 +00001212Condition MacroAssembler::CheckIsMinSmi(Register src) {
Steve Block8defd9f2010-07-08 12:39:36 +01001213 ASSERT(!src.is(kScratchRegister));
1214 // If we overflow by subtracting one, it's the minimal smi value.
1215 cmpq(src, kSmiConstantRegister);
1216 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +00001217}
1218
Steve Blocka7e24c12009-10-30 11:49:00 +00001219
1220Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001221 // A 32-bit integer value can always be converted to a smi.
1222 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +00001223}
1224
1225
Steve Block3ce2e202009-11-05 08:53:23 +00001226Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
1227 // An unsigned 32-bit integer value is valid as long as the high bit
1228 // is not set.
Steve Block8defd9f2010-07-08 12:39:36 +01001229 testl(src, src);
1230 return positive;
Steve Block3ce2e202009-11-05 08:53:23 +00001231}
1232
1233
Steve Block1e0659c2011-05-24 12:43:12 +01001234void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1235 if (dst.is(src)) {
1236 andl(dst, Immediate(kSmiTagMask));
1237 } else {
1238 movl(dst, Immediate(kSmiTagMask));
1239 andl(dst, src);
1240 }
1241}
1242
1243
1244void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
1245 if (!(src.AddressUsesRegister(dst))) {
1246 movl(dst, Immediate(kSmiTagMask));
1247 andl(dst, src);
1248 } else {
1249 movl(dst, src);
1250 andl(dst, Immediate(kSmiTagMask));
1251 }
1252}
1253
1254
Ben Murdoch257744e2011-11-30 15:57:28 +00001255void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1256 Label* on_invalid,
1257 Label::Distance near_jump) {
1258 Condition is_valid = CheckInteger32ValidSmiValue(src);
1259 j(NegateCondition(is_valid), on_invalid, near_jump);
1260}
1261
1262
1263void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1264 Label* on_invalid,
1265 Label::Distance near_jump) {
1266 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1267 j(NegateCondition(is_valid), on_invalid, near_jump);
1268}
1269
1270
1271void MacroAssembler::JumpIfSmi(Register src,
1272 Label* on_smi,
1273 Label::Distance near_jump) {
1274 Condition smi = CheckSmi(src);
1275 j(smi, on_smi, near_jump);
1276}
1277
1278
1279void MacroAssembler::JumpIfNotSmi(Register src,
1280 Label* on_not_smi,
1281 Label::Distance near_jump) {
1282 Condition smi = CheckSmi(src);
1283 j(NegateCondition(smi), on_not_smi, near_jump);
1284}
1285
1286
1287void MacroAssembler::JumpUnlessNonNegativeSmi(
1288 Register src, Label* on_not_smi_or_negative,
1289 Label::Distance near_jump) {
1290 Condition non_negative_smi = CheckNonNegativeSmi(src);
1291 j(NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump);
1292}
1293
1294
1295void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1296 Smi* constant,
1297 Label* on_equals,
1298 Label::Distance near_jump) {
1299 SmiCompare(src, constant);
1300 j(equal, on_equals, near_jump);
1301}
1302
1303
1304void MacroAssembler::JumpIfNotBothSmi(Register src1,
1305 Register src2,
1306 Label* on_not_both_smi,
1307 Label::Distance near_jump) {
1308 Condition both_smi = CheckBothSmi(src1, src2);
1309 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1310}
1311
1312
1313void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1314 Register src2,
1315 Label* on_not_both_smi,
1316 Label::Distance near_jump) {
1317 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1318 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1319}
1320
1321
1322void MacroAssembler::SmiTryAddConstant(Register dst,
1323 Register src,
1324 Smi* constant,
1325 Label* on_not_smi_result,
1326 Label::Distance near_jump) {
1327 // Does not assume that src is a smi.
1328 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001329 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001330 ASSERT(!dst.is(kScratchRegister));
1331 ASSERT(!src.is(kScratchRegister));
1332
1333 JumpIfNotSmi(src, on_not_smi_result, near_jump);
1334 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1335 LoadSmiConstant(tmp, constant);
1336 addq(tmp, src);
1337 j(overflow, on_not_smi_result, near_jump);
1338 if (dst.is(src)) {
1339 movq(dst, tmp);
1340 }
1341}
1342
1343
Steve Block3ce2e202009-11-05 08:53:23 +00001344void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1345 if (constant->value() == 0) {
1346 if (!dst.is(src)) {
1347 movq(dst, src);
1348 }
Steve Block8defd9f2010-07-08 12:39:36 +01001349 return;
Steve Block3ce2e202009-11-05 08:53:23 +00001350 } else if (dst.is(src)) {
1351 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001352 switch (constant->value()) {
1353 case 1:
1354 addq(dst, kSmiConstantRegister);
1355 return;
1356 case 2:
1357 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1358 return;
1359 case 4:
1360 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1361 return;
1362 case 8:
1363 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1364 return;
1365 default:
1366 Register constant_reg = GetSmiConstant(constant);
1367 addq(dst, constant_reg);
1368 return;
1369 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001370 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001371 switch (constant->value()) {
1372 case 1:
1373 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
1374 return;
1375 case 2:
1376 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1377 return;
1378 case 4:
1379 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1380 return;
1381 case 8:
1382 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1383 return;
1384 default:
1385 LoadSmiConstant(dst, constant);
1386 addq(dst, src);
1387 return;
1388 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001389 }
1390}
1391
1392
Leon Clarkef7060e22010-06-03 12:02:55 +01001393void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1394 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001395 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01001396 }
1397}
1398
1399
Ben Murdoch257744e2011-11-30 15:57:28 +00001400void MacroAssembler::SmiAddConstant(Register dst,
1401 Register src,
1402 Smi* constant,
1403 Label* on_not_smi_result,
1404 Label::Distance near_jump) {
1405 if (constant->value() == 0) {
1406 if (!dst.is(src)) {
1407 movq(dst, src);
1408 }
1409 } else if (dst.is(src)) {
1410 ASSERT(!dst.is(kScratchRegister));
1411
1412 LoadSmiConstant(kScratchRegister, constant);
1413 addq(kScratchRegister, src);
1414 j(overflow, on_not_smi_result, near_jump);
1415 movq(dst, kScratchRegister);
1416 } else {
1417 LoadSmiConstant(dst, constant);
1418 addq(dst, src);
1419 j(overflow, on_not_smi_result, near_jump);
1420 }
1421}
1422
1423
Steve Block3ce2e202009-11-05 08:53:23 +00001424void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1425 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001426 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001427 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001428 }
Steve Block3ce2e202009-11-05 08:53:23 +00001429 } else if (dst.is(src)) {
1430 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001431 Register constant_reg = GetSmiConstant(constant);
1432 subq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001433 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001434 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001435 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001436 // Adding and subtracting the min-value gives the same result, it only
1437 // differs on the overflow bit, which we don't check here.
1438 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001439 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001440 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001441 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Steve Block3ce2e202009-11-05 08:53:23 +00001442 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001443 }
1444 }
1445}
1446
1447
Ben Murdoch257744e2011-11-30 15:57:28 +00001448void MacroAssembler::SmiSubConstant(Register dst,
1449 Register src,
1450 Smi* constant,
1451 Label* on_not_smi_result,
1452 Label::Distance near_jump) {
1453 if (constant->value() == 0) {
1454 if (!dst.is(src)) {
1455 movq(dst, src);
1456 }
1457 } else if (dst.is(src)) {
1458 ASSERT(!dst.is(kScratchRegister));
1459 if (constant->value() == Smi::kMinValue) {
1460 // Subtracting min-value from any non-negative value will overflow.
1461 // We test the non-negativeness before doing the subtraction.
1462 testq(src, src);
1463 j(not_sign, on_not_smi_result, near_jump);
1464 LoadSmiConstant(kScratchRegister, constant);
1465 subq(dst, kScratchRegister);
1466 } else {
1467 // Subtract by adding the negation.
1468 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1469 addq(kScratchRegister, dst);
1470 j(overflow, on_not_smi_result, near_jump);
1471 movq(dst, kScratchRegister);
1472 }
1473 } else {
1474 if (constant->value() == Smi::kMinValue) {
1475 // Subtracting min-value from any non-negative value will overflow.
1476 // We test the non-negativeness before doing the subtraction.
1477 testq(src, src);
1478 j(not_sign, on_not_smi_result, near_jump);
1479 LoadSmiConstant(dst, constant);
1480 // Adding and subtracting the min-value gives the same result, it only
1481 // differs on the overflow bit, which we don't check here.
1482 addq(dst, src);
1483 } else {
1484 // Subtract by adding the negation.
1485 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1486 addq(dst, src);
1487 j(overflow, on_not_smi_result, near_jump);
1488 }
1489 }
1490}
1491
1492
1493void MacroAssembler::SmiNeg(Register dst,
1494 Register src,
1495 Label* on_smi_result,
1496 Label::Distance near_jump) {
1497 if (dst.is(src)) {
1498 ASSERT(!dst.is(kScratchRegister));
1499 movq(kScratchRegister, src);
1500 neg(dst); // Low 32 bits are retained as zero by negation.
1501 // Test if result is zero or Smi::kMinValue.
1502 cmpq(dst, kScratchRegister);
1503 j(not_equal, on_smi_result, near_jump);
1504 movq(src, kScratchRegister);
1505 } else {
1506 movq(dst, src);
1507 neg(dst);
1508 cmpq(dst, src);
1509 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1510 j(not_equal, on_smi_result, near_jump);
1511 }
1512}
1513
1514
1515void MacroAssembler::SmiAdd(Register dst,
1516 Register src1,
1517 Register src2,
1518 Label* on_not_smi_result,
1519 Label::Distance near_jump) {
1520 ASSERT_NOT_NULL(on_not_smi_result);
1521 ASSERT(!dst.is(src2));
1522 if (dst.is(src1)) {
1523 movq(kScratchRegister, src1);
1524 addq(kScratchRegister, src2);
1525 j(overflow, on_not_smi_result, near_jump);
1526 movq(dst, kScratchRegister);
1527 } else {
1528 movq(dst, src1);
1529 addq(dst, src2);
1530 j(overflow, on_not_smi_result, near_jump);
1531 }
1532}
1533
1534
1535void MacroAssembler::SmiAdd(Register dst,
1536 Register src1,
1537 const Operand& src2,
1538 Label* on_not_smi_result,
1539 Label::Distance near_jump) {
1540 ASSERT_NOT_NULL(on_not_smi_result);
1541 if (dst.is(src1)) {
1542 movq(kScratchRegister, src1);
1543 addq(kScratchRegister, src2);
1544 j(overflow, on_not_smi_result, near_jump);
1545 movq(dst, kScratchRegister);
1546 } else {
1547 ASSERT(!src2.AddressUsesRegister(dst));
1548 movq(dst, src1);
1549 addq(dst, src2);
1550 j(overflow, on_not_smi_result, near_jump);
1551 }
1552}
1553
1554
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001555void MacroAssembler::SmiAdd(Register dst,
1556 Register src1,
1557 Register src2) {
1558 // No overflow checking. Use only when it's known that
1559 // overflowing is impossible.
Steve Block44f0eee2011-05-26 01:26:41 +01001560 if (!dst.is(src1)) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001561 if (emit_debug_code()) {
1562 movq(kScratchRegister, src1);
1563 addq(kScratchRegister, src2);
1564 Check(no_overflow, "Smi addition overflow");
1565 }
1566 lea(dst, Operand(src1, src2, times_1, 0));
1567 } else {
1568 addq(dst, src2);
1569 Assert(no_overflow, "Smi addition overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001570 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001571}
1572
1573
1574void MacroAssembler::SmiSub(Register dst,
1575 Register src1,
1576 Register src2,
1577 Label* on_not_smi_result,
1578 Label::Distance near_jump) {
1579 ASSERT_NOT_NULL(on_not_smi_result);
1580 ASSERT(!dst.is(src2));
1581 if (dst.is(src1)) {
1582 cmpq(dst, src2);
1583 j(overflow, on_not_smi_result, near_jump);
1584 subq(dst, src2);
1585 } else {
1586 movq(dst, src1);
1587 subq(dst, src2);
1588 j(overflow, on_not_smi_result, near_jump);
1589 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001590}
1591
1592
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001593void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1594 // No overflow checking. Use only when it's known that
1595 // overflowing is impossible (e.g., subtracting two positive smis).
1596 ASSERT(!dst.is(src2));
Steve Block44f0eee2011-05-26 01:26:41 +01001597 if (!dst.is(src1)) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001598 movq(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001599 }
Steve Block44f0eee2011-05-26 01:26:41 +01001600 subq(dst, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001601 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001602}
1603
1604
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001605void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001606 Register src1,
Ben Murdoch257744e2011-11-30 15:57:28 +00001607 const Operand& src2,
1608 Label* on_not_smi_result,
1609 Label::Distance near_jump) {
1610 ASSERT_NOT_NULL(on_not_smi_result);
1611 if (dst.is(src1)) {
1612 movq(kScratchRegister, src2);
1613 cmpq(src1, kScratchRegister);
1614 j(overflow, on_not_smi_result, near_jump);
1615 subq(src1, kScratchRegister);
1616 } else {
1617 movq(dst, src1);
1618 subq(dst, src2);
1619 j(overflow, on_not_smi_result, near_jump);
1620 }
1621}
1622
1623
1624void MacroAssembler::SmiSub(Register dst,
1625 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001626 const Operand& src2) {
1627 // No overflow checking. Use only when it's known that
1628 // overflowing is impossible (e.g., subtracting two positive smis).
Steve Block44f0eee2011-05-26 01:26:41 +01001629 if (!dst.is(src1)) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001630 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001631 }
Steve Block44f0eee2011-05-26 01:26:41 +01001632 subq(dst, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001633 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001634}
1635
1636
Ben Murdoch257744e2011-11-30 15:57:28 +00001637void MacroAssembler::SmiMul(Register dst,
1638 Register src1,
1639 Register src2,
1640 Label* on_not_smi_result,
1641 Label::Distance near_jump) {
1642 ASSERT(!dst.is(src2));
1643 ASSERT(!dst.is(kScratchRegister));
1644 ASSERT(!src1.is(kScratchRegister));
1645 ASSERT(!src2.is(kScratchRegister));
1646
1647 if (dst.is(src1)) {
1648 Label failure, zero_correct_result;
1649 movq(kScratchRegister, src1); // Create backup for later testing.
1650 SmiToInteger64(dst, src1);
1651 imul(dst, src2);
1652 j(overflow, &failure, Label::kNear);
1653
1654 // Check for negative zero result. If product is zero, and one
1655 // argument is negative, go to slow case.
1656 Label correct_result;
1657 testq(dst, dst);
1658 j(not_zero, &correct_result, Label::kNear);
1659
1660 movq(dst, kScratchRegister);
1661 xor_(dst, src2);
1662 // Result was positive zero.
1663 j(positive, &zero_correct_result, Label::kNear);
1664
1665 bind(&failure); // Reused failure exit, restores src1.
1666 movq(src1, kScratchRegister);
1667 jmp(on_not_smi_result, near_jump);
1668
1669 bind(&zero_correct_result);
1670 Set(dst, 0);
1671
1672 bind(&correct_result);
1673 } else {
1674 SmiToInteger64(dst, src1);
1675 imul(dst, src2);
1676 j(overflow, on_not_smi_result, near_jump);
1677 // Check for negative zero result. If product is zero, and one
1678 // argument is negative, go to slow case.
1679 Label correct_result;
1680 testq(dst, dst);
1681 j(not_zero, &correct_result, Label::kNear);
1682 // One of src1 and src2 is zero, the check whether the other is
1683 // negative.
1684 movq(kScratchRegister, src1);
1685 xor_(kScratchRegister, src2);
1686 j(negative, on_not_smi_result, near_jump);
1687 bind(&correct_result);
1688 }
1689}
1690
1691
1692void MacroAssembler::SmiDiv(Register dst,
1693 Register src1,
1694 Register src2,
1695 Label* on_not_smi_result,
1696 Label::Distance near_jump) {
1697 ASSERT(!src1.is(kScratchRegister));
1698 ASSERT(!src2.is(kScratchRegister));
1699 ASSERT(!dst.is(kScratchRegister));
1700 ASSERT(!src2.is(rax));
1701 ASSERT(!src2.is(rdx));
1702 ASSERT(!src1.is(rdx));
1703
1704 // Check for 0 divisor (result is +/-Infinity).
1705 testq(src2, src2);
1706 j(zero, on_not_smi_result, near_jump);
1707
1708 if (src1.is(rax)) {
1709 movq(kScratchRegister, src1);
1710 }
1711 SmiToInteger32(rax, src1);
1712 // We need to rule out dividing Smi::kMinValue by -1, since that would
1713 // overflow in idiv and raise an exception.
1714 // We combine this with negative zero test (negative zero only happens
1715 // when dividing zero by a negative number).
1716
1717 // We overshoot a little and go to slow case if we divide min-value
1718 // by any negative value, not just -1.
1719 Label safe_div;
1720 testl(rax, Immediate(0x7fffffff));
1721 j(not_zero, &safe_div, Label::kNear);
1722 testq(src2, src2);
1723 if (src1.is(rax)) {
1724 j(positive, &safe_div, Label::kNear);
1725 movq(src1, kScratchRegister);
1726 jmp(on_not_smi_result, near_jump);
1727 } else {
1728 j(negative, on_not_smi_result, near_jump);
1729 }
1730 bind(&safe_div);
1731
1732 SmiToInteger32(src2, src2);
1733 // Sign extend src1 into edx:eax.
1734 cdq();
1735 idivl(src2);
1736 Integer32ToSmi(src2, src2);
1737 // Check that the remainder is zero.
1738 testl(rdx, rdx);
1739 if (src1.is(rax)) {
1740 Label smi_result;
1741 j(zero, &smi_result, Label::kNear);
1742 movq(src1, kScratchRegister);
1743 jmp(on_not_smi_result, near_jump);
1744 bind(&smi_result);
1745 } else {
1746 j(not_zero, on_not_smi_result, near_jump);
1747 }
1748 if (!dst.is(src1) && src1.is(rax)) {
1749 movq(src1, kScratchRegister);
1750 }
1751 Integer32ToSmi(dst, rax);
1752}
1753
1754
1755void MacroAssembler::SmiMod(Register dst,
1756 Register src1,
1757 Register src2,
1758 Label* on_not_smi_result,
1759 Label::Distance near_jump) {
1760 ASSERT(!dst.is(kScratchRegister));
1761 ASSERT(!src1.is(kScratchRegister));
1762 ASSERT(!src2.is(kScratchRegister));
1763 ASSERT(!src2.is(rax));
1764 ASSERT(!src2.is(rdx));
1765 ASSERT(!src1.is(rdx));
1766 ASSERT(!src1.is(src2));
1767
1768 testq(src2, src2);
1769 j(zero, on_not_smi_result, near_jump);
1770
1771 if (src1.is(rax)) {
1772 movq(kScratchRegister, src1);
1773 }
1774 SmiToInteger32(rax, src1);
1775 SmiToInteger32(src2, src2);
1776
1777 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1778 Label safe_div;
1779 cmpl(rax, Immediate(Smi::kMinValue));
1780 j(not_equal, &safe_div, Label::kNear);
1781 cmpl(src2, Immediate(-1));
1782 j(not_equal, &safe_div, Label::kNear);
1783 // Retag inputs and go slow case.
1784 Integer32ToSmi(src2, src2);
1785 if (src1.is(rax)) {
1786 movq(src1, kScratchRegister);
1787 }
1788 jmp(on_not_smi_result, near_jump);
1789 bind(&safe_div);
1790
1791 // Sign extend eax into edx:eax.
1792 cdq();
1793 idivl(src2);
1794 // Restore smi tags on inputs.
1795 Integer32ToSmi(src2, src2);
1796 if (src1.is(rax)) {
1797 movq(src1, kScratchRegister);
1798 }
1799 // Check for a negative zero result. If the result is zero, and the
1800 // dividend is negative, go slow to return a floating point negative zero.
1801 Label smi_result;
1802 testl(rdx, rdx);
1803 j(not_zero, &smi_result, Label::kNear);
1804 testq(src1, src1);
1805 j(negative, on_not_smi_result, near_jump);
1806 bind(&smi_result);
1807 Integer32ToSmi(dst, rdx);
1808}
1809
1810
Steve Blocka7e24c12009-10-30 11:49:00 +00001811void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001812 ASSERT(!dst.is(kScratchRegister));
1813 ASSERT(!src.is(kScratchRegister));
1814 // Set tag and padding bits before negating, so that they are zero afterwards.
1815 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001816 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001817 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001818 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001819 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001820 }
Steve Block3ce2e202009-11-05 08:53:23 +00001821 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001822}
1823
1824
1825void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001826 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001827 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001828 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001829 }
1830 and_(dst, src2);
1831}
1832
1833
Steve Block3ce2e202009-11-05 08:53:23 +00001834void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1835 if (constant->value() == 0) {
Steve Block9fac8402011-05-12 15:51:54 +01001836 Set(dst, 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001837 } else if (dst.is(src)) {
1838 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001839 Register constant_reg = GetSmiConstant(constant);
1840 and_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001841 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001842 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001843 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001844 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001845}
1846
1847
1848void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1849 if (!dst.is(src1)) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001850 ASSERT(!src1.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +00001851 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001852 }
1853 or_(dst, src2);
1854}
1855
1856
Steve Block3ce2e202009-11-05 08:53:23 +00001857void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1858 if (dst.is(src)) {
1859 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001860 Register constant_reg = GetSmiConstant(constant);
1861 or_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001862 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001863 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001864 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001865 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001866}
1867
Steve Block3ce2e202009-11-05 08:53:23 +00001868
Steve Blocka7e24c12009-10-30 11:49:00 +00001869void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1870 if (!dst.is(src1)) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001871 ASSERT(!src1.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +00001872 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001873 }
1874 xor_(dst, src2);
1875}
1876
1877
Steve Block3ce2e202009-11-05 08:53:23 +00001878void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1879 if (dst.is(src)) {
1880 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001881 Register constant_reg = GetSmiConstant(constant);
1882 xor_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001883 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001884 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001885 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001886 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001887}
1888
1889
Steve Blocka7e24c12009-10-30 11:49:00 +00001890void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1891 Register src,
1892 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001893 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001894 if (shift_value > 0) {
1895 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001896 sar(dst, Immediate(shift_value + kSmiShift));
1897 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001898 } else {
1899 UNIMPLEMENTED(); // Not used.
1900 }
1901 }
1902}
1903
1904
Steve Blocka7e24c12009-10-30 11:49:00 +00001905void MacroAssembler::SmiShiftLeftConstant(Register dst,
1906 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001907 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001908 if (!dst.is(src)) {
1909 movq(dst, src);
1910 }
1911 if (shift_value > 0) {
1912 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001913 }
1914}
1915
1916
Ben Murdoch257744e2011-11-30 15:57:28 +00001917void MacroAssembler::SmiShiftLogicalRightConstant(
1918 Register dst, Register src, int shift_value,
1919 Label* on_not_smi_result, Label::Distance near_jump) {
1920 // Logic right shift interprets its result as an *unsigned* number.
1921 if (dst.is(src)) {
1922 UNIMPLEMENTED(); // Not used.
1923 } else {
1924 movq(dst, src);
1925 if (shift_value == 0) {
1926 testq(dst, dst);
1927 j(negative, on_not_smi_result, near_jump);
1928 }
1929 shr(dst, Immediate(shift_value + kSmiShift));
1930 shl(dst, Immediate(kSmiShift));
1931 }
1932}
1933
1934
Steve Blocka7e24c12009-10-30 11:49:00 +00001935void MacroAssembler::SmiShiftLeft(Register dst,
1936 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001937 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001938 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001939 // Untag shift amount.
1940 if (!dst.is(src1)) {
1941 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001942 }
Steve Block3ce2e202009-11-05 08:53:23 +00001943 SmiToInteger32(rcx, src2);
1944 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1945 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001946 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001947}
1948
1949
Ben Murdoch257744e2011-11-30 15:57:28 +00001950void MacroAssembler::SmiShiftLogicalRight(Register dst,
1951 Register src1,
1952 Register src2,
1953 Label* on_not_smi_result,
1954 Label::Distance near_jump) {
1955 ASSERT(!dst.is(kScratchRegister));
1956 ASSERT(!src1.is(kScratchRegister));
1957 ASSERT(!src2.is(kScratchRegister));
1958 ASSERT(!dst.is(rcx));
1959 // dst and src1 can be the same, because the one case that bails out
1960 // is a shift by 0, which leaves dst, and therefore src1, unchanged.
1961 if (src1.is(rcx) || src2.is(rcx)) {
1962 movq(kScratchRegister, rcx);
1963 }
1964 if (!dst.is(src1)) {
1965 movq(dst, src1);
1966 }
1967 SmiToInteger32(rcx, src2);
1968 orl(rcx, Immediate(kSmiShift));
1969 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
1970 shl(dst, Immediate(kSmiShift));
1971 testq(dst, dst);
1972 if (src1.is(rcx) || src2.is(rcx)) {
1973 Label positive_result;
1974 j(positive, &positive_result, Label::kNear);
1975 if (src1.is(rcx)) {
1976 movq(src1, kScratchRegister);
1977 } else {
1978 movq(src2, kScratchRegister);
1979 }
1980 jmp(on_not_smi_result, near_jump);
1981 bind(&positive_result);
1982 } else {
1983 // src2 was zero and src1 negative.
1984 j(negative, on_not_smi_result, near_jump);
1985 }
1986}
1987
1988
Steve Blocka7e24c12009-10-30 11:49:00 +00001989void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1990 Register src1,
1991 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001992 ASSERT(!dst.is(kScratchRegister));
1993 ASSERT(!src1.is(kScratchRegister));
1994 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001995 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001996 if (src1.is(rcx)) {
1997 movq(kScratchRegister, src1);
1998 } else if (src2.is(rcx)) {
1999 movq(kScratchRegister, src2);
2000 }
2001 if (!dst.is(src1)) {
2002 movq(dst, src1);
2003 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002004 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00002005 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00002006 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00002007 shl(dst, Immediate(kSmiShift));
2008 if (src1.is(rcx)) {
2009 movq(src1, kScratchRegister);
2010 } else if (src2.is(rcx)) {
2011 movq(src2, kScratchRegister);
2012 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002013}
2014
2015
Ben Murdoch257744e2011-11-30 15:57:28 +00002016void MacroAssembler::SelectNonSmi(Register dst,
2017 Register src1,
2018 Register src2,
2019 Label* on_not_smis,
2020 Label::Distance near_jump) {
2021 ASSERT(!dst.is(kScratchRegister));
2022 ASSERT(!src1.is(kScratchRegister));
2023 ASSERT(!src2.is(kScratchRegister));
2024 ASSERT(!dst.is(src1));
2025 ASSERT(!dst.is(src2));
2026 // Both operands must not be smis.
2027#ifdef DEBUG
2028 if (allow_stub_calls()) { // Check contains a stub call.
2029 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
2030 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
2031 }
2032#endif
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002033 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00002034 ASSERT_EQ(0, Smi::FromInt(0));
2035 movl(kScratchRegister, Immediate(kSmiTagMask));
2036 and_(kScratchRegister, src1);
2037 testl(kScratchRegister, src2);
2038 // If non-zero then both are smis.
2039 j(not_zero, on_not_smis, near_jump);
2040
2041 // Exactly one operand is a smi.
2042 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
2043 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
2044 subq(kScratchRegister, Immediate(1));
2045 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
2046 movq(dst, src1);
2047 xor_(dst, src2);
2048 and_(dst, kScratchRegister);
2049 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
2050 xor_(dst, src1);
2051 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
2052}
2053
2054
Steve Block3ce2e202009-11-05 08:53:23 +00002055SmiIndex MacroAssembler::SmiToIndex(Register dst,
2056 Register src,
2057 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002058 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00002059 // There is a possible optimization if shift is in the range 60-63, but that
2060 // will (and must) never happen.
2061 if (!dst.is(src)) {
2062 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002063 }
Steve Block3ce2e202009-11-05 08:53:23 +00002064 if (shift < kSmiShift) {
2065 sar(dst, Immediate(kSmiShift - shift));
2066 } else {
2067 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00002068 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002069 return SmiIndex(dst, times_1);
2070}
2071
Steve Blocka7e24c12009-10-30 11:49:00 +00002072SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2073 Register src,
2074 int shift) {
2075 // Register src holds a positive smi.
2076 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00002077 if (!dst.is(src)) {
2078 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002079 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002080 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00002081 if (shift < kSmiShift) {
2082 sar(dst, Immediate(kSmiShift - shift));
2083 } else {
2084 shl(dst, Immediate(shift - kSmiShift));
2085 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002086 return SmiIndex(dst, times_1);
2087}
2088
2089
Steve Block44f0eee2011-05-26 01:26:41 +01002090void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
2091 ASSERT_EQ(0, kSmiShift % kBitsPerByte);
2092 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
2093}
2094
2095
Ben Murdoch257744e2011-11-30 15:57:28 +00002096void MacroAssembler::JumpIfNotString(Register object,
2097 Register object_map,
2098 Label* not_string,
2099 Label::Distance near_jump) {
2100 Condition is_smi = CheckSmi(object);
2101 j(is_smi, not_string, near_jump);
2102 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
2103 j(above_equal, not_string, near_jump);
2104}
2105
2106
2107void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(
2108 Register first_object,
2109 Register second_object,
2110 Register scratch1,
2111 Register scratch2,
2112 Label* on_fail,
2113 Label::Distance near_jump) {
2114 // Check that both objects are not smis.
2115 Condition either_smi = CheckEitherSmi(first_object, second_object);
2116 j(either_smi, on_fail, near_jump);
2117
2118 // Load instance type for both strings.
2119 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
2120 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
2121 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2122 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2123
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002124 // Check that both are flat ASCII strings.
Ben Murdoch257744e2011-11-30 15:57:28 +00002125 ASSERT(kNotStringTag != 0);
2126 const int kFlatAsciiStringMask =
2127 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2128 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2129
2130 andl(scratch1, Immediate(kFlatAsciiStringMask));
2131 andl(scratch2, Immediate(kFlatAsciiStringMask));
2132 // Interleave the bits to check both scratch1 and scratch2 in one test.
2133 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2134 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2135 cmpl(scratch1,
2136 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2137 j(not_equal, on_fail, near_jump);
2138}
2139
2140
2141void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2142 Register instance_type,
2143 Register scratch,
2144 Label* failure,
2145 Label::Distance near_jump) {
2146 if (!scratch.is(instance_type)) {
2147 movl(scratch, instance_type);
2148 }
2149
2150 const int kFlatAsciiStringMask =
2151 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2152
2153 andl(scratch, Immediate(kFlatAsciiStringMask));
2154 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
2155 j(not_equal, failure, near_jump);
2156}
2157
2158
2159void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
2160 Register first_object_instance_type,
2161 Register second_object_instance_type,
2162 Register scratch1,
2163 Register scratch2,
2164 Label* on_fail,
2165 Label::Distance near_jump) {
2166 // Load instance type for both strings.
2167 movq(scratch1, first_object_instance_type);
2168 movq(scratch2, second_object_instance_type);
2169
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002170 // Check that both are flat ASCII strings.
Ben Murdoch257744e2011-11-30 15:57:28 +00002171 ASSERT(kNotStringTag != 0);
2172 const int kFlatAsciiStringMask =
2173 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2174 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2175
2176 andl(scratch1, Immediate(kFlatAsciiStringMask));
2177 andl(scratch2, Immediate(kFlatAsciiStringMask));
2178 // Interleave the bits to check both scratch1 and scratch2 in one test.
2179 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2180 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2181 cmpl(scratch1,
2182 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2183 j(not_equal, on_fail, near_jump);
2184}
2185
2186
Steve Block44f0eee2011-05-26 01:26:41 +01002187
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002188void MacroAssembler::Move(Register dst, Register src) {
2189 if (!dst.is(src)) {
2190 movq(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01002191 }
Steve Block6ded16b2010-05-10 14:33:55 +01002192}
2193
2194
Steve Blocka7e24c12009-10-30 11:49:00 +00002195void MacroAssembler::Move(Register dst, Handle<Object> source) {
2196 ASSERT(!source->IsFailure());
2197 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002198 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002199 } else {
2200 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
2201 }
2202}
2203
2204
2205void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00002206 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00002207 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002208 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002209 } else {
2210 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2211 movq(dst, kScratchRegister);
2212 }
2213}
2214
2215
2216void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00002217 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002218 Cmp(dst, Smi::cast(*source));
Steve Block3ce2e202009-11-05 08:53:23 +00002219 } else {
2220 Move(kScratchRegister, source);
2221 cmpq(dst, kScratchRegister);
2222 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002223}
2224
2225
2226void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
2227 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002228 Cmp(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002229 } else {
2230 ASSERT(source->IsHeapObject());
2231 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2232 cmpq(dst, kScratchRegister);
2233 }
2234}
2235
2236
2237void MacroAssembler::Push(Handle<Object> source) {
2238 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002239 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002240 } else {
2241 ASSERT(source->IsHeapObject());
2242 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2243 push(kScratchRegister);
2244 }
2245}
2246
2247
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002248void MacroAssembler::LoadHeapObject(Register result,
2249 Handle<HeapObject> object) {
2250 if (isolate()->heap()->InNewSpace(*object)) {
2251 Handle<JSGlobalPropertyCell> cell =
2252 isolate()->factory()->NewJSGlobalPropertyCell(object);
2253 movq(result, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
2254 movq(result, Operand(result, 0));
2255 } else {
2256 Move(result, object);
2257 }
2258}
2259
2260
2261void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2262 if (isolate()->heap()->InNewSpace(*object)) {
2263 Handle<JSGlobalPropertyCell> cell =
2264 isolate()->factory()->NewJSGlobalPropertyCell(object);
2265 movq(kScratchRegister, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
2266 movq(kScratchRegister, Operand(kScratchRegister, 0));
2267 push(kScratchRegister);
2268 } else {
2269 Push(object);
2270 }
2271}
2272
2273
2274void MacroAssembler::LoadGlobalCell(Register dst,
2275 Handle<JSGlobalPropertyCell> cell) {
2276 if (dst.is(rax)) {
2277 load_rax(cell.location(), RelocInfo::GLOBAL_PROPERTY_CELL);
2278 } else {
2279 movq(dst, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
2280 movq(dst, Operand(dst, 0));
2281 }
2282}
2283
2284
Steve Blocka7e24c12009-10-30 11:49:00 +00002285void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00002286 intptr_t smi = reinterpret_cast<intptr_t>(source);
2287 if (is_int32(smi)) {
2288 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00002289 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002290 Register constant = GetSmiConstant(source);
2291 push(constant);
Steve Block3ce2e202009-11-05 08:53:23 +00002292 }
2293}
2294
2295
Leon Clarkee46be812010-01-19 14:06:41 +00002296void MacroAssembler::Drop(int stack_elements) {
2297 if (stack_elements > 0) {
2298 addq(rsp, Immediate(stack_elements * kPointerSize));
2299 }
2300}
2301
2302
Steve Block3ce2e202009-11-05 08:53:23 +00002303void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01002304 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002305}
2306
2307
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002308void MacroAssembler::TestBit(const Operand& src, int bits) {
2309 int byte_offset = bits / kBitsPerByte;
2310 int bit_in_byte = bits & (kBitsPerByte - 1);
2311 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte));
2312}
2313
2314
Steve Blocka7e24c12009-10-30 11:49:00 +00002315void MacroAssembler::Jump(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01002316 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00002317 jmp(kScratchRegister);
2318}
2319
2320
2321void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
2322 movq(kScratchRegister, destination, rmode);
2323 jmp(kScratchRegister);
2324}
2325
2326
2327void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00002328 // TODO(X64): Inline this
2329 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002330}
2331
2332
Steve Block44f0eee2011-05-26 01:26:41 +01002333int MacroAssembler::CallSize(ExternalReference ext) {
2334 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
2335 const int kCallInstructionSize = 3;
2336 return LoadAddressSize(ext) + kCallInstructionSize;
2337}
2338
2339
Steve Blocka7e24c12009-10-30 11:49:00 +00002340void MacroAssembler::Call(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01002341#ifdef DEBUG
2342 int end_position = pc_offset() + CallSize(ext);
2343#endif
2344 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00002345 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01002346#ifdef DEBUG
2347 CHECK_EQ(end_position, pc_offset());
2348#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002349}
2350
2351
2352void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
Steve Block44f0eee2011-05-26 01:26:41 +01002353#ifdef DEBUG
2354 int end_position = pc_offset() + CallSize(destination, rmode);
2355#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002356 movq(kScratchRegister, destination, rmode);
2357 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01002358#ifdef DEBUG
2359 CHECK_EQ(pc_offset(), end_position);
2360#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002361}
2362
2363
Ben Murdoch257744e2011-11-30 15:57:28 +00002364void MacroAssembler::Call(Handle<Code> code_object,
2365 RelocInfo::Mode rmode,
2366 unsigned ast_id) {
Steve Block44f0eee2011-05-26 01:26:41 +01002367#ifdef DEBUG
2368 int end_position = pc_offset() + CallSize(code_object);
2369#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002370 ASSERT(RelocInfo::IsCodeTarget(rmode));
Ben Murdoch257744e2011-11-30 15:57:28 +00002371 call(code_object, rmode, ast_id);
Steve Block44f0eee2011-05-26 01:26:41 +01002372#ifdef DEBUG
2373 CHECK_EQ(end_position, pc_offset());
2374#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002375}
2376
2377
Steve Block1e0659c2011-05-24 12:43:12 +01002378void MacroAssembler::Pushad() {
2379 push(rax);
2380 push(rcx);
2381 push(rdx);
2382 push(rbx);
2383 // Not pushing rsp or rbp.
2384 push(rsi);
2385 push(rdi);
2386 push(r8);
2387 push(r9);
2388 // r10 is kScratchRegister.
2389 push(r11);
Steve Block44f0eee2011-05-26 01:26:41 +01002390 // r12 is kSmiConstantRegister.
Steve Block1e0659c2011-05-24 12:43:12 +01002391 // r13 is kRootRegister.
2392 push(r14);
Steve Block44f0eee2011-05-26 01:26:41 +01002393 push(r15);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002394 STATIC_ASSERT(11 == kNumSafepointSavedRegisters);
2395 // Use lea for symmetry with Popad.
2396 int sp_delta =
2397 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
2398 lea(rsp, Operand(rsp, -sp_delta));
Steve Block1e0659c2011-05-24 12:43:12 +01002399}
2400
2401
2402void MacroAssembler::Popad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002403 // Popad must not change the flags, so use lea instead of addq.
2404 int sp_delta =
2405 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
2406 lea(rsp, Operand(rsp, sp_delta));
Steve Block44f0eee2011-05-26 01:26:41 +01002407 pop(r15);
Steve Block1e0659c2011-05-24 12:43:12 +01002408 pop(r14);
Steve Block1e0659c2011-05-24 12:43:12 +01002409 pop(r11);
2410 pop(r9);
2411 pop(r8);
2412 pop(rdi);
2413 pop(rsi);
2414 pop(rbx);
2415 pop(rdx);
2416 pop(rcx);
2417 pop(rax);
2418}
2419
2420
2421void MacroAssembler::Dropad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002422 addq(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
Steve Block1e0659c2011-05-24 12:43:12 +01002423}
2424
2425
2426// Order general registers are pushed by Pushad:
Steve Block44f0eee2011-05-26 01:26:41 +01002427// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002428const int
2429MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
Steve Block1e0659c2011-05-24 12:43:12 +01002430 0,
2431 1,
2432 2,
2433 3,
2434 -1,
2435 -1,
2436 4,
2437 5,
2438 6,
2439 7,
2440 -1,
2441 8,
Steve Block1e0659c2011-05-24 12:43:12 +01002442 -1,
Steve Block44f0eee2011-05-26 01:26:41 +01002443 -1,
2444 9,
2445 10
Steve Block1e0659c2011-05-24 12:43:12 +01002446};
2447
2448
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002449void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2450 movq(SafepointRegisterSlot(dst), src);
2451}
2452
2453
2454void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2455 movq(dst, SafepointRegisterSlot(src));
2456}
2457
2458
2459Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2460 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2461}
2462
2463
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002464void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
2465 int handler_index) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002466 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002467 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002468 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2469 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
2470 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
2471 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
2472 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00002473
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002474 // We will build up the handler from the bottom by pushing on the stack.
2475 // First push the frame pointer and context.
2476 if (kind == StackHandler::JS_ENTRY) {
2477 // The frame pointer does not point to a JS frame so we save NULL for
2478 // rbp. We expect the code throwing an exception to check rbp before
2479 // dereferencing it to restore the context.
Ben Murdoch85b71792012-04-11 18:30:58 +01002480 push(Immediate(0)); // NULL frame pointer.
2481 Push(Smi::FromInt(0)); // No context.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002482 } else {
2483 push(rbp);
2484 push(rsi);
Steve Blocka7e24c12009-10-30 11:49:00 +00002485 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002486
2487 // Push the state and the code object.
2488 unsigned state =
2489 StackHandler::IndexField::encode(handler_index) |
2490 StackHandler::KindField::encode(kind);
2491 push(Immediate(state));
2492 Push(CodeObject());
2493
2494 // Link the current handler as the next handler.
2495 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2496 push(ExternalOperand(handler_address));
2497 // Set this new handler as the current one.
2498 movq(ExternalOperand(handler_address), rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00002499}
2500
2501
Leon Clarkee46be812010-01-19 14:06:41 +00002502void MacroAssembler::PopTryHandler() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002503 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2504 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2505 pop(ExternalOperand(handler_address));
Leon Clarkee46be812010-01-19 14:06:41 +00002506 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
2507}
2508
2509
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002510void MacroAssembler::JumpToHandlerEntry() {
2511 // Compute the handler entry address and jump to it. The handler table is
2512 // a fixed array of (smi-tagged) code offsets.
2513 // rax = exception, rdi = code object, rdx = state.
2514 movq(rbx, FieldOperand(rdi, Code::kHandlerTableOffset));
2515 shr(rdx, Immediate(StackHandler::kKindWidth));
2516 movq(rdx, FieldOperand(rbx, rdx, times_8, FixedArray::kHeaderSize));
2517 SmiToInteger64(rdx, rdx);
2518 lea(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
2519 jmp(rdi);
2520}
2521
2522
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002523void MacroAssembler::Throw(Register value) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002524 // Adjust this code if not the case.
2525 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002526 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2527 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
2528 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
2529 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
2530 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
2531
2532 // The exception is expected in rax.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002533 if (!value.is(rax)) {
2534 movq(rax, value);
2535 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002536 // Drop the stack pointer to the top of the top handler.
Ben Murdoch589d6972011-11-30 16:04:58 +00002537 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002538 movq(rsp, ExternalOperand(handler_address));
2539 // Restore the next handler.
2540 pop(ExternalOperand(handler_address));
2541
2542 // Remove the code object and state, compute the handler address in rdi.
2543 pop(rdi); // Code object.
2544 pop(rdx); // Offset and state.
2545
2546 // Restore the context and frame pointer.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002547 pop(rsi); // Context.
2548 pop(rbp); // Frame pointer.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002549
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002550 // If the handler is a JS frame, restore the context to the frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002551 // (kind == ENTRY) == (rbp == 0) == (rsi == 0), so we could test either
2552 // rbp or rsi.
Ben Murdoch257744e2011-11-30 15:57:28 +00002553 Label skip;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002554 testq(rsi, rsi);
2555 j(zero, &skip, Label::kNear);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002556 movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002557 bind(&skip);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002558
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002559 JumpToHandlerEntry();
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002560}
2561
2562
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002563void MacroAssembler::ThrowUncatchable(Register value) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002564 // Adjust this code if not the case.
2565 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002566 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2567 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
2568 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
2569 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
2570 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
2571
2572 // The exception is expected in rax.
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01002573 if (!value.is(rax)) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002574 movq(rax, value);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002575 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002576 // Drop the stack pointer to the top of the top stack handler.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002577 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2578 Load(rsp, handler_address);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002579
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002580 // Unwind the handlers until the top ENTRY handler is found.
2581 Label fetch_next, check_kind;
2582 jmp(&check_kind, Label::kNear);
2583 bind(&fetch_next);
2584 movq(rsp, Operand(rsp, StackHandlerConstants::kNextOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002585
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002586 bind(&check_kind);
2587 STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
2588 testl(Operand(rsp, StackHandlerConstants::kStateOffset),
2589 Immediate(StackHandler::KindField::kMask));
2590 j(not_zero, &fetch_next);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002591
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002592 // Set the top handler address to next handler past the top ENTRY handler.
2593 pop(ExternalOperand(handler_address));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002594
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002595 // Remove the code object and state, compute the handler address in rdi.
2596 pop(rdi); // Code object.
2597 pop(rdx); // Offset and state.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002598
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002599 // Clear the context pointer and frame pointer (0 was saved in the handler).
2600 pop(rsi);
2601 pop(rbp);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002602
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002603 JumpToHandlerEntry();
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002604}
2605
2606
Steve Blocka7e24c12009-10-30 11:49:00 +00002607void MacroAssembler::Ret() {
2608 ret(0);
2609}
2610
2611
Steve Block1e0659c2011-05-24 12:43:12 +01002612void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2613 if (is_uint16(bytes_dropped)) {
2614 ret(bytes_dropped);
2615 } else {
2616 pop(scratch);
2617 addq(rsp, Immediate(bytes_dropped));
2618 push(scratch);
2619 ret(0);
2620 }
2621}
2622
2623
Steve Blocka7e24c12009-10-30 11:49:00 +00002624void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00002625 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01002626 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002627}
2628
2629
2630void MacroAssembler::CmpObjectType(Register heap_object,
2631 InstanceType type,
2632 Register map) {
2633 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
2634 CmpInstanceType(map, type);
2635}
2636
2637
2638void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
2639 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
2640 Immediate(static_cast<int8_t>(type)));
2641}
2642
2643
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002644void MacroAssembler::CheckFastElements(Register map,
2645 Label* fail,
2646 Label::Distance distance) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002647 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
2648 STATIC_ASSERT(FAST_ELEMENTS == 1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002649 cmpb(FieldOperand(map, Map::kBitField2Offset),
2650 Immediate(Map::kMaximumBitField2FastElementValue));
2651 j(above, fail, distance);
2652}
2653
2654
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002655void MacroAssembler::CheckFastObjectElements(Register map,
2656 Label* fail,
2657 Label::Distance distance) {
2658 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
2659 STATIC_ASSERT(FAST_ELEMENTS == 1);
2660 cmpb(FieldOperand(map, Map::kBitField2Offset),
2661 Immediate(Map::kMaximumBitField2FastSmiOnlyElementValue));
2662 j(below_equal, fail, distance);
2663 cmpb(FieldOperand(map, Map::kBitField2Offset),
2664 Immediate(Map::kMaximumBitField2FastElementValue));
2665 j(above, fail, distance);
2666}
2667
2668
2669void MacroAssembler::CheckFastSmiOnlyElements(Register map,
2670 Label* fail,
2671 Label::Distance distance) {
2672 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
2673 cmpb(FieldOperand(map, Map::kBitField2Offset),
2674 Immediate(Map::kMaximumBitField2FastSmiOnlyElementValue));
2675 j(above, fail, distance);
2676}
2677
2678
2679void MacroAssembler::StoreNumberToDoubleElements(
2680 Register maybe_number,
2681 Register elements,
2682 Register index,
2683 XMMRegister xmm_scratch,
2684 Label* fail) {
2685 Label smi_value, is_nan, maybe_nan, not_nan, have_double_value, done;
2686
2687 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
2688
2689 CheckMap(maybe_number,
2690 isolate()->factory()->heap_number_map(),
2691 fail,
2692 DONT_DO_SMI_CHECK);
2693
2694 // Double value, canonicalize NaN.
2695 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
2696 cmpl(FieldOperand(maybe_number, offset),
2697 Immediate(kNaNOrInfinityLowerBoundUpper32));
2698 j(greater_equal, &maybe_nan, Label::kNear);
2699
2700 bind(&not_nan);
2701 movsd(xmm_scratch, FieldOperand(maybe_number, HeapNumber::kValueOffset));
2702 bind(&have_double_value);
2703 movsd(FieldOperand(elements, index, times_8, FixedDoubleArray::kHeaderSize),
2704 xmm_scratch);
2705 jmp(&done);
2706
2707 bind(&maybe_nan);
2708 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
2709 // it's an Infinity, and the non-NaN code path applies.
2710 j(greater, &is_nan, Label::kNear);
2711 cmpl(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
2712 j(zero, &not_nan);
2713 bind(&is_nan);
2714 // Convert all NaNs to the same canonical NaN value when they are stored in
2715 // the double array.
2716 Set(kScratchRegister, BitCast<uint64_t>(
2717 FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
2718 movq(xmm_scratch, kScratchRegister);
2719 jmp(&have_double_value, Label::kNear);
2720
2721 bind(&smi_value);
2722 // Value is a smi. convert to a double and store.
2723 // Preserve original value.
2724 SmiToInteger32(kScratchRegister, maybe_number);
2725 cvtlsi2sd(xmm_scratch, kScratchRegister);
2726 movsd(FieldOperand(elements, index, times_8, FixedDoubleArray::kHeaderSize),
2727 xmm_scratch);
2728 bind(&done);
2729}
2730
2731
2732void MacroAssembler::CompareMap(Register obj,
2733 Handle<Map> map,
2734 Label* early_success,
2735 CompareMapMode mode) {
2736 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
2737 if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
2738 Map* transitioned_fast_element_map(
2739 map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
2740 ASSERT(transitioned_fast_element_map == NULL ||
2741 map->elements_kind() != FAST_ELEMENTS);
2742 if (transitioned_fast_element_map != NULL) {
2743 j(equal, early_success, Label::kNear);
2744 Cmp(FieldOperand(obj, HeapObject::kMapOffset),
2745 Handle<Map>(transitioned_fast_element_map));
2746 }
2747
2748 Map* transitioned_double_map(
2749 map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
2750 ASSERT(transitioned_double_map == NULL ||
2751 map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
2752 if (transitioned_double_map != NULL) {
2753 j(equal, early_success, Label::kNear);
2754 Cmp(FieldOperand(obj, HeapObject::kMapOffset),
2755 Handle<Map>(transitioned_double_map));
2756 }
2757 }
2758}
2759
2760
Andrei Popescu31002712010-02-23 13:46:05 +00002761void MacroAssembler::CheckMap(Register obj,
2762 Handle<Map> map,
2763 Label* fail,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002764 SmiCheckType smi_check_type,
2765 CompareMapMode mode) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002766 if (smi_check_type == DO_SMI_CHECK) {
Andrei Popescu31002712010-02-23 13:46:05 +00002767 JumpIfSmi(obj, fail);
2768 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002769
2770 Label success;
2771 CompareMap(obj, map, &success, mode);
Andrei Popescu31002712010-02-23 13:46:05 +00002772 j(not_equal, fail);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002773 bind(&success);
Andrei Popescu31002712010-02-23 13:46:05 +00002774}
2775
2776
Ben Murdoch257744e2011-11-30 15:57:28 +00002777void MacroAssembler::ClampUint8(Register reg) {
2778 Label done;
2779 testl(reg, Immediate(0xFFFFFF00));
2780 j(zero, &done, Label::kNear);
2781 setcc(negative, reg); // 1 if negative, 0 if positive.
2782 decb(reg); // 0 if negative, 255 if positive.
2783 bind(&done);
2784}
2785
2786
2787void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
2788 XMMRegister temp_xmm_reg,
2789 Register result_reg,
2790 Register temp_reg) {
2791 Label done;
2792 Set(result_reg, 0);
2793 xorps(temp_xmm_reg, temp_xmm_reg);
2794 ucomisd(input_reg, temp_xmm_reg);
2795 j(below, &done, Label::kNear);
2796 uint64_t one_half = BitCast<uint64_t, double>(0.5);
2797 Set(temp_reg, one_half);
2798 movq(temp_xmm_reg, temp_reg);
2799 addsd(temp_xmm_reg, input_reg);
2800 cvttsd2si(result_reg, temp_xmm_reg);
2801 testl(result_reg, Immediate(0xFFFFFF00));
2802 j(zero, &done, Label::kNear);
2803 Set(result_reg, 255);
2804 bind(&done);
2805}
2806
2807
2808void MacroAssembler::LoadInstanceDescriptors(Register map,
2809 Register descriptors) {
2810 movq(descriptors, FieldOperand(map,
2811 Map::kInstanceDescriptorsOrBitField3Offset));
2812 Label not_smi;
2813 JumpIfNotSmi(descriptors, &not_smi, Label::kNear);
2814 Move(descriptors, isolate()->factory()->empty_descriptor_array());
2815 bind(&not_smi);
2816}
2817
2818
2819void MacroAssembler::DispatchMap(Register obj,
2820 Handle<Map> map,
2821 Handle<Code> success,
2822 SmiCheckType smi_check_type) {
2823 Label fail;
2824 if (smi_check_type == DO_SMI_CHECK) {
2825 JumpIfSmi(obj, &fail);
2826 }
2827 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
2828 j(equal, success, RelocInfo::CODE_TARGET);
2829
2830 bind(&fail);
2831}
2832
2833
Leon Clarkef7060e22010-06-03 12:02:55 +01002834void MacroAssembler::AbortIfNotNumber(Register object) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002835 Label ok;
Andrei Popescu402d9372010-02-26 13:31:12 +00002836 Condition is_smi = CheckSmi(object);
Ben Murdoch257744e2011-11-30 15:57:28 +00002837 j(is_smi, &ok, Label::kNear);
Andrei Popescu402d9372010-02-26 13:31:12 +00002838 Cmp(FieldOperand(object, HeapObject::kMapOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00002839 isolate()->factory()->heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01002840 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00002841 bind(&ok);
2842}
2843
2844
Iain Merrick75681382010-08-19 15:07:18 +01002845void MacroAssembler::AbortIfSmi(Register object) {
Iain Merrick75681382010-08-19 15:07:18 +01002846 Condition is_smi = CheckSmi(object);
2847 Assert(NegateCondition(is_smi), "Operand is a smi");
2848}
2849
2850
Leon Clarkef7060e22010-06-03 12:02:55 +01002851void MacroAssembler::AbortIfNotSmi(Register object) {
Steve Block44f0eee2011-05-26 01:26:41 +01002852 Condition is_smi = CheckSmi(object);
2853 Assert(is_smi, "Operand is not a smi");
2854}
2855
2856
2857void MacroAssembler::AbortIfNotSmi(const Operand& object) {
Steve Block6ded16b2010-05-10 14:33:55 +01002858 Condition is_smi = CheckSmi(object);
Iain Merrick75681382010-08-19 15:07:18 +01002859 Assert(is_smi, "Operand is not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01002860}
2861
2862
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002863void MacroAssembler::AbortIfNotZeroExtended(Register int32_register) {
2864 ASSERT(!int32_register.is(kScratchRegister));
2865 movq(kScratchRegister, 0x100000000l, RelocInfo::NONE);
2866 cmpq(kScratchRegister, int32_register);
2867 Assert(above_equal, "32 bit value in register is not zero-extended");
2868}
2869
2870
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002871void MacroAssembler::AbortIfNotString(Register object) {
2872 testb(object, Immediate(kSmiTagMask));
2873 Assert(not_equal, "Operand is not a string");
2874 push(object);
2875 movq(object, FieldOperand(object, HeapObject::kMapOffset));
2876 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
2877 pop(object);
2878 Assert(below, "Operand is not a string");
2879}
2880
2881
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002882void MacroAssembler::AbortIfNotRootValue(Register src,
2883 Heap::RootListIndex root_value_index,
2884 const char* message) {
2885 ASSERT(!src.is(kScratchRegister));
2886 LoadRoot(kScratchRegister, root_value_index);
2887 cmpq(src, kScratchRegister);
2888 Check(equal, message);
2889}
2890
2891
2892
Leon Clarked91b9f72010-01-27 17:25:45 +00002893Condition MacroAssembler::IsObjectStringType(Register heap_object,
2894 Register map,
2895 Register instance_type) {
2896 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00002897 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002898 STATIC_ASSERT(kNotStringTag != 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00002899 testb(instance_type, Immediate(kIsNotStringMask));
2900 return zero;
2901}
2902
2903
Steve Blocka7e24c12009-10-30 11:49:00 +00002904void MacroAssembler::TryGetFunctionPrototype(Register function,
2905 Register result,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002906 Label* miss,
2907 bool miss_on_bound_function) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002908 // Check that the receiver isn't a smi.
2909 testl(function, Immediate(kSmiTagMask));
2910 j(zero, miss);
2911
2912 // Check that the function really is a function.
2913 CmpObjectType(function, JS_FUNCTION_TYPE, result);
2914 j(not_equal, miss);
2915
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002916 if (miss_on_bound_function) {
2917 movq(kScratchRegister,
2918 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2919 // It's not smi-tagged (stored in the top half of a smi-tagged 8-byte
2920 // field).
2921 TestBit(FieldOperand(kScratchRegister,
2922 SharedFunctionInfo::kCompilerHintsOffset),
2923 SharedFunctionInfo::kBoundFunction);
2924 j(not_zero, miss);
2925 }
2926
Steve Blocka7e24c12009-10-30 11:49:00 +00002927 // Make sure that the function has an instance prototype.
Ben Murdoch257744e2011-11-30 15:57:28 +00002928 Label non_instance;
Steve Blocka7e24c12009-10-30 11:49:00 +00002929 testb(FieldOperand(result, Map::kBitFieldOffset),
2930 Immediate(1 << Map::kHasNonInstancePrototype));
Ben Murdoch257744e2011-11-30 15:57:28 +00002931 j(not_zero, &non_instance, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00002932
2933 // Get the prototype or initial map from the function.
2934 movq(result,
2935 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2936
2937 // If the prototype or initial map is the hole, don't return it and
2938 // simply miss the cache instead. This will allow us to allocate a
2939 // prototype object on-demand in the runtime system.
2940 CompareRoot(result, Heap::kTheHoleValueRootIndex);
2941 j(equal, miss);
2942
2943 // If the function does not have an initial map, we're done.
Ben Murdoch257744e2011-11-30 15:57:28 +00002944 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +00002945 CmpObjectType(result, MAP_TYPE, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002946 j(not_equal, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00002947
2948 // Get the prototype from the initial map.
2949 movq(result, FieldOperand(result, Map::kPrototypeOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002950 jmp(&done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00002951
2952 // Non-instance prototype: Fetch prototype from constructor field
2953 // in initial map.
2954 bind(&non_instance);
2955 movq(result, FieldOperand(result, Map::kConstructorOffset));
2956
2957 // All done.
2958 bind(&done);
2959}
2960
2961
2962void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2963 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002964 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002965 movl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002966 }
2967}
2968
2969
2970void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2971 ASSERT(value > 0);
2972 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002973 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00002974 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01002975 incl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002976 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002977 addl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002978 }
2979 }
2980}
2981
2982
2983void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2984 ASSERT(value > 0);
2985 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002986 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00002987 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01002988 decl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002989 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002990 subl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002991 }
2992 }
2993}
2994
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002995
Steve Blocka7e24c12009-10-30 11:49:00 +00002996#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00002997void MacroAssembler::DebugBreak() {
Steve Block9fac8402011-05-12 15:51:54 +01002998 Set(rax, 0); // No arguments.
Steve Block44f0eee2011-05-26 01:26:41 +01002999 LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
Andrei Popescu402d9372010-02-26 13:31:12 +00003000 CEntryStub ces(1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003001 ASSERT(AllowThisStubCall(&ces));
Andrei Popescu402d9372010-02-26 13:31:12 +00003002 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00003003}
Andrei Popescu402d9372010-02-26 13:31:12 +00003004#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00003005
3006
Ben Murdoch257744e2011-11-30 15:57:28 +00003007void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
3008 // This macro takes the dst register to make the code more readable
3009 // at the call sites. However, the dst register has to be rcx to
3010 // follow the calling convention which requires the call type to be
3011 // in rcx.
3012 ASSERT(dst.is(rcx));
3013 if (call_kind == CALL_AS_FUNCTION) {
3014 LoadSmiConstant(dst, Smi::FromInt(1));
3015 } else {
3016 LoadSmiConstant(dst, Smi::FromInt(0));
3017 }
3018}
3019
3020
Steve Blocka7e24c12009-10-30 11:49:00 +00003021void MacroAssembler::InvokeCode(Register code,
3022 const ParameterCount& expected,
3023 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003024 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003025 const CallWrapper& call_wrapper,
3026 CallKind call_kind) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003027 // You can't call a function without a valid frame.
3028 ASSERT(flag == JUMP_FUNCTION || has_frame());
3029
Ben Murdoch257744e2011-11-30 15:57:28 +00003030 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003031 bool definitely_mismatches = false;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003032 InvokePrologue(expected,
3033 actual,
3034 Handle<Code>::null(),
3035 code,
3036 &done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003037 &definitely_mismatches,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003038 flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003039 Label::kNear,
3040 call_wrapper,
3041 call_kind);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003042 if (!definitely_mismatches) {
3043 if (flag == CALL_FUNCTION) {
3044 call_wrapper.BeforeCall(CallSize(code));
3045 SetCallKind(rcx, call_kind);
3046 call(code);
3047 call_wrapper.AfterCall();
3048 } else {
3049 ASSERT(flag == JUMP_FUNCTION);
3050 SetCallKind(rcx, call_kind);
3051 jmp(code);
3052 }
3053 bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00003054 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003055}
3056
3057
3058void MacroAssembler::InvokeCode(Handle<Code> code,
3059 const ParameterCount& expected,
3060 const ParameterCount& actual,
3061 RelocInfo::Mode rmode,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003062 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003063 const CallWrapper& call_wrapper,
3064 CallKind call_kind) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003065 // You can't call a function without a valid frame.
3066 ASSERT(flag == JUMP_FUNCTION || has_frame());
3067
Ben Murdoch257744e2011-11-30 15:57:28 +00003068 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003069 bool definitely_mismatches = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00003070 Register dummy = rax;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003071 InvokePrologue(expected,
3072 actual,
3073 code,
3074 dummy,
3075 &done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003076 &definitely_mismatches,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003077 flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003078 Label::kNear,
3079 call_wrapper,
3080 call_kind);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003081 if (!definitely_mismatches) {
3082 if (flag == CALL_FUNCTION) {
3083 call_wrapper.BeforeCall(CallSize(code));
3084 SetCallKind(rcx, call_kind);
3085 Call(code, rmode);
3086 call_wrapper.AfterCall();
3087 } else {
3088 ASSERT(flag == JUMP_FUNCTION);
3089 SetCallKind(rcx, call_kind);
3090 Jump(code, rmode);
3091 }
3092 bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00003093 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003094}
3095
3096
3097void MacroAssembler::InvokeFunction(Register function,
3098 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003099 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003100 const CallWrapper& call_wrapper,
3101 CallKind call_kind) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003102 // You can't call a function without a valid frame.
3103 ASSERT(flag == JUMP_FUNCTION || has_frame());
3104
Steve Blocka7e24c12009-10-30 11:49:00 +00003105 ASSERT(function.is(rdi));
3106 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
3107 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
3108 movsxlq(rbx,
3109 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003110 // Advances rdx to the end of the Code object header, to the start of
3111 // the executable code.
Steve Block791712a2010-08-27 10:21:07 +01003112 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003113
3114 ParameterCount expected(rbx);
Ben Murdoch257744e2011-11-30 15:57:28 +00003115 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00003116}
3117
3118
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003119void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
Andrei Popescu402d9372010-02-26 13:31:12 +00003120 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003121 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003122 const CallWrapper& call_wrapper,
3123 CallKind call_kind) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003124 // You can't call a function without a valid frame.
3125 ASSERT(flag == JUMP_FUNCTION || has_frame());
3126
Andrei Popescu402d9372010-02-26 13:31:12 +00003127 // Get the function and setup the context.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003128 LoadHeapObject(rdi, function);
Andrei Popescu402d9372010-02-26 13:31:12 +00003129 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
3130
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003131 // We call indirectly through the code field in the function to
3132 // allow recompilation to take effect without changing any of the
3133 // call sites.
3134 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
3135 ParameterCount expected(function->shared()->formal_parameter_count());
3136 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind);
Ben Murdoch257744e2011-11-30 15:57:28 +00003137}
3138
3139
3140void MacroAssembler::InvokePrologue(const ParameterCount& expected,
3141 const ParameterCount& actual,
3142 Handle<Code> code_constant,
3143 Register code_register,
3144 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003145 bool* definitely_mismatches,
Ben Murdoch257744e2011-11-30 15:57:28 +00003146 InvokeFlag flag,
3147 Label::Distance near_jump,
3148 const CallWrapper& call_wrapper,
3149 CallKind call_kind) {
3150 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003151 *definitely_mismatches = false;
Ben Murdoch257744e2011-11-30 15:57:28 +00003152 Label invoke;
3153 if (expected.is_immediate()) {
3154 ASSERT(actual.is_immediate());
3155 if (expected.immediate() == actual.immediate()) {
3156 definitely_matches = true;
3157 } else {
3158 Set(rax, actual.immediate());
3159 if (expected.immediate() ==
3160 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
3161 // Don't worry about adapting arguments for built-ins that
3162 // don't want that done. Skip adaption code by making it look
3163 // like we have a match between expected and actual number of
3164 // arguments.
3165 definitely_matches = true;
3166 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003167 *definitely_mismatches = true;
Ben Murdoch257744e2011-11-30 15:57:28 +00003168 Set(rbx, expected.immediate());
3169 }
3170 }
3171 } else {
3172 if (actual.is_immediate()) {
3173 // Expected is in register, actual is immediate. This is the
3174 // case when we invoke function values without going through the
3175 // IC mechanism.
3176 cmpq(expected.reg(), Immediate(actual.immediate()));
3177 j(equal, &invoke, Label::kNear);
3178 ASSERT(expected.reg().is(rbx));
3179 Set(rax, actual.immediate());
3180 } else if (!expected.reg().is(actual.reg())) {
3181 // Both expected and actual are in (different) registers. This
3182 // is the case when we invoke functions using call and apply.
3183 cmpq(expected.reg(), actual.reg());
3184 j(equal, &invoke, Label::kNear);
3185 ASSERT(actual.reg().is(rax));
3186 ASSERT(expected.reg().is(rbx));
3187 }
3188 }
3189
3190 if (!definitely_matches) {
3191 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
3192 if (!code_constant.is_null()) {
3193 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
3194 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
3195 } else if (!code_register.is(rdx)) {
3196 movq(rdx, code_register);
3197 }
3198
3199 if (flag == CALL_FUNCTION) {
3200 call_wrapper.BeforeCall(CallSize(adaptor));
3201 SetCallKind(rcx, call_kind);
3202 Call(adaptor, RelocInfo::CODE_TARGET);
3203 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003204 if (!*definitely_mismatches) {
3205 jmp(done, near_jump);
3206 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003207 } else {
3208 SetCallKind(rcx, call_kind);
3209 Jump(adaptor, RelocInfo::CODE_TARGET);
3210 }
3211 bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01003212 }
Andrei Popescu402d9372010-02-26 13:31:12 +00003213}
3214
3215
Steve Blocka7e24c12009-10-30 11:49:00 +00003216void MacroAssembler::EnterFrame(StackFrame::Type type) {
3217 push(rbp);
3218 movq(rbp, rsp);
3219 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00003220 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00003221 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
3222 push(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01003223 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003224 movq(kScratchRegister,
Ben Murdoch257744e2011-11-30 15:57:28 +00003225 isolate()->factory()->undefined_value(),
Steve Blocka7e24c12009-10-30 11:49:00 +00003226 RelocInfo::EMBEDDED_OBJECT);
3227 cmpq(Operand(rsp, 0), kScratchRegister);
3228 Check(not_equal, "code object not properly patched");
3229 }
3230}
3231
3232
3233void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +01003234 if (emit_debug_code()) {
Steve Block3ce2e202009-11-05 08:53:23 +00003235 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00003236 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
3237 Check(equal, "stack frame types must match");
3238 }
3239 movq(rsp, rbp);
3240 pop(rbp);
3241}
3242
3243
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003244void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003245 // Set up the frame structure on the stack.
Steve Blocka7e24c12009-10-30 11:49:00 +00003246 // All constants are relative to the frame pointer of the exit frame.
3247 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
3248 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
3249 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
3250 push(rbp);
3251 movq(rbp, rsp);
3252
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003253 // Reserve room for entry stack pointer and push the code object.
Steve Block3ce2e202009-11-05 08:53:23 +00003254 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00003255 push(Immediate(0)); // Saved entry sp, patched before call.
3256 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
3257 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00003258
3259 // Save the frame pointer and the context in top.
Ben Murdochbb769b22010-08-11 14:56:33 +01003260 if (save_rax) {
Steve Block44f0eee2011-05-26 01:26:41 +01003261 movq(r14, rax); // Backup rax in callee-save register.
Ben Murdochbb769b22010-08-11 14:56:33 +01003262 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003263
Ben Murdoch589d6972011-11-30 16:04:58 +00003264 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()), rbp);
3265 Store(ExternalReference(Isolate::kContextAddress, isolate()), rsi);
Ben Murdochbb769b22010-08-11 14:56:33 +01003266}
Steve Blocka7e24c12009-10-30 11:49:00 +00003267
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003268
Steve Block1e0659c2011-05-24 12:43:12 +01003269void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
3270 bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003271#ifdef _WIN64
Steve Block1e0659c2011-05-24 12:43:12 +01003272 const int kShadowSpace = 4;
3273 arg_stack_space += kShadowSpace;
Steve Blocka7e24c12009-10-30 11:49:00 +00003274#endif
Steve Block1e0659c2011-05-24 12:43:12 +01003275 // Optionally save all XMM registers.
3276 if (save_doubles) {
Steve Block1e0659c2011-05-24 12:43:12 +01003277 int space = XMMRegister::kNumRegisters * kDoubleSize +
3278 arg_stack_space * kPointerSize;
3279 subq(rsp, Immediate(space));
3280 int offset = -2 * kPointerSize;
3281 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
3282 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3283 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
3284 }
3285 } else if (arg_stack_space > 0) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003286 subq(rsp, Immediate(arg_stack_space * kPointerSize));
3287 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003288
3289 // Get the required frame alignment for the OS.
Steve Block44f0eee2011-05-26 01:26:41 +01003290 const int kFrameAlignment = OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +00003291 if (kFrameAlignment > 0) {
3292 ASSERT(IsPowerOf2(kFrameAlignment));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003293 ASSERT(is_int8(kFrameAlignment));
3294 and_(rsp, Immediate(-kFrameAlignment));
Steve Blocka7e24c12009-10-30 11:49:00 +00003295 }
3296
3297 // Patch the saved entry sp.
3298 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
3299}
3300
3301
Steve Block1e0659c2011-05-24 12:43:12 +01003302void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003303 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01003304
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003305 // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
Ben Murdochbb769b22010-08-11 14:56:33 +01003306 // so it must be retained across the C-call.
3307 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Steve Block44f0eee2011-05-26 01:26:41 +01003308 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
Ben Murdochbb769b22010-08-11 14:56:33 +01003309
Steve Block1e0659c2011-05-24 12:43:12 +01003310 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
Ben Murdochbb769b22010-08-11 14:56:33 +01003311}
3312
3313
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003314void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003315 EnterExitFramePrologue(false);
Steve Block1e0659c2011-05-24 12:43:12 +01003316 EnterExitFrameEpilogue(arg_stack_space, false);
Ben Murdochbb769b22010-08-11 14:56:33 +01003317}
3318
3319
Steve Block1e0659c2011-05-24 12:43:12 +01003320void MacroAssembler::LeaveExitFrame(bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003321 // Registers:
Steve Block44f0eee2011-05-26 01:26:41 +01003322 // r15 : argv
Steve Block1e0659c2011-05-24 12:43:12 +01003323 if (save_doubles) {
3324 int offset = -2 * kPointerSize;
3325 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
3326 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3327 movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
3328 }
3329 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003330 // Get the return address from the stack and restore the frame pointer.
3331 movq(rcx, Operand(rbp, 1 * kPointerSize));
3332 movq(rbp, Operand(rbp, 0 * kPointerSize));
3333
Steve Block1e0659c2011-05-24 12:43:12 +01003334 // Drop everything up to and including the arguments and the receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00003335 // from the caller stack.
Steve Block44f0eee2011-05-26 01:26:41 +01003336 lea(rsp, Operand(r15, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00003337
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003338 // Push the return address to get ready to return.
3339 push(rcx);
3340
3341 LeaveExitFrameEpilogue();
3342}
3343
3344
3345void MacroAssembler::LeaveApiExitFrame() {
3346 movq(rsp, rbp);
3347 pop(rbp);
3348
3349 LeaveExitFrameEpilogue();
3350}
3351
3352
3353void MacroAssembler::LeaveExitFrameEpilogue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00003354 // Restore current context from top and clear it in debug mode.
Ben Murdoch589d6972011-11-30 16:04:58 +00003355 ExternalReference context_address(Isolate::kContextAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01003356 Operand context_operand = ExternalOperand(context_address);
3357 movq(rsi, context_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003358#ifdef DEBUG
Steve Block44f0eee2011-05-26 01:26:41 +01003359 movq(context_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00003360#endif
3361
Steve Blocka7e24c12009-10-30 11:49:00 +00003362 // Clear the top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00003363 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +01003364 isolate());
3365 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
3366 movq(c_entry_fp_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00003367}
3368
3369
Steve Blocka7e24c12009-10-30 11:49:00 +00003370void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
3371 Register scratch,
3372 Label* miss) {
3373 Label same_contexts;
3374
3375 ASSERT(!holder_reg.is(scratch));
3376 ASSERT(!scratch.is(kScratchRegister));
3377 // Load current lexical context from the stack frame.
3378 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
3379
3380 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +01003381 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003382 cmpq(scratch, Immediate(0));
3383 Check(not_equal, "we should not have an empty lexical context");
3384 }
3385 // Load the global context of the current context.
3386 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
3387 movq(scratch, FieldOperand(scratch, offset));
3388 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
3389
3390 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +01003391 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003392 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00003393 isolate()->factory()->global_context_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00003394 Check(equal, "JSGlobalObject::global_context should be a global context.");
3395 }
3396
3397 // Check if both contexts are the same.
3398 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
3399 j(equal, &same_contexts);
3400
3401 // Compare security tokens.
3402 // Check that the security token in the calling global object is
3403 // compatible with the security token in the receiving global
3404 // object.
3405
3406 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +01003407 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003408 // Preserve original value of holder_reg.
3409 push(holder_reg);
3410 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
3411 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
3412 Check(not_equal, "JSGlobalProxy::context() should not be null.");
3413
3414 // Read the first word and compare to global_context_map(),
3415 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
3416 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
3417 Check(equal, "JSGlobalObject::global_context should be a global context.");
3418 pop(holder_reg);
3419 }
3420
3421 movq(kScratchRegister,
3422 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00003423 int token_offset =
3424 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00003425 movq(scratch, FieldOperand(scratch, token_offset));
3426 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
3427 j(not_equal, miss);
3428
3429 bind(&same_contexts);
3430}
3431
3432
Ben Murdochc7cc0282012-03-05 14:35:55 +00003433void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
3434 // First of all we assign the hash seed to scratch.
3435 LoadRoot(scratch, Heap::kHashSeedRootIndex);
3436 SmiToInteger32(scratch, scratch);
3437
3438 // Xor original key with a seed.
3439 xorl(r0, scratch);
3440
3441 // Compute the hash code from the untagged key. This must be kept in sync
3442 // with ComputeIntegerHash in utils.h.
3443 //
3444 // hash = ~hash + (hash << 15);
3445 movl(scratch, r0);
3446 notl(r0);
3447 shll(scratch, Immediate(15));
3448 addl(r0, scratch);
3449 // hash = hash ^ (hash >> 12);
3450 movl(scratch, r0);
3451 shrl(scratch, Immediate(12));
3452 xorl(r0, scratch);
3453 // hash = hash + (hash << 2);
3454 leal(r0, Operand(r0, r0, times_4, 0));
3455 // hash = hash ^ (hash >> 4);
3456 movl(scratch, r0);
3457 shrl(scratch, Immediate(4));
3458 xorl(r0, scratch);
3459 // hash = hash * 2057;
3460 imull(r0, r0, Immediate(2057));
3461 // hash = hash ^ (hash >> 16);
3462 movl(scratch, r0);
3463 shrl(scratch, Immediate(16));
3464 xorl(r0, scratch);
3465}
3466
3467
3468
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003469void MacroAssembler::LoadFromNumberDictionary(Label* miss,
3470 Register elements,
3471 Register key,
3472 Register r0,
3473 Register r1,
3474 Register r2,
3475 Register result) {
3476 // Register use:
3477 //
3478 // elements - holds the slow-case elements of the receiver on entry.
3479 // Unchanged unless 'result' is the same register.
3480 //
3481 // key - holds the smi key on entry.
3482 // Unchanged unless 'result' is the same register.
3483 //
3484 // Scratch registers:
3485 //
3486 // r0 - holds the untagged key on entry and holds the hash once computed.
3487 //
3488 // r1 - used to hold the capacity mask of the dictionary
3489 //
3490 // r2 - used for the index into the dictionary.
3491 //
3492 // result - holds the result on exit if the load succeeded.
3493 // Allowed to be the same as 'key' or 'result'.
3494 // Unchanged on bailout so 'key' or 'result' can be used
3495 // in further computation.
3496
3497 Label done;
3498
Ben Murdochc7cc0282012-03-05 14:35:55 +00003499 GetNumberHash(r0, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003500
3501 // Compute capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00003502 SmiToInteger32(r1, FieldOperand(elements,
3503 SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003504 decl(r1);
3505
3506 // Generate an unrolled loop that performs a few probes before giving up.
3507 const int kProbes = 4;
3508 for (int i = 0; i < kProbes; i++) {
3509 // Use r2 for index calculations and keep the hash intact in r0.
3510 movq(r2, r0);
3511 // Compute the masked index: (hash + i + i * i) & mask.
3512 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00003513 addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003514 }
3515 and_(r2, r1);
3516
3517 // Scale the index by multiplying by the entry size.
Ben Murdochc7cc0282012-03-05 14:35:55 +00003518 ASSERT(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003519 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
3520
3521 // Check if the key matches.
3522 cmpq(key, FieldOperand(elements,
3523 r2,
3524 times_pointer_size,
Ben Murdochc7cc0282012-03-05 14:35:55 +00003525 SeededNumberDictionary::kElementsStartOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003526 if (i != (kProbes - 1)) {
3527 j(equal, &done);
3528 } else {
3529 j(not_equal, miss);
3530 }
3531 }
3532
3533 bind(&done);
3534 // Check that the value is a normal propety.
3535 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00003536 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003537 ASSERT_EQ(NORMAL, 0);
3538 Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00003539 Smi::FromInt(PropertyDetails::TypeField::kMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003540 j(not_zero, miss);
3541
3542 // Get the value at the masked, scaled index.
3543 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00003544 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003545 movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
3546}
3547
3548
Steve Blocka7e24c12009-10-30 11:49:00 +00003549void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00003550 Register scratch,
3551 AllocationFlags flags) {
3552 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01003553 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003554
3555 // Just return if allocation top is already known.
3556 if ((flags & RESULT_CONTAINS_TOP) != 0) {
3557 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01003558 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00003559#ifdef DEBUG
3560 // Assert that result actually contains top on entry.
Steve Block44f0eee2011-05-26 01:26:41 +01003561 Operand top_operand = ExternalOperand(new_space_allocation_top);
3562 cmpq(result, top_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003563 Check(equal, "Unexpected allocation top");
3564#endif
3565 return;
3566 }
3567
Steve Block6ded16b2010-05-10 14:33:55 +01003568 // Move address of new object to result. Use scratch register if available,
3569 // and keep address in scratch until call to UpdateAllocationTopHelper.
3570 if (scratch.is_valid()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003571 LoadAddress(scratch, new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00003572 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01003573 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003574 Load(result, new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00003575 }
3576}
3577
3578
3579void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
3580 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +01003581 if (emit_debug_code()) {
Steve Blockd0582a62009-12-15 09:54:21 +00003582 testq(result_end, Immediate(kObjectAlignmentMask));
3583 Check(zero, "Unaligned allocation in new space");
3584 }
3585
Steve Blocka7e24c12009-10-30 11:49:00 +00003586 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01003587 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003588
3589 // Update new top.
Steve Block44f0eee2011-05-26 01:26:41 +01003590 if (scratch.is_valid()) {
3591 // Scratch already contains address of allocation top.
3592 movq(Operand(scratch, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00003593 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003594 Store(new_space_allocation_top, result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00003595 }
3596}
3597
3598
3599void MacroAssembler::AllocateInNewSpace(int object_size,
3600 Register result,
3601 Register result_end,
3602 Register scratch,
3603 Label* gc_required,
3604 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07003605 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01003606 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07003607 // Trash the registers to simulate an allocation failure.
3608 movl(result, Immediate(0x7091));
3609 if (result_end.is_valid()) {
3610 movl(result_end, Immediate(0x7191));
3611 }
3612 if (scratch.is_valid()) {
3613 movl(scratch, Immediate(0x7291));
3614 }
3615 }
3616 jmp(gc_required);
3617 return;
3618 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003619 ASSERT(!result.is(result_end));
3620
3621 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003622 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003623
3624 // Calculate new top and bail out if new space is exhausted.
3625 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01003626 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Block6ded16b2010-05-10 14:33:55 +01003627
3628 Register top_reg = result_end.is_valid() ? result_end : result;
3629
Steve Block1e0659c2011-05-24 12:43:12 +01003630 if (!top_reg.is(result)) {
3631 movq(top_reg, result);
Steve Block6ded16b2010-05-10 14:33:55 +01003632 }
Steve Block1e0659c2011-05-24 12:43:12 +01003633 addq(top_reg, Immediate(object_size));
3634 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01003635 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3636 cmpq(top_reg, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003637 j(above, gc_required);
3638
3639 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01003640 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00003641
Steve Block6ded16b2010-05-10 14:33:55 +01003642 if (top_reg.is(result)) {
3643 if ((flags & TAG_OBJECT) != 0) {
3644 subq(result, Immediate(object_size - kHeapObjectTag));
3645 } else {
3646 subq(result, Immediate(object_size));
3647 }
3648 } else if ((flags & TAG_OBJECT) != 0) {
3649 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00003650 addq(result, Immediate(kHeapObjectTag));
3651 }
3652}
3653
3654
3655void MacroAssembler::AllocateInNewSpace(int header_size,
3656 ScaleFactor element_size,
3657 Register element_count,
3658 Register result,
3659 Register result_end,
3660 Register scratch,
3661 Label* gc_required,
3662 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07003663 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01003664 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07003665 // Trash the registers to simulate an allocation failure.
3666 movl(result, Immediate(0x7091));
3667 movl(result_end, Immediate(0x7191));
3668 if (scratch.is_valid()) {
3669 movl(scratch, Immediate(0x7291));
3670 }
3671 // Register element_count is not modified by the function.
3672 }
3673 jmp(gc_required);
3674 return;
3675 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003676 ASSERT(!result.is(result_end));
3677
3678 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003679 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003680
3681 // Calculate new top and bail out if new space is exhausted.
3682 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01003683 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Block1e0659c2011-05-24 12:43:12 +01003684
3685 // We assume that element_count*element_size + header_size does not
3686 // overflow.
3687 lea(result_end, Operand(element_count, element_size, header_size));
3688 addq(result_end, result);
3689 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01003690 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3691 cmpq(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003692 j(above, gc_required);
3693
3694 // Update allocation top.
3695 UpdateAllocationTopHelper(result_end, scratch);
3696
3697 // Tag the result if requested.
3698 if ((flags & TAG_OBJECT) != 0) {
3699 addq(result, Immediate(kHeapObjectTag));
3700 }
3701}
3702
3703
3704void MacroAssembler::AllocateInNewSpace(Register object_size,
3705 Register result,
3706 Register result_end,
3707 Register scratch,
3708 Label* gc_required,
3709 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07003710 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01003711 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07003712 // Trash the registers to simulate an allocation failure.
3713 movl(result, Immediate(0x7091));
3714 movl(result_end, Immediate(0x7191));
3715 if (scratch.is_valid()) {
3716 movl(scratch, Immediate(0x7291));
3717 }
3718 // object_size is left unchanged by this function.
3719 }
3720 jmp(gc_required);
3721 return;
3722 }
3723 ASSERT(!result.is(result_end));
3724
Steve Blocka7e24c12009-10-30 11:49:00 +00003725 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003726 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003727
3728 // Calculate new top and bail out if new space is exhausted.
3729 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01003730 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003731 if (!object_size.is(result_end)) {
3732 movq(result_end, object_size);
3733 }
3734 addq(result_end, result);
Steve Block1e0659c2011-05-24 12:43:12 +01003735 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01003736 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3737 cmpq(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003738 j(above, gc_required);
3739
3740 // Update allocation top.
3741 UpdateAllocationTopHelper(result_end, scratch);
3742
3743 // Tag the result if requested.
3744 if ((flags & TAG_OBJECT) != 0) {
3745 addq(result, Immediate(kHeapObjectTag));
3746 }
3747}
3748
3749
3750void MacroAssembler::UndoAllocationInNewSpace(Register object) {
3751 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01003752 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003753
3754 // Make sure the object has no tag before resetting top.
3755 and_(object, Immediate(~kHeapObjectTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003756 Operand top_operand = ExternalOperand(new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00003757#ifdef DEBUG
Steve Block44f0eee2011-05-26 01:26:41 +01003758 cmpq(object, top_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003759 Check(below, "Undo allocation of non allocated memory");
3760#endif
Steve Block44f0eee2011-05-26 01:26:41 +01003761 movq(top_operand, object);
Steve Blocka7e24c12009-10-30 11:49:00 +00003762}
3763
3764
Steve Block3ce2e202009-11-05 08:53:23 +00003765void MacroAssembler::AllocateHeapNumber(Register result,
3766 Register scratch,
3767 Label* gc_required) {
3768 // Allocate heap number in new space.
3769 AllocateInNewSpace(HeapNumber::kSize,
3770 result,
3771 scratch,
3772 no_reg,
3773 gc_required,
3774 TAG_OBJECT);
3775
3776 // Set the map.
3777 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
3778 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3779}
3780
3781
Leon Clarkee46be812010-01-19 14:06:41 +00003782void MacroAssembler::AllocateTwoByteString(Register result,
3783 Register length,
3784 Register scratch1,
3785 Register scratch2,
3786 Register scratch3,
3787 Label* gc_required) {
3788 // Calculate the number of bytes needed for the characters in the string while
3789 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01003790 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
3791 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00003792 ASSERT(kShortSize == 2);
3793 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01003794 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
3795 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00003796 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01003797 if (kHeaderAlignment > 0) {
3798 subq(scratch1, Immediate(kHeaderAlignment));
3799 }
Leon Clarkee46be812010-01-19 14:06:41 +00003800
3801 // Allocate two byte string in new space.
3802 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
3803 times_1,
3804 scratch1,
3805 result,
3806 scratch2,
3807 scratch3,
3808 gc_required,
3809 TAG_OBJECT);
3810
3811 // Set the map, length and hash field.
3812 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
3813 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01003814 Integer32ToSmi(scratch1, length);
3815 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003816 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00003817 Immediate(String::kEmptyHashField));
3818}
3819
3820
3821void MacroAssembler::AllocateAsciiString(Register result,
3822 Register length,
3823 Register scratch1,
3824 Register scratch2,
3825 Register scratch3,
3826 Label* gc_required) {
3827 // Calculate the number of bytes needed for the characters in the string while
3828 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01003829 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
3830 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00003831 movl(scratch1, length);
3832 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01003833 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00003834 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01003835 if (kHeaderAlignment > 0) {
3836 subq(scratch1, Immediate(kHeaderAlignment));
3837 }
Leon Clarkee46be812010-01-19 14:06:41 +00003838
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003839 // Allocate ASCII string in new space.
Leon Clarkee46be812010-01-19 14:06:41 +00003840 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
3841 times_1,
3842 scratch1,
3843 result,
3844 scratch2,
3845 scratch3,
3846 gc_required,
3847 TAG_OBJECT);
3848
3849 // Set the map, length and hash field.
3850 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
3851 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01003852 Integer32ToSmi(scratch1, length);
3853 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003854 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00003855 Immediate(String::kEmptyHashField));
3856}
3857
3858
Ben Murdoch589d6972011-11-30 16:04:58 +00003859void MacroAssembler::AllocateTwoByteConsString(Register result,
Leon Clarkee46be812010-01-19 14:06:41 +00003860 Register scratch1,
3861 Register scratch2,
3862 Label* gc_required) {
3863 // Allocate heap number in new space.
3864 AllocateInNewSpace(ConsString::kSize,
3865 result,
3866 scratch1,
3867 scratch2,
3868 gc_required,
3869 TAG_OBJECT);
3870
3871 // Set the map. The other fields are left uninitialized.
3872 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
3873 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3874}
3875
3876
3877void MacroAssembler::AllocateAsciiConsString(Register result,
3878 Register scratch1,
3879 Register scratch2,
3880 Label* gc_required) {
3881 // Allocate heap number in new space.
3882 AllocateInNewSpace(ConsString::kSize,
3883 result,
3884 scratch1,
3885 scratch2,
3886 gc_required,
3887 TAG_OBJECT);
3888
3889 // Set the map. The other fields are left uninitialized.
3890 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
3891 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3892}
3893
3894
Ben Murdoch589d6972011-11-30 16:04:58 +00003895void MacroAssembler::AllocateTwoByteSlicedString(Register result,
3896 Register scratch1,
3897 Register scratch2,
3898 Label* gc_required) {
3899 // Allocate heap number in new space.
3900 AllocateInNewSpace(SlicedString::kSize,
3901 result,
3902 scratch1,
3903 scratch2,
3904 gc_required,
3905 TAG_OBJECT);
3906
3907 // Set the map. The other fields are left uninitialized.
3908 LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex);
3909 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3910}
3911
3912
3913void MacroAssembler::AllocateAsciiSlicedString(Register result,
3914 Register scratch1,
3915 Register scratch2,
3916 Label* gc_required) {
3917 // Allocate heap number in new space.
3918 AllocateInNewSpace(SlicedString::kSize,
3919 result,
3920 scratch1,
3921 scratch2,
3922 gc_required,
3923 TAG_OBJECT);
3924
3925 // Set the map. The other fields are left uninitialized.
3926 LoadRoot(kScratchRegister, Heap::kSlicedAsciiStringMapRootIndex);
3927 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3928}
3929
3930
Steve Block44f0eee2011-05-26 01:26:41 +01003931// Copy memory, byte-by-byte, from source to destination. Not optimized for
3932// long or aligned copies. The contents of scratch and length are destroyed.
3933// Destination is incremented by length, source, length and scratch are
3934// clobbered.
3935// A simpler loop is faster on small copies, but slower on large ones.
3936// The cld() instruction must have been emitted, to set the direction flag(),
3937// before calling this function.
3938void MacroAssembler::CopyBytes(Register destination,
3939 Register source,
3940 Register length,
3941 int min_length,
3942 Register scratch) {
3943 ASSERT(min_length >= 0);
3944 if (FLAG_debug_code) {
3945 cmpl(length, Immediate(min_length));
3946 Assert(greater_equal, "Invalid min_length");
3947 }
3948 Label loop, done, short_string, short_loop;
3949
3950 const int kLongStringLimit = 20;
3951 if (min_length <= kLongStringLimit) {
3952 cmpl(length, Immediate(kLongStringLimit));
3953 j(less_equal, &short_string);
3954 }
3955
3956 ASSERT(source.is(rsi));
3957 ASSERT(destination.is(rdi));
3958 ASSERT(length.is(rcx));
3959
3960 // Because source is 8-byte aligned in our uses of this function,
3961 // we keep source aligned for the rep movs operation by copying the odd bytes
3962 // at the end of the ranges.
3963 movq(scratch, length);
3964 shrl(length, Immediate(3));
3965 repmovsq();
3966 // Move remaining bytes of length.
3967 andl(scratch, Immediate(0x7));
3968 movq(length, Operand(source, scratch, times_1, -8));
3969 movq(Operand(destination, scratch, times_1, -8), length);
3970 addq(destination, scratch);
3971
3972 if (min_length <= kLongStringLimit) {
3973 jmp(&done);
3974
3975 bind(&short_string);
3976 if (min_length == 0) {
3977 testl(length, length);
3978 j(zero, &done);
3979 }
3980 lea(scratch, Operand(destination, length, times_1, 0));
3981
3982 bind(&short_loop);
3983 movb(length, Operand(source, 0));
3984 movb(Operand(destination, 0), length);
3985 incq(source);
3986 incq(destination);
3987 cmpq(destination, scratch);
3988 j(not_equal, &short_loop);
3989
3990 bind(&done);
3991 }
3992}
3993
3994
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003995void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
3996 Register end_offset,
3997 Register filler) {
3998 Label loop, entry;
3999 jmp(&entry);
4000 bind(&loop);
4001 movq(Operand(start_offset, 0), filler);
4002 addq(start_offset, Immediate(kPointerSize));
4003 bind(&entry);
4004 cmpq(start_offset, end_offset);
4005 j(less, &loop);
4006}
4007
4008
Steve Blockd0582a62009-12-15 09:54:21 +00004009void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
4010 if (context_chain_length > 0) {
4011 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004012 movq(dst, Operand(rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00004013 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004014 movq(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00004015 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004016 } else {
4017 // Slot is in the current function context. Move it into the
4018 // destination register in case we store into it (the write barrier
4019 // cannot be allowed to destroy the context in rsi).
4020 movq(dst, rsi);
4021 }
4022
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004023 // We should not have found a with context by walking the context
4024 // chain (i.e., the static scope chain and runtime context chain do
4025 // not agree). A variable occurring in such a scope should have
4026 // slot type LOOKUP and not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01004027 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004028 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
4029 Heap::kWithContextMapRootIndex);
4030 Check(not_equal, "Variable resolved to with context.");
Steve Blockd0582a62009-12-15 09:54:21 +00004031 }
4032}
4033
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004034
4035void MacroAssembler::LoadTransitionedArrayMapConditional(
4036 ElementsKind expected_kind,
4037 ElementsKind transitioned_kind,
4038 Register map_in_out,
4039 Register scratch,
4040 Label* no_map_match) {
4041 // Load the global or builtins object from the current context.
4042 movq(scratch, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
4043 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
4044
4045 // Check that the function's map is the same as the expected cached map.
4046 int expected_index =
4047 Context::GetContextMapIndexFromElementsKind(expected_kind);
4048 cmpq(map_in_out, Operand(scratch, Context::SlotOffset(expected_index)));
4049 j(not_equal, no_map_match);
4050
4051 // Use the transitioned cached map.
4052 int trans_index =
4053 Context::GetContextMapIndexFromElementsKind(transitioned_kind);
4054 movq(map_in_out, Operand(scratch, Context::SlotOffset(trans_index)));
4055}
4056
4057
4058void MacroAssembler::LoadInitialArrayMap(
4059 Register function_in, Register scratch, Register map_out) {
4060 ASSERT(!function_in.is(map_out));
4061 Label done;
4062 movq(map_out, FieldOperand(function_in,
4063 JSFunction::kPrototypeOrInitialMapOffset));
4064 if (!FLAG_smi_only_arrays) {
4065 LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
4066 FAST_ELEMENTS,
4067 map_out,
4068 scratch,
4069 &done);
4070 }
4071 bind(&done);
4072}
4073
Steve Block44f0eee2011-05-26 01:26:41 +01004074#ifdef _WIN64
4075static const int kRegisterPassedArguments = 4;
4076#else
4077static const int kRegisterPassedArguments = 6;
4078#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004079
Ben Murdochb0fe1622011-05-05 13:52:32 +01004080void MacroAssembler::LoadGlobalFunction(int index, Register function) {
4081 // Load the global or builtins object from the current context.
4082 movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
4083 // Load the global context from the global or builtins object.
4084 movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
4085 // Load the function from the global context.
4086 movq(function, Operand(function, Context::SlotOffset(index)));
4087}
4088
4089
4090void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
4091 Register map) {
4092 // Load the initial map. The global functions all have initial maps.
4093 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004094 if (emit_debug_code()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004095 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00004096 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004097 jmp(&ok);
4098 bind(&fail);
4099 Abort("Global functions must have initial map");
4100 bind(&ok);
4101 }
4102}
4103
4104
Leon Clarke4515c472010-02-03 11:58:03 +00004105int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004106 // On Windows 64 stack slots are reserved by the caller for all arguments
4107 // including the ones passed in registers, and space is always allocated for
4108 // the four register arguments even if the function takes fewer than four
4109 // arguments.
4110 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
4111 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00004112 ASSERT(num_arguments >= 0);
4113#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01004114 const int kMinimumStackSlots = kRegisterPassedArguments;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004115 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
4116 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00004117#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004118 if (num_arguments < kRegisterPassedArguments) return 0;
4119 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00004120#endif
Leon Clarke4515c472010-02-03 11:58:03 +00004121}
4122
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004123
Leon Clarke4515c472010-02-03 11:58:03 +00004124void MacroAssembler::PrepareCallCFunction(int num_arguments) {
4125 int frame_alignment = OS::ActivationFrameAlignment();
4126 ASSERT(frame_alignment != 0);
4127 ASSERT(num_arguments >= 0);
Steve Block44f0eee2011-05-26 01:26:41 +01004128
Leon Clarke4515c472010-02-03 11:58:03 +00004129 // Make stack end at alignment and allocate space for arguments and old rsp.
4130 movq(kScratchRegister, rsp);
4131 ASSERT(IsPowerOf2(frame_alignment));
4132 int argument_slots_on_stack =
4133 ArgumentStackSlotsForCFunctionCall(num_arguments);
4134 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
4135 and_(rsp, Immediate(-frame_alignment));
4136 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
4137}
4138
4139
4140void MacroAssembler::CallCFunction(ExternalReference function,
4141 int num_arguments) {
Steve Block44f0eee2011-05-26 01:26:41 +01004142 LoadAddress(rax, function);
Leon Clarke4515c472010-02-03 11:58:03 +00004143 CallCFunction(rax, num_arguments);
4144}
4145
4146
4147void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004148 ASSERT(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01004149 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01004150 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01004151 CheckStackAlignment();
4152 }
4153
Leon Clarke4515c472010-02-03 11:58:03 +00004154 call(function);
4155 ASSERT(OS::ActivationFrameAlignment() != 0);
4156 ASSERT(num_arguments >= 0);
4157 int argument_slots_on_stack =
4158 ArgumentStackSlotsForCFunctionCall(num_arguments);
4159 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
4160}
4161
Steve Blockd0582a62009-12-15 09:54:21 +00004162
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004163bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
4164 if (r1.is(r2)) return true;
4165 if (r1.is(r3)) return true;
4166 if (r1.is(r4)) return true;
4167 if (r2.is(r3)) return true;
4168 if (r2.is(r4)) return true;
4169 if (r3.is(r4)) return true;
4170 return false;
4171}
4172
4173
Steve Blocka7e24c12009-10-30 11:49:00 +00004174CodePatcher::CodePatcher(byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01004175 : address_(address),
4176 size_(size),
4177 masm_(Isolate::Current(), address, size + Assembler::kGap) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004178 // Create a new macro assembler pointing to the address of the code to patch.
4179 // The size is adjusted with kGap on order for the assembler to generate size
4180 // bytes of instructions without failing with buffer size constraints.
4181 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4182}
4183
4184
4185CodePatcher::~CodePatcher() {
4186 // Indicate that code has changed.
4187 CPU::FlushICache(address_, size_);
4188
4189 // Check that the code was patched as expected.
4190 ASSERT(masm_.pc_ == address_ + size_);
4191 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4192}
4193
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004194
4195void MacroAssembler::CheckPageFlag(
4196 Register object,
4197 Register scratch,
4198 int mask,
4199 Condition cc,
4200 Label* condition_met,
4201 Label::Distance condition_met_distance) {
4202 ASSERT(cc == zero || cc == not_zero);
4203 if (scratch.is(object)) {
4204 and_(scratch, Immediate(~Page::kPageAlignmentMask));
4205 } else {
4206 movq(scratch, Immediate(~Page::kPageAlignmentMask));
4207 and_(scratch, object);
4208 }
4209 if (mask < (1 << kBitsPerByte)) {
4210 testb(Operand(scratch, MemoryChunk::kFlagsOffset),
4211 Immediate(static_cast<uint8_t>(mask)));
4212 } else {
4213 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
4214 }
4215 j(cc, condition_met, condition_met_distance);
4216}
4217
4218
4219void MacroAssembler::JumpIfBlack(Register object,
4220 Register bitmap_scratch,
4221 Register mask_scratch,
4222 Label* on_black,
4223 Label::Distance on_black_distance) {
4224 ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, rcx));
4225 GetMarkBits(object, bitmap_scratch, mask_scratch);
4226
4227 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
4228 // The mask_scratch register contains a 1 at the position of the first bit
4229 // and a 0 at all other positions, including the position of the second bit.
4230 movq(rcx, mask_scratch);
4231 // Make rcx into a mask that covers both marking bits using the operation
4232 // rcx = mask | (mask << 1).
4233 lea(rcx, Operand(mask_scratch, mask_scratch, times_2, 0));
4234 // Note that we are using a 4-byte aligned 8-byte load.
4235 and_(rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
4236 cmpq(mask_scratch, rcx);
4237 j(equal, on_black, on_black_distance);
4238}
4239
4240
4241// Detect some, but not all, common pointer-free objects. This is used by the
4242// incremental write barrier which doesn't care about oddballs (they are always
4243// marked black immediately so this code is not hit).
4244void MacroAssembler::JumpIfDataObject(
4245 Register value,
4246 Register scratch,
4247 Label* not_data_object,
4248 Label::Distance not_data_object_distance) {
4249 Label is_data_object;
4250 movq(scratch, FieldOperand(value, HeapObject::kMapOffset));
4251 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
4252 j(equal, &is_data_object, Label::kNear);
4253 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
4254 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
4255 // If it's a string and it's not a cons string then it's an object containing
4256 // no GC pointers.
4257 testb(FieldOperand(scratch, Map::kInstanceTypeOffset),
4258 Immediate(kIsIndirectStringMask | kIsNotStringMask));
4259 j(not_zero, not_data_object, not_data_object_distance);
4260 bind(&is_data_object);
4261}
4262
4263
4264void MacroAssembler::GetMarkBits(Register addr_reg,
4265 Register bitmap_reg,
4266 Register mask_reg) {
4267 ASSERT(!AreAliased(addr_reg, bitmap_reg, mask_reg, rcx));
4268 movq(bitmap_reg, addr_reg);
4269 // Sign extended 32 bit immediate.
4270 and_(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
4271 movq(rcx, addr_reg);
4272 int shift =
4273 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
4274 shrl(rcx, Immediate(shift));
4275 and_(rcx,
4276 Immediate((Page::kPageAlignmentMask >> shift) &
4277 ~(Bitmap::kBytesPerCell - 1)));
4278
4279 addq(bitmap_reg, rcx);
4280 movq(rcx, addr_reg);
4281 shrl(rcx, Immediate(kPointerSizeLog2));
4282 and_(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
4283 movl(mask_reg, Immediate(1));
4284 shl_cl(mask_reg);
4285}
4286
4287
4288void MacroAssembler::EnsureNotWhite(
4289 Register value,
4290 Register bitmap_scratch,
4291 Register mask_scratch,
4292 Label* value_is_white_and_not_data,
4293 Label::Distance distance) {
4294 ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, rcx));
4295 GetMarkBits(value, bitmap_scratch, mask_scratch);
4296
4297 // If the value is black or grey we don't need to do anything.
4298 ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
4299 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
4300 ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
4301 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
4302
4303 Label done;
4304
4305 // Since both black and grey have a 1 in the first position and white does
4306 // not have a 1 there we only need to check one bit.
4307 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4308 j(not_zero, &done, Label::kNear);
4309
4310 if (FLAG_debug_code) {
4311 // Check for impossible bit pattern.
4312 Label ok;
4313 push(mask_scratch);
4314 // shl. May overflow making the check conservative.
4315 addq(mask_scratch, mask_scratch);
4316 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4317 j(zero, &ok, Label::kNear);
4318 int3();
4319 bind(&ok);
4320 pop(mask_scratch);
4321 }
4322
4323 // Value is white. We check whether it is data that doesn't need scanning.
4324 // Currently only checks for HeapNumber and non-cons strings.
4325 Register map = rcx; // Holds map while checking type.
4326 Register length = rcx; // Holds length of object after checking type.
4327 Label not_heap_number;
4328 Label is_data_object;
4329
4330 // Check for heap-number
4331 movq(map, FieldOperand(value, HeapObject::kMapOffset));
4332 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
4333 j(not_equal, &not_heap_number, Label::kNear);
4334 movq(length, Immediate(HeapNumber::kSize));
4335 jmp(&is_data_object, Label::kNear);
4336
4337 bind(&not_heap_number);
4338 // Check for strings.
4339 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
4340 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
4341 // If it's a string and it's not a cons string then it's an object containing
4342 // no GC pointers.
4343 Register instance_type = rcx;
4344 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
4345 testb(instance_type, Immediate(kIsIndirectStringMask | kIsNotStringMask));
4346 j(not_zero, value_is_white_and_not_data);
4347 // It's a non-indirect (non-cons and non-slice) string.
4348 // If it's external, the length is just ExternalString::kSize.
4349 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
4350 Label not_external;
4351 // External strings are the only ones with the kExternalStringTag bit
4352 // set.
4353 ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
4354 ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
4355 testb(instance_type, Immediate(kExternalStringTag));
4356 j(zero, &not_external, Label::kNear);
4357 movq(length, Immediate(ExternalString::kSize));
4358 jmp(&is_data_object, Label::kNear);
4359
4360 bind(&not_external);
4361 // Sequential string, either ASCII or UC16.
4362 ASSERT(kAsciiStringTag == 0x04);
4363 and_(length, Immediate(kStringEncodingMask));
4364 xor_(length, Immediate(kStringEncodingMask));
4365 addq(length, Immediate(0x04));
4366 // Value now either 4 (if ASCII) or 8 (if UC16), i.e. char-size shifted by 2.
4367 imul(length, FieldOperand(value, String::kLengthOffset));
4368 shr(length, Immediate(2 + kSmiTagSize + kSmiShiftSize));
4369 addq(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
4370 and_(length, Immediate(~kObjectAlignmentMask));
4371
4372 bind(&is_data_object);
4373 // Value is a data object, and it is white. Mark it black. Since we know
4374 // that the object is white we can make it black by flipping one bit.
4375 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4376
4377 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
4378 addl(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), length);
4379
4380 bind(&done);
4381}
4382
4383
4384void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
4385 Label next;
4386 Register empty_fixed_array_value = r8;
4387 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
4388 Register empty_descriptor_array_value = r9;
4389 LoadRoot(empty_descriptor_array_value,
4390 Heap::kEmptyDescriptorArrayRootIndex);
4391 movq(rcx, rax);
4392 bind(&next);
4393
4394 // Check that there are no elements. Register rcx contains the
4395 // current JS object we've reached through the prototype chain.
4396 cmpq(empty_fixed_array_value,
4397 FieldOperand(rcx, JSObject::kElementsOffset));
4398 j(not_equal, call_runtime);
4399
4400 // Check that instance descriptors are not empty so that we can
4401 // check for an enum cache. Leave the map in rbx for the subsequent
4402 // prototype load.
4403 movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
4404 movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOrBitField3Offset));
4405 JumpIfSmi(rdx, call_runtime);
4406
4407 // Check that there is an enum cache in the non-empty instance
4408 // descriptors (rdx). This is the case if the next enumeration
4409 // index field does not contain a smi.
4410 movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
4411 JumpIfSmi(rdx, call_runtime);
4412
4413 // For all objects but the receiver, check that the cache is empty.
4414 Label check_prototype;
4415 cmpq(rcx, rax);
4416 j(equal, &check_prototype, Label::kNear);
4417 movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4418 cmpq(rdx, empty_fixed_array_value);
4419 j(not_equal, call_runtime);
4420
4421 // Load the prototype from the map and loop if non-null.
4422 bind(&check_prototype);
4423 movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
4424 cmpq(rcx, null_value);
4425 j(not_equal, &next);
4426}
4427
4428
Steve Blocka7e24c12009-10-30 11:49:00 +00004429} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01004430
4431#endif // V8_TARGET_ARCH_X64