blob: 2d6bd08139884c9980fd1c61d74675b95dc81c55 [file] [log] [blame]
Ben Murdochc7cc0282012-03-05 14:35:55 +00001// Copyright 2012 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
Ben Murdoch8b112d22011-06-08 16:22:53 +010033#include "codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010038#include "heap.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
40namespace v8 {
41namespace internal {
42
Ben Murdoch8b112d22011-06-08 16:22:53 +010043MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
44 : Assembler(arg_isolate, buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000045 generating_stub_(false),
46 allow_stub_calls_(true),
Ben Murdoch592a9fc2012-03-05 11:04:45 +000047 has_frame_(false),
Ben Murdoch8b112d22011-06-08 16:22:53 +010048 root_array_available_(true) {
49 if (isolate() != NULL) {
50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
51 isolate());
52 }
Steve Block44f0eee2011-05-26 01:26:41 +010053}
54
55
56static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
57 Address roots_register_value = kRootRegisterBias +
Ben Murdoch592a9fc2012-03-05 11:04:45 +000058 reinterpret_cast<Address>(isolate->heap()->roots_array_start());
Steve Block44f0eee2011-05-26 01:26:41 +010059 intptr_t delta = other.address() - roots_register_value;
60 return delta;
61}
62
63
64Operand MacroAssembler::ExternalOperand(ExternalReference target,
65 Register scratch) {
66 if (root_array_available_ && !Serializer::enabled()) {
67 intptr_t delta = RootRegisterDelta(target, isolate());
68 if (is_int32(delta)) {
69 Serializer::TooLateToEnableNow();
70 return Operand(kRootRegister, static_cast<int32_t>(delta));
71 }
72 }
73 movq(scratch, target);
74 return Operand(scratch, 0);
75}
76
77
78void MacroAssembler::Load(Register destination, ExternalReference source) {
79 if (root_array_available_ && !Serializer::enabled()) {
80 intptr_t delta = RootRegisterDelta(source, isolate());
81 if (is_int32(delta)) {
82 Serializer::TooLateToEnableNow();
83 movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
84 return;
85 }
86 }
87 // Safe code.
88 if (destination.is(rax)) {
89 load_rax(source);
90 } else {
91 movq(kScratchRegister, source);
92 movq(destination, Operand(kScratchRegister, 0));
93 }
94}
95
96
97void MacroAssembler::Store(ExternalReference destination, Register source) {
98 if (root_array_available_ && !Serializer::enabled()) {
99 intptr_t delta = RootRegisterDelta(destination, isolate());
100 if (is_int32(delta)) {
101 Serializer::TooLateToEnableNow();
102 movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
103 return;
104 }
105 }
106 // Safe code.
107 if (source.is(rax)) {
108 store_rax(destination);
109 } else {
110 movq(kScratchRegister, destination);
111 movq(Operand(kScratchRegister, 0), source);
112 }
113}
114
115
116void MacroAssembler::LoadAddress(Register destination,
117 ExternalReference source) {
118 if (root_array_available_ && !Serializer::enabled()) {
119 intptr_t delta = RootRegisterDelta(source, isolate());
120 if (is_int32(delta)) {
121 Serializer::TooLateToEnableNow();
122 lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
123 return;
124 }
125 }
126 // Safe code.
127 movq(destination, source);
128}
129
130
131int MacroAssembler::LoadAddressSize(ExternalReference source) {
132 if (root_array_available_ && !Serializer::enabled()) {
133 // This calculation depends on the internals of LoadAddress.
134 // It's correctness is ensured by the asserts in the Call
135 // instruction below.
136 intptr_t delta = RootRegisterDelta(source, isolate());
137 if (is_int32(delta)) {
138 Serializer::TooLateToEnableNow();
139 // Operand is lea(scratch, Operand(kRootRegister, delta));
140 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
141 int size = 4;
142 if (!is_int8(static_cast<int32_t>(delta))) {
143 size += 3; // Need full four-byte displacement in lea.
144 }
145 return size;
146 }
147 }
148 // Size of movq(destination, src);
149 return 10;
Steve Blocka7e24c12009-10-30 11:49:00 +0000150}
151
152
Steve Block3ce2e202009-11-05 08:53:23 +0000153void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100154 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100155 movq(destination, Operand(kRootRegister,
156 (index << kPointerSizeLog2) - kRootRegisterBias));
157}
158
159
160void MacroAssembler::LoadRootIndexed(Register destination,
161 Register variable_offset,
162 int fixed_offset) {
Steve Block44f0eee2011-05-26 01:26:41 +0100163 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100164 movq(destination,
165 Operand(kRootRegister,
166 variable_offset, times_pointer_size,
167 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000168}
169
170
Kristian Monsen25f61362010-05-21 11:50:48 +0100171void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100172 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100173 movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
174 source);
Kristian Monsen25f61362010-05-21 11:50:48 +0100175}
176
177
Steve Blocka7e24c12009-10-30 11:49:00 +0000178void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100179 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100180 push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000181}
182
183
Steve Block3ce2e202009-11-05 08:53:23 +0000184void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100185 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100186 cmpq(with, Operand(kRootRegister,
187 (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000188}
189
190
Steve Block1e0659c2011-05-24 12:43:12 +0100191void MacroAssembler::CompareRoot(const Operand& with,
192 Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100193 ASSERT(root_array_available_);
Steve Block1e0659c2011-05-24 12:43:12 +0100194 ASSERT(!with.AddressUsesRegister(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000195 LoadRoot(kScratchRegister, index);
196 cmpq(with, kScratchRegister);
197}
198
199
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000200void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
201 Register addr,
202 Register scratch,
203 SaveFPRegsMode save_fp,
204 RememberedSetFinalAction and_then) {
205 if (FLAG_debug_code) {
206 Label ok;
207 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
208 int3();
209 bind(&ok);
Steve Block6ded16b2010-05-10 14:33:55 +0100210 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000211 // Load store buffer top.
212 LoadRoot(scratch, Heap::kStoreBufferTopRootIndex);
213 // Store pointer to buffer.
214 movq(Operand(scratch, 0), addr);
215 // Increment buffer top.
216 addq(scratch, Immediate(kPointerSize));
217 // Write back new top of buffer.
218 StoreRoot(scratch, Heap::kStoreBufferTopRootIndex);
219 // Call stub on end of buffer.
220 Label done;
221 // Check for end of buffer.
222 testq(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
223 if (and_then == kReturnAtEnd) {
224 Label buffer_overflowed;
225 j(not_equal, &buffer_overflowed, Label::kNear);
226 ret(0);
227 bind(&buffer_overflowed);
228 } else {
229 ASSERT(and_then == kFallThroughAtEnd);
230 j(equal, &done, Label::kNear);
231 }
232 StoreBufferOverflowStub store_buffer_overflow =
233 StoreBufferOverflowStub(save_fp);
234 CallStub(&store_buffer_overflow);
235 if (and_then == kReturnAtEnd) {
236 ret(0);
237 } else {
238 ASSERT(and_then == kFallThroughAtEnd);
239 bind(&done);
240 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000241}
242
243
Ben Murdoch257744e2011-11-30 15:57:28 +0000244void MacroAssembler::InNewSpace(Register object,
245 Register scratch,
246 Condition cc,
247 Label* branch,
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000248 Label::Distance distance) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000249 if (Serializer::enabled()) {
250 // Can't do arithmetic on external references if it might get serialized.
251 // The mask isn't really an address. We load it as an external reference in
252 // case the size of the new space is different between the snapshot maker
253 // and the running system.
254 if (scratch.is(object)) {
255 movq(kScratchRegister, ExternalReference::new_space_mask(isolate()));
256 and_(scratch, kScratchRegister);
257 } else {
258 movq(scratch, ExternalReference::new_space_mask(isolate()));
259 and_(scratch, object);
260 }
261 movq(kScratchRegister, ExternalReference::new_space_start(isolate()));
262 cmpq(scratch, kScratchRegister);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000263 j(cc, branch, distance);
Ben Murdoch257744e2011-11-30 15:57:28 +0000264 } else {
265 ASSERT(is_int32(static_cast<int64_t>(HEAP->NewSpaceMask())));
266 intptr_t new_space_start =
267 reinterpret_cast<intptr_t>(HEAP->NewSpaceStart());
268 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
269 if (scratch.is(object)) {
270 addq(scratch, kScratchRegister);
271 } else {
272 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
273 }
274 and_(scratch, Immediate(static_cast<int32_t>(HEAP->NewSpaceMask())));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000275 j(cc, branch, distance);
Ben Murdoch257744e2011-11-30 15:57:28 +0000276 }
277}
278
279
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000280void MacroAssembler::RecordWriteField(
281 Register object,
282 int offset,
283 Register value,
284 Register dst,
285 SaveFPRegsMode save_fp,
286 RememberedSetAction remembered_set_action,
287 SmiCheck smi_check) {
Leon Clarke4515c472010-02-03 11:58:03 +0000288 // The compiled code assumes that record write doesn't change the
289 // context register, so we check that none of the clobbered
290 // registers are rsi.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000291 ASSERT(!value.is(rsi) && !dst.is(rsi));
Leon Clarke4515c472010-02-03 11:58:03 +0000292
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100293 // First, check if a write barrier is even needed. The tests below
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000294 // catch stores of Smis.
Steve Blocka7e24c12009-10-30 11:49:00 +0000295 Label done;
296
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000297 // Skip barrier if writing a smi.
298 if (smi_check == INLINE_SMI_CHECK) {
299 JumpIfSmi(value, &done);
300 }
301
302 // Although the object register is tagged, the offset is relative to the start
303 // of the object, so so offset must be a multiple of kPointerSize.
304 ASSERT(IsAligned(offset, kPointerSize));
305
306 lea(dst, FieldOperand(object, offset));
307 if (emit_debug_code()) {
308 Label ok;
309 testb(dst, Immediate((1 << kPointerSizeLog2) - 1));
310 j(zero, &ok, Label::kNear);
311 int3();
312 bind(&ok);
313 }
314
315 RecordWrite(
316 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
317
Steve Block3ce2e202009-11-05 08:53:23 +0000318 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000319
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000320 // Clobber clobbered input registers when running with the debug-code flag
321 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100322 if (emit_debug_code()) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000323 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
324 movq(dst, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
325 }
326}
327
328
329void MacroAssembler::RecordWriteArray(Register object,
330 Register value,
331 Register index,
332 SaveFPRegsMode save_fp,
333 RememberedSetAction remembered_set_action,
334 SmiCheck smi_check) {
335 // First, check if a write barrier is even needed. The tests below
336 // catch stores of Smis.
337 Label done;
338
339 // Skip barrier if writing a smi.
340 if (smi_check == INLINE_SMI_CHECK) {
341 JumpIfSmi(value, &done);
342 }
343
344 // Array access: calculate the destination address. Index is not a smi.
345 Register dst = index;
346 lea(dst, Operand(object, index, times_pointer_size,
347 FixedArray::kHeaderSize - kHeapObjectTag));
348
349 RecordWrite(
350 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
351
352 bind(&done);
353
354 // Clobber clobbered input registers when running with the debug-code flag
355 // turned on to provoke errors.
356 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100357 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100358 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000359 }
Steve Block3ce2e202009-11-05 08:53:23 +0000360}
361
362
Steve Block8defd9f2010-07-08 12:39:36 +0100363void MacroAssembler::RecordWrite(Register object,
364 Register address,
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000365 Register value,
366 SaveFPRegsMode fp_mode,
367 RememberedSetAction remembered_set_action,
368 SmiCheck smi_check) {
Steve Block8defd9f2010-07-08 12:39:36 +0100369 // The compiled code assumes that record write doesn't change the
370 // context register, so we check that none of the clobbered
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100371 // registers are rsi.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000372 ASSERT(!value.is(rsi) && !address.is(rsi));
373
374 ASSERT(!object.is(value));
375 ASSERT(!object.is(address));
376 ASSERT(!value.is(address));
377 if (emit_debug_code()) {
378 AbortIfSmi(object);
379 }
380
381 if (remembered_set_action == OMIT_REMEMBERED_SET &&
382 !FLAG_incremental_marking) {
383 return;
384 }
385
386 if (FLAG_debug_code) {
387 Label ok;
388 cmpq(value, Operand(address, 0));
389 j(equal, &ok, Label::kNear);
390 int3();
391 bind(&ok);
392 }
Steve Block8defd9f2010-07-08 12:39:36 +0100393
394 // First, check if a write barrier is even needed. The tests below
Steve Block44f0eee2011-05-26 01:26:41 +0100395 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100396 Label done;
Steve Block8defd9f2010-07-08 12:39:36 +0100397
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000398 if (smi_check == INLINE_SMI_CHECK) {
399 // Skip barrier if writing a smi.
400 JumpIfSmi(value, &done);
401 }
Steve Block8defd9f2010-07-08 12:39:36 +0100402
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000403 CheckPageFlag(value,
404 value, // Used as scratch.
405 MemoryChunk::kPointersToHereAreInterestingMask,
406 zero,
407 &done,
408 Label::kNear);
409
410 CheckPageFlag(object,
411 value, // Used as scratch.
412 MemoryChunk::kPointersFromHereAreInterestingMask,
413 zero,
414 &done,
415 Label::kNear);
416
417 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
418 CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +0100419
420 bind(&done);
421
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000422 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100423 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100424 if (emit_debug_code()) {
Steve Block8defd9f2010-07-08 12:39:36 +0100425 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
426 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
427 }
428}
429
430
Steve Blocka7e24c12009-10-30 11:49:00 +0000431void MacroAssembler::Assert(Condition cc, const char* msg) {
Steve Block44f0eee2011-05-26 01:26:41 +0100432 if (emit_debug_code()) Check(cc, msg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000433}
434
435
Iain Merrick75681382010-08-19 15:07:18 +0100436void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +0100437 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000438 Label ok;
Iain Merrick75681382010-08-19 15:07:18 +0100439 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
440 Heap::kFixedArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000441 j(equal, &ok, Label::kNear);
Iain Merrick75681382010-08-19 15:07:18 +0100442 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000443 Heap::kFixedDoubleArrayMapRootIndex);
444 j(equal, &ok, Label::kNear);
445 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Iain Merrick75681382010-08-19 15:07:18 +0100446 Heap::kFixedCOWArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000447 j(equal, &ok, Label::kNear);
Iain Merrick75681382010-08-19 15:07:18 +0100448 Abort("JSObject with fast elements map has slow elements");
449 bind(&ok);
450 }
451}
452
453
Steve Blocka7e24c12009-10-30 11:49:00 +0000454void MacroAssembler::Check(Condition cc, const char* msg) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000455 Label L;
456 j(cc, &L, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000457 Abort(msg);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000458 // Control will not return here.
Steve Blocka7e24c12009-10-30 11:49:00 +0000459 bind(&L);
460}
461
462
Steve Block6ded16b2010-05-10 14:33:55 +0100463void MacroAssembler::CheckStackAlignment() {
464 int frame_alignment = OS::ActivationFrameAlignment();
465 int frame_alignment_mask = frame_alignment - 1;
466 if (frame_alignment > kPointerSize) {
467 ASSERT(IsPowerOf2(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +0000468 Label alignment_as_expected;
Steve Block6ded16b2010-05-10 14:33:55 +0100469 testq(rsp, Immediate(frame_alignment_mask));
Ben Murdoch257744e2011-11-30 15:57:28 +0000470 j(zero, &alignment_as_expected, Label::kNear);
Steve Block6ded16b2010-05-10 14:33:55 +0100471 // Abort if stack is not aligned.
472 int3();
473 bind(&alignment_as_expected);
474 }
475}
476
477
Steve Blocka7e24c12009-10-30 11:49:00 +0000478void MacroAssembler::NegativeZeroTest(Register result,
479 Register op,
480 Label* then_label) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000481 Label ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000482 testl(result, result);
Ben Murdoch257744e2011-11-30 15:57:28 +0000483 j(not_zero, &ok, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000484 testl(op, op);
485 j(sign, then_label);
486 bind(&ok);
487}
488
489
490void MacroAssembler::Abort(const char* msg) {
491 // We want to pass the msg string like a smi to avoid GC
492 // problems, however msg is not guaranteed to be aligned
493 // properly. Instead, we pass an aligned pointer that is
494 // a proper v8 smi, but also pass the alignment difference
495 // from the real pointer as a smi.
496 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
497 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
Ben Murdochc7cc0282012-03-05 14:35:55 +0000498 // Note: p0 might not be a valid Smi _value_, but it has a valid Smi tag.
Steve Blocka7e24c12009-10-30 11:49:00 +0000499 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
500#ifdef DEBUG
501 if (msg != NULL) {
502 RecordComment("Abort message: ");
503 RecordComment(msg);
504 }
505#endif
506 push(rax);
507 movq(kScratchRegister, p0, RelocInfo::NONE);
508 push(kScratchRegister);
509 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000510 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000511 RelocInfo::NONE);
512 push(kScratchRegister);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000513
514 if (!has_frame_) {
515 // We don't actually want to generate a pile of code for this, so just
516 // claim there is a stack frame, without generating one.
517 FrameScope scope(this, StackFrame::NONE);
518 CallRuntime(Runtime::kAbort, 2);
519 } else {
520 CallRuntime(Runtime::kAbort, 2);
521 }
522 // Control will not return here.
Steve Blockd0582a62009-12-15 09:54:21 +0000523 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000524}
525
526
Ben Murdoch257744e2011-11-30 15:57:28 +0000527void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000528 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs
Ben Murdoch257744e2011-11-30 15:57:28 +0000529 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000530}
531
532
Leon Clarkee46be812010-01-19 14:06:41 +0000533void MacroAssembler::TailCallStub(CodeStub* stub) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000534 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
Leon Clarkee46be812010-01-19 14:06:41 +0000535 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
536}
537
538
Steve Blocka7e24c12009-10-30 11:49:00 +0000539void MacroAssembler::StubReturn(int argc) {
540 ASSERT(argc >= 1 && generating_stub());
541 ret((argc - 1) * kPointerSize);
542}
543
544
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000545bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
546 if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
547 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
548}
549
550
Steve Blocka7e24c12009-10-30 11:49:00 +0000551void MacroAssembler::IllegalOperation(int num_arguments) {
552 if (num_arguments > 0) {
553 addq(rsp, Immediate(num_arguments * kPointerSize));
554 }
555 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
556}
557
558
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100559void MacroAssembler::IndexFromHash(Register hash, Register index) {
560 // The assert checks that the constants for the maximum number of digits
561 // for an array index cached in the hash field and the number of bits
562 // reserved for it does not conflict.
563 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
564 (1 << String::kArrayIndexValueBits));
565 // We want the smi-tagged index in key. Even if we subsequently go to
566 // the slow case, converting the key to a smi is always valid.
567 // key: string key
568 // hash: key's hash field, including its array index value.
569 and_(hash, Immediate(String::kArrayIndexValueMask));
570 shr(hash, Immediate(String::kHashShift));
571 // Here we actually clobber the key which will be used if calling into
572 // runtime later. However as the new key is the numeric value of a string key
573 // there is no difference in using either key.
574 Integer32ToSmi(index, hash);
575}
576
577
Steve Blocka7e24c12009-10-30 11:49:00 +0000578void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
579 CallRuntime(Runtime::FunctionForId(id), num_arguments);
580}
581
582
Steve Block1e0659c2011-05-24 12:43:12 +0100583void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
Steve Block44f0eee2011-05-26 01:26:41 +0100584 const Runtime::Function* function = Runtime::FunctionForId(id);
Steve Block1e0659c2011-05-24 12:43:12 +0100585 Set(rax, function->nargs);
Steve Block44f0eee2011-05-26 01:26:41 +0100586 LoadAddress(rbx, ExternalReference(function, isolate()));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000587 CEntryStub ces(1, kSaveFPRegs);
Steve Block1e0659c2011-05-24 12:43:12 +0100588 CallStub(&ces);
589}
590
591
Steve Block44f0eee2011-05-26 01:26:41 +0100592void MacroAssembler::CallRuntime(const Runtime::Function* f,
593 int num_arguments) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000594 // If the expected number of arguments of the runtime function is
595 // constant, we check that the actual number of arguments match the
596 // expectation.
597 if (f->nargs >= 0 && f->nargs != num_arguments) {
598 IllegalOperation(num_arguments);
599 return;
600 }
601
Leon Clarke4515c472010-02-03 11:58:03 +0000602 // TODO(1236192): Most runtime routines don't need the number of
603 // arguments passed in because it is constant. At some point we
604 // should remove this need and make the runtime routine entry code
605 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100606 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100607 LoadAddress(rbx, ExternalReference(f, isolate()));
Leon Clarke4515c472010-02-03 11:58:03 +0000608 CEntryStub ces(f->result_size);
609 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000610}
611
612
Andrei Popescu402d9372010-02-26 13:31:12 +0000613void MacroAssembler::CallExternalReference(const ExternalReference& ext,
614 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100615 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100616 LoadAddress(rbx, ext);
Andrei Popescu402d9372010-02-26 13:31:12 +0000617
618 CEntryStub stub(1);
619 CallStub(&stub);
620}
621
622
Steve Block6ded16b2010-05-10 14:33:55 +0100623void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
624 int num_arguments,
625 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000626 // ----------- S t a t e -------------
627 // -- rsp[0] : return address
628 // -- rsp[8] : argument num_arguments - 1
629 // ...
630 // -- rsp[8 * num_arguments] : argument 0 (receiver)
631 // -----------------------------------
632
633 // TODO(1236192): Most runtime routines don't need the number of
634 // arguments passed in because it is constant. At some point we
635 // should remove this need and make the runtime routine entry code
636 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100637 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100638 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000639}
640
641
Steve Block6ded16b2010-05-10 14:33:55 +0100642void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
643 int num_arguments,
644 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100645 TailCallExternalReference(ExternalReference(fid, isolate()),
646 num_arguments,
647 result_size);
Steve Block6ded16b2010-05-10 14:33:55 +0100648}
649
650
Ben Murdochbb769b22010-08-11 14:56:33 +0100651static int Offset(ExternalReference ref0, ExternalReference ref1) {
652 int64_t offset = (ref0.address() - ref1.address());
653 // Check that fits into int.
654 ASSERT(static_cast<int>(offset) == offset);
655 return static_cast<int>(offset);
656}
657
658
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800659void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
660#ifdef _WIN64
661 // We need to prepare a slot for result handle on stack and put
662 // a pointer to it into 1st arg register.
663 EnterApiExitFrame(arg_stack_space + 1);
664
665 // rcx must be used to pass the pointer to the return value slot.
666 lea(rcx, StackSpaceOperand(arg_stack_space));
667#else
668 EnterApiExitFrame(arg_stack_space);
669#endif
Ben Murdochbb769b22010-08-11 14:56:33 +0100670}
671
672
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000673void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
674 int stack_space) {
John Reck59135872010-11-02 12:39:01 -0700675 Label empty_result;
676 Label prologue;
677 Label promote_scheduled_exception;
678 Label delete_allocated_handles;
679 Label leave_exit_frame;
Ben Murdochbb769b22010-08-11 14:56:33 +0100680 Label write_back;
Ben Murdochbb769b22010-08-11 14:56:33 +0100681
Ben Murdoch257744e2011-11-30 15:57:28 +0000682 Factory* factory = isolate()->factory();
John Reck59135872010-11-02 12:39:01 -0700683 ExternalReference next_address =
684 ExternalReference::handle_scope_next_address();
685 const int kNextOffset = 0;
686 const int kLimitOffset = Offset(
687 ExternalReference::handle_scope_limit_address(),
688 next_address);
689 const int kLevelOffset = Offset(
690 ExternalReference::handle_scope_level_address(),
691 next_address);
692 ExternalReference scheduled_exception_address =
Steve Block44f0eee2011-05-26 01:26:41 +0100693 ExternalReference::scheduled_exception_address(isolate());
Ben Murdochbb769b22010-08-11 14:56:33 +0100694
John Reck59135872010-11-02 12:39:01 -0700695 // Allocate HandleScope in callee-save registers.
696 Register prev_next_address_reg = r14;
697 Register prev_limit_reg = rbx;
Steve Block44f0eee2011-05-26 01:26:41 +0100698 Register base_reg = r15;
John Reck59135872010-11-02 12:39:01 -0700699 movq(base_reg, next_address);
700 movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
701 movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
702 addl(Operand(base_reg, kLevelOffset), Immediate(1));
703 // Call the api function!
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000704 movq(rax, reinterpret_cast<int64_t>(function_address),
John Reck59135872010-11-02 12:39:01 -0700705 RelocInfo::RUNTIME_ENTRY);
706 call(rax);
Ben Murdochbb769b22010-08-11 14:56:33 +0100707
John Reck59135872010-11-02 12:39:01 -0700708#ifdef _WIN64
709 // rax keeps a pointer to v8::Handle, unpack it.
710 movq(rax, Operand(rax, 0));
711#endif
712 // Check if the result handle holds 0.
713 testq(rax, rax);
714 j(zero, &empty_result);
715 // It was non-zero. Dereference to get the result value.
716 movq(rax, Operand(rax, 0));
717 bind(&prologue);
Ben Murdochbb769b22010-08-11 14:56:33 +0100718
John Reck59135872010-11-02 12:39:01 -0700719 // No more valid handles (the result handle was the last one). Restore
720 // previous handle scope.
721 subl(Operand(base_reg, kLevelOffset), Immediate(1));
722 movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
723 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
724 j(not_equal, &delete_allocated_handles);
725 bind(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100726
John Reck59135872010-11-02 12:39:01 -0700727 // Check if the function scheduled an exception.
728 movq(rsi, scheduled_exception_address);
Ben Murdoch257744e2011-11-30 15:57:28 +0000729 Cmp(Operand(rsi, 0), factory->the_hole_value());
John Reck59135872010-11-02 12:39:01 -0700730 j(not_equal, &promote_scheduled_exception);
Ben Murdochbb769b22010-08-11 14:56:33 +0100731
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800732 LeaveApiExitFrame();
733 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -0700734
735 bind(&promote_scheduled_exception);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000736 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
John Reck59135872010-11-02 12:39:01 -0700737
738 bind(&empty_result);
739 // It was zero; the result is undefined.
Ben Murdoch257744e2011-11-30 15:57:28 +0000740 Move(rax, factory->undefined_value());
John Reck59135872010-11-02 12:39:01 -0700741 jmp(&prologue);
742
743 // HandleScope limit has changed. Delete allocated extensions.
744 bind(&delete_allocated_handles);
745 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
746 movq(prev_limit_reg, rax);
Steve Block44f0eee2011-05-26 01:26:41 +0100747#ifdef _WIN64
748 LoadAddress(rcx, ExternalReference::isolate_address());
749#else
750 LoadAddress(rdi, ExternalReference::isolate_address());
751#endif
752 LoadAddress(rax,
753 ExternalReference::delete_handle_scope_extensions(isolate()));
John Reck59135872010-11-02 12:39:01 -0700754 call(rax);
755 movq(rax, prev_limit_reg);
756 jmp(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100757}
758
759
Steve Block6ded16b2010-05-10 14:33:55 +0100760void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
761 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000762 // Set the entry point and jump to the C entry runtime stub.
Steve Block44f0eee2011-05-26 01:26:41 +0100763 LoadAddress(rbx, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +0000764 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000765 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000766}
767
768
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100769void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
770 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000771 const CallWrapper& call_wrapper) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000772 // You can't call a builtin without a valid frame.
773 ASSERT(flag == JUMP_FUNCTION || has_frame());
Steve Blocka7e24c12009-10-30 11:49:00 +0000774
Andrei Popescu402d9372010-02-26 13:31:12 +0000775 // Rely on the assertion to check that the number of provided
776 // arguments match the expected number of arguments. Fake a
777 // parameter count to avoid emitting code to do the check.
778 ParameterCount expected(0);
779 GetBuiltinEntry(rdx, id);
Ben Murdoch257744e2011-11-30 15:57:28 +0000780 InvokeCode(rdx, expected, expected, flag, call_wrapper, CALL_AS_METHOD);
Steve Blocka7e24c12009-10-30 11:49:00 +0000781}
782
Andrei Popescu402d9372010-02-26 13:31:12 +0000783
Steve Block791712a2010-08-27 10:21:07 +0100784void MacroAssembler::GetBuiltinFunction(Register target,
785 Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100786 // Load the builtins object into target register.
787 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
788 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
Steve Block791712a2010-08-27 10:21:07 +0100789 movq(target, FieldOperand(target,
790 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
791}
Steve Block6ded16b2010-05-10 14:33:55 +0100792
Steve Block791712a2010-08-27 10:21:07 +0100793
794void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
795 ASSERT(!target.is(rdi));
Andrei Popescu402d9372010-02-26 13:31:12 +0000796 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +0100797 GetBuiltinFunction(rdi, id);
798 movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000799}
800
801
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000802static const Register saved_regs[] =
803 { rax, rcx, rdx, rbx, rbp, rsi, rdi, r8, r9, r10, r11 };
804static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
805
806
807void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
808 Register exclusion1,
809 Register exclusion2,
810 Register exclusion3) {
811 // We don't allow a GC during a store buffer overflow so there is no need to
812 // store the registers in any particular way, but we do have to store and
813 // restore them.
814 for (int i = 0; i < kNumberOfSavedRegs; i++) {
815 Register reg = saved_regs[i];
816 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
817 push(reg);
818 }
819 }
820 // R12 to r15 are callee save on all platforms.
821 if (fp_mode == kSaveFPRegs) {
822 CpuFeatures::Scope scope(SSE2);
823 subq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
824 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
825 XMMRegister reg = XMMRegister::from_code(i);
826 movsd(Operand(rsp, i * kDoubleSize), reg);
827 }
828 }
829}
830
831
832void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode,
833 Register exclusion1,
834 Register exclusion2,
835 Register exclusion3) {
836 if (fp_mode == kSaveFPRegs) {
837 CpuFeatures::Scope scope(SSE2);
838 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
839 XMMRegister reg = XMMRegister::from_code(i);
840 movsd(reg, Operand(rsp, i * kDoubleSize));
841 }
842 addq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
843 }
844 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
845 Register reg = saved_regs[i];
846 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
847 pop(reg);
848 }
849 }
850}
851
852
Steve Blocka7e24c12009-10-30 11:49:00 +0000853void MacroAssembler::Set(Register dst, int64_t x) {
854 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100855 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000856 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000857 movl(dst, Immediate(static_cast<uint32_t>(x)));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100858 } else if (is_int32(x)) {
859 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000860 } else {
861 movq(dst, x, RelocInfo::NONE);
862 }
863}
864
Steve Blocka7e24c12009-10-30 11:49:00 +0000865void MacroAssembler::Set(const Operand& dst, int64_t x) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100866 if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000867 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000868 } else {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100869 Set(kScratchRegister, x);
Steve Blocka7e24c12009-10-30 11:49:00 +0000870 movq(dst, kScratchRegister);
871 }
872}
873
Steve Blocka7e24c12009-10-30 11:49:00 +0000874// ----------------------------------------------------------------------------
875// Smi tagging, untagging and tag detection.
876
Steve Block8defd9f2010-07-08 12:39:36 +0100877Register MacroAssembler::GetSmiConstant(Smi* source) {
878 int value = source->value();
879 if (value == 0) {
880 xorl(kScratchRegister, kScratchRegister);
881 return kScratchRegister;
882 }
883 if (value == 1) {
884 return kSmiConstantRegister;
885 }
886 LoadSmiConstant(kScratchRegister, source);
887 return kScratchRegister;
888}
889
890void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
Steve Block44f0eee2011-05-26 01:26:41 +0100891 if (emit_debug_code()) {
Steve Block8defd9f2010-07-08 12:39:36 +0100892 movq(dst,
893 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
894 RelocInfo::NONE);
895 cmpq(dst, kSmiConstantRegister);
896 if (allow_stub_calls()) {
897 Assert(equal, "Uninitialized kSmiConstantRegister");
898 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000899 Label ok;
900 j(equal, &ok, Label::kNear);
Steve Block8defd9f2010-07-08 12:39:36 +0100901 int3();
902 bind(&ok);
903 }
904 }
Steve Block44f0eee2011-05-26 01:26:41 +0100905 int value = source->value();
906 if (value == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100907 xorl(dst, dst);
908 return;
909 }
Steve Block8defd9f2010-07-08 12:39:36 +0100910 bool negative = value < 0;
911 unsigned int uvalue = negative ? -value : value;
912
913 switch (uvalue) {
914 case 9:
915 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
916 break;
917 case 8:
918 xorl(dst, dst);
919 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
920 break;
921 case 4:
922 xorl(dst, dst);
923 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
924 break;
925 case 5:
926 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
927 break;
928 case 3:
929 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
930 break;
931 case 2:
932 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
933 break;
934 case 1:
935 movq(dst, kSmiConstantRegister);
936 break;
937 case 0:
938 UNREACHABLE();
939 return;
940 default:
941 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
942 return;
943 }
944 if (negative) {
945 neg(dst);
946 }
947}
948
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100949
Steve Blocka7e24c12009-10-30 11:49:00 +0000950void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000951 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +0000952 if (!dst.is(src)) {
953 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000954 }
Steve Block3ce2e202009-11-05 08:53:23 +0000955 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000956}
957
958
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100959void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +0100960 if (emit_debug_code()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100961 testb(dst, Immediate(0x01));
Ben Murdoch257744e2011-11-30 15:57:28 +0000962 Label ok;
963 j(zero, &ok, Label::kNear);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100964 if (allow_stub_calls()) {
965 Abort("Integer32ToSmiField writing to non-smi location");
966 } else {
967 int3();
968 }
969 bind(&ok);
970 }
971 ASSERT(kSmiShift % kBitsPerByte == 0);
972 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
973}
974
975
Steve Block3ce2e202009-11-05 08:53:23 +0000976void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
977 Register src,
978 int constant) {
979 if (dst.is(src)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100980 addl(dst, Immediate(constant));
Steve Block3ce2e202009-11-05 08:53:23 +0000981 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100982 leal(dst, Operand(src, constant));
Steve Block3ce2e202009-11-05 08:53:23 +0000983 }
984 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000985}
986
987
988void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000989 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000990 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000991 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000992 }
Steve Block3ce2e202009-11-05 08:53:23 +0000993 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000994}
995
996
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100997void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
998 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
999}
1000
1001
Steve Blocka7e24c12009-10-30 11:49:00 +00001002void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001003 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001004 if (!dst.is(src)) {
1005 movq(dst, src);
1006 }
1007 sar(dst, Immediate(kSmiShift));
1008}
1009
1010
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001011void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
1012 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
1013}
1014
1015
Steve Block3ce2e202009-11-05 08:53:23 +00001016void MacroAssembler::SmiTest(Register src) {
1017 testq(src, src);
1018}
1019
1020
Steve Block44f0eee2011-05-26 01:26:41 +01001021void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
1022 if (emit_debug_code()) {
1023 AbortIfNotSmi(smi1);
1024 AbortIfNotSmi(smi2);
1025 }
1026 cmpq(smi1, smi2);
Steve Block3ce2e202009-11-05 08:53:23 +00001027}
1028
1029
1030void MacroAssembler::SmiCompare(Register dst, Smi* src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001031 if (emit_debug_code()) {
1032 AbortIfNotSmi(dst);
1033 }
1034 Cmp(dst, src);
1035}
1036
1037
1038void MacroAssembler::Cmp(Register dst, Smi* src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001039 ASSERT(!dst.is(kScratchRegister));
1040 if (src->value() == 0) {
1041 testq(dst, dst);
1042 } else {
Iain Merrick75681382010-08-19 15:07:18 +01001043 Register constant_reg = GetSmiConstant(src);
1044 cmpq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001045 }
1046}
1047
1048
Leon Clarkef7060e22010-06-03 12:02:55 +01001049void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001050 if (emit_debug_code()) {
1051 AbortIfNotSmi(dst);
1052 AbortIfNotSmi(src);
1053 }
Steve Block6ded16b2010-05-10 14:33:55 +01001054 cmpq(dst, src);
1055}
1056
1057
Steve Block3ce2e202009-11-05 08:53:23 +00001058void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001059 if (emit_debug_code()) {
1060 AbortIfNotSmi(dst);
1061 AbortIfNotSmi(src);
1062 }
Steve Block3ce2e202009-11-05 08:53:23 +00001063 cmpq(dst, src);
1064}
1065
1066
1067void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001068 if (emit_debug_code()) {
1069 AbortIfNotSmi(dst);
1070 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001071 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001072}
1073
1074
Steve Block44f0eee2011-05-26 01:26:41 +01001075void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1076 // The Operand cannot use the smi register.
1077 Register smi_reg = GetSmiConstant(src);
1078 ASSERT(!dst.AddressUsesRegister(smi_reg));
1079 cmpq(dst, smi_reg);
1080}
1081
1082
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001083void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
1084 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1085}
1086
1087
Steve Blocka7e24c12009-10-30 11:49:00 +00001088void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1089 Register src,
1090 int power) {
1091 ASSERT(power >= 0);
1092 ASSERT(power < 64);
1093 if (power == 0) {
1094 SmiToInteger64(dst, src);
1095 return;
1096 }
Steve Block3ce2e202009-11-05 08:53:23 +00001097 if (!dst.is(src)) {
1098 movq(dst, src);
1099 }
1100 if (power < kSmiShift) {
1101 sar(dst, Immediate(kSmiShift - power));
1102 } else if (power > kSmiShift) {
1103 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001104 }
1105}
1106
1107
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001108void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1109 Register src,
1110 int power) {
1111 ASSERT((0 <= power) && (power < 32));
1112 if (dst.is(src)) {
1113 shr(dst, Immediate(power + kSmiShift));
1114 } else {
1115 UNIMPLEMENTED(); // Not used.
1116 }
1117}
1118
1119
Ben Murdoch257744e2011-11-30 15:57:28 +00001120void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1121 Label* on_not_smis,
1122 Label::Distance near_jump) {
1123 if (dst.is(src1) || dst.is(src2)) {
1124 ASSERT(!src1.is(kScratchRegister));
1125 ASSERT(!src2.is(kScratchRegister));
1126 movq(kScratchRegister, src1);
1127 or_(kScratchRegister, src2);
1128 JumpIfNotSmi(kScratchRegister, on_not_smis, near_jump);
1129 movq(dst, kScratchRegister);
1130 } else {
1131 movq(dst, src1);
1132 or_(dst, src2);
1133 JumpIfNotSmi(dst, on_not_smis, near_jump);
1134 }
1135}
1136
1137
Steve Blocka7e24c12009-10-30 11:49:00 +00001138Condition MacroAssembler::CheckSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001139 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001140 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +00001141 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001142}
1143
1144
Steve Block1e0659c2011-05-24 12:43:12 +01001145Condition MacroAssembler::CheckSmi(const Operand& src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001146 STATIC_ASSERT(kSmiTag == 0);
Steve Block1e0659c2011-05-24 12:43:12 +01001147 testb(src, Immediate(kSmiTagMask));
1148 return zero;
1149}
1150
1151
Ben Murdochf87a2032010-10-22 12:50:53 +01001152Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001153 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001154 // Test that both bits of the mask 0x8000000000000001 are zero.
Steve Block3ce2e202009-11-05 08:53:23 +00001155 movq(kScratchRegister, src);
1156 rol(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +01001157 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +00001158 return zero;
1159}
1160
1161
Steve Blocka7e24c12009-10-30 11:49:00 +00001162Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1163 if (first.is(second)) {
1164 return CheckSmi(first);
1165 }
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001166 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
Steve Block8defd9f2010-07-08 12:39:36 +01001167 leal(kScratchRegister, Operand(first, second, times_1, 0));
1168 testb(kScratchRegister, Immediate(0x03));
Steve Block3ce2e202009-11-05 08:53:23 +00001169 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001170}
1171
1172
Ben Murdochf87a2032010-10-22 12:50:53 +01001173Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1174 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001175 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +01001176 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +00001177 }
Steve Block8defd9f2010-07-08 12:39:36 +01001178 movq(kScratchRegister, first);
1179 or_(kScratchRegister, second);
Leon Clarked91b9f72010-01-27 17:25:45 +00001180 rol(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +01001181 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +00001182 return zero;
1183}
1184
1185
Ben Murdochbb769b22010-08-11 14:56:33 +01001186Condition MacroAssembler::CheckEitherSmi(Register first,
1187 Register second,
1188 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +00001189 if (first.is(second)) {
1190 return CheckSmi(first);
1191 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001192 if (scratch.is(second)) {
1193 andl(scratch, first);
1194 } else {
1195 if (!scratch.is(first)) {
1196 movl(scratch, first);
1197 }
1198 andl(scratch, second);
1199 }
1200 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +00001201 return zero;
1202}
1203
1204
Steve Blocka7e24c12009-10-30 11:49:00 +00001205Condition MacroAssembler::CheckIsMinSmi(Register src) {
Steve Block8defd9f2010-07-08 12:39:36 +01001206 ASSERT(!src.is(kScratchRegister));
1207 // If we overflow by subtracting one, it's the minimal smi value.
1208 cmpq(src, kSmiConstantRegister);
1209 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +00001210}
1211
Steve Blocka7e24c12009-10-30 11:49:00 +00001212
1213Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001214 // A 32-bit integer value can always be converted to a smi.
1215 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +00001216}
1217
1218
Steve Block3ce2e202009-11-05 08:53:23 +00001219Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
1220 // An unsigned 32-bit integer value is valid as long as the high bit
1221 // is not set.
Steve Block8defd9f2010-07-08 12:39:36 +01001222 testl(src, src);
1223 return positive;
Steve Block3ce2e202009-11-05 08:53:23 +00001224}
1225
1226
Steve Block1e0659c2011-05-24 12:43:12 +01001227void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1228 if (dst.is(src)) {
1229 andl(dst, Immediate(kSmiTagMask));
1230 } else {
1231 movl(dst, Immediate(kSmiTagMask));
1232 andl(dst, src);
1233 }
1234}
1235
1236
1237void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
1238 if (!(src.AddressUsesRegister(dst))) {
1239 movl(dst, Immediate(kSmiTagMask));
1240 andl(dst, src);
1241 } else {
1242 movl(dst, src);
1243 andl(dst, Immediate(kSmiTagMask));
1244 }
1245}
1246
1247
Ben Murdoch257744e2011-11-30 15:57:28 +00001248void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1249 Label* on_invalid,
1250 Label::Distance near_jump) {
1251 Condition is_valid = CheckInteger32ValidSmiValue(src);
1252 j(NegateCondition(is_valid), on_invalid, near_jump);
1253}
1254
1255
1256void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1257 Label* on_invalid,
1258 Label::Distance near_jump) {
1259 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1260 j(NegateCondition(is_valid), on_invalid, near_jump);
1261}
1262
1263
1264void MacroAssembler::JumpIfSmi(Register src,
1265 Label* on_smi,
1266 Label::Distance near_jump) {
1267 Condition smi = CheckSmi(src);
1268 j(smi, on_smi, near_jump);
1269}
1270
1271
1272void MacroAssembler::JumpIfNotSmi(Register src,
1273 Label* on_not_smi,
1274 Label::Distance near_jump) {
1275 Condition smi = CheckSmi(src);
1276 j(NegateCondition(smi), on_not_smi, near_jump);
1277}
1278
1279
1280void MacroAssembler::JumpUnlessNonNegativeSmi(
1281 Register src, Label* on_not_smi_or_negative,
1282 Label::Distance near_jump) {
1283 Condition non_negative_smi = CheckNonNegativeSmi(src);
1284 j(NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump);
1285}
1286
1287
1288void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1289 Smi* constant,
1290 Label* on_equals,
1291 Label::Distance near_jump) {
1292 SmiCompare(src, constant);
1293 j(equal, on_equals, near_jump);
1294}
1295
1296
1297void MacroAssembler::JumpIfNotBothSmi(Register src1,
1298 Register src2,
1299 Label* on_not_both_smi,
1300 Label::Distance near_jump) {
1301 Condition both_smi = CheckBothSmi(src1, src2);
1302 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1303}
1304
1305
1306void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1307 Register src2,
1308 Label* on_not_both_smi,
1309 Label::Distance near_jump) {
1310 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1311 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1312}
1313
1314
1315void MacroAssembler::SmiTryAddConstant(Register dst,
1316 Register src,
1317 Smi* constant,
1318 Label* on_not_smi_result,
1319 Label::Distance near_jump) {
1320 // Does not assume that src is a smi.
1321 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001322 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001323 ASSERT(!dst.is(kScratchRegister));
1324 ASSERT(!src.is(kScratchRegister));
1325
1326 JumpIfNotSmi(src, on_not_smi_result, near_jump);
1327 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1328 LoadSmiConstant(tmp, constant);
1329 addq(tmp, src);
1330 j(overflow, on_not_smi_result, near_jump);
1331 if (dst.is(src)) {
1332 movq(dst, tmp);
1333 }
1334}
1335
1336
Steve Block3ce2e202009-11-05 08:53:23 +00001337void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1338 if (constant->value() == 0) {
1339 if (!dst.is(src)) {
1340 movq(dst, src);
1341 }
Steve Block8defd9f2010-07-08 12:39:36 +01001342 return;
Steve Block3ce2e202009-11-05 08:53:23 +00001343 } else if (dst.is(src)) {
1344 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001345 switch (constant->value()) {
1346 case 1:
1347 addq(dst, kSmiConstantRegister);
1348 return;
1349 case 2:
1350 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1351 return;
1352 case 4:
1353 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1354 return;
1355 case 8:
1356 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1357 return;
1358 default:
1359 Register constant_reg = GetSmiConstant(constant);
1360 addq(dst, constant_reg);
1361 return;
1362 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001363 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001364 switch (constant->value()) {
1365 case 1:
1366 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
1367 return;
1368 case 2:
1369 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1370 return;
1371 case 4:
1372 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1373 return;
1374 case 8:
1375 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1376 return;
1377 default:
1378 LoadSmiConstant(dst, constant);
1379 addq(dst, src);
1380 return;
1381 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001382 }
1383}
1384
1385
Leon Clarkef7060e22010-06-03 12:02:55 +01001386void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1387 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001388 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01001389 }
1390}
1391
1392
Ben Murdoch257744e2011-11-30 15:57:28 +00001393void MacroAssembler::SmiAddConstant(Register dst,
1394 Register src,
1395 Smi* constant,
1396 Label* on_not_smi_result,
1397 Label::Distance near_jump) {
1398 if (constant->value() == 0) {
1399 if (!dst.is(src)) {
1400 movq(dst, src);
1401 }
1402 } else if (dst.is(src)) {
1403 ASSERT(!dst.is(kScratchRegister));
1404
1405 LoadSmiConstant(kScratchRegister, constant);
1406 addq(kScratchRegister, src);
1407 j(overflow, on_not_smi_result, near_jump);
1408 movq(dst, kScratchRegister);
1409 } else {
1410 LoadSmiConstant(dst, constant);
1411 addq(dst, src);
1412 j(overflow, on_not_smi_result, near_jump);
1413 }
1414}
1415
1416
Steve Block3ce2e202009-11-05 08:53:23 +00001417void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1418 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001419 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001420 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001421 }
Steve Block3ce2e202009-11-05 08:53:23 +00001422 } else if (dst.is(src)) {
1423 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001424 Register constant_reg = GetSmiConstant(constant);
1425 subq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001426 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001427 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001428 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001429 // Adding and subtracting the min-value gives the same result, it only
1430 // differs on the overflow bit, which we don't check here.
1431 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001432 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001433 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001434 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Steve Block3ce2e202009-11-05 08:53:23 +00001435 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001436 }
1437 }
1438}
1439
1440
Ben Murdoch257744e2011-11-30 15:57:28 +00001441void MacroAssembler::SmiSubConstant(Register dst,
1442 Register src,
1443 Smi* constant,
1444 Label* on_not_smi_result,
1445 Label::Distance near_jump) {
1446 if (constant->value() == 0) {
1447 if (!dst.is(src)) {
1448 movq(dst, src);
1449 }
1450 } else if (dst.is(src)) {
1451 ASSERT(!dst.is(kScratchRegister));
1452 if (constant->value() == Smi::kMinValue) {
1453 // Subtracting min-value from any non-negative value will overflow.
1454 // We test the non-negativeness before doing the subtraction.
1455 testq(src, src);
1456 j(not_sign, on_not_smi_result, near_jump);
1457 LoadSmiConstant(kScratchRegister, constant);
1458 subq(dst, kScratchRegister);
1459 } else {
1460 // Subtract by adding the negation.
1461 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1462 addq(kScratchRegister, dst);
1463 j(overflow, on_not_smi_result, near_jump);
1464 movq(dst, kScratchRegister);
1465 }
1466 } else {
1467 if (constant->value() == Smi::kMinValue) {
1468 // Subtracting min-value from any non-negative value will overflow.
1469 // We test the non-negativeness before doing the subtraction.
1470 testq(src, src);
1471 j(not_sign, on_not_smi_result, near_jump);
1472 LoadSmiConstant(dst, constant);
1473 // Adding and subtracting the min-value gives the same result, it only
1474 // differs on the overflow bit, which we don't check here.
1475 addq(dst, src);
1476 } else {
1477 // Subtract by adding the negation.
1478 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1479 addq(dst, src);
1480 j(overflow, on_not_smi_result, near_jump);
1481 }
1482 }
1483}
1484
1485
1486void MacroAssembler::SmiNeg(Register dst,
1487 Register src,
1488 Label* on_smi_result,
1489 Label::Distance near_jump) {
1490 if (dst.is(src)) {
1491 ASSERT(!dst.is(kScratchRegister));
1492 movq(kScratchRegister, src);
1493 neg(dst); // Low 32 bits are retained as zero by negation.
1494 // Test if result is zero or Smi::kMinValue.
1495 cmpq(dst, kScratchRegister);
1496 j(not_equal, on_smi_result, near_jump);
1497 movq(src, kScratchRegister);
1498 } else {
1499 movq(dst, src);
1500 neg(dst);
1501 cmpq(dst, src);
1502 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1503 j(not_equal, on_smi_result, near_jump);
1504 }
1505}
1506
1507
1508void MacroAssembler::SmiAdd(Register dst,
1509 Register src1,
1510 Register src2,
1511 Label* on_not_smi_result,
1512 Label::Distance near_jump) {
1513 ASSERT_NOT_NULL(on_not_smi_result);
1514 ASSERT(!dst.is(src2));
1515 if (dst.is(src1)) {
1516 movq(kScratchRegister, src1);
1517 addq(kScratchRegister, src2);
1518 j(overflow, on_not_smi_result, near_jump);
1519 movq(dst, kScratchRegister);
1520 } else {
1521 movq(dst, src1);
1522 addq(dst, src2);
1523 j(overflow, on_not_smi_result, near_jump);
1524 }
1525}
1526
1527
1528void MacroAssembler::SmiAdd(Register dst,
1529 Register src1,
1530 const Operand& src2,
1531 Label* on_not_smi_result,
1532 Label::Distance near_jump) {
1533 ASSERT_NOT_NULL(on_not_smi_result);
1534 if (dst.is(src1)) {
1535 movq(kScratchRegister, src1);
1536 addq(kScratchRegister, src2);
1537 j(overflow, on_not_smi_result, near_jump);
1538 movq(dst, kScratchRegister);
1539 } else {
1540 ASSERT(!src2.AddressUsesRegister(dst));
1541 movq(dst, src1);
1542 addq(dst, src2);
1543 j(overflow, on_not_smi_result, near_jump);
1544 }
1545}
1546
1547
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001548void MacroAssembler::SmiAdd(Register dst,
1549 Register src1,
1550 Register src2) {
1551 // No overflow checking. Use only when it's known that
1552 // overflowing is impossible.
Steve Block44f0eee2011-05-26 01:26:41 +01001553 if (!dst.is(src1)) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001554 if (emit_debug_code()) {
1555 movq(kScratchRegister, src1);
1556 addq(kScratchRegister, src2);
1557 Check(no_overflow, "Smi addition overflow");
1558 }
1559 lea(dst, Operand(src1, src2, times_1, 0));
1560 } else {
1561 addq(dst, src2);
1562 Assert(no_overflow, "Smi addition overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001563 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001564}
1565
1566
1567void MacroAssembler::SmiSub(Register dst,
1568 Register src1,
1569 Register src2,
1570 Label* on_not_smi_result,
1571 Label::Distance near_jump) {
1572 ASSERT_NOT_NULL(on_not_smi_result);
1573 ASSERT(!dst.is(src2));
1574 if (dst.is(src1)) {
1575 cmpq(dst, src2);
1576 j(overflow, on_not_smi_result, near_jump);
1577 subq(dst, src2);
1578 } else {
1579 movq(dst, src1);
1580 subq(dst, src2);
1581 j(overflow, on_not_smi_result, near_jump);
1582 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001583}
1584
1585
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001586void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1587 // No overflow checking. Use only when it's known that
1588 // overflowing is impossible (e.g., subtracting two positive smis).
1589 ASSERT(!dst.is(src2));
Steve Block44f0eee2011-05-26 01:26:41 +01001590 if (!dst.is(src1)) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001591 movq(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001592 }
Steve Block44f0eee2011-05-26 01:26:41 +01001593 subq(dst, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001594 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001595}
1596
1597
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001598void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001599 Register src1,
Ben Murdoch257744e2011-11-30 15:57:28 +00001600 const Operand& src2,
1601 Label* on_not_smi_result,
1602 Label::Distance near_jump) {
1603 ASSERT_NOT_NULL(on_not_smi_result);
1604 if (dst.is(src1)) {
1605 movq(kScratchRegister, src2);
1606 cmpq(src1, kScratchRegister);
1607 j(overflow, on_not_smi_result, near_jump);
1608 subq(src1, kScratchRegister);
1609 } else {
1610 movq(dst, src1);
1611 subq(dst, src2);
1612 j(overflow, on_not_smi_result, near_jump);
1613 }
1614}
1615
1616
1617void MacroAssembler::SmiSub(Register dst,
1618 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001619 const Operand& src2) {
1620 // No overflow checking. Use only when it's known that
1621 // overflowing is impossible (e.g., subtracting two positive smis).
Steve Block44f0eee2011-05-26 01:26:41 +01001622 if (!dst.is(src1)) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001623 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001624 }
Steve Block44f0eee2011-05-26 01:26:41 +01001625 subq(dst, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001626 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001627}
1628
1629
Ben Murdoch257744e2011-11-30 15:57:28 +00001630void MacroAssembler::SmiMul(Register dst,
1631 Register src1,
1632 Register src2,
1633 Label* on_not_smi_result,
1634 Label::Distance near_jump) {
1635 ASSERT(!dst.is(src2));
1636 ASSERT(!dst.is(kScratchRegister));
1637 ASSERT(!src1.is(kScratchRegister));
1638 ASSERT(!src2.is(kScratchRegister));
1639
1640 if (dst.is(src1)) {
1641 Label failure, zero_correct_result;
1642 movq(kScratchRegister, src1); // Create backup for later testing.
1643 SmiToInteger64(dst, src1);
1644 imul(dst, src2);
1645 j(overflow, &failure, Label::kNear);
1646
1647 // Check for negative zero result. If product is zero, and one
1648 // argument is negative, go to slow case.
1649 Label correct_result;
1650 testq(dst, dst);
1651 j(not_zero, &correct_result, Label::kNear);
1652
1653 movq(dst, kScratchRegister);
1654 xor_(dst, src2);
1655 // Result was positive zero.
1656 j(positive, &zero_correct_result, Label::kNear);
1657
1658 bind(&failure); // Reused failure exit, restores src1.
1659 movq(src1, kScratchRegister);
1660 jmp(on_not_smi_result, near_jump);
1661
1662 bind(&zero_correct_result);
1663 Set(dst, 0);
1664
1665 bind(&correct_result);
1666 } else {
1667 SmiToInteger64(dst, src1);
1668 imul(dst, src2);
1669 j(overflow, on_not_smi_result, near_jump);
1670 // Check for negative zero result. If product is zero, and one
1671 // argument is negative, go to slow case.
1672 Label correct_result;
1673 testq(dst, dst);
1674 j(not_zero, &correct_result, Label::kNear);
1675 // One of src1 and src2 is zero, the check whether the other is
1676 // negative.
1677 movq(kScratchRegister, src1);
1678 xor_(kScratchRegister, src2);
1679 j(negative, on_not_smi_result, near_jump);
1680 bind(&correct_result);
1681 }
1682}
1683
1684
1685void MacroAssembler::SmiDiv(Register dst,
1686 Register src1,
1687 Register src2,
1688 Label* on_not_smi_result,
1689 Label::Distance near_jump) {
1690 ASSERT(!src1.is(kScratchRegister));
1691 ASSERT(!src2.is(kScratchRegister));
1692 ASSERT(!dst.is(kScratchRegister));
1693 ASSERT(!src2.is(rax));
1694 ASSERT(!src2.is(rdx));
1695 ASSERT(!src1.is(rdx));
1696
1697 // Check for 0 divisor (result is +/-Infinity).
1698 testq(src2, src2);
1699 j(zero, on_not_smi_result, near_jump);
1700
1701 if (src1.is(rax)) {
1702 movq(kScratchRegister, src1);
1703 }
1704 SmiToInteger32(rax, src1);
1705 // We need to rule out dividing Smi::kMinValue by -1, since that would
1706 // overflow in idiv and raise an exception.
1707 // We combine this with negative zero test (negative zero only happens
1708 // when dividing zero by a negative number).
1709
1710 // We overshoot a little and go to slow case if we divide min-value
1711 // by any negative value, not just -1.
1712 Label safe_div;
1713 testl(rax, Immediate(0x7fffffff));
1714 j(not_zero, &safe_div, Label::kNear);
1715 testq(src2, src2);
1716 if (src1.is(rax)) {
1717 j(positive, &safe_div, Label::kNear);
1718 movq(src1, kScratchRegister);
1719 jmp(on_not_smi_result, near_jump);
1720 } else {
1721 j(negative, on_not_smi_result, near_jump);
1722 }
1723 bind(&safe_div);
1724
1725 SmiToInteger32(src2, src2);
1726 // Sign extend src1 into edx:eax.
1727 cdq();
1728 idivl(src2);
1729 Integer32ToSmi(src2, src2);
1730 // Check that the remainder is zero.
1731 testl(rdx, rdx);
1732 if (src1.is(rax)) {
1733 Label smi_result;
1734 j(zero, &smi_result, Label::kNear);
1735 movq(src1, kScratchRegister);
1736 jmp(on_not_smi_result, near_jump);
1737 bind(&smi_result);
1738 } else {
1739 j(not_zero, on_not_smi_result, near_jump);
1740 }
1741 if (!dst.is(src1) && src1.is(rax)) {
1742 movq(src1, kScratchRegister);
1743 }
1744 Integer32ToSmi(dst, rax);
1745}
1746
1747
1748void MacroAssembler::SmiMod(Register dst,
1749 Register src1,
1750 Register src2,
1751 Label* on_not_smi_result,
1752 Label::Distance near_jump) {
1753 ASSERT(!dst.is(kScratchRegister));
1754 ASSERT(!src1.is(kScratchRegister));
1755 ASSERT(!src2.is(kScratchRegister));
1756 ASSERT(!src2.is(rax));
1757 ASSERT(!src2.is(rdx));
1758 ASSERT(!src1.is(rdx));
1759 ASSERT(!src1.is(src2));
1760
1761 testq(src2, src2);
1762 j(zero, on_not_smi_result, near_jump);
1763
1764 if (src1.is(rax)) {
1765 movq(kScratchRegister, src1);
1766 }
1767 SmiToInteger32(rax, src1);
1768 SmiToInteger32(src2, src2);
1769
1770 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1771 Label safe_div;
1772 cmpl(rax, Immediate(Smi::kMinValue));
1773 j(not_equal, &safe_div, Label::kNear);
1774 cmpl(src2, Immediate(-1));
1775 j(not_equal, &safe_div, Label::kNear);
1776 // Retag inputs and go slow case.
1777 Integer32ToSmi(src2, src2);
1778 if (src1.is(rax)) {
1779 movq(src1, kScratchRegister);
1780 }
1781 jmp(on_not_smi_result, near_jump);
1782 bind(&safe_div);
1783
1784 // Sign extend eax into edx:eax.
1785 cdq();
1786 idivl(src2);
1787 // Restore smi tags on inputs.
1788 Integer32ToSmi(src2, src2);
1789 if (src1.is(rax)) {
1790 movq(src1, kScratchRegister);
1791 }
1792 // Check for a negative zero result. If the result is zero, and the
1793 // dividend is negative, go slow to return a floating point negative zero.
1794 Label smi_result;
1795 testl(rdx, rdx);
1796 j(not_zero, &smi_result, Label::kNear);
1797 testq(src1, src1);
1798 j(negative, on_not_smi_result, near_jump);
1799 bind(&smi_result);
1800 Integer32ToSmi(dst, rdx);
1801}
1802
1803
Steve Blocka7e24c12009-10-30 11:49:00 +00001804void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001805 ASSERT(!dst.is(kScratchRegister));
1806 ASSERT(!src.is(kScratchRegister));
1807 // Set tag and padding bits before negating, so that they are zero afterwards.
1808 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001809 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001810 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001811 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001812 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001813 }
Steve Block3ce2e202009-11-05 08:53:23 +00001814 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001815}
1816
1817
1818void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001819 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001820 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001821 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001822 }
1823 and_(dst, src2);
1824}
1825
1826
Steve Block3ce2e202009-11-05 08:53:23 +00001827void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1828 if (constant->value() == 0) {
Steve Block9fac8402011-05-12 15:51:54 +01001829 Set(dst, 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001830 } else if (dst.is(src)) {
1831 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001832 Register constant_reg = GetSmiConstant(constant);
1833 and_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001834 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001835 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001836 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001837 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001838}
1839
1840
1841void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1842 if (!dst.is(src1)) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001843 ASSERT(!src1.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +00001844 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001845 }
1846 or_(dst, src2);
1847}
1848
1849
Steve Block3ce2e202009-11-05 08:53:23 +00001850void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1851 if (dst.is(src)) {
1852 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001853 Register constant_reg = GetSmiConstant(constant);
1854 or_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001855 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001856 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001857 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001858 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001859}
1860
Steve Block3ce2e202009-11-05 08:53:23 +00001861
Steve Blocka7e24c12009-10-30 11:49:00 +00001862void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1863 if (!dst.is(src1)) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001864 ASSERT(!src1.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +00001865 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001866 }
1867 xor_(dst, src2);
1868}
1869
1870
Steve Block3ce2e202009-11-05 08:53:23 +00001871void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1872 if (dst.is(src)) {
1873 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001874 Register constant_reg = GetSmiConstant(constant);
1875 xor_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001876 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001877 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001878 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001879 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001880}
1881
1882
Steve Blocka7e24c12009-10-30 11:49:00 +00001883void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1884 Register src,
1885 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001886 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001887 if (shift_value > 0) {
1888 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001889 sar(dst, Immediate(shift_value + kSmiShift));
1890 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001891 } else {
1892 UNIMPLEMENTED(); // Not used.
1893 }
1894 }
1895}
1896
1897
Steve Blocka7e24c12009-10-30 11:49:00 +00001898void MacroAssembler::SmiShiftLeftConstant(Register dst,
1899 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001900 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001901 if (!dst.is(src)) {
1902 movq(dst, src);
1903 }
1904 if (shift_value > 0) {
1905 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001906 }
1907}
1908
1909
Ben Murdoch257744e2011-11-30 15:57:28 +00001910void MacroAssembler::SmiShiftLogicalRightConstant(
1911 Register dst, Register src, int shift_value,
1912 Label* on_not_smi_result, Label::Distance near_jump) {
1913 // Logic right shift interprets its result as an *unsigned* number.
1914 if (dst.is(src)) {
1915 UNIMPLEMENTED(); // Not used.
1916 } else {
1917 movq(dst, src);
1918 if (shift_value == 0) {
1919 testq(dst, dst);
1920 j(negative, on_not_smi_result, near_jump);
1921 }
1922 shr(dst, Immediate(shift_value + kSmiShift));
1923 shl(dst, Immediate(kSmiShift));
1924 }
1925}
1926
1927
Steve Blocka7e24c12009-10-30 11:49:00 +00001928void MacroAssembler::SmiShiftLeft(Register dst,
1929 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001930 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001931 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001932 // Untag shift amount.
1933 if (!dst.is(src1)) {
1934 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001935 }
Steve Block3ce2e202009-11-05 08:53:23 +00001936 SmiToInteger32(rcx, src2);
1937 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1938 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001939 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001940}
1941
1942
Ben Murdoch257744e2011-11-30 15:57:28 +00001943void MacroAssembler::SmiShiftLogicalRight(Register dst,
1944 Register src1,
1945 Register src2,
1946 Label* on_not_smi_result,
1947 Label::Distance near_jump) {
1948 ASSERT(!dst.is(kScratchRegister));
1949 ASSERT(!src1.is(kScratchRegister));
1950 ASSERT(!src2.is(kScratchRegister));
1951 ASSERT(!dst.is(rcx));
1952 // dst and src1 can be the same, because the one case that bails out
1953 // is a shift by 0, which leaves dst, and therefore src1, unchanged.
1954 if (src1.is(rcx) || src2.is(rcx)) {
1955 movq(kScratchRegister, rcx);
1956 }
1957 if (!dst.is(src1)) {
1958 movq(dst, src1);
1959 }
1960 SmiToInteger32(rcx, src2);
1961 orl(rcx, Immediate(kSmiShift));
1962 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
1963 shl(dst, Immediate(kSmiShift));
1964 testq(dst, dst);
1965 if (src1.is(rcx) || src2.is(rcx)) {
1966 Label positive_result;
1967 j(positive, &positive_result, Label::kNear);
1968 if (src1.is(rcx)) {
1969 movq(src1, kScratchRegister);
1970 } else {
1971 movq(src2, kScratchRegister);
1972 }
1973 jmp(on_not_smi_result, near_jump);
1974 bind(&positive_result);
1975 } else {
1976 // src2 was zero and src1 negative.
1977 j(negative, on_not_smi_result, near_jump);
1978 }
1979}
1980
1981
Steve Blocka7e24c12009-10-30 11:49:00 +00001982void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1983 Register src1,
1984 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001985 ASSERT(!dst.is(kScratchRegister));
1986 ASSERT(!src1.is(kScratchRegister));
1987 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001988 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001989 if (src1.is(rcx)) {
1990 movq(kScratchRegister, src1);
1991 } else if (src2.is(rcx)) {
1992 movq(kScratchRegister, src2);
1993 }
1994 if (!dst.is(src1)) {
1995 movq(dst, src1);
1996 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001997 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001998 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001999 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00002000 shl(dst, Immediate(kSmiShift));
2001 if (src1.is(rcx)) {
2002 movq(src1, kScratchRegister);
2003 } else if (src2.is(rcx)) {
2004 movq(src2, kScratchRegister);
2005 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002006}
2007
2008
Ben Murdoch257744e2011-11-30 15:57:28 +00002009void MacroAssembler::SelectNonSmi(Register dst,
2010 Register src1,
2011 Register src2,
2012 Label* on_not_smis,
2013 Label::Distance near_jump) {
2014 ASSERT(!dst.is(kScratchRegister));
2015 ASSERT(!src1.is(kScratchRegister));
2016 ASSERT(!src2.is(kScratchRegister));
2017 ASSERT(!dst.is(src1));
2018 ASSERT(!dst.is(src2));
2019 // Both operands must not be smis.
2020#ifdef DEBUG
2021 if (allow_stub_calls()) { // Check contains a stub call.
2022 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
2023 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
2024 }
2025#endif
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002026 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00002027 ASSERT_EQ(0, Smi::FromInt(0));
2028 movl(kScratchRegister, Immediate(kSmiTagMask));
2029 and_(kScratchRegister, src1);
2030 testl(kScratchRegister, src2);
2031 // If non-zero then both are smis.
2032 j(not_zero, on_not_smis, near_jump);
2033
2034 // Exactly one operand is a smi.
2035 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
2036 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
2037 subq(kScratchRegister, Immediate(1));
2038 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
2039 movq(dst, src1);
2040 xor_(dst, src2);
2041 and_(dst, kScratchRegister);
2042 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
2043 xor_(dst, src1);
2044 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
2045}
2046
2047
Steve Block3ce2e202009-11-05 08:53:23 +00002048SmiIndex MacroAssembler::SmiToIndex(Register dst,
2049 Register src,
2050 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002051 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00002052 // There is a possible optimization if shift is in the range 60-63, but that
2053 // will (and must) never happen.
2054 if (!dst.is(src)) {
2055 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002056 }
Steve Block3ce2e202009-11-05 08:53:23 +00002057 if (shift < kSmiShift) {
2058 sar(dst, Immediate(kSmiShift - shift));
2059 } else {
2060 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00002061 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002062 return SmiIndex(dst, times_1);
2063}
2064
Steve Blocka7e24c12009-10-30 11:49:00 +00002065SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2066 Register src,
2067 int shift) {
2068 // Register src holds a positive smi.
2069 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00002070 if (!dst.is(src)) {
2071 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002072 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002073 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00002074 if (shift < kSmiShift) {
2075 sar(dst, Immediate(kSmiShift - shift));
2076 } else {
2077 shl(dst, Immediate(shift - kSmiShift));
2078 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002079 return SmiIndex(dst, times_1);
2080}
2081
2082
Steve Block44f0eee2011-05-26 01:26:41 +01002083void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
2084 ASSERT_EQ(0, kSmiShift % kBitsPerByte);
2085 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
2086}
2087
2088
Ben Murdoch257744e2011-11-30 15:57:28 +00002089void MacroAssembler::JumpIfNotString(Register object,
2090 Register object_map,
2091 Label* not_string,
2092 Label::Distance near_jump) {
2093 Condition is_smi = CheckSmi(object);
2094 j(is_smi, not_string, near_jump);
2095 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
2096 j(above_equal, not_string, near_jump);
2097}
2098
2099
2100void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(
2101 Register first_object,
2102 Register second_object,
2103 Register scratch1,
2104 Register scratch2,
2105 Label* on_fail,
2106 Label::Distance near_jump) {
2107 // Check that both objects are not smis.
2108 Condition either_smi = CheckEitherSmi(first_object, second_object);
2109 j(either_smi, on_fail, near_jump);
2110
2111 // Load instance type for both strings.
2112 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
2113 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
2114 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2115 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2116
Ben Murdochc7cc0282012-03-05 14:35:55 +00002117 // Check that both are flat ASCII strings.
Ben Murdoch257744e2011-11-30 15:57:28 +00002118 ASSERT(kNotStringTag != 0);
2119 const int kFlatAsciiStringMask =
2120 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2121 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2122
2123 andl(scratch1, Immediate(kFlatAsciiStringMask));
2124 andl(scratch2, Immediate(kFlatAsciiStringMask));
2125 // Interleave the bits to check both scratch1 and scratch2 in one test.
2126 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2127 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2128 cmpl(scratch1,
2129 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2130 j(not_equal, on_fail, near_jump);
2131}
2132
2133
2134void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2135 Register instance_type,
2136 Register scratch,
2137 Label* failure,
2138 Label::Distance near_jump) {
2139 if (!scratch.is(instance_type)) {
2140 movl(scratch, instance_type);
2141 }
2142
2143 const int kFlatAsciiStringMask =
2144 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2145
2146 andl(scratch, Immediate(kFlatAsciiStringMask));
2147 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
2148 j(not_equal, failure, near_jump);
2149}
2150
2151
2152void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
2153 Register first_object_instance_type,
2154 Register second_object_instance_type,
2155 Register scratch1,
2156 Register scratch2,
2157 Label* on_fail,
2158 Label::Distance near_jump) {
2159 // Load instance type for both strings.
2160 movq(scratch1, first_object_instance_type);
2161 movq(scratch2, second_object_instance_type);
2162
Ben Murdochc7cc0282012-03-05 14:35:55 +00002163 // Check that both are flat ASCII strings.
Ben Murdoch257744e2011-11-30 15:57:28 +00002164 ASSERT(kNotStringTag != 0);
2165 const int kFlatAsciiStringMask =
2166 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2167 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2168
2169 andl(scratch1, Immediate(kFlatAsciiStringMask));
2170 andl(scratch2, Immediate(kFlatAsciiStringMask));
2171 // Interleave the bits to check both scratch1 and scratch2 in one test.
2172 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2173 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2174 cmpl(scratch1,
2175 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2176 j(not_equal, on_fail, near_jump);
2177}
2178
2179
Steve Block44f0eee2011-05-26 01:26:41 +01002180
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002181void MacroAssembler::Move(Register dst, Register src) {
2182 if (!dst.is(src)) {
2183 movq(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01002184 }
Steve Block6ded16b2010-05-10 14:33:55 +01002185}
2186
2187
Steve Blocka7e24c12009-10-30 11:49:00 +00002188void MacroAssembler::Move(Register dst, Handle<Object> source) {
2189 ASSERT(!source->IsFailure());
2190 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002191 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002192 } else {
2193 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
2194 }
2195}
2196
2197
2198void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00002199 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00002200 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002201 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002202 } else {
2203 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2204 movq(dst, kScratchRegister);
2205 }
2206}
2207
2208
2209void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00002210 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002211 Cmp(dst, Smi::cast(*source));
Steve Block3ce2e202009-11-05 08:53:23 +00002212 } else {
2213 Move(kScratchRegister, source);
2214 cmpq(dst, kScratchRegister);
2215 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002216}
2217
2218
2219void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
2220 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002221 Cmp(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002222 } else {
2223 ASSERT(source->IsHeapObject());
2224 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2225 cmpq(dst, kScratchRegister);
2226 }
2227}
2228
2229
2230void MacroAssembler::Push(Handle<Object> source) {
2231 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002232 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002233 } else {
2234 ASSERT(source->IsHeapObject());
2235 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2236 push(kScratchRegister);
2237 }
2238}
2239
2240
Ben Murdochc7cc0282012-03-05 14:35:55 +00002241void MacroAssembler::LoadHeapObject(Register result,
2242 Handle<HeapObject> object) {
2243 if (isolate()->heap()->InNewSpace(*object)) {
2244 Handle<JSGlobalPropertyCell> cell =
2245 isolate()->factory()->NewJSGlobalPropertyCell(object);
2246 movq(result, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
2247 movq(result, Operand(result, 0));
2248 } else {
2249 Move(result, object);
2250 }
2251}
2252
2253
2254void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2255 if (isolate()->heap()->InNewSpace(*object)) {
2256 Handle<JSGlobalPropertyCell> cell =
2257 isolate()->factory()->NewJSGlobalPropertyCell(object);
2258 movq(kScratchRegister, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
2259 movq(kScratchRegister, Operand(kScratchRegister, 0));
2260 push(kScratchRegister);
2261 } else {
2262 Push(object);
2263 }
2264}
2265
2266
2267void MacroAssembler::LoadGlobalCell(Register dst,
2268 Handle<JSGlobalPropertyCell> cell) {
2269 if (dst.is(rax)) {
2270 load_rax(cell.location(), RelocInfo::GLOBAL_PROPERTY_CELL);
2271 } else {
2272 movq(dst, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
2273 movq(dst, Operand(dst, 0));
2274 }
2275}
2276
2277
Steve Blocka7e24c12009-10-30 11:49:00 +00002278void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00002279 intptr_t smi = reinterpret_cast<intptr_t>(source);
2280 if (is_int32(smi)) {
2281 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00002282 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002283 Register constant = GetSmiConstant(source);
2284 push(constant);
Steve Block3ce2e202009-11-05 08:53:23 +00002285 }
2286}
2287
2288
Leon Clarkee46be812010-01-19 14:06:41 +00002289void MacroAssembler::Drop(int stack_elements) {
2290 if (stack_elements > 0) {
2291 addq(rsp, Immediate(stack_elements * kPointerSize));
2292 }
2293}
2294
2295
Steve Block3ce2e202009-11-05 08:53:23 +00002296void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01002297 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002298}
2299
2300
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002301void MacroAssembler::TestBit(const Operand& src, int bits) {
2302 int byte_offset = bits / kBitsPerByte;
2303 int bit_in_byte = bits & (kBitsPerByte - 1);
2304 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte));
2305}
2306
2307
Steve Blocka7e24c12009-10-30 11:49:00 +00002308void MacroAssembler::Jump(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01002309 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00002310 jmp(kScratchRegister);
2311}
2312
2313
2314void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
2315 movq(kScratchRegister, destination, rmode);
2316 jmp(kScratchRegister);
2317}
2318
2319
2320void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00002321 // TODO(X64): Inline this
2322 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002323}
2324
2325
Steve Block44f0eee2011-05-26 01:26:41 +01002326int MacroAssembler::CallSize(ExternalReference ext) {
2327 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
2328 const int kCallInstructionSize = 3;
2329 return LoadAddressSize(ext) + kCallInstructionSize;
2330}
2331
2332
Steve Blocka7e24c12009-10-30 11:49:00 +00002333void MacroAssembler::Call(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01002334#ifdef DEBUG
2335 int end_position = pc_offset() + CallSize(ext);
2336#endif
2337 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00002338 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01002339#ifdef DEBUG
2340 CHECK_EQ(end_position, pc_offset());
2341#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002342}
2343
2344
2345void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
Steve Block44f0eee2011-05-26 01:26:41 +01002346#ifdef DEBUG
2347 int end_position = pc_offset() + CallSize(destination, rmode);
2348#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002349 movq(kScratchRegister, destination, rmode);
2350 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01002351#ifdef DEBUG
2352 CHECK_EQ(pc_offset(), end_position);
2353#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002354}
2355
2356
Ben Murdoch257744e2011-11-30 15:57:28 +00002357void MacroAssembler::Call(Handle<Code> code_object,
2358 RelocInfo::Mode rmode,
2359 unsigned ast_id) {
Steve Block44f0eee2011-05-26 01:26:41 +01002360#ifdef DEBUG
2361 int end_position = pc_offset() + CallSize(code_object);
2362#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002363 ASSERT(RelocInfo::IsCodeTarget(rmode));
Ben Murdoch257744e2011-11-30 15:57:28 +00002364 call(code_object, rmode, ast_id);
Steve Block44f0eee2011-05-26 01:26:41 +01002365#ifdef DEBUG
2366 CHECK_EQ(end_position, pc_offset());
2367#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002368}
2369
2370
Steve Block1e0659c2011-05-24 12:43:12 +01002371void MacroAssembler::Pushad() {
2372 push(rax);
2373 push(rcx);
2374 push(rdx);
2375 push(rbx);
2376 // Not pushing rsp or rbp.
2377 push(rsi);
2378 push(rdi);
2379 push(r8);
2380 push(r9);
2381 // r10 is kScratchRegister.
2382 push(r11);
Steve Block44f0eee2011-05-26 01:26:41 +01002383 // r12 is kSmiConstantRegister.
Steve Block1e0659c2011-05-24 12:43:12 +01002384 // r13 is kRootRegister.
2385 push(r14);
Steve Block44f0eee2011-05-26 01:26:41 +01002386 push(r15);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002387 STATIC_ASSERT(11 == kNumSafepointSavedRegisters);
2388 // Use lea for symmetry with Popad.
2389 int sp_delta =
2390 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
2391 lea(rsp, Operand(rsp, -sp_delta));
Steve Block1e0659c2011-05-24 12:43:12 +01002392}
2393
2394
2395void MacroAssembler::Popad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002396 // Popad must not change the flags, so use lea instead of addq.
2397 int sp_delta =
2398 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
2399 lea(rsp, Operand(rsp, sp_delta));
Steve Block44f0eee2011-05-26 01:26:41 +01002400 pop(r15);
Steve Block1e0659c2011-05-24 12:43:12 +01002401 pop(r14);
Steve Block1e0659c2011-05-24 12:43:12 +01002402 pop(r11);
2403 pop(r9);
2404 pop(r8);
2405 pop(rdi);
2406 pop(rsi);
2407 pop(rbx);
2408 pop(rdx);
2409 pop(rcx);
2410 pop(rax);
2411}
2412
2413
2414void MacroAssembler::Dropad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002415 addq(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
Steve Block1e0659c2011-05-24 12:43:12 +01002416}
2417
2418
2419// Order general registers are pushed by Pushad:
Steve Block44f0eee2011-05-26 01:26:41 +01002420// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
Steve Block1e0659c2011-05-24 12:43:12 +01002421int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
2422 0,
2423 1,
2424 2,
2425 3,
2426 -1,
2427 -1,
2428 4,
2429 5,
2430 6,
2431 7,
2432 -1,
2433 8,
Steve Block1e0659c2011-05-24 12:43:12 +01002434 -1,
Steve Block44f0eee2011-05-26 01:26:41 +01002435 -1,
2436 9,
2437 10
Steve Block1e0659c2011-05-24 12:43:12 +01002438};
2439
2440
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002441void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2442 movq(SafepointRegisterSlot(dst), src);
2443}
2444
2445
2446void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2447 movq(dst, SafepointRegisterSlot(src));
2448}
2449
2450
2451Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2452 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2453}
2454
2455
Steve Blocka7e24c12009-10-30 11:49:00 +00002456void MacroAssembler::PushTryHandler(CodeLocation try_location,
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002457 HandlerType type,
2458 int handler_index) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002459 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002460 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002461 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2462 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
2463 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
2464 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
2465 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00002466
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002467 // We will build up the handler from the bottom by pushing on the stack.
2468 // First compute the state and push the frame pointer and context.
2469 unsigned state = StackHandler::OffsetField::encode(handler_index);
Steve Blocka7e24c12009-10-30 11:49:00 +00002470 if (try_location == IN_JAVASCRIPT) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002471 push(rbp);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002472 push(rsi);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002473 state |= (type == TRY_CATCH_HANDLER)
2474 ? StackHandler::KindField::encode(StackHandler::TRY_CATCH)
2475 : StackHandler::KindField::encode(StackHandler::TRY_FINALLY);
Steve Blocka7e24c12009-10-30 11:49:00 +00002476 } else {
2477 ASSERT(try_location == IN_JS_ENTRY);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002478 // The frame pointer does not point to a JS frame so we save NULL for
2479 // rbp. We expect the code throwing an exception to check rbp before
2480 // dereferencing it to restore the context.
Steve Blocka7e24c12009-10-30 11:49:00 +00002481 push(Immediate(0)); // NULL frame pointer.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002482 Push(Smi::FromInt(0)); // No context.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002483 state |= StackHandler::KindField::encode(StackHandler::ENTRY);
Steve Blocka7e24c12009-10-30 11:49:00 +00002484 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002485
2486 // Push the state and the code object.
2487 push(Immediate(state));
2488 Push(CodeObject());
2489
2490 // Link the current handler as the next handler.
2491 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2492 push(ExternalOperand(handler_address));
2493 // Set this new handler as the current one.
2494 movq(ExternalOperand(handler_address), rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00002495}
2496
2497
Leon Clarkee46be812010-01-19 14:06:41 +00002498void MacroAssembler::PopTryHandler() {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002499 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2500 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2501 pop(ExternalOperand(handler_address));
Leon Clarkee46be812010-01-19 14:06:41 +00002502 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
2503}
2504
2505
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002506void MacroAssembler::JumpToHandlerEntry() {
2507 // Compute the handler entry address and jump to it. The handler table is
2508 // a fixed array of (smi-tagged) code offsets.
2509 // rax = exception, rdi = code object, rdx = state.
2510 movq(rbx, FieldOperand(rdi, Code::kHandlerTableOffset));
2511 shr(rdx, Immediate(StackHandler::kKindWidth));
2512 movq(rdx, FieldOperand(rbx, rdx, times_8, FixedArray::kHeaderSize));
2513 SmiToInteger64(rdx, rdx);
2514 lea(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
2515 jmp(rdi);
2516}
2517
2518
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002519void MacroAssembler::Throw(Register value) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002520 // Adjust this code if not the case.
2521 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002522 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2523 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
2524 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
2525 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
2526 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
2527
2528 // The exception is expected in rax.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002529 if (!value.is(rax)) {
2530 movq(rax, value);
2531 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002532 // Drop the stack pointer to the top of the top handler.
Ben Murdoch589d6972011-11-30 16:04:58 +00002533 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002534 movq(rsp, ExternalOperand(handler_address));
2535 // Restore the next handler.
2536 pop(ExternalOperand(handler_address));
2537
2538 // Remove the code object and state, compute the handler address in rdi.
2539 pop(rdi); // Code object.
2540 pop(rdx); // Offset and state.
2541
2542 // Restore the context and frame pointer.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002543 pop(rsi); // Context.
2544 pop(rbp); // Frame pointer.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002545
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002546 // If the handler is a JS frame, restore the context to the frame.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002547 // (kind == ENTRY) == (rbp == 0) == (rsi == 0), so we could test either
2548 // rbp or rsi.
Ben Murdoch257744e2011-11-30 15:57:28 +00002549 Label skip;
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002550 testq(rsi, rsi);
2551 j(zero, &skip, Label::kNear);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002552 movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002553 bind(&skip);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002554
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002555 JumpToHandlerEntry();
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002556}
2557
2558
2559void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
2560 Register value) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002561 // Adjust this code if not the case.
2562 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002563 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2564 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
2565 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
2566 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
2567 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002568
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002569 // The exception is expected in rax.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002570 if (type == OUT_OF_MEMORY) {
2571 // Set external caught exception to false.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002572 ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
2573 isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +01002574 Set(rax, static_cast<int64_t>(false));
Steve Block44f0eee2011-05-26 01:26:41 +01002575 Store(external_caught, rax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002576
2577 // Set pending exception and rax to out of memory exception.
Ben Murdoch589d6972011-11-30 16:04:58 +00002578 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
Steve Block44f0eee2011-05-26 01:26:41 +01002579 isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002580 movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
Steve Block44f0eee2011-05-26 01:26:41 +01002581 Store(pending_exception, rax);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002582 } else if (!value.is(rax)) {
2583 movq(rax, value);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002584 }
2585
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002586 // Drop the stack pointer to the top of the top stack handler.
2587 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2588 Load(rsp, handler_address);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002589
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002590 // Unwind the handlers until the top ENTRY handler is found.
2591 Label fetch_next, check_kind;
2592 jmp(&check_kind, Label::kNear);
2593 bind(&fetch_next);
2594 movq(rsp, Operand(rsp, StackHandlerConstants::kNextOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002595
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002596 bind(&check_kind);
2597 STATIC_ASSERT(StackHandler::ENTRY == 0);
2598 testl(Operand(rsp, StackHandlerConstants::kStateOffset),
2599 Immediate(StackHandler::KindField::kMask));
2600 j(not_zero, &fetch_next);
2601
2602 // Set the top handler address to next handler past the top ENTRY handler.
2603 pop(ExternalOperand(handler_address));
2604
2605 // Remove the code object and state, compute the handler address in rdi.
2606 pop(rdi); // Code object.
2607 pop(rdx); // Offset and state.
2608
2609 // Clear the context pointer and frame pointer (0 was saved in the handler).
2610 pop(rsi);
2611 pop(rbp);
2612
2613 JumpToHandlerEntry();
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002614}
2615
2616
Steve Blocka7e24c12009-10-30 11:49:00 +00002617void MacroAssembler::Ret() {
2618 ret(0);
2619}
2620
2621
Steve Block1e0659c2011-05-24 12:43:12 +01002622void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2623 if (is_uint16(bytes_dropped)) {
2624 ret(bytes_dropped);
2625 } else {
2626 pop(scratch);
2627 addq(rsp, Immediate(bytes_dropped));
2628 push(scratch);
2629 ret(0);
2630 }
2631}
2632
2633
Steve Blocka7e24c12009-10-30 11:49:00 +00002634void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00002635 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01002636 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002637}
2638
2639
2640void MacroAssembler::CmpObjectType(Register heap_object,
2641 InstanceType type,
2642 Register map) {
2643 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
2644 CmpInstanceType(map, type);
2645}
2646
2647
2648void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
2649 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
2650 Immediate(static_cast<int8_t>(type)));
2651}
2652
2653
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002654void MacroAssembler::CheckFastElements(Register map,
2655 Label* fail,
2656 Label::Distance distance) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002657 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
2658 STATIC_ASSERT(FAST_ELEMENTS == 1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002659 cmpb(FieldOperand(map, Map::kBitField2Offset),
2660 Immediate(Map::kMaximumBitField2FastElementValue));
2661 j(above, fail, distance);
2662}
2663
2664
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002665void MacroAssembler::CheckFastObjectElements(Register map,
2666 Label* fail,
2667 Label::Distance distance) {
2668 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
2669 STATIC_ASSERT(FAST_ELEMENTS == 1);
2670 cmpb(FieldOperand(map, Map::kBitField2Offset),
2671 Immediate(Map::kMaximumBitField2FastSmiOnlyElementValue));
2672 j(below_equal, fail, distance);
2673 cmpb(FieldOperand(map, Map::kBitField2Offset),
2674 Immediate(Map::kMaximumBitField2FastElementValue));
2675 j(above, fail, distance);
2676}
2677
2678
2679void MacroAssembler::CheckFastSmiOnlyElements(Register map,
2680 Label* fail,
2681 Label::Distance distance) {
2682 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
2683 cmpb(FieldOperand(map, Map::kBitField2Offset),
2684 Immediate(Map::kMaximumBitField2FastSmiOnlyElementValue));
2685 j(above, fail, distance);
2686}
2687
2688
2689void MacroAssembler::StoreNumberToDoubleElements(
2690 Register maybe_number,
2691 Register elements,
2692 Register index,
2693 XMMRegister xmm_scratch,
2694 Label* fail) {
2695 Label smi_value, is_nan, maybe_nan, not_nan, have_double_value, done;
2696
2697 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
2698
2699 CheckMap(maybe_number,
2700 isolate()->factory()->heap_number_map(),
2701 fail,
2702 DONT_DO_SMI_CHECK);
2703
2704 // Double value, canonicalize NaN.
2705 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
2706 cmpl(FieldOperand(maybe_number, offset),
2707 Immediate(kNaNOrInfinityLowerBoundUpper32));
2708 j(greater_equal, &maybe_nan, Label::kNear);
2709
2710 bind(&not_nan);
2711 movsd(xmm_scratch, FieldOperand(maybe_number, HeapNumber::kValueOffset));
2712 bind(&have_double_value);
2713 movsd(FieldOperand(elements, index, times_8, FixedDoubleArray::kHeaderSize),
2714 xmm_scratch);
2715 jmp(&done);
2716
2717 bind(&maybe_nan);
2718 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
2719 // it's an Infinity, and the non-NaN code path applies.
2720 j(greater, &is_nan, Label::kNear);
2721 cmpl(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
2722 j(zero, &not_nan);
2723 bind(&is_nan);
2724 // Convert all NaNs to the same canonical NaN value when they are stored in
2725 // the double array.
2726 Set(kScratchRegister, BitCast<uint64_t>(
2727 FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
2728 movq(xmm_scratch, kScratchRegister);
2729 jmp(&have_double_value, Label::kNear);
2730
2731 bind(&smi_value);
2732 // Value is a smi. convert to a double and store.
2733 // Preserve original value.
2734 SmiToInteger32(kScratchRegister, maybe_number);
2735 cvtlsi2sd(xmm_scratch, kScratchRegister);
2736 movsd(FieldOperand(elements, index, times_8, FixedDoubleArray::kHeaderSize),
2737 xmm_scratch);
2738 bind(&done);
2739}
2740
2741
Ben Murdochc7cc0282012-03-05 14:35:55 +00002742void MacroAssembler::CompareMap(Register obj,
2743 Handle<Map> map,
2744 Label* early_success,
2745 CompareMapMode mode) {
2746 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
2747 if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
2748 Map* transitioned_fast_element_map(
2749 map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
2750 ASSERT(transitioned_fast_element_map == NULL ||
2751 map->elements_kind() != FAST_ELEMENTS);
2752 if (transitioned_fast_element_map != NULL) {
2753 j(equal, early_success, Label::kNear);
2754 Cmp(FieldOperand(obj, HeapObject::kMapOffset),
2755 Handle<Map>(transitioned_fast_element_map));
2756 }
2757
2758 Map* transitioned_double_map(
2759 map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
2760 ASSERT(transitioned_double_map == NULL ||
2761 map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
2762 if (transitioned_double_map != NULL) {
2763 j(equal, early_success, Label::kNear);
2764 Cmp(FieldOperand(obj, HeapObject::kMapOffset),
2765 Handle<Map>(transitioned_double_map));
2766 }
2767 }
2768}
2769
2770
Andrei Popescu31002712010-02-23 13:46:05 +00002771void MacroAssembler::CheckMap(Register obj,
2772 Handle<Map> map,
2773 Label* fail,
Ben Murdochc7cc0282012-03-05 14:35:55 +00002774 SmiCheckType smi_check_type,
2775 CompareMapMode mode) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002776 if (smi_check_type == DO_SMI_CHECK) {
Andrei Popescu31002712010-02-23 13:46:05 +00002777 JumpIfSmi(obj, fail);
2778 }
Ben Murdochc7cc0282012-03-05 14:35:55 +00002779
2780 Label success;
2781 CompareMap(obj, map, &success, mode);
Andrei Popescu31002712010-02-23 13:46:05 +00002782 j(not_equal, fail);
Ben Murdochc7cc0282012-03-05 14:35:55 +00002783 bind(&success);
Andrei Popescu31002712010-02-23 13:46:05 +00002784}
2785
2786
Ben Murdoch257744e2011-11-30 15:57:28 +00002787void MacroAssembler::ClampUint8(Register reg) {
2788 Label done;
2789 testl(reg, Immediate(0xFFFFFF00));
2790 j(zero, &done, Label::kNear);
2791 setcc(negative, reg); // 1 if negative, 0 if positive.
2792 decb(reg); // 0 if negative, 255 if positive.
2793 bind(&done);
2794}
2795
2796
2797void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
2798 XMMRegister temp_xmm_reg,
2799 Register result_reg,
2800 Register temp_reg) {
2801 Label done;
2802 Set(result_reg, 0);
2803 xorps(temp_xmm_reg, temp_xmm_reg);
2804 ucomisd(input_reg, temp_xmm_reg);
2805 j(below, &done, Label::kNear);
2806 uint64_t one_half = BitCast<uint64_t, double>(0.5);
2807 Set(temp_reg, one_half);
2808 movq(temp_xmm_reg, temp_reg);
2809 addsd(temp_xmm_reg, input_reg);
2810 cvttsd2si(result_reg, temp_xmm_reg);
2811 testl(result_reg, Immediate(0xFFFFFF00));
2812 j(zero, &done, Label::kNear);
2813 Set(result_reg, 255);
2814 bind(&done);
2815}
2816
2817
2818void MacroAssembler::LoadInstanceDescriptors(Register map,
2819 Register descriptors) {
2820 movq(descriptors, FieldOperand(map,
2821 Map::kInstanceDescriptorsOrBitField3Offset));
2822 Label not_smi;
2823 JumpIfNotSmi(descriptors, &not_smi, Label::kNear);
2824 Move(descriptors, isolate()->factory()->empty_descriptor_array());
2825 bind(&not_smi);
2826}
2827
2828
2829void MacroAssembler::DispatchMap(Register obj,
2830 Handle<Map> map,
2831 Handle<Code> success,
2832 SmiCheckType smi_check_type) {
2833 Label fail;
2834 if (smi_check_type == DO_SMI_CHECK) {
2835 JumpIfSmi(obj, &fail);
2836 }
2837 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
2838 j(equal, success, RelocInfo::CODE_TARGET);
2839
2840 bind(&fail);
2841}
2842
2843
Leon Clarkef7060e22010-06-03 12:02:55 +01002844void MacroAssembler::AbortIfNotNumber(Register object) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002845 Label ok;
Andrei Popescu402d9372010-02-26 13:31:12 +00002846 Condition is_smi = CheckSmi(object);
Ben Murdoch257744e2011-11-30 15:57:28 +00002847 j(is_smi, &ok, Label::kNear);
Andrei Popescu402d9372010-02-26 13:31:12 +00002848 Cmp(FieldOperand(object, HeapObject::kMapOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00002849 isolate()->factory()->heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01002850 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00002851 bind(&ok);
2852}
2853
2854
Iain Merrick75681382010-08-19 15:07:18 +01002855void MacroAssembler::AbortIfSmi(Register object) {
Iain Merrick75681382010-08-19 15:07:18 +01002856 Condition is_smi = CheckSmi(object);
2857 Assert(NegateCondition(is_smi), "Operand is a smi");
2858}
2859
2860
Leon Clarkef7060e22010-06-03 12:02:55 +01002861void MacroAssembler::AbortIfNotSmi(Register object) {
Steve Block44f0eee2011-05-26 01:26:41 +01002862 Condition is_smi = CheckSmi(object);
2863 Assert(is_smi, "Operand is not a smi");
2864}
2865
2866
2867void MacroAssembler::AbortIfNotSmi(const Operand& object) {
Steve Block6ded16b2010-05-10 14:33:55 +01002868 Condition is_smi = CheckSmi(object);
Iain Merrick75681382010-08-19 15:07:18 +01002869 Assert(is_smi, "Operand is not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01002870}
2871
2872
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002873void MacroAssembler::AbortIfNotString(Register object) {
2874 testb(object, Immediate(kSmiTagMask));
2875 Assert(not_equal, "Operand is not a string");
2876 push(object);
2877 movq(object, FieldOperand(object, HeapObject::kMapOffset));
2878 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
2879 pop(object);
2880 Assert(below, "Operand is not a string");
2881}
2882
2883
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002884void MacroAssembler::AbortIfNotRootValue(Register src,
2885 Heap::RootListIndex root_value_index,
2886 const char* message) {
2887 ASSERT(!src.is(kScratchRegister));
2888 LoadRoot(kScratchRegister, root_value_index);
2889 cmpq(src, kScratchRegister);
2890 Check(equal, message);
2891}
2892
2893
2894
Leon Clarked91b9f72010-01-27 17:25:45 +00002895Condition MacroAssembler::IsObjectStringType(Register heap_object,
2896 Register map,
2897 Register instance_type) {
2898 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00002899 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002900 STATIC_ASSERT(kNotStringTag != 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00002901 testb(instance_type, Immediate(kIsNotStringMask));
2902 return zero;
2903}
2904
2905
Steve Blocka7e24c12009-10-30 11:49:00 +00002906void MacroAssembler::TryGetFunctionPrototype(Register function,
2907 Register result,
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002908 Label* miss,
2909 bool miss_on_bound_function) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002910 // Check that the receiver isn't a smi.
2911 testl(function, Immediate(kSmiTagMask));
2912 j(zero, miss);
2913
2914 // Check that the function really is a function.
2915 CmpObjectType(function, JS_FUNCTION_TYPE, result);
2916 j(not_equal, miss);
2917
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002918 if (miss_on_bound_function) {
2919 movq(kScratchRegister,
2920 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2921 // It's not smi-tagged (stored in the top half of a smi-tagged 8-byte
2922 // field).
2923 TestBit(FieldOperand(kScratchRegister,
2924 SharedFunctionInfo::kCompilerHintsOffset),
2925 SharedFunctionInfo::kBoundFunction);
2926 j(not_zero, miss);
2927 }
2928
Steve Blocka7e24c12009-10-30 11:49:00 +00002929 // Make sure that the function has an instance prototype.
Ben Murdoch257744e2011-11-30 15:57:28 +00002930 Label non_instance;
Steve Blocka7e24c12009-10-30 11:49:00 +00002931 testb(FieldOperand(result, Map::kBitFieldOffset),
2932 Immediate(1 << Map::kHasNonInstancePrototype));
Ben Murdoch257744e2011-11-30 15:57:28 +00002933 j(not_zero, &non_instance, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00002934
2935 // Get the prototype or initial map from the function.
2936 movq(result,
2937 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2938
2939 // If the prototype or initial map is the hole, don't return it and
2940 // simply miss the cache instead. This will allow us to allocate a
2941 // prototype object on-demand in the runtime system.
2942 CompareRoot(result, Heap::kTheHoleValueRootIndex);
2943 j(equal, miss);
2944
2945 // If the function does not have an initial map, we're done.
Ben Murdoch257744e2011-11-30 15:57:28 +00002946 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +00002947 CmpObjectType(result, MAP_TYPE, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002948 j(not_equal, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00002949
2950 // Get the prototype from the initial map.
2951 movq(result, FieldOperand(result, Map::kPrototypeOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002952 jmp(&done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00002953
2954 // Non-instance prototype: Fetch prototype from constructor field
2955 // in initial map.
2956 bind(&non_instance);
2957 movq(result, FieldOperand(result, Map::kConstructorOffset));
2958
2959 // All done.
2960 bind(&done);
2961}
2962
2963
2964void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2965 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002966 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002967 movl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002968 }
2969}
2970
2971
2972void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2973 ASSERT(value > 0);
2974 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002975 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00002976 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01002977 incl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002978 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002979 addl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002980 }
2981 }
2982}
2983
2984
2985void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2986 ASSERT(value > 0);
2987 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002988 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00002989 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01002990 decl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002991 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002992 subl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002993 }
2994 }
2995}
2996
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002997
Steve Blocka7e24c12009-10-30 11:49:00 +00002998#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00002999void MacroAssembler::DebugBreak() {
Steve Block9fac8402011-05-12 15:51:54 +01003000 Set(rax, 0); // No arguments.
Steve Block44f0eee2011-05-26 01:26:41 +01003001 LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
Andrei Popescu402d9372010-02-26 13:31:12 +00003002 CEntryStub ces(1);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003003 ASSERT(AllowThisStubCall(&ces));
Andrei Popescu402d9372010-02-26 13:31:12 +00003004 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00003005}
Andrei Popescu402d9372010-02-26 13:31:12 +00003006#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00003007
3008
Ben Murdoch257744e2011-11-30 15:57:28 +00003009void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
3010 // This macro takes the dst register to make the code more readable
3011 // at the call sites. However, the dst register has to be rcx to
3012 // follow the calling convention which requires the call type to be
3013 // in rcx.
3014 ASSERT(dst.is(rcx));
3015 if (call_kind == CALL_AS_FUNCTION) {
3016 LoadSmiConstant(dst, Smi::FromInt(1));
3017 } else {
3018 LoadSmiConstant(dst, Smi::FromInt(0));
3019 }
3020}
3021
3022
Steve Blocka7e24c12009-10-30 11:49:00 +00003023void MacroAssembler::InvokeCode(Register code,
3024 const ParameterCount& expected,
3025 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003026 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003027 const CallWrapper& call_wrapper,
3028 CallKind call_kind) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003029 // You can't call a function without a valid frame.
3030 ASSERT(flag == JUMP_FUNCTION || has_frame());
3031
Ben Murdoch257744e2011-11-30 15:57:28 +00003032 Label done;
Ben Murdochc7cc0282012-03-05 14:35:55 +00003033 bool definitely_mismatches = false;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003034 InvokePrologue(expected,
3035 actual,
3036 Handle<Code>::null(),
3037 code,
3038 &done,
Ben Murdochc7cc0282012-03-05 14:35:55 +00003039 &definitely_mismatches,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003040 flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003041 Label::kNear,
3042 call_wrapper,
3043 call_kind);
Ben Murdochc7cc0282012-03-05 14:35:55 +00003044 if (!definitely_mismatches) {
3045 if (flag == CALL_FUNCTION) {
3046 call_wrapper.BeforeCall(CallSize(code));
3047 SetCallKind(rcx, call_kind);
3048 call(code);
3049 call_wrapper.AfterCall();
3050 } else {
3051 ASSERT(flag == JUMP_FUNCTION);
3052 SetCallKind(rcx, call_kind);
3053 jmp(code);
3054 }
3055 bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00003056 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003057}
3058
3059
3060void MacroAssembler::InvokeCode(Handle<Code> code,
3061 const ParameterCount& expected,
3062 const ParameterCount& actual,
3063 RelocInfo::Mode rmode,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003064 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003065 const CallWrapper& call_wrapper,
3066 CallKind call_kind) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003067 // You can't call a function without a valid frame.
3068 ASSERT(flag == JUMP_FUNCTION || has_frame());
3069
Ben Murdoch257744e2011-11-30 15:57:28 +00003070 Label done;
Ben Murdochc7cc0282012-03-05 14:35:55 +00003071 bool definitely_mismatches = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00003072 Register dummy = rax;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003073 InvokePrologue(expected,
3074 actual,
3075 code,
3076 dummy,
3077 &done,
Ben Murdochc7cc0282012-03-05 14:35:55 +00003078 &definitely_mismatches,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003079 flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003080 Label::kNear,
3081 call_wrapper,
3082 call_kind);
Ben Murdochc7cc0282012-03-05 14:35:55 +00003083 if (!definitely_mismatches) {
3084 if (flag == CALL_FUNCTION) {
3085 call_wrapper.BeforeCall(CallSize(code));
3086 SetCallKind(rcx, call_kind);
3087 Call(code, rmode);
3088 call_wrapper.AfterCall();
3089 } else {
3090 ASSERT(flag == JUMP_FUNCTION);
3091 SetCallKind(rcx, call_kind);
3092 Jump(code, rmode);
3093 }
3094 bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00003095 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003096}
3097
3098
3099void MacroAssembler::InvokeFunction(Register function,
3100 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003101 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003102 const CallWrapper& call_wrapper,
3103 CallKind call_kind) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003104 // You can't call a function without a valid frame.
3105 ASSERT(flag == JUMP_FUNCTION || has_frame());
3106
Steve Blocka7e24c12009-10-30 11:49:00 +00003107 ASSERT(function.is(rdi));
3108 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
3109 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
3110 movsxlq(rbx,
3111 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003112 // Advances rdx to the end of the Code object header, to the start of
3113 // the executable code.
Steve Block791712a2010-08-27 10:21:07 +01003114 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003115
3116 ParameterCount expected(rbx);
Ben Murdoch257744e2011-11-30 15:57:28 +00003117 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00003118}
3119
3120
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003121void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
Andrei Popescu402d9372010-02-26 13:31:12 +00003122 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003123 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003124 const CallWrapper& call_wrapper,
3125 CallKind call_kind) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003126 // You can't call a function without a valid frame.
3127 ASSERT(flag == JUMP_FUNCTION || has_frame());
3128
Andrei Popescu402d9372010-02-26 13:31:12 +00003129 // Get the function and setup the context.
Ben Murdochc7cc0282012-03-05 14:35:55 +00003130 LoadHeapObject(rdi, function);
Andrei Popescu402d9372010-02-26 13:31:12 +00003131 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
3132
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003133 // We call indirectly through the code field in the function to
3134 // allow recompilation to take effect without changing any of the
3135 // call sites.
3136 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
3137 ParameterCount expected(function->shared()->formal_parameter_count());
3138 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind);
Ben Murdoch257744e2011-11-30 15:57:28 +00003139}
3140
3141
3142void MacroAssembler::InvokePrologue(const ParameterCount& expected,
3143 const ParameterCount& actual,
3144 Handle<Code> code_constant,
3145 Register code_register,
3146 Label* done,
Ben Murdochc7cc0282012-03-05 14:35:55 +00003147 bool* definitely_mismatches,
Ben Murdoch257744e2011-11-30 15:57:28 +00003148 InvokeFlag flag,
3149 Label::Distance near_jump,
3150 const CallWrapper& call_wrapper,
3151 CallKind call_kind) {
3152 bool definitely_matches = false;
Ben Murdochc7cc0282012-03-05 14:35:55 +00003153 *definitely_mismatches = false;
Ben Murdoch257744e2011-11-30 15:57:28 +00003154 Label invoke;
3155 if (expected.is_immediate()) {
3156 ASSERT(actual.is_immediate());
3157 if (expected.immediate() == actual.immediate()) {
3158 definitely_matches = true;
3159 } else {
3160 Set(rax, actual.immediate());
3161 if (expected.immediate() ==
3162 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
3163 // Don't worry about adapting arguments for built-ins that
3164 // don't want that done. Skip adaption code by making it look
3165 // like we have a match between expected and actual number of
3166 // arguments.
3167 definitely_matches = true;
3168 } else {
Ben Murdochc7cc0282012-03-05 14:35:55 +00003169 *definitely_mismatches = true;
Ben Murdoch257744e2011-11-30 15:57:28 +00003170 Set(rbx, expected.immediate());
3171 }
3172 }
3173 } else {
3174 if (actual.is_immediate()) {
3175 // Expected is in register, actual is immediate. This is the
3176 // case when we invoke function values without going through the
3177 // IC mechanism.
3178 cmpq(expected.reg(), Immediate(actual.immediate()));
3179 j(equal, &invoke, Label::kNear);
3180 ASSERT(expected.reg().is(rbx));
3181 Set(rax, actual.immediate());
3182 } else if (!expected.reg().is(actual.reg())) {
3183 // Both expected and actual are in (different) registers. This
3184 // is the case when we invoke functions using call and apply.
3185 cmpq(expected.reg(), actual.reg());
3186 j(equal, &invoke, Label::kNear);
3187 ASSERT(actual.reg().is(rax));
3188 ASSERT(expected.reg().is(rbx));
3189 }
3190 }
3191
3192 if (!definitely_matches) {
3193 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
3194 if (!code_constant.is_null()) {
3195 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
3196 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
3197 } else if (!code_register.is(rdx)) {
3198 movq(rdx, code_register);
3199 }
3200
3201 if (flag == CALL_FUNCTION) {
3202 call_wrapper.BeforeCall(CallSize(adaptor));
3203 SetCallKind(rcx, call_kind);
3204 Call(adaptor, RelocInfo::CODE_TARGET);
3205 call_wrapper.AfterCall();
Ben Murdochc7cc0282012-03-05 14:35:55 +00003206 if (!*definitely_mismatches) {
3207 jmp(done, near_jump);
3208 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003209 } else {
3210 SetCallKind(rcx, call_kind);
3211 Jump(adaptor, RelocInfo::CODE_TARGET);
3212 }
3213 bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01003214 }
Andrei Popescu402d9372010-02-26 13:31:12 +00003215}
3216
3217
Steve Blocka7e24c12009-10-30 11:49:00 +00003218void MacroAssembler::EnterFrame(StackFrame::Type type) {
3219 push(rbp);
3220 movq(rbp, rsp);
3221 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00003222 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00003223 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
3224 push(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01003225 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003226 movq(kScratchRegister,
Ben Murdoch257744e2011-11-30 15:57:28 +00003227 isolate()->factory()->undefined_value(),
Steve Blocka7e24c12009-10-30 11:49:00 +00003228 RelocInfo::EMBEDDED_OBJECT);
3229 cmpq(Operand(rsp, 0), kScratchRegister);
3230 Check(not_equal, "code object not properly patched");
3231 }
3232}
3233
3234
3235void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +01003236 if (emit_debug_code()) {
Steve Block3ce2e202009-11-05 08:53:23 +00003237 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00003238 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
3239 Check(equal, "stack frame types must match");
3240 }
3241 movq(rsp, rbp);
3242 pop(rbp);
3243}
3244
3245
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003246void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00003247 // Set up the frame structure on the stack.
Steve Blocka7e24c12009-10-30 11:49:00 +00003248 // All constants are relative to the frame pointer of the exit frame.
3249 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
3250 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
3251 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
3252 push(rbp);
3253 movq(rbp, rsp);
3254
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003255 // Reserve room for entry stack pointer and push the code object.
Steve Block3ce2e202009-11-05 08:53:23 +00003256 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00003257 push(Immediate(0)); // Saved entry sp, patched before call.
3258 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
3259 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00003260
3261 // Save the frame pointer and the context in top.
Ben Murdochbb769b22010-08-11 14:56:33 +01003262 if (save_rax) {
Steve Block44f0eee2011-05-26 01:26:41 +01003263 movq(r14, rax); // Backup rax in callee-save register.
Ben Murdochbb769b22010-08-11 14:56:33 +01003264 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003265
Ben Murdoch589d6972011-11-30 16:04:58 +00003266 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()), rbp);
3267 Store(ExternalReference(Isolate::kContextAddress, isolate()), rsi);
Ben Murdochbb769b22010-08-11 14:56:33 +01003268}
Steve Blocka7e24c12009-10-30 11:49:00 +00003269
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003270
Steve Block1e0659c2011-05-24 12:43:12 +01003271void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
3272 bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003273#ifdef _WIN64
Steve Block1e0659c2011-05-24 12:43:12 +01003274 const int kShadowSpace = 4;
3275 arg_stack_space += kShadowSpace;
Steve Blocka7e24c12009-10-30 11:49:00 +00003276#endif
Steve Block1e0659c2011-05-24 12:43:12 +01003277 // Optionally save all XMM registers.
3278 if (save_doubles) {
Steve Block1e0659c2011-05-24 12:43:12 +01003279 int space = XMMRegister::kNumRegisters * kDoubleSize +
3280 arg_stack_space * kPointerSize;
3281 subq(rsp, Immediate(space));
3282 int offset = -2 * kPointerSize;
3283 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
3284 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3285 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
3286 }
3287 } else if (arg_stack_space > 0) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003288 subq(rsp, Immediate(arg_stack_space * kPointerSize));
3289 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003290
3291 // Get the required frame alignment for the OS.
Steve Block44f0eee2011-05-26 01:26:41 +01003292 const int kFrameAlignment = OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +00003293 if (kFrameAlignment > 0) {
3294 ASSERT(IsPowerOf2(kFrameAlignment));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003295 ASSERT(is_int8(kFrameAlignment));
3296 and_(rsp, Immediate(-kFrameAlignment));
Steve Blocka7e24c12009-10-30 11:49:00 +00003297 }
3298
3299 // Patch the saved entry sp.
3300 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
3301}
3302
3303
Steve Block1e0659c2011-05-24 12:43:12 +01003304void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003305 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01003306
Ben Murdochc7cc0282012-03-05 14:35:55 +00003307 // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
Ben Murdochbb769b22010-08-11 14:56:33 +01003308 // so it must be retained across the C-call.
3309 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Steve Block44f0eee2011-05-26 01:26:41 +01003310 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
Ben Murdochbb769b22010-08-11 14:56:33 +01003311
Steve Block1e0659c2011-05-24 12:43:12 +01003312 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
Ben Murdochbb769b22010-08-11 14:56:33 +01003313}
3314
3315
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003316void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003317 EnterExitFramePrologue(false);
Steve Block1e0659c2011-05-24 12:43:12 +01003318 EnterExitFrameEpilogue(arg_stack_space, false);
Ben Murdochbb769b22010-08-11 14:56:33 +01003319}
3320
3321
Steve Block1e0659c2011-05-24 12:43:12 +01003322void MacroAssembler::LeaveExitFrame(bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003323 // Registers:
Steve Block44f0eee2011-05-26 01:26:41 +01003324 // r15 : argv
Steve Block1e0659c2011-05-24 12:43:12 +01003325 if (save_doubles) {
3326 int offset = -2 * kPointerSize;
3327 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
3328 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3329 movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
3330 }
3331 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003332 // Get the return address from the stack and restore the frame pointer.
3333 movq(rcx, Operand(rbp, 1 * kPointerSize));
3334 movq(rbp, Operand(rbp, 0 * kPointerSize));
3335
Steve Block1e0659c2011-05-24 12:43:12 +01003336 // Drop everything up to and including the arguments and the receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00003337 // from the caller stack.
Steve Block44f0eee2011-05-26 01:26:41 +01003338 lea(rsp, Operand(r15, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00003339
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003340 // Push the return address to get ready to return.
3341 push(rcx);
3342
3343 LeaveExitFrameEpilogue();
3344}
3345
3346
3347void MacroAssembler::LeaveApiExitFrame() {
3348 movq(rsp, rbp);
3349 pop(rbp);
3350
3351 LeaveExitFrameEpilogue();
3352}
3353
3354
3355void MacroAssembler::LeaveExitFrameEpilogue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00003356 // Restore current context from top and clear it in debug mode.
Ben Murdoch589d6972011-11-30 16:04:58 +00003357 ExternalReference context_address(Isolate::kContextAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01003358 Operand context_operand = ExternalOperand(context_address);
3359 movq(rsi, context_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003360#ifdef DEBUG
Steve Block44f0eee2011-05-26 01:26:41 +01003361 movq(context_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00003362#endif
3363
Steve Blocka7e24c12009-10-30 11:49:00 +00003364 // Clear the top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00003365 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +01003366 isolate());
3367 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
3368 movq(c_entry_fp_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00003369}
3370
3371
Steve Blocka7e24c12009-10-30 11:49:00 +00003372void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
3373 Register scratch,
3374 Label* miss) {
3375 Label same_contexts;
3376
3377 ASSERT(!holder_reg.is(scratch));
3378 ASSERT(!scratch.is(kScratchRegister));
3379 // Load current lexical context from the stack frame.
3380 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
3381
3382 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +01003383 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003384 cmpq(scratch, Immediate(0));
3385 Check(not_equal, "we should not have an empty lexical context");
3386 }
3387 // Load the global context of the current context.
3388 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
3389 movq(scratch, FieldOperand(scratch, offset));
3390 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
3391
3392 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +01003393 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003394 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00003395 isolate()->factory()->global_context_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00003396 Check(equal, "JSGlobalObject::global_context should be a global context.");
3397 }
3398
3399 // Check if both contexts are the same.
3400 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
3401 j(equal, &same_contexts);
3402
3403 // Compare security tokens.
3404 // Check that the security token in the calling global object is
3405 // compatible with the security token in the receiving global
3406 // object.
3407
3408 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +01003409 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003410 // Preserve original value of holder_reg.
3411 push(holder_reg);
3412 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
3413 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
3414 Check(not_equal, "JSGlobalProxy::context() should not be null.");
3415
3416 // Read the first word and compare to global_context_map(),
3417 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
3418 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
3419 Check(equal, "JSGlobalObject::global_context should be a global context.");
3420 pop(holder_reg);
3421 }
3422
3423 movq(kScratchRegister,
3424 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00003425 int token_offset =
3426 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00003427 movq(scratch, FieldOperand(scratch, token_offset));
3428 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
3429 j(not_equal, miss);
3430
3431 bind(&same_contexts);
3432}
3433
3434
Ben Murdochc7cc0282012-03-05 14:35:55 +00003435void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
3436 // First of all we assign the hash seed to scratch.
3437 LoadRoot(scratch, Heap::kHashSeedRootIndex);
3438 SmiToInteger32(scratch, scratch);
3439
3440 // Xor original key with a seed.
3441 xorl(r0, scratch);
3442
3443 // Compute the hash code from the untagged key. This must be kept in sync
3444 // with ComputeIntegerHash in utils.h.
3445 //
3446 // hash = ~hash + (hash << 15);
3447 movl(scratch, r0);
3448 notl(r0);
3449 shll(scratch, Immediate(15));
3450 addl(r0, scratch);
3451 // hash = hash ^ (hash >> 12);
3452 movl(scratch, r0);
3453 shrl(scratch, Immediate(12));
3454 xorl(r0, scratch);
3455 // hash = hash + (hash << 2);
3456 leal(r0, Operand(r0, r0, times_4, 0));
3457 // hash = hash ^ (hash >> 4);
3458 movl(scratch, r0);
3459 shrl(scratch, Immediate(4));
3460 xorl(r0, scratch);
3461 // hash = hash * 2057;
3462 imull(r0, r0, Immediate(2057));
3463 // hash = hash ^ (hash >> 16);
3464 movl(scratch, r0);
3465 shrl(scratch, Immediate(16));
3466 xorl(r0, scratch);
3467}
3468
3469
3470
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003471void MacroAssembler::LoadFromNumberDictionary(Label* miss,
3472 Register elements,
3473 Register key,
3474 Register r0,
3475 Register r1,
3476 Register r2,
3477 Register result) {
3478 // Register use:
3479 //
3480 // elements - holds the slow-case elements of the receiver on entry.
3481 // Unchanged unless 'result' is the same register.
3482 //
3483 // key - holds the smi key on entry.
3484 // Unchanged unless 'result' is the same register.
3485 //
3486 // Scratch registers:
3487 //
3488 // r0 - holds the untagged key on entry and holds the hash once computed.
3489 //
3490 // r1 - used to hold the capacity mask of the dictionary
3491 //
3492 // r2 - used for the index into the dictionary.
3493 //
3494 // result - holds the result on exit if the load succeeded.
3495 // Allowed to be the same as 'key' or 'result'.
3496 // Unchanged on bailout so 'key' or 'result' can be used
3497 // in further computation.
3498
3499 Label done;
3500
Ben Murdochc7cc0282012-03-05 14:35:55 +00003501 GetNumberHash(r0, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003502
3503 // Compute capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00003504 SmiToInteger32(r1, FieldOperand(elements,
3505 SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003506 decl(r1);
3507
3508 // Generate an unrolled loop that performs a few probes before giving up.
3509 const int kProbes = 4;
3510 for (int i = 0; i < kProbes; i++) {
3511 // Use r2 for index calculations and keep the hash intact in r0.
3512 movq(r2, r0);
3513 // Compute the masked index: (hash + i + i * i) & mask.
3514 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00003515 addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003516 }
3517 and_(r2, r1);
3518
3519 // Scale the index by multiplying by the entry size.
Ben Murdochc7cc0282012-03-05 14:35:55 +00003520 ASSERT(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003521 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
3522
3523 // Check if the key matches.
3524 cmpq(key, FieldOperand(elements,
3525 r2,
3526 times_pointer_size,
Ben Murdochc7cc0282012-03-05 14:35:55 +00003527 SeededNumberDictionary::kElementsStartOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003528 if (i != (kProbes - 1)) {
3529 j(equal, &done);
3530 } else {
3531 j(not_equal, miss);
3532 }
3533 }
3534
3535 bind(&done);
3536 // Check that the value is a normal propety.
3537 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00003538 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003539 ASSERT_EQ(NORMAL, 0);
3540 Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00003541 Smi::FromInt(PropertyDetails::TypeField::kMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003542 j(not_zero, miss);
3543
3544 // Get the value at the masked, scaled index.
3545 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00003546 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003547 movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
3548}
3549
3550
Steve Blocka7e24c12009-10-30 11:49:00 +00003551void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00003552 Register scratch,
3553 AllocationFlags flags) {
3554 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01003555 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003556
3557 // Just return if allocation top is already known.
3558 if ((flags & RESULT_CONTAINS_TOP) != 0) {
3559 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01003560 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00003561#ifdef DEBUG
3562 // Assert that result actually contains top on entry.
Steve Block44f0eee2011-05-26 01:26:41 +01003563 Operand top_operand = ExternalOperand(new_space_allocation_top);
3564 cmpq(result, top_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003565 Check(equal, "Unexpected allocation top");
3566#endif
3567 return;
3568 }
3569
Steve Block6ded16b2010-05-10 14:33:55 +01003570 // Move address of new object to result. Use scratch register if available,
3571 // and keep address in scratch until call to UpdateAllocationTopHelper.
3572 if (scratch.is_valid()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003573 LoadAddress(scratch, new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00003574 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01003575 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003576 Load(result, new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00003577 }
3578}
3579
3580
3581void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
3582 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +01003583 if (emit_debug_code()) {
Steve Blockd0582a62009-12-15 09:54:21 +00003584 testq(result_end, Immediate(kObjectAlignmentMask));
3585 Check(zero, "Unaligned allocation in new space");
3586 }
3587
Steve Blocka7e24c12009-10-30 11:49:00 +00003588 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01003589 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003590
3591 // Update new top.
Steve Block44f0eee2011-05-26 01:26:41 +01003592 if (scratch.is_valid()) {
3593 // Scratch already contains address of allocation top.
3594 movq(Operand(scratch, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00003595 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003596 Store(new_space_allocation_top, result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00003597 }
3598}
3599
3600
3601void MacroAssembler::AllocateInNewSpace(int object_size,
3602 Register result,
3603 Register result_end,
3604 Register scratch,
3605 Label* gc_required,
3606 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07003607 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01003608 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07003609 // Trash the registers to simulate an allocation failure.
3610 movl(result, Immediate(0x7091));
3611 if (result_end.is_valid()) {
3612 movl(result_end, Immediate(0x7191));
3613 }
3614 if (scratch.is_valid()) {
3615 movl(scratch, Immediate(0x7291));
3616 }
3617 }
3618 jmp(gc_required);
3619 return;
3620 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003621 ASSERT(!result.is(result_end));
3622
3623 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003624 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003625
3626 // Calculate new top and bail out if new space is exhausted.
3627 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01003628 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Block6ded16b2010-05-10 14:33:55 +01003629
3630 Register top_reg = result_end.is_valid() ? result_end : result;
3631
Steve Block1e0659c2011-05-24 12:43:12 +01003632 if (!top_reg.is(result)) {
3633 movq(top_reg, result);
Steve Block6ded16b2010-05-10 14:33:55 +01003634 }
Steve Block1e0659c2011-05-24 12:43:12 +01003635 addq(top_reg, Immediate(object_size));
3636 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01003637 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3638 cmpq(top_reg, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003639 j(above, gc_required);
3640
3641 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01003642 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00003643
Steve Block6ded16b2010-05-10 14:33:55 +01003644 if (top_reg.is(result)) {
3645 if ((flags & TAG_OBJECT) != 0) {
3646 subq(result, Immediate(object_size - kHeapObjectTag));
3647 } else {
3648 subq(result, Immediate(object_size));
3649 }
3650 } else if ((flags & TAG_OBJECT) != 0) {
3651 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00003652 addq(result, Immediate(kHeapObjectTag));
3653 }
3654}
3655
3656
3657void MacroAssembler::AllocateInNewSpace(int header_size,
3658 ScaleFactor element_size,
3659 Register element_count,
3660 Register result,
3661 Register result_end,
3662 Register scratch,
3663 Label* gc_required,
3664 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07003665 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01003666 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07003667 // Trash the registers to simulate an allocation failure.
3668 movl(result, Immediate(0x7091));
3669 movl(result_end, Immediate(0x7191));
3670 if (scratch.is_valid()) {
3671 movl(scratch, Immediate(0x7291));
3672 }
3673 // Register element_count is not modified by the function.
3674 }
3675 jmp(gc_required);
3676 return;
3677 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003678 ASSERT(!result.is(result_end));
3679
3680 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003681 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003682
3683 // Calculate new top and bail out if new space is exhausted.
3684 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01003685 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Block1e0659c2011-05-24 12:43:12 +01003686
3687 // We assume that element_count*element_size + header_size does not
3688 // overflow.
3689 lea(result_end, Operand(element_count, element_size, header_size));
3690 addq(result_end, result);
3691 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01003692 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3693 cmpq(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003694 j(above, gc_required);
3695
3696 // Update allocation top.
3697 UpdateAllocationTopHelper(result_end, scratch);
3698
3699 // Tag the result if requested.
3700 if ((flags & TAG_OBJECT) != 0) {
3701 addq(result, Immediate(kHeapObjectTag));
3702 }
3703}
3704
3705
3706void MacroAssembler::AllocateInNewSpace(Register object_size,
3707 Register result,
3708 Register result_end,
3709 Register scratch,
3710 Label* gc_required,
3711 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07003712 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01003713 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07003714 // Trash the registers to simulate an allocation failure.
3715 movl(result, Immediate(0x7091));
3716 movl(result_end, Immediate(0x7191));
3717 if (scratch.is_valid()) {
3718 movl(scratch, Immediate(0x7291));
3719 }
3720 // object_size is left unchanged by this function.
3721 }
3722 jmp(gc_required);
3723 return;
3724 }
3725 ASSERT(!result.is(result_end));
3726
Steve Blocka7e24c12009-10-30 11:49:00 +00003727 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003728 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003729
3730 // Calculate new top and bail out if new space is exhausted.
3731 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01003732 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003733 if (!object_size.is(result_end)) {
3734 movq(result_end, object_size);
3735 }
3736 addq(result_end, result);
Steve Block1e0659c2011-05-24 12:43:12 +01003737 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01003738 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3739 cmpq(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003740 j(above, gc_required);
3741
3742 // Update allocation top.
3743 UpdateAllocationTopHelper(result_end, scratch);
3744
3745 // Tag the result if requested.
3746 if ((flags & TAG_OBJECT) != 0) {
3747 addq(result, Immediate(kHeapObjectTag));
3748 }
3749}
3750
3751
3752void MacroAssembler::UndoAllocationInNewSpace(Register object) {
3753 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01003754 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003755
3756 // Make sure the object has no tag before resetting top.
3757 and_(object, Immediate(~kHeapObjectTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003758 Operand top_operand = ExternalOperand(new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00003759#ifdef DEBUG
Steve Block44f0eee2011-05-26 01:26:41 +01003760 cmpq(object, top_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003761 Check(below, "Undo allocation of non allocated memory");
3762#endif
Steve Block44f0eee2011-05-26 01:26:41 +01003763 movq(top_operand, object);
Steve Blocka7e24c12009-10-30 11:49:00 +00003764}
3765
3766
Steve Block3ce2e202009-11-05 08:53:23 +00003767void MacroAssembler::AllocateHeapNumber(Register result,
3768 Register scratch,
3769 Label* gc_required) {
3770 // Allocate heap number in new space.
3771 AllocateInNewSpace(HeapNumber::kSize,
3772 result,
3773 scratch,
3774 no_reg,
3775 gc_required,
3776 TAG_OBJECT);
3777
3778 // Set the map.
3779 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
3780 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3781}
3782
3783
Leon Clarkee46be812010-01-19 14:06:41 +00003784void MacroAssembler::AllocateTwoByteString(Register result,
3785 Register length,
3786 Register scratch1,
3787 Register scratch2,
3788 Register scratch3,
3789 Label* gc_required) {
3790 // Calculate the number of bytes needed for the characters in the string while
3791 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01003792 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
3793 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00003794 ASSERT(kShortSize == 2);
3795 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01003796 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
3797 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00003798 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01003799 if (kHeaderAlignment > 0) {
3800 subq(scratch1, Immediate(kHeaderAlignment));
3801 }
Leon Clarkee46be812010-01-19 14:06:41 +00003802
3803 // Allocate two byte string in new space.
3804 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
3805 times_1,
3806 scratch1,
3807 result,
3808 scratch2,
3809 scratch3,
3810 gc_required,
3811 TAG_OBJECT);
3812
3813 // Set the map, length and hash field.
3814 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
3815 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01003816 Integer32ToSmi(scratch1, length);
3817 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003818 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00003819 Immediate(String::kEmptyHashField));
3820}
3821
3822
3823void MacroAssembler::AllocateAsciiString(Register result,
3824 Register length,
3825 Register scratch1,
3826 Register scratch2,
3827 Register scratch3,
3828 Label* gc_required) {
3829 // Calculate the number of bytes needed for the characters in the string while
3830 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01003831 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
3832 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00003833 movl(scratch1, length);
3834 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01003835 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00003836 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01003837 if (kHeaderAlignment > 0) {
3838 subq(scratch1, Immediate(kHeaderAlignment));
3839 }
Leon Clarkee46be812010-01-19 14:06:41 +00003840
Ben Murdochc7cc0282012-03-05 14:35:55 +00003841 // Allocate ASCII string in new space.
Leon Clarkee46be812010-01-19 14:06:41 +00003842 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
3843 times_1,
3844 scratch1,
3845 result,
3846 scratch2,
3847 scratch3,
3848 gc_required,
3849 TAG_OBJECT);
3850
3851 // Set the map, length and hash field.
3852 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
3853 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01003854 Integer32ToSmi(scratch1, length);
3855 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003856 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00003857 Immediate(String::kEmptyHashField));
3858}
3859
3860
Ben Murdoch589d6972011-11-30 16:04:58 +00003861void MacroAssembler::AllocateTwoByteConsString(Register result,
Leon Clarkee46be812010-01-19 14:06:41 +00003862 Register scratch1,
3863 Register scratch2,
3864 Label* gc_required) {
3865 // Allocate heap number in new space.
3866 AllocateInNewSpace(ConsString::kSize,
3867 result,
3868 scratch1,
3869 scratch2,
3870 gc_required,
3871 TAG_OBJECT);
3872
3873 // Set the map. The other fields are left uninitialized.
3874 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
3875 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3876}
3877
3878
3879void MacroAssembler::AllocateAsciiConsString(Register result,
3880 Register scratch1,
3881 Register scratch2,
3882 Label* gc_required) {
3883 // Allocate heap number in new space.
3884 AllocateInNewSpace(ConsString::kSize,
3885 result,
3886 scratch1,
3887 scratch2,
3888 gc_required,
3889 TAG_OBJECT);
3890
3891 // Set the map. The other fields are left uninitialized.
3892 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
3893 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3894}
3895
3896
Ben Murdoch589d6972011-11-30 16:04:58 +00003897void MacroAssembler::AllocateTwoByteSlicedString(Register result,
3898 Register scratch1,
3899 Register scratch2,
3900 Label* gc_required) {
3901 // Allocate heap number in new space.
3902 AllocateInNewSpace(SlicedString::kSize,
3903 result,
3904 scratch1,
3905 scratch2,
3906 gc_required,
3907 TAG_OBJECT);
3908
3909 // Set the map. The other fields are left uninitialized.
3910 LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex);
3911 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3912}
3913
3914
3915void MacroAssembler::AllocateAsciiSlicedString(Register result,
3916 Register scratch1,
3917 Register scratch2,
3918 Label* gc_required) {
3919 // Allocate heap number in new space.
3920 AllocateInNewSpace(SlicedString::kSize,
3921 result,
3922 scratch1,
3923 scratch2,
3924 gc_required,
3925 TAG_OBJECT);
3926
3927 // Set the map. The other fields are left uninitialized.
3928 LoadRoot(kScratchRegister, Heap::kSlicedAsciiStringMapRootIndex);
3929 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3930}
3931
3932
Steve Block44f0eee2011-05-26 01:26:41 +01003933// Copy memory, byte-by-byte, from source to destination. Not optimized for
3934// long or aligned copies. The contents of scratch and length are destroyed.
3935// Destination is incremented by length, source, length and scratch are
3936// clobbered.
3937// A simpler loop is faster on small copies, but slower on large ones.
3938// The cld() instruction must have been emitted, to set the direction flag(),
3939// before calling this function.
3940void MacroAssembler::CopyBytes(Register destination,
3941 Register source,
3942 Register length,
3943 int min_length,
3944 Register scratch) {
3945 ASSERT(min_length >= 0);
3946 if (FLAG_debug_code) {
3947 cmpl(length, Immediate(min_length));
3948 Assert(greater_equal, "Invalid min_length");
3949 }
3950 Label loop, done, short_string, short_loop;
3951
3952 const int kLongStringLimit = 20;
3953 if (min_length <= kLongStringLimit) {
3954 cmpl(length, Immediate(kLongStringLimit));
3955 j(less_equal, &short_string);
3956 }
3957
3958 ASSERT(source.is(rsi));
3959 ASSERT(destination.is(rdi));
3960 ASSERT(length.is(rcx));
3961
3962 // Because source is 8-byte aligned in our uses of this function,
3963 // we keep source aligned for the rep movs operation by copying the odd bytes
3964 // at the end of the ranges.
3965 movq(scratch, length);
3966 shrl(length, Immediate(3));
3967 repmovsq();
3968 // Move remaining bytes of length.
3969 andl(scratch, Immediate(0x7));
3970 movq(length, Operand(source, scratch, times_1, -8));
3971 movq(Operand(destination, scratch, times_1, -8), length);
3972 addq(destination, scratch);
3973
3974 if (min_length <= kLongStringLimit) {
3975 jmp(&done);
3976
3977 bind(&short_string);
3978 if (min_length == 0) {
3979 testl(length, length);
3980 j(zero, &done);
3981 }
3982 lea(scratch, Operand(destination, length, times_1, 0));
3983
3984 bind(&short_loop);
3985 movb(length, Operand(source, 0));
3986 movb(Operand(destination, 0), length);
3987 incq(source);
3988 incq(destination);
3989 cmpq(destination, scratch);
3990 j(not_equal, &short_loop);
3991
3992 bind(&done);
3993 }
3994}
3995
3996
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003997void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
3998 Register end_offset,
3999 Register filler) {
4000 Label loop, entry;
4001 jmp(&entry);
4002 bind(&loop);
4003 movq(Operand(start_offset, 0), filler);
4004 addq(start_offset, Immediate(kPointerSize));
4005 bind(&entry);
4006 cmpq(start_offset, end_offset);
4007 j(less, &loop);
4008}
4009
4010
Steve Blockd0582a62009-12-15 09:54:21 +00004011void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
4012 if (context_chain_length > 0) {
4013 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004014 movq(dst, Operand(rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00004015 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004016 movq(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00004017 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004018 } else {
4019 // Slot is in the current function context. Move it into the
4020 // destination register in case we store into it (the write barrier
4021 // cannot be allowed to destroy the context in rsi).
4022 movq(dst, rsi);
4023 }
4024
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004025 // We should not have found a with context by walking the context
4026 // chain (i.e., the static scope chain and runtime context chain do
4027 // not agree). A variable occurring in such a scope should have
4028 // slot type LOOKUP and not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01004029 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004030 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
4031 Heap::kWithContextMapRootIndex);
4032 Check(not_equal, "Variable resolved to with context.");
Steve Blockd0582a62009-12-15 09:54:21 +00004033 }
4034}
4035
Steve Block44f0eee2011-05-26 01:26:41 +01004036#ifdef _WIN64
4037static const int kRegisterPassedArguments = 4;
4038#else
4039static const int kRegisterPassedArguments = 6;
4040#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004041
Ben Murdochb0fe1622011-05-05 13:52:32 +01004042void MacroAssembler::LoadGlobalFunction(int index, Register function) {
4043 // Load the global or builtins object from the current context.
4044 movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
4045 // Load the global context from the global or builtins object.
4046 movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
4047 // Load the function from the global context.
4048 movq(function, Operand(function, Context::SlotOffset(index)));
4049}
4050
4051
4052void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
4053 Register map) {
4054 // Load the initial map. The global functions all have initial maps.
4055 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004056 if (emit_debug_code()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004057 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00004058 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004059 jmp(&ok);
4060 bind(&fail);
4061 Abort("Global functions must have initial map");
4062 bind(&ok);
4063 }
4064}
4065
4066
Leon Clarke4515c472010-02-03 11:58:03 +00004067int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004068 // On Windows 64 stack slots are reserved by the caller for all arguments
4069 // including the ones passed in registers, and space is always allocated for
4070 // the four register arguments even if the function takes fewer than four
4071 // arguments.
4072 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
4073 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00004074 ASSERT(num_arguments >= 0);
4075#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01004076 const int kMinimumStackSlots = kRegisterPassedArguments;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004077 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
4078 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00004079#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004080 if (num_arguments < kRegisterPassedArguments) return 0;
4081 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00004082#endif
Leon Clarke4515c472010-02-03 11:58:03 +00004083}
4084
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004085
Leon Clarke4515c472010-02-03 11:58:03 +00004086void MacroAssembler::PrepareCallCFunction(int num_arguments) {
4087 int frame_alignment = OS::ActivationFrameAlignment();
4088 ASSERT(frame_alignment != 0);
4089 ASSERT(num_arguments >= 0);
Steve Block44f0eee2011-05-26 01:26:41 +01004090
Leon Clarke4515c472010-02-03 11:58:03 +00004091 // Make stack end at alignment and allocate space for arguments and old rsp.
4092 movq(kScratchRegister, rsp);
4093 ASSERT(IsPowerOf2(frame_alignment));
4094 int argument_slots_on_stack =
4095 ArgumentStackSlotsForCFunctionCall(num_arguments);
4096 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
4097 and_(rsp, Immediate(-frame_alignment));
4098 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
4099}
4100
4101
4102void MacroAssembler::CallCFunction(ExternalReference function,
4103 int num_arguments) {
Steve Block44f0eee2011-05-26 01:26:41 +01004104 LoadAddress(rax, function);
Leon Clarke4515c472010-02-03 11:58:03 +00004105 CallCFunction(rax, num_arguments);
4106}
4107
4108
4109void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00004110 ASSERT(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01004111 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01004112 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01004113 CheckStackAlignment();
4114 }
4115
Leon Clarke4515c472010-02-03 11:58:03 +00004116 call(function);
4117 ASSERT(OS::ActivationFrameAlignment() != 0);
4118 ASSERT(num_arguments >= 0);
4119 int argument_slots_on_stack =
4120 ArgumentStackSlotsForCFunctionCall(num_arguments);
4121 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
4122}
4123
Steve Blockd0582a62009-12-15 09:54:21 +00004124
Ben Murdoch592a9fc2012-03-05 11:04:45 +00004125bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
4126 if (r1.is(r2)) return true;
4127 if (r1.is(r3)) return true;
4128 if (r1.is(r4)) return true;
4129 if (r2.is(r3)) return true;
4130 if (r2.is(r4)) return true;
4131 if (r3.is(r4)) return true;
4132 return false;
4133}
4134
4135
Steve Blocka7e24c12009-10-30 11:49:00 +00004136CodePatcher::CodePatcher(byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01004137 : address_(address),
4138 size_(size),
4139 masm_(Isolate::Current(), address, size + Assembler::kGap) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004140 // Create a new macro assembler pointing to the address of the code to patch.
4141 // The size is adjusted with kGap on order for the assembler to generate size
4142 // bytes of instructions without failing with buffer size constraints.
4143 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4144}
4145
4146
4147CodePatcher::~CodePatcher() {
4148 // Indicate that code has changed.
4149 CPU::FlushICache(address_, size_);
4150
4151 // Check that the code was patched as expected.
4152 ASSERT(masm_.pc_ == address_ + size_);
4153 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4154}
4155
Ben Murdoch592a9fc2012-03-05 11:04:45 +00004156
4157void MacroAssembler::CheckPageFlag(
4158 Register object,
4159 Register scratch,
4160 int mask,
4161 Condition cc,
4162 Label* condition_met,
4163 Label::Distance condition_met_distance) {
4164 ASSERT(cc == zero || cc == not_zero);
4165 if (scratch.is(object)) {
4166 and_(scratch, Immediate(~Page::kPageAlignmentMask));
4167 } else {
4168 movq(scratch, Immediate(~Page::kPageAlignmentMask));
4169 and_(scratch, object);
4170 }
4171 if (mask < (1 << kBitsPerByte)) {
4172 testb(Operand(scratch, MemoryChunk::kFlagsOffset),
4173 Immediate(static_cast<uint8_t>(mask)));
4174 } else {
4175 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
4176 }
4177 j(cc, condition_met, condition_met_distance);
4178}
4179
4180
4181void MacroAssembler::JumpIfBlack(Register object,
4182 Register bitmap_scratch,
4183 Register mask_scratch,
4184 Label* on_black,
4185 Label::Distance on_black_distance) {
4186 ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, rcx));
4187 GetMarkBits(object, bitmap_scratch, mask_scratch);
4188
4189 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
4190 // The mask_scratch register contains a 1 at the position of the first bit
4191 // and a 0 at all other positions, including the position of the second bit.
4192 movq(rcx, mask_scratch);
4193 // Make rcx into a mask that covers both marking bits using the operation
4194 // rcx = mask | (mask << 1).
4195 lea(rcx, Operand(mask_scratch, mask_scratch, times_2, 0));
4196 // Note that we are using a 4-byte aligned 8-byte load.
4197 and_(rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
4198 cmpq(mask_scratch, rcx);
4199 j(equal, on_black, on_black_distance);
4200}
4201
4202
4203// Detect some, but not all, common pointer-free objects. This is used by the
4204// incremental write barrier which doesn't care about oddballs (they are always
4205// marked black immediately so this code is not hit).
4206void MacroAssembler::JumpIfDataObject(
4207 Register value,
4208 Register scratch,
4209 Label* not_data_object,
4210 Label::Distance not_data_object_distance) {
4211 Label is_data_object;
4212 movq(scratch, FieldOperand(value, HeapObject::kMapOffset));
4213 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
4214 j(equal, &is_data_object, Label::kNear);
4215 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
4216 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
4217 // If it's a string and it's not a cons string then it's an object containing
4218 // no GC pointers.
4219 testb(FieldOperand(scratch, Map::kInstanceTypeOffset),
4220 Immediate(kIsIndirectStringMask | kIsNotStringMask));
4221 j(not_zero, not_data_object, not_data_object_distance);
4222 bind(&is_data_object);
4223}
4224
4225
4226void MacroAssembler::GetMarkBits(Register addr_reg,
4227 Register bitmap_reg,
4228 Register mask_reg) {
4229 ASSERT(!AreAliased(addr_reg, bitmap_reg, mask_reg, rcx));
4230 movq(bitmap_reg, addr_reg);
4231 // Sign extended 32 bit immediate.
4232 and_(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
4233 movq(rcx, addr_reg);
4234 int shift =
4235 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
4236 shrl(rcx, Immediate(shift));
4237 and_(rcx,
4238 Immediate((Page::kPageAlignmentMask >> shift) &
4239 ~(Bitmap::kBytesPerCell - 1)));
4240
4241 addq(bitmap_reg, rcx);
4242 movq(rcx, addr_reg);
4243 shrl(rcx, Immediate(kPointerSizeLog2));
4244 and_(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
4245 movl(mask_reg, Immediate(1));
4246 shl_cl(mask_reg);
4247}
4248
4249
4250void MacroAssembler::EnsureNotWhite(
4251 Register value,
4252 Register bitmap_scratch,
4253 Register mask_scratch,
4254 Label* value_is_white_and_not_data,
4255 Label::Distance distance) {
4256 ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, rcx));
4257 GetMarkBits(value, bitmap_scratch, mask_scratch);
4258
4259 // If the value is black or grey we don't need to do anything.
4260 ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
4261 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
4262 ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
4263 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
4264
4265 Label done;
4266
4267 // Since both black and grey have a 1 in the first position and white does
4268 // not have a 1 there we only need to check one bit.
4269 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4270 j(not_zero, &done, Label::kNear);
4271
4272 if (FLAG_debug_code) {
4273 // Check for impossible bit pattern.
4274 Label ok;
4275 push(mask_scratch);
4276 // shl. May overflow making the check conservative.
4277 addq(mask_scratch, mask_scratch);
4278 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4279 j(zero, &ok, Label::kNear);
4280 int3();
4281 bind(&ok);
4282 pop(mask_scratch);
4283 }
4284
4285 // Value is white. We check whether it is data that doesn't need scanning.
4286 // Currently only checks for HeapNumber and non-cons strings.
4287 Register map = rcx; // Holds map while checking type.
4288 Register length = rcx; // Holds length of object after checking type.
4289 Label not_heap_number;
4290 Label is_data_object;
4291
4292 // Check for heap-number
4293 movq(map, FieldOperand(value, HeapObject::kMapOffset));
4294 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
4295 j(not_equal, &not_heap_number, Label::kNear);
4296 movq(length, Immediate(HeapNumber::kSize));
4297 jmp(&is_data_object, Label::kNear);
4298
4299 bind(&not_heap_number);
4300 // Check for strings.
4301 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
4302 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
4303 // If it's a string and it's not a cons string then it's an object containing
4304 // no GC pointers.
4305 Register instance_type = rcx;
4306 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
4307 testb(instance_type, Immediate(kIsIndirectStringMask | kIsNotStringMask));
4308 j(not_zero, value_is_white_and_not_data);
4309 // It's a non-indirect (non-cons and non-slice) string.
4310 // If it's external, the length is just ExternalString::kSize.
4311 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
4312 Label not_external;
4313 // External strings are the only ones with the kExternalStringTag bit
4314 // set.
4315 ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
4316 ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
4317 testb(instance_type, Immediate(kExternalStringTag));
4318 j(zero, &not_external, Label::kNear);
4319 movq(length, Immediate(ExternalString::kSize));
4320 jmp(&is_data_object, Label::kNear);
4321
4322 bind(&not_external);
4323 // Sequential string, either ASCII or UC16.
4324 ASSERT(kAsciiStringTag == 0x04);
4325 and_(length, Immediate(kStringEncodingMask));
4326 xor_(length, Immediate(kStringEncodingMask));
4327 addq(length, Immediate(0x04));
4328 // Value now either 4 (if ASCII) or 8 (if UC16), i.e. char-size shifted by 2.
4329 imul(length, FieldOperand(value, String::kLengthOffset));
4330 shr(length, Immediate(2 + kSmiTagSize + kSmiShiftSize));
4331 addq(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
4332 and_(length, Immediate(~kObjectAlignmentMask));
4333
4334 bind(&is_data_object);
4335 // Value is a data object, and it is white. Mark it black. Since we know
4336 // that the object is white we can make it black by flipping one bit.
4337 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4338
4339 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
4340 addl(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), length);
4341
4342 bind(&done);
4343}
4344
Steve Blocka7e24c12009-10-30 11:49:00 +00004345} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01004346
4347#endif // V8_TARGET_ARCH_X64