blob: 2d2857951556cca3009a7a98643bbd619bbfae3b [file] [log] [blame]
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
Ben Murdoch8b112d22011-06-08 16:22:53 +010033#include "codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010038#include "heap.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
40namespace v8 {
41namespace internal {
42
Ben Murdoch8b112d22011-06-08 16:22:53 +010043MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
44 : Assembler(arg_isolate, buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000045 generating_stub_(false),
46 allow_stub_calls_(true),
Ben Murdoch8b112d22011-06-08 16:22:53 +010047 root_array_available_(true) {
48 if (isolate() != NULL) {
49 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
50 isolate());
51 }
Steve Block44f0eee2011-05-26 01:26:41 +010052}
53
54
55static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
56 Address roots_register_value = kRootRegisterBias +
57 reinterpret_cast<Address>(isolate->heap()->roots_address());
58 intptr_t delta = other.address() - roots_register_value;
59 return delta;
60}
61
62
63Operand MacroAssembler::ExternalOperand(ExternalReference target,
64 Register scratch) {
65 if (root_array_available_ && !Serializer::enabled()) {
66 intptr_t delta = RootRegisterDelta(target, isolate());
67 if (is_int32(delta)) {
68 Serializer::TooLateToEnableNow();
69 return Operand(kRootRegister, static_cast<int32_t>(delta));
70 }
71 }
72 movq(scratch, target);
73 return Operand(scratch, 0);
74}
75
76
77void MacroAssembler::Load(Register destination, ExternalReference source) {
78 if (root_array_available_ && !Serializer::enabled()) {
79 intptr_t delta = RootRegisterDelta(source, isolate());
80 if (is_int32(delta)) {
81 Serializer::TooLateToEnableNow();
82 movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
83 return;
84 }
85 }
86 // Safe code.
87 if (destination.is(rax)) {
88 load_rax(source);
89 } else {
90 movq(kScratchRegister, source);
91 movq(destination, Operand(kScratchRegister, 0));
92 }
93}
94
95
96void MacroAssembler::Store(ExternalReference destination, Register source) {
97 if (root_array_available_ && !Serializer::enabled()) {
98 intptr_t delta = RootRegisterDelta(destination, isolate());
99 if (is_int32(delta)) {
100 Serializer::TooLateToEnableNow();
101 movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
102 return;
103 }
104 }
105 // Safe code.
106 if (source.is(rax)) {
107 store_rax(destination);
108 } else {
109 movq(kScratchRegister, destination);
110 movq(Operand(kScratchRegister, 0), source);
111 }
112}
113
114
115void MacroAssembler::LoadAddress(Register destination,
116 ExternalReference source) {
117 if (root_array_available_ && !Serializer::enabled()) {
118 intptr_t delta = RootRegisterDelta(source, isolate());
119 if (is_int32(delta)) {
120 Serializer::TooLateToEnableNow();
121 lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
122 return;
123 }
124 }
125 // Safe code.
126 movq(destination, source);
127}
128
129
130int MacroAssembler::LoadAddressSize(ExternalReference source) {
131 if (root_array_available_ && !Serializer::enabled()) {
132 // This calculation depends on the internals of LoadAddress.
133 // It's correctness is ensured by the asserts in the Call
134 // instruction below.
135 intptr_t delta = RootRegisterDelta(source, isolate());
136 if (is_int32(delta)) {
137 Serializer::TooLateToEnableNow();
138 // Operand is lea(scratch, Operand(kRootRegister, delta));
139 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
140 int size = 4;
141 if (!is_int8(static_cast<int32_t>(delta))) {
142 size += 3; // Need full four-byte displacement in lea.
143 }
144 return size;
145 }
146 }
147 // Size of movq(destination, src);
148 return 10;
Steve Blocka7e24c12009-10-30 11:49:00 +0000149}
150
151
Steve Block3ce2e202009-11-05 08:53:23 +0000152void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100153 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100154 movq(destination, Operand(kRootRegister,
155 (index << kPointerSizeLog2) - kRootRegisterBias));
156}
157
158
159void MacroAssembler::LoadRootIndexed(Register destination,
160 Register variable_offset,
161 int fixed_offset) {
Steve Block44f0eee2011-05-26 01:26:41 +0100162 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100163 movq(destination,
164 Operand(kRootRegister,
165 variable_offset, times_pointer_size,
166 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000167}
168
169
Kristian Monsen25f61362010-05-21 11:50:48 +0100170void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100171 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100172 movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
173 source);
Kristian Monsen25f61362010-05-21 11:50:48 +0100174}
175
176
Steve Blocka7e24c12009-10-30 11:49:00 +0000177void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100178 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100179 push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000180}
181
182
Steve Block3ce2e202009-11-05 08:53:23 +0000183void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100184 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100185 cmpq(with, Operand(kRootRegister,
186 (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000187}
188
189
Steve Block1e0659c2011-05-24 12:43:12 +0100190void MacroAssembler::CompareRoot(const Operand& with,
191 Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100192 ASSERT(root_array_available_);
Steve Block1e0659c2011-05-24 12:43:12 +0100193 ASSERT(!with.AddressUsesRegister(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000194 LoadRoot(kScratchRegister, index);
195 cmpq(with, kScratchRegister);
196}
197
198
Steve Block6ded16b2010-05-10 14:33:55 +0100199void MacroAssembler::RecordWriteHelper(Register object,
200 Register addr,
201 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +0100202 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100203 // Check that the object is not in new space.
Ben Murdoch257744e2011-11-30 15:57:28 +0000204 Label not_in_new_space;
205 InNewSpace(object, scratch, not_equal, &not_in_new_space, Label::kNear);
Steve Block6ded16b2010-05-10 14:33:55 +0100206 Abort("new-space object passed to RecordWriteHelper");
207 bind(&not_in_new_space);
208 }
209
Steve Blocka7e24c12009-10-30 11:49:00 +0000210 // Compute the page start address from the heap object pointer, and reuse
211 // the 'object' register for it.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100212 and_(object, Immediate(~Page::kPageAlignmentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +0000213
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100214 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
215 // method for more details.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100216 shrl(addr, Immediate(Page::kRegionSizeLog2));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100217 andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000218
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100219 // Set dirty mark for region.
220 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000221}
222
223
Ben Murdoch257744e2011-11-30 15:57:28 +0000224void MacroAssembler::InNewSpace(Register object,
225 Register scratch,
226 Condition cc,
227 Label* branch,
228 Label::Distance near_jump) {
229 if (Serializer::enabled()) {
230 // Can't do arithmetic on external references if it might get serialized.
231 // The mask isn't really an address. We load it as an external reference in
232 // case the size of the new space is different between the snapshot maker
233 // and the running system.
234 if (scratch.is(object)) {
235 movq(kScratchRegister, ExternalReference::new_space_mask(isolate()));
236 and_(scratch, kScratchRegister);
237 } else {
238 movq(scratch, ExternalReference::new_space_mask(isolate()));
239 and_(scratch, object);
240 }
241 movq(kScratchRegister, ExternalReference::new_space_start(isolate()));
242 cmpq(scratch, kScratchRegister);
243 j(cc, branch, near_jump);
244 } else {
245 ASSERT(is_int32(static_cast<int64_t>(HEAP->NewSpaceMask())));
246 intptr_t new_space_start =
247 reinterpret_cast<intptr_t>(HEAP->NewSpaceStart());
248 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
249 if (scratch.is(object)) {
250 addq(scratch, kScratchRegister);
251 } else {
252 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
253 }
254 and_(scratch, Immediate(static_cast<int32_t>(HEAP->NewSpaceMask())));
255 j(cc, branch, near_jump);
256 }
257}
258
259
Steve Blocka7e24c12009-10-30 11:49:00 +0000260void MacroAssembler::RecordWrite(Register object,
261 int offset,
262 Register value,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100263 Register index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000264 // The compiled code assumes that record write doesn't change the
265 // context register, so we check that none of the clobbered
266 // registers are rsi.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100267 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
Leon Clarke4515c472010-02-03 11:58:03 +0000268
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100269 // First, check if a write barrier is even needed. The tests below
Steve Block44f0eee2011-05-26 01:26:41 +0100270 // catch stores of smis and stores into the young generation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000271 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000272 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000273
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100274 RecordWriteNonSmi(object, offset, value, index);
Steve Block3ce2e202009-11-05 08:53:23 +0000275 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000276
277 // Clobber all input registers when running with the debug-code flag
278 // turned on to provoke errors. This clobbering repeats the
279 // clobbering done inside RecordWriteNonSmi but it's necessary to
280 // avoid having the fast case for smis leave the registers
281 // unchanged.
Steve Block44f0eee2011-05-26 01:26:41 +0100282 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100283 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
284 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100285 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000286 }
Steve Block3ce2e202009-11-05 08:53:23 +0000287}
288
289
Steve Block8defd9f2010-07-08 12:39:36 +0100290void MacroAssembler::RecordWrite(Register object,
291 Register address,
292 Register value) {
293 // The compiled code assumes that record write doesn't change the
294 // context register, so we check that none of the clobbered
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100295 // registers are rsi.
Steve Block8defd9f2010-07-08 12:39:36 +0100296 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
297
298 // First, check if a write barrier is even needed. The tests below
Steve Block44f0eee2011-05-26 01:26:41 +0100299 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100300 Label done;
301 JumpIfSmi(value, &done);
302
303 InNewSpace(object, value, equal, &done);
304
305 RecordWriteHelper(object, address, value);
306
307 bind(&done);
308
309 // Clobber all input registers when running with the debug-code flag
310 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100311 if (emit_debug_code()) {
Steve Block8defd9f2010-07-08 12:39:36 +0100312 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
313 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
314 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
315 }
316}
317
318
Steve Block3ce2e202009-11-05 08:53:23 +0000319void MacroAssembler::RecordWriteNonSmi(Register object,
320 int offset,
321 Register scratch,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100322 Register index) {
Steve Block3ce2e202009-11-05 08:53:23 +0000323 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000324
Steve Block44f0eee2011-05-26 01:26:41 +0100325 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000326 Label okay;
327 JumpIfNotSmi(object, &okay, Label::kNear);
Leon Clarke4515c472010-02-03 11:58:03 +0000328 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
329 bind(&okay);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100330
331 if (offset == 0) {
332 // index must be int32.
333 Register tmp = index.is(rax) ? rbx : rax;
334 push(tmp);
335 movl(tmp, index);
336 cmpq(tmp, index);
337 Check(equal, "Index register for RecordWrite must be untagged int32.");
338 pop(tmp);
339 }
Leon Clarke4515c472010-02-03 11:58:03 +0000340 }
341
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100342 // Test that the object address is not in the new space. We cannot
343 // update page dirty marks for new space pages.
Steve Block6ded16b2010-05-10 14:33:55 +0100344 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000345
Steve Block6ded16b2010-05-10 14:33:55 +0100346 // The offset is relative to a tagged or untagged HeapObject pointer,
347 // so either offset or offset + kHeapObjectTag must be a
348 // multiple of kPointerSize.
349 ASSERT(IsAligned(offset, kPointerSize) ||
350 IsAligned(offset + kHeapObjectTag, kPointerSize));
351
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100352 Register dst = index;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100353 if (offset != 0) {
354 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000355 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100356 // array access: calculate the destination address in the same manner as
357 // KeyedStoreIC::GenerateGeneric.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100358 lea(dst, FieldOperand(object,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100359 index,
360 times_pointer_size,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100361 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000362 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100363 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000364
365 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000366
367 // Clobber all input registers when running with the debug-code flag
368 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100369 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100370 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
371 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100372 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Steve Block6ded16b2010-05-10 14:33:55 +0100373 }
374}
375
Steve Blocka7e24c12009-10-30 11:49:00 +0000376void MacroAssembler::Assert(Condition cc, const char* msg) {
Steve Block44f0eee2011-05-26 01:26:41 +0100377 if (emit_debug_code()) Check(cc, msg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000378}
379
380
Iain Merrick75681382010-08-19 15:07:18 +0100381void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +0100382 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000383 Label ok;
Iain Merrick75681382010-08-19 15:07:18 +0100384 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
385 Heap::kFixedArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000386 j(equal, &ok, Label::kNear);
Iain Merrick75681382010-08-19 15:07:18 +0100387 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
388 Heap::kFixedCOWArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000389 j(equal, &ok, Label::kNear);
Iain Merrick75681382010-08-19 15:07:18 +0100390 Abort("JSObject with fast elements map has slow elements");
391 bind(&ok);
392 }
393}
394
395
Steve Blocka7e24c12009-10-30 11:49:00 +0000396void MacroAssembler::Check(Condition cc, const char* msg) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000397 Label L;
398 j(cc, &L, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000399 Abort(msg);
400 // will not return here
401 bind(&L);
402}
403
404
Steve Block6ded16b2010-05-10 14:33:55 +0100405void MacroAssembler::CheckStackAlignment() {
406 int frame_alignment = OS::ActivationFrameAlignment();
407 int frame_alignment_mask = frame_alignment - 1;
408 if (frame_alignment > kPointerSize) {
409 ASSERT(IsPowerOf2(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +0000410 Label alignment_as_expected;
Steve Block6ded16b2010-05-10 14:33:55 +0100411 testq(rsp, Immediate(frame_alignment_mask));
Ben Murdoch257744e2011-11-30 15:57:28 +0000412 j(zero, &alignment_as_expected, Label::kNear);
Steve Block6ded16b2010-05-10 14:33:55 +0100413 // Abort if stack is not aligned.
414 int3();
415 bind(&alignment_as_expected);
416 }
417}
418
419
Steve Blocka7e24c12009-10-30 11:49:00 +0000420void MacroAssembler::NegativeZeroTest(Register result,
421 Register op,
422 Label* then_label) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000423 Label ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000424 testl(result, result);
Ben Murdoch257744e2011-11-30 15:57:28 +0000425 j(not_zero, &ok, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000426 testl(op, op);
427 j(sign, then_label);
428 bind(&ok);
429}
430
431
432void MacroAssembler::Abort(const char* msg) {
433 // We want to pass the msg string like a smi to avoid GC
434 // problems, however msg is not guaranteed to be aligned
435 // properly. Instead, we pass an aligned pointer that is
436 // a proper v8 smi, but also pass the alignment difference
437 // from the real pointer as a smi.
438 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
439 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
440 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
441 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
442#ifdef DEBUG
443 if (msg != NULL) {
444 RecordComment("Abort message: ");
445 RecordComment(msg);
446 }
447#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000448 // Disable stub call restrictions to always allow calls to abort.
Ben Murdoch086aeea2011-05-13 15:57:08 +0100449 AllowStubCallsScope allow_scope(this, true);
Steve Blockd0582a62009-12-15 09:54:21 +0000450
Steve Blocka7e24c12009-10-30 11:49:00 +0000451 push(rax);
452 movq(kScratchRegister, p0, RelocInfo::NONE);
453 push(kScratchRegister);
454 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000455 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000456 RelocInfo::NONE);
457 push(kScratchRegister);
458 CallRuntime(Runtime::kAbort, 2);
459 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000460 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000461}
462
463
Ben Murdoch257744e2011-11-30 15:57:28 +0000464void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000465 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
Ben Murdoch257744e2011-11-30 15:57:28 +0000466 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000467}
468
469
John Reck59135872010-11-02 12:39:01 -0700470MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100471 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700472 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100473 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700474 call(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
475 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100476 }
477 return result;
478}
479
480
Leon Clarkee46be812010-01-19 14:06:41 +0000481void MacroAssembler::TailCallStub(CodeStub* stub) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800482 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Leon Clarkee46be812010-01-19 14:06:41 +0000483 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
484}
485
486
John Reck59135872010-11-02 12:39:01 -0700487MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100488 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700489 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100490 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700491 jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
492 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100493 }
494 return result;
495}
496
497
Steve Blocka7e24c12009-10-30 11:49:00 +0000498void MacroAssembler::StubReturn(int argc) {
499 ASSERT(argc >= 1 && generating_stub());
500 ret((argc - 1) * kPointerSize);
501}
502
503
504void MacroAssembler::IllegalOperation(int num_arguments) {
505 if (num_arguments > 0) {
506 addq(rsp, Immediate(num_arguments * kPointerSize));
507 }
508 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
509}
510
511
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100512void MacroAssembler::IndexFromHash(Register hash, Register index) {
513 // The assert checks that the constants for the maximum number of digits
514 // for an array index cached in the hash field and the number of bits
515 // reserved for it does not conflict.
516 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
517 (1 << String::kArrayIndexValueBits));
518 // We want the smi-tagged index in key. Even if we subsequently go to
519 // the slow case, converting the key to a smi is always valid.
520 // key: string key
521 // hash: key's hash field, including its array index value.
522 and_(hash, Immediate(String::kArrayIndexValueMask));
523 shr(hash, Immediate(String::kHashShift));
524 // Here we actually clobber the key which will be used if calling into
525 // runtime later. However as the new key is the numeric value of a string key
526 // there is no difference in using either key.
527 Integer32ToSmi(index, hash);
528}
529
530
Steve Blocka7e24c12009-10-30 11:49:00 +0000531void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
532 CallRuntime(Runtime::FunctionForId(id), num_arguments);
533}
534
535
Steve Block1e0659c2011-05-24 12:43:12 +0100536void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
Steve Block44f0eee2011-05-26 01:26:41 +0100537 const Runtime::Function* function = Runtime::FunctionForId(id);
Steve Block1e0659c2011-05-24 12:43:12 +0100538 Set(rax, function->nargs);
Steve Block44f0eee2011-05-26 01:26:41 +0100539 LoadAddress(rbx, ExternalReference(function, isolate()));
Steve Block1e0659c2011-05-24 12:43:12 +0100540 CEntryStub ces(1);
541 ces.SaveDoubles();
542 CallStub(&ces);
543}
544
545
John Reck59135872010-11-02 12:39:01 -0700546MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
547 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100548 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
549}
550
551
Steve Block44f0eee2011-05-26 01:26:41 +0100552void MacroAssembler::CallRuntime(const Runtime::Function* f,
553 int num_arguments) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000554 // If the expected number of arguments of the runtime function is
555 // constant, we check that the actual number of arguments match the
556 // expectation.
557 if (f->nargs >= 0 && f->nargs != num_arguments) {
558 IllegalOperation(num_arguments);
559 return;
560 }
561
Leon Clarke4515c472010-02-03 11:58:03 +0000562 // TODO(1236192): Most runtime routines don't need the number of
563 // arguments passed in because it is constant. At some point we
564 // should remove this need and make the runtime routine entry code
565 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100566 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100567 LoadAddress(rbx, ExternalReference(f, isolate()));
Leon Clarke4515c472010-02-03 11:58:03 +0000568 CEntryStub ces(f->result_size);
569 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000570}
571
572
Steve Block44f0eee2011-05-26 01:26:41 +0100573MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
John Reck59135872010-11-02 12:39:01 -0700574 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100575 if (f->nargs >= 0 && f->nargs != num_arguments) {
576 IllegalOperation(num_arguments);
577 // Since we did not call the stub, there was no allocation failure.
578 // Return some non-failure object.
Steve Block44f0eee2011-05-26 01:26:41 +0100579 return HEAP->undefined_value();
Ben Murdochbb769b22010-08-11 14:56:33 +0100580 }
581
582 // TODO(1236192): Most runtime routines don't need the number of
583 // arguments passed in because it is constant. At some point we
584 // should remove this need and make the runtime routine entry code
585 // smarter.
586 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100587 LoadAddress(rbx, ExternalReference(f, isolate()));
Ben Murdochbb769b22010-08-11 14:56:33 +0100588 CEntryStub ces(f->result_size);
589 return TryCallStub(&ces);
590}
591
592
Andrei Popescu402d9372010-02-26 13:31:12 +0000593void MacroAssembler::CallExternalReference(const ExternalReference& ext,
594 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100595 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100596 LoadAddress(rbx, ext);
Andrei Popescu402d9372010-02-26 13:31:12 +0000597
598 CEntryStub stub(1);
599 CallStub(&stub);
600}
601
602
Steve Block6ded16b2010-05-10 14:33:55 +0100603void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
604 int num_arguments,
605 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000606 // ----------- S t a t e -------------
607 // -- rsp[0] : return address
608 // -- rsp[8] : argument num_arguments - 1
609 // ...
610 // -- rsp[8 * num_arguments] : argument 0 (receiver)
611 // -----------------------------------
612
613 // TODO(1236192): Most runtime routines don't need the number of
614 // arguments passed in because it is constant. At some point we
615 // should remove this need and make the runtime routine entry code
616 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100617 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100618 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000619}
620
621
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800622MaybeObject* MacroAssembler::TryTailCallExternalReference(
623 const ExternalReference& ext, int num_arguments, int result_size) {
624 // ----------- S t a t e -------------
625 // -- rsp[0] : return address
626 // -- rsp[8] : argument num_arguments - 1
627 // ...
628 // -- rsp[8 * num_arguments] : argument 0 (receiver)
629 // -----------------------------------
630
631 // TODO(1236192): Most runtime routines don't need the number of
632 // arguments passed in because it is constant. At some point we
633 // should remove this need and make the runtime routine entry code
634 // smarter.
635 Set(rax, num_arguments);
636 return TryJumpToExternalReference(ext, result_size);
637}
638
639
Steve Block6ded16b2010-05-10 14:33:55 +0100640void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
641 int num_arguments,
642 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100643 TailCallExternalReference(ExternalReference(fid, isolate()),
644 num_arguments,
645 result_size);
Steve Block6ded16b2010-05-10 14:33:55 +0100646}
647
648
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800649MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
650 int num_arguments,
651 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100652 return TryTailCallExternalReference(ExternalReference(fid, isolate()),
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800653 num_arguments,
654 result_size);
655}
656
657
Ben Murdochbb769b22010-08-11 14:56:33 +0100658static int Offset(ExternalReference ref0, ExternalReference ref1) {
659 int64_t offset = (ref0.address() - ref1.address());
660 // Check that fits into int.
661 ASSERT(static_cast<int>(offset) == offset);
662 return static_cast<int>(offset);
663}
664
665
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800666void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
667#ifdef _WIN64
668 // We need to prepare a slot for result handle on stack and put
669 // a pointer to it into 1st arg register.
670 EnterApiExitFrame(arg_stack_space + 1);
671
672 // rcx must be used to pass the pointer to the return value slot.
673 lea(rcx, StackSpaceOperand(arg_stack_space));
674#else
675 EnterApiExitFrame(arg_stack_space);
676#endif
Ben Murdochbb769b22010-08-11 14:56:33 +0100677}
678
679
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800680MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
681 ApiFunction* function, int stack_space) {
John Reck59135872010-11-02 12:39:01 -0700682 Label empty_result;
683 Label prologue;
684 Label promote_scheduled_exception;
685 Label delete_allocated_handles;
686 Label leave_exit_frame;
Ben Murdochbb769b22010-08-11 14:56:33 +0100687 Label write_back;
Ben Murdochbb769b22010-08-11 14:56:33 +0100688
Ben Murdoch257744e2011-11-30 15:57:28 +0000689 Factory* factory = isolate()->factory();
John Reck59135872010-11-02 12:39:01 -0700690 ExternalReference next_address =
691 ExternalReference::handle_scope_next_address();
692 const int kNextOffset = 0;
693 const int kLimitOffset = Offset(
694 ExternalReference::handle_scope_limit_address(),
695 next_address);
696 const int kLevelOffset = Offset(
697 ExternalReference::handle_scope_level_address(),
698 next_address);
699 ExternalReference scheduled_exception_address =
Steve Block44f0eee2011-05-26 01:26:41 +0100700 ExternalReference::scheduled_exception_address(isolate());
Ben Murdochbb769b22010-08-11 14:56:33 +0100701
John Reck59135872010-11-02 12:39:01 -0700702 // Allocate HandleScope in callee-save registers.
703 Register prev_next_address_reg = r14;
704 Register prev_limit_reg = rbx;
Steve Block44f0eee2011-05-26 01:26:41 +0100705 Register base_reg = r15;
John Reck59135872010-11-02 12:39:01 -0700706 movq(base_reg, next_address);
707 movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
708 movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
709 addl(Operand(base_reg, kLevelOffset), Immediate(1));
710 // Call the api function!
711 movq(rax,
712 reinterpret_cast<int64_t>(function->address()),
713 RelocInfo::RUNTIME_ENTRY);
714 call(rax);
Ben Murdochbb769b22010-08-11 14:56:33 +0100715
John Reck59135872010-11-02 12:39:01 -0700716#ifdef _WIN64
717 // rax keeps a pointer to v8::Handle, unpack it.
718 movq(rax, Operand(rax, 0));
719#endif
720 // Check if the result handle holds 0.
721 testq(rax, rax);
722 j(zero, &empty_result);
723 // It was non-zero. Dereference to get the result value.
724 movq(rax, Operand(rax, 0));
725 bind(&prologue);
Ben Murdochbb769b22010-08-11 14:56:33 +0100726
John Reck59135872010-11-02 12:39:01 -0700727 // No more valid handles (the result handle was the last one). Restore
728 // previous handle scope.
729 subl(Operand(base_reg, kLevelOffset), Immediate(1));
730 movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
731 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
732 j(not_equal, &delete_allocated_handles);
733 bind(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100734
John Reck59135872010-11-02 12:39:01 -0700735 // Check if the function scheduled an exception.
736 movq(rsi, scheduled_exception_address);
Ben Murdoch257744e2011-11-30 15:57:28 +0000737 Cmp(Operand(rsi, 0), factory->the_hole_value());
John Reck59135872010-11-02 12:39:01 -0700738 j(not_equal, &promote_scheduled_exception);
Ben Murdochbb769b22010-08-11 14:56:33 +0100739
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800740 LeaveApiExitFrame();
741 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -0700742
743 bind(&promote_scheduled_exception);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800744 MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException,
745 0, 1);
746 if (result->IsFailure()) {
747 return result;
748 }
John Reck59135872010-11-02 12:39:01 -0700749
750 bind(&empty_result);
751 // It was zero; the result is undefined.
Ben Murdoch257744e2011-11-30 15:57:28 +0000752 Move(rax, factory->undefined_value());
John Reck59135872010-11-02 12:39:01 -0700753 jmp(&prologue);
754
755 // HandleScope limit has changed. Delete allocated extensions.
756 bind(&delete_allocated_handles);
757 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
758 movq(prev_limit_reg, rax);
Steve Block44f0eee2011-05-26 01:26:41 +0100759#ifdef _WIN64
760 LoadAddress(rcx, ExternalReference::isolate_address());
761#else
762 LoadAddress(rdi, ExternalReference::isolate_address());
763#endif
764 LoadAddress(rax,
765 ExternalReference::delete_handle_scope_extensions(isolate()));
John Reck59135872010-11-02 12:39:01 -0700766 call(rax);
767 movq(rax, prev_limit_reg);
768 jmp(&leave_exit_frame);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800769
770 return result;
Ben Murdochbb769b22010-08-11 14:56:33 +0100771}
772
773
Steve Block6ded16b2010-05-10 14:33:55 +0100774void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
775 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000776 // Set the entry point and jump to the C entry runtime stub.
Steve Block44f0eee2011-05-26 01:26:41 +0100777 LoadAddress(rbx, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +0000778 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000779 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000780}
781
782
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800783MaybeObject* MacroAssembler::TryJumpToExternalReference(
784 const ExternalReference& ext, int result_size) {
785 // Set the entry point and jump to the C entry runtime stub.
Steve Block44f0eee2011-05-26 01:26:41 +0100786 LoadAddress(rbx, ext);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800787 CEntryStub ces(result_size);
788 return TryTailCallStub(&ces);
789}
790
791
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100792void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
793 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000794 const CallWrapper& call_wrapper) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000795 // Calls are not allowed in some stubs.
796 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000797
Andrei Popescu402d9372010-02-26 13:31:12 +0000798 // Rely on the assertion to check that the number of provided
799 // arguments match the expected number of arguments. Fake a
800 // parameter count to avoid emitting code to do the check.
801 ParameterCount expected(0);
802 GetBuiltinEntry(rdx, id);
Ben Murdoch257744e2011-11-30 15:57:28 +0000803 InvokeCode(rdx, expected, expected, flag, call_wrapper, CALL_AS_METHOD);
Steve Blocka7e24c12009-10-30 11:49:00 +0000804}
805
Andrei Popescu402d9372010-02-26 13:31:12 +0000806
Steve Block791712a2010-08-27 10:21:07 +0100807void MacroAssembler::GetBuiltinFunction(Register target,
808 Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100809 // Load the builtins object into target register.
810 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
811 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
Steve Block791712a2010-08-27 10:21:07 +0100812 movq(target, FieldOperand(target,
813 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
814}
Steve Block6ded16b2010-05-10 14:33:55 +0100815
Steve Block791712a2010-08-27 10:21:07 +0100816
817void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
818 ASSERT(!target.is(rdi));
Andrei Popescu402d9372010-02-26 13:31:12 +0000819 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +0100820 GetBuiltinFunction(rdi, id);
821 movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000822}
823
824
825void MacroAssembler::Set(Register dst, int64_t x) {
826 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100827 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000828 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000829 movl(dst, Immediate(static_cast<uint32_t>(x)));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100830 } else if (is_int32(x)) {
831 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000832 } else {
833 movq(dst, x, RelocInfo::NONE);
834 }
835}
836
Steve Blocka7e24c12009-10-30 11:49:00 +0000837void MacroAssembler::Set(const Operand& dst, int64_t x) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100838 if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000839 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000840 } else {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100841 Set(kScratchRegister, x);
Steve Blocka7e24c12009-10-30 11:49:00 +0000842 movq(dst, kScratchRegister);
843 }
844}
845
Steve Blocka7e24c12009-10-30 11:49:00 +0000846// ----------------------------------------------------------------------------
847// Smi tagging, untagging and tag detection.
848
Steve Block8defd9f2010-07-08 12:39:36 +0100849Register MacroAssembler::GetSmiConstant(Smi* source) {
850 int value = source->value();
851 if (value == 0) {
852 xorl(kScratchRegister, kScratchRegister);
853 return kScratchRegister;
854 }
855 if (value == 1) {
856 return kSmiConstantRegister;
857 }
858 LoadSmiConstant(kScratchRegister, source);
859 return kScratchRegister;
860}
861
862void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
Steve Block44f0eee2011-05-26 01:26:41 +0100863 if (emit_debug_code()) {
Steve Block8defd9f2010-07-08 12:39:36 +0100864 movq(dst,
865 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
866 RelocInfo::NONE);
867 cmpq(dst, kSmiConstantRegister);
868 if (allow_stub_calls()) {
869 Assert(equal, "Uninitialized kSmiConstantRegister");
870 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000871 Label ok;
872 j(equal, &ok, Label::kNear);
Steve Block8defd9f2010-07-08 12:39:36 +0100873 int3();
874 bind(&ok);
875 }
876 }
Steve Block44f0eee2011-05-26 01:26:41 +0100877 int value = source->value();
878 if (value == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100879 xorl(dst, dst);
880 return;
881 }
Steve Block8defd9f2010-07-08 12:39:36 +0100882 bool negative = value < 0;
883 unsigned int uvalue = negative ? -value : value;
884
885 switch (uvalue) {
886 case 9:
887 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
888 break;
889 case 8:
890 xorl(dst, dst);
891 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
892 break;
893 case 4:
894 xorl(dst, dst);
895 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
896 break;
897 case 5:
898 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
899 break;
900 case 3:
901 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
902 break;
903 case 2:
904 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
905 break;
906 case 1:
907 movq(dst, kSmiConstantRegister);
908 break;
909 case 0:
910 UNREACHABLE();
911 return;
912 default:
913 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
914 return;
915 }
916 if (negative) {
917 neg(dst);
918 }
919}
920
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100921
Steve Blocka7e24c12009-10-30 11:49:00 +0000922void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000923 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000924 if (!dst.is(src)) {
925 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000926 }
Steve Block3ce2e202009-11-05 08:53:23 +0000927 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000928}
929
930
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100931void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +0100932 if (emit_debug_code()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100933 testb(dst, Immediate(0x01));
Ben Murdoch257744e2011-11-30 15:57:28 +0000934 Label ok;
935 j(zero, &ok, Label::kNear);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100936 if (allow_stub_calls()) {
937 Abort("Integer32ToSmiField writing to non-smi location");
938 } else {
939 int3();
940 }
941 bind(&ok);
942 }
943 ASSERT(kSmiShift % kBitsPerByte == 0);
944 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
945}
946
947
Steve Block3ce2e202009-11-05 08:53:23 +0000948void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
949 Register src,
950 int constant) {
951 if (dst.is(src)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100952 addl(dst, Immediate(constant));
Steve Block3ce2e202009-11-05 08:53:23 +0000953 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100954 leal(dst, Operand(src, constant));
Steve Block3ce2e202009-11-05 08:53:23 +0000955 }
956 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000957}
958
959
960void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000961 ASSERT_EQ(0, kSmiTag);
962 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000963 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000964 }
Steve Block3ce2e202009-11-05 08:53:23 +0000965 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000966}
967
968
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100969void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
970 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
971}
972
973
Steve Blocka7e24c12009-10-30 11:49:00 +0000974void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000975 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000976 if (!dst.is(src)) {
977 movq(dst, src);
978 }
979 sar(dst, Immediate(kSmiShift));
980}
981
982
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100983void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
984 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
985}
986
987
Steve Block3ce2e202009-11-05 08:53:23 +0000988void MacroAssembler::SmiTest(Register src) {
989 testq(src, src);
990}
991
992
Steve Block44f0eee2011-05-26 01:26:41 +0100993void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
994 if (emit_debug_code()) {
995 AbortIfNotSmi(smi1);
996 AbortIfNotSmi(smi2);
997 }
998 cmpq(smi1, smi2);
Steve Block3ce2e202009-11-05 08:53:23 +0000999}
1000
1001
1002void MacroAssembler::SmiCompare(Register dst, Smi* src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001003 if (emit_debug_code()) {
1004 AbortIfNotSmi(dst);
1005 }
1006 Cmp(dst, src);
1007}
1008
1009
1010void MacroAssembler::Cmp(Register dst, Smi* src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001011 ASSERT(!dst.is(kScratchRegister));
1012 if (src->value() == 0) {
1013 testq(dst, dst);
1014 } else {
Iain Merrick75681382010-08-19 15:07:18 +01001015 Register constant_reg = GetSmiConstant(src);
1016 cmpq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001017 }
1018}
1019
1020
Leon Clarkef7060e22010-06-03 12:02:55 +01001021void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001022 if (emit_debug_code()) {
1023 AbortIfNotSmi(dst);
1024 AbortIfNotSmi(src);
1025 }
Steve Block6ded16b2010-05-10 14:33:55 +01001026 cmpq(dst, src);
1027}
1028
1029
Steve Block3ce2e202009-11-05 08:53:23 +00001030void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001031 if (emit_debug_code()) {
1032 AbortIfNotSmi(dst);
1033 AbortIfNotSmi(src);
1034 }
Steve Block3ce2e202009-11-05 08:53:23 +00001035 cmpq(dst, src);
1036}
1037
1038
1039void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001040 if (emit_debug_code()) {
1041 AbortIfNotSmi(dst);
1042 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001043 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001044}
1045
1046
Steve Block44f0eee2011-05-26 01:26:41 +01001047void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1048 // The Operand cannot use the smi register.
1049 Register smi_reg = GetSmiConstant(src);
1050 ASSERT(!dst.AddressUsesRegister(smi_reg));
1051 cmpq(dst, smi_reg);
1052}
1053
1054
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001055void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
1056 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1057}
1058
1059
Steve Blocka7e24c12009-10-30 11:49:00 +00001060void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1061 Register src,
1062 int power) {
1063 ASSERT(power >= 0);
1064 ASSERT(power < 64);
1065 if (power == 0) {
1066 SmiToInteger64(dst, src);
1067 return;
1068 }
Steve Block3ce2e202009-11-05 08:53:23 +00001069 if (!dst.is(src)) {
1070 movq(dst, src);
1071 }
1072 if (power < kSmiShift) {
1073 sar(dst, Immediate(kSmiShift - power));
1074 } else if (power > kSmiShift) {
1075 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001076 }
1077}
1078
1079
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001080void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1081 Register src,
1082 int power) {
1083 ASSERT((0 <= power) && (power < 32));
1084 if (dst.is(src)) {
1085 shr(dst, Immediate(power + kSmiShift));
1086 } else {
1087 UNIMPLEMENTED(); // Not used.
1088 }
1089}
1090
1091
Ben Murdoch257744e2011-11-30 15:57:28 +00001092void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1093 Label* on_not_smis,
1094 Label::Distance near_jump) {
1095 if (dst.is(src1) || dst.is(src2)) {
1096 ASSERT(!src1.is(kScratchRegister));
1097 ASSERT(!src2.is(kScratchRegister));
1098 movq(kScratchRegister, src1);
1099 or_(kScratchRegister, src2);
1100 JumpIfNotSmi(kScratchRegister, on_not_smis, near_jump);
1101 movq(dst, kScratchRegister);
1102 } else {
1103 movq(dst, src1);
1104 or_(dst, src2);
1105 JumpIfNotSmi(dst, on_not_smis, near_jump);
1106 }
1107}
1108
1109
Steve Blocka7e24c12009-10-30 11:49:00 +00001110Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001111 ASSERT_EQ(0, kSmiTag);
1112 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +00001113 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001114}
1115
1116
Steve Block1e0659c2011-05-24 12:43:12 +01001117Condition MacroAssembler::CheckSmi(const Operand& src) {
1118 ASSERT_EQ(0, kSmiTag);
1119 testb(src, Immediate(kSmiTagMask));
1120 return zero;
1121}
1122
1123
Ben Murdochf87a2032010-10-22 12:50:53 +01001124Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001125 ASSERT_EQ(0, kSmiTag);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001126 // Test that both bits of the mask 0x8000000000000001 are zero.
Steve Block3ce2e202009-11-05 08:53:23 +00001127 movq(kScratchRegister, src);
1128 rol(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +01001129 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +00001130 return zero;
1131}
1132
1133
Steve Blocka7e24c12009-10-30 11:49:00 +00001134Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1135 if (first.is(second)) {
1136 return CheckSmi(first);
1137 }
Steve Block8defd9f2010-07-08 12:39:36 +01001138 ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
1139 leal(kScratchRegister, Operand(first, second, times_1, 0));
1140 testb(kScratchRegister, Immediate(0x03));
Steve Block3ce2e202009-11-05 08:53:23 +00001141 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001142}
1143
1144
Ben Murdochf87a2032010-10-22 12:50:53 +01001145Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1146 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001147 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +01001148 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +00001149 }
Steve Block8defd9f2010-07-08 12:39:36 +01001150 movq(kScratchRegister, first);
1151 or_(kScratchRegister, second);
Leon Clarked91b9f72010-01-27 17:25:45 +00001152 rol(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +01001153 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +00001154 return zero;
1155}
1156
1157
Ben Murdochbb769b22010-08-11 14:56:33 +01001158Condition MacroAssembler::CheckEitherSmi(Register first,
1159 Register second,
1160 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +00001161 if (first.is(second)) {
1162 return CheckSmi(first);
1163 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001164 if (scratch.is(second)) {
1165 andl(scratch, first);
1166 } else {
1167 if (!scratch.is(first)) {
1168 movl(scratch, first);
1169 }
1170 andl(scratch, second);
1171 }
1172 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +00001173 return zero;
1174}
1175
1176
Steve Blocka7e24c12009-10-30 11:49:00 +00001177Condition MacroAssembler::CheckIsMinSmi(Register src) {
Steve Block8defd9f2010-07-08 12:39:36 +01001178 ASSERT(!src.is(kScratchRegister));
1179 // If we overflow by subtracting one, it's the minimal smi value.
1180 cmpq(src, kSmiConstantRegister);
1181 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +00001182}
1183
Steve Blocka7e24c12009-10-30 11:49:00 +00001184
1185Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001186 // A 32-bit integer value can always be converted to a smi.
1187 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +00001188}
1189
1190
Steve Block3ce2e202009-11-05 08:53:23 +00001191Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
1192 // An unsigned 32-bit integer value is valid as long as the high bit
1193 // is not set.
Steve Block8defd9f2010-07-08 12:39:36 +01001194 testl(src, src);
1195 return positive;
Steve Block3ce2e202009-11-05 08:53:23 +00001196}
1197
1198
Steve Block1e0659c2011-05-24 12:43:12 +01001199void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1200 if (dst.is(src)) {
1201 andl(dst, Immediate(kSmiTagMask));
1202 } else {
1203 movl(dst, Immediate(kSmiTagMask));
1204 andl(dst, src);
1205 }
1206}
1207
1208
1209void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
1210 if (!(src.AddressUsesRegister(dst))) {
1211 movl(dst, Immediate(kSmiTagMask));
1212 andl(dst, src);
1213 } else {
1214 movl(dst, src);
1215 andl(dst, Immediate(kSmiTagMask));
1216 }
1217}
1218
1219
Ben Murdoch257744e2011-11-30 15:57:28 +00001220void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1221 Label* on_invalid,
1222 Label::Distance near_jump) {
1223 Condition is_valid = CheckInteger32ValidSmiValue(src);
1224 j(NegateCondition(is_valid), on_invalid, near_jump);
1225}
1226
1227
1228void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1229 Label* on_invalid,
1230 Label::Distance near_jump) {
1231 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1232 j(NegateCondition(is_valid), on_invalid, near_jump);
1233}
1234
1235
1236void MacroAssembler::JumpIfSmi(Register src,
1237 Label* on_smi,
1238 Label::Distance near_jump) {
1239 Condition smi = CheckSmi(src);
1240 j(smi, on_smi, near_jump);
1241}
1242
1243
1244void MacroAssembler::JumpIfNotSmi(Register src,
1245 Label* on_not_smi,
1246 Label::Distance near_jump) {
1247 Condition smi = CheckSmi(src);
1248 j(NegateCondition(smi), on_not_smi, near_jump);
1249}
1250
1251
1252void MacroAssembler::JumpUnlessNonNegativeSmi(
1253 Register src, Label* on_not_smi_or_negative,
1254 Label::Distance near_jump) {
1255 Condition non_negative_smi = CheckNonNegativeSmi(src);
1256 j(NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump);
1257}
1258
1259
1260void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1261 Smi* constant,
1262 Label* on_equals,
1263 Label::Distance near_jump) {
1264 SmiCompare(src, constant);
1265 j(equal, on_equals, near_jump);
1266}
1267
1268
1269void MacroAssembler::JumpIfNotBothSmi(Register src1,
1270 Register src2,
1271 Label* on_not_both_smi,
1272 Label::Distance near_jump) {
1273 Condition both_smi = CheckBothSmi(src1, src2);
1274 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1275}
1276
1277
1278void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1279 Register src2,
1280 Label* on_not_both_smi,
1281 Label::Distance near_jump) {
1282 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1283 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1284}
1285
1286
1287void MacroAssembler::SmiTryAddConstant(Register dst,
1288 Register src,
1289 Smi* constant,
1290 Label* on_not_smi_result,
1291 Label::Distance near_jump) {
1292 // Does not assume that src is a smi.
1293 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
1294 ASSERT_EQ(0, kSmiTag);
1295 ASSERT(!dst.is(kScratchRegister));
1296 ASSERT(!src.is(kScratchRegister));
1297
1298 JumpIfNotSmi(src, on_not_smi_result, near_jump);
1299 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1300 LoadSmiConstant(tmp, constant);
1301 addq(tmp, src);
1302 j(overflow, on_not_smi_result, near_jump);
1303 if (dst.is(src)) {
1304 movq(dst, tmp);
1305 }
1306}
1307
1308
Steve Block3ce2e202009-11-05 08:53:23 +00001309void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1310 if (constant->value() == 0) {
1311 if (!dst.is(src)) {
1312 movq(dst, src);
1313 }
Steve Block8defd9f2010-07-08 12:39:36 +01001314 return;
Steve Block3ce2e202009-11-05 08:53:23 +00001315 } else if (dst.is(src)) {
1316 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001317 switch (constant->value()) {
1318 case 1:
1319 addq(dst, kSmiConstantRegister);
1320 return;
1321 case 2:
1322 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1323 return;
1324 case 4:
1325 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1326 return;
1327 case 8:
1328 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1329 return;
1330 default:
1331 Register constant_reg = GetSmiConstant(constant);
1332 addq(dst, constant_reg);
1333 return;
1334 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001335 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001336 switch (constant->value()) {
1337 case 1:
1338 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
1339 return;
1340 case 2:
1341 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1342 return;
1343 case 4:
1344 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1345 return;
1346 case 8:
1347 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1348 return;
1349 default:
1350 LoadSmiConstant(dst, constant);
1351 addq(dst, src);
1352 return;
1353 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001354 }
1355}
1356
1357
Leon Clarkef7060e22010-06-03 12:02:55 +01001358void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1359 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001360 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01001361 }
1362}
1363
1364
Ben Murdoch257744e2011-11-30 15:57:28 +00001365void MacroAssembler::SmiAddConstant(Register dst,
1366 Register src,
1367 Smi* constant,
1368 Label* on_not_smi_result,
1369 Label::Distance near_jump) {
1370 if (constant->value() == 0) {
1371 if (!dst.is(src)) {
1372 movq(dst, src);
1373 }
1374 } else if (dst.is(src)) {
1375 ASSERT(!dst.is(kScratchRegister));
1376
1377 LoadSmiConstant(kScratchRegister, constant);
1378 addq(kScratchRegister, src);
1379 j(overflow, on_not_smi_result, near_jump);
1380 movq(dst, kScratchRegister);
1381 } else {
1382 LoadSmiConstant(dst, constant);
1383 addq(dst, src);
1384 j(overflow, on_not_smi_result, near_jump);
1385 }
1386}
1387
1388
Steve Block3ce2e202009-11-05 08:53:23 +00001389void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1390 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001391 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001392 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001393 }
Steve Block3ce2e202009-11-05 08:53:23 +00001394 } else if (dst.is(src)) {
1395 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001396 Register constant_reg = GetSmiConstant(constant);
1397 subq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001398 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001399 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001400 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001401 // Adding and subtracting the min-value gives the same result, it only
1402 // differs on the overflow bit, which we don't check here.
1403 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001404 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001405 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001406 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Steve Block3ce2e202009-11-05 08:53:23 +00001407 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001408 }
1409 }
1410}
1411
1412
Ben Murdoch257744e2011-11-30 15:57:28 +00001413void MacroAssembler::SmiSubConstant(Register dst,
1414 Register src,
1415 Smi* constant,
1416 Label* on_not_smi_result,
1417 Label::Distance near_jump) {
1418 if (constant->value() == 0) {
1419 if (!dst.is(src)) {
1420 movq(dst, src);
1421 }
1422 } else if (dst.is(src)) {
1423 ASSERT(!dst.is(kScratchRegister));
1424 if (constant->value() == Smi::kMinValue) {
1425 // Subtracting min-value from any non-negative value will overflow.
1426 // We test the non-negativeness before doing the subtraction.
1427 testq(src, src);
1428 j(not_sign, on_not_smi_result, near_jump);
1429 LoadSmiConstant(kScratchRegister, constant);
1430 subq(dst, kScratchRegister);
1431 } else {
1432 // Subtract by adding the negation.
1433 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1434 addq(kScratchRegister, dst);
1435 j(overflow, on_not_smi_result, near_jump);
1436 movq(dst, kScratchRegister);
1437 }
1438 } else {
1439 if (constant->value() == Smi::kMinValue) {
1440 // Subtracting min-value from any non-negative value will overflow.
1441 // We test the non-negativeness before doing the subtraction.
1442 testq(src, src);
1443 j(not_sign, on_not_smi_result, near_jump);
1444 LoadSmiConstant(dst, constant);
1445 // Adding and subtracting the min-value gives the same result, it only
1446 // differs on the overflow bit, which we don't check here.
1447 addq(dst, src);
1448 } else {
1449 // Subtract by adding the negation.
1450 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1451 addq(dst, src);
1452 j(overflow, on_not_smi_result, near_jump);
1453 }
1454 }
1455}
1456
1457
1458void MacroAssembler::SmiNeg(Register dst,
1459 Register src,
1460 Label* on_smi_result,
1461 Label::Distance near_jump) {
1462 if (dst.is(src)) {
1463 ASSERT(!dst.is(kScratchRegister));
1464 movq(kScratchRegister, src);
1465 neg(dst); // Low 32 bits are retained as zero by negation.
1466 // Test if result is zero or Smi::kMinValue.
1467 cmpq(dst, kScratchRegister);
1468 j(not_equal, on_smi_result, near_jump);
1469 movq(src, kScratchRegister);
1470 } else {
1471 movq(dst, src);
1472 neg(dst);
1473 cmpq(dst, src);
1474 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1475 j(not_equal, on_smi_result, near_jump);
1476 }
1477}
1478
1479
1480void MacroAssembler::SmiAdd(Register dst,
1481 Register src1,
1482 Register src2,
1483 Label* on_not_smi_result,
1484 Label::Distance near_jump) {
1485 ASSERT_NOT_NULL(on_not_smi_result);
1486 ASSERT(!dst.is(src2));
1487 if (dst.is(src1)) {
1488 movq(kScratchRegister, src1);
1489 addq(kScratchRegister, src2);
1490 j(overflow, on_not_smi_result, near_jump);
1491 movq(dst, kScratchRegister);
1492 } else {
1493 movq(dst, src1);
1494 addq(dst, src2);
1495 j(overflow, on_not_smi_result, near_jump);
1496 }
1497}
1498
1499
1500void MacroAssembler::SmiAdd(Register dst,
1501 Register src1,
1502 const Operand& src2,
1503 Label* on_not_smi_result,
1504 Label::Distance near_jump) {
1505 ASSERT_NOT_NULL(on_not_smi_result);
1506 if (dst.is(src1)) {
1507 movq(kScratchRegister, src1);
1508 addq(kScratchRegister, src2);
1509 j(overflow, on_not_smi_result, near_jump);
1510 movq(dst, kScratchRegister);
1511 } else {
1512 ASSERT(!src2.AddressUsesRegister(dst));
1513 movq(dst, src1);
1514 addq(dst, src2);
1515 j(overflow, on_not_smi_result, near_jump);
1516 }
1517}
1518
1519
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001520void MacroAssembler::SmiAdd(Register dst,
1521 Register src1,
1522 Register src2) {
1523 // No overflow checking. Use only when it's known that
1524 // overflowing is impossible.
Steve Block44f0eee2011-05-26 01:26:41 +01001525 if (!dst.is(src1)) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001526 if (emit_debug_code()) {
1527 movq(kScratchRegister, src1);
1528 addq(kScratchRegister, src2);
1529 Check(no_overflow, "Smi addition overflow");
1530 }
1531 lea(dst, Operand(src1, src2, times_1, 0));
1532 } else {
1533 addq(dst, src2);
1534 Assert(no_overflow, "Smi addition overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001535 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001536}
1537
1538
1539void MacroAssembler::SmiSub(Register dst,
1540 Register src1,
1541 Register src2,
1542 Label* on_not_smi_result,
1543 Label::Distance near_jump) {
1544 ASSERT_NOT_NULL(on_not_smi_result);
1545 ASSERT(!dst.is(src2));
1546 if (dst.is(src1)) {
1547 cmpq(dst, src2);
1548 j(overflow, on_not_smi_result, near_jump);
1549 subq(dst, src2);
1550 } else {
1551 movq(dst, src1);
1552 subq(dst, src2);
1553 j(overflow, on_not_smi_result, near_jump);
1554 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001555}
1556
1557
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001558void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1559 // No overflow checking. Use only when it's known that
1560 // overflowing is impossible (e.g., subtracting two positive smis).
1561 ASSERT(!dst.is(src2));
Steve Block44f0eee2011-05-26 01:26:41 +01001562 if (!dst.is(src1)) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001563 movq(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001564 }
Steve Block44f0eee2011-05-26 01:26:41 +01001565 subq(dst, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001566 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001567}
1568
1569
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001570void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001571 Register src1,
Ben Murdoch257744e2011-11-30 15:57:28 +00001572 const Operand& src2,
1573 Label* on_not_smi_result,
1574 Label::Distance near_jump) {
1575 ASSERT_NOT_NULL(on_not_smi_result);
1576 if (dst.is(src1)) {
1577 movq(kScratchRegister, src2);
1578 cmpq(src1, kScratchRegister);
1579 j(overflow, on_not_smi_result, near_jump);
1580 subq(src1, kScratchRegister);
1581 } else {
1582 movq(dst, src1);
1583 subq(dst, src2);
1584 j(overflow, on_not_smi_result, near_jump);
1585 }
1586}
1587
1588
1589void MacroAssembler::SmiSub(Register dst,
1590 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001591 const Operand& src2) {
1592 // No overflow checking. Use only when it's known that
1593 // overflowing is impossible (e.g., subtracting two positive smis).
Steve Block44f0eee2011-05-26 01:26:41 +01001594 if (!dst.is(src1)) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001595 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001596 }
Steve Block44f0eee2011-05-26 01:26:41 +01001597 subq(dst, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001598 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001599}
1600
1601
Ben Murdoch257744e2011-11-30 15:57:28 +00001602void MacroAssembler::SmiMul(Register dst,
1603 Register src1,
1604 Register src2,
1605 Label* on_not_smi_result,
1606 Label::Distance near_jump) {
1607 ASSERT(!dst.is(src2));
1608 ASSERT(!dst.is(kScratchRegister));
1609 ASSERT(!src1.is(kScratchRegister));
1610 ASSERT(!src2.is(kScratchRegister));
1611
1612 if (dst.is(src1)) {
1613 Label failure, zero_correct_result;
1614 movq(kScratchRegister, src1); // Create backup for later testing.
1615 SmiToInteger64(dst, src1);
1616 imul(dst, src2);
1617 j(overflow, &failure, Label::kNear);
1618
1619 // Check for negative zero result. If product is zero, and one
1620 // argument is negative, go to slow case.
1621 Label correct_result;
1622 testq(dst, dst);
1623 j(not_zero, &correct_result, Label::kNear);
1624
1625 movq(dst, kScratchRegister);
1626 xor_(dst, src2);
1627 // Result was positive zero.
1628 j(positive, &zero_correct_result, Label::kNear);
1629
1630 bind(&failure); // Reused failure exit, restores src1.
1631 movq(src1, kScratchRegister);
1632 jmp(on_not_smi_result, near_jump);
1633
1634 bind(&zero_correct_result);
1635 Set(dst, 0);
1636
1637 bind(&correct_result);
1638 } else {
1639 SmiToInteger64(dst, src1);
1640 imul(dst, src2);
1641 j(overflow, on_not_smi_result, near_jump);
1642 // Check for negative zero result. If product is zero, and one
1643 // argument is negative, go to slow case.
1644 Label correct_result;
1645 testq(dst, dst);
1646 j(not_zero, &correct_result, Label::kNear);
1647 // One of src1 and src2 is zero, the check whether the other is
1648 // negative.
1649 movq(kScratchRegister, src1);
1650 xor_(kScratchRegister, src2);
1651 j(negative, on_not_smi_result, near_jump);
1652 bind(&correct_result);
1653 }
1654}
1655
1656
1657void MacroAssembler::SmiDiv(Register dst,
1658 Register src1,
1659 Register src2,
1660 Label* on_not_smi_result,
1661 Label::Distance near_jump) {
1662 ASSERT(!src1.is(kScratchRegister));
1663 ASSERT(!src2.is(kScratchRegister));
1664 ASSERT(!dst.is(kScratchRegister));
1665 ASSERT(!src2.is(rax));
1666 ASSERT(!src2.is(rdx));
1667 ASSERT(!src1.is(rdx));
1668
1669 // Check for 0 divisor (result is +/-Infinity).
1670 testq(src2, src2);
1671 j(zero, on_not_smi_result, near_jump);
1672
1673 if (src1.is(rax)) {
1674 movq(kScratchRegister, src1);
1675 }
1676 SmiToInteger32(rax, src1);
1677 // We need to rule out dividing Smi::kMinValue by -1, since that would
1678 // overflow in idiv and raise an exception.
1679 // We combine this with negative zero test (negative zero only happens
1680 // when dividing zero by a negative number).
1681
1682 // We overshoot a little and go to slow case if we divide min-value
1683 // by any negative value, not just -1.
1684 Label safe_div;
1685 testl(rax, Immediate(0x7fffffff));
1686 j(not_zero, &safe_div, Label::kNear);
1687 testq(src2, src2);
1688 if (src1.is(rax)) {
1689 j(positive, &safe_div, Label::kNear);
1690 movq(src1, kScratchRegister);
1691 jmp(on_not_smi_result, near_jump);
1692 } else {
1693 j(negative, on_not_smi_result, near_jump);
1694 }
1695 bind(&safe_div);
1696
1697 SmiToInteger32(src2, src2);
1698 // Sign extend src1 into edx:eax.
1699 cdq();
1700 idivl(src2);
1701 Integer32ToSmi(src2, src2);
1702 // Check that the remainder is zero.
1703 testl(rdx, rdx);
1704 if (src1.is(rax)) {
1705 Label smi_result;
1706 j(zero, &smi_result, Label::kNear);
1707 movq(src1, kScratchRegister);
1708 jmp(on_not_smi_result, near_jump);
1709 bind(&smi_result);
1710 } else {
1711 j(not_zero, on_not_smi_result, near_jump);
1712 }
1713 if (!dst.is(src1) && src1.is(rax)) {
1714 movq(src1, kScratchRegister);
1715 }
1716 Integer32ToSmi(dst, rax);
1717}
1718
1719
1720void MacroAssembler::SmiMod(Register dst,
1721 Register src1,
1722 Register src2,
1723 Label* on_not_smi_result,
1724 Label::Distance near_jump) {
1725 ASSERT(!dst.is(kScratchRegister));
1726 ASSERT(!src1.is(kScratchRegister));
1727 ASSERT(!src2.is(kScratchRegister));
1728 ASSERT(!src2.is(rax));
1729 ASSERT(!src2.is(rdx));
1730 ASSERT(!src1.is(rdx));
1731 ASSERT(!src1.is(src2));
1732
1733 testq(src2, src2);
1734 j(zero, on_not_smi_result, near_jump);
1735
1736 if (src1.is(rax)) {
1737 movq(kScratchRegister, src1);
1738 }
1739 SmiToInteger32(rax, src1);
1740 SmiToInteger32(src2, src2);
1741
1742 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1743 Label safe_div;
1744 cmpl(rax, Immediate(Smi::kMinValue));
1745 j(not_equal, &safe_div, Label::kNear);
1746 cmpl(src2, Immediate(-1));
1747 j(not_equal, &safe_div, Label::kNear);
1748 // Retag inputs and go slow case.
1749 Integer32ToSmi(src2, src2);
1750 if (src1.is(rax)) {
1751 movq(src1, kScratchRegister);
1752 }
1753 jmp(on_not_smi_result, near_jump);
1754 bind(&safe_div);
1755
1756 // Sign extend eax into edx:eax.
1757 cdq();
1758 idivl(src2);
1759 // Restore smi tags on inputs.
1760 Integer32ToSmi(src2, src2);
1761 if (src1.is(rax)) {
1762 movq(src1, kScratchRegister);
1763 }
1764 // Check for a negative zero result. If the result is zero, and the
1765 // dividend is negative, go slow to return a floating point negative zero.
1766 Label smi_result;
1767 testl(rdx, rdx);
1768 j(not_zero, &smi_result, Label::kNear);
1769 testq(src1, src1);
1770 j(negative, on_not_smi_result, near_jump);
1771 bind(&smi_result);
1772 Integer32ToSmi(dst, rdx);
1773}
1774
1775
Steve Blocka7e24c12009-10-30 11:49:00 +00001776void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001777 ASSERT(!dst.is(kScratchRegister));
1778 ASSERT(!src.is(kScratchRegister));
1779 // Set tag and padding bits before negating, so that they are zero afterwards.
1780 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001781 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001782 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001783 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001784 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001785 }
Steve Block3ce2e202009-11-05 08:53:23 +00001786 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001787}
1788
1789
1790void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001791 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001792 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001793 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001794 }
1795 and_(dst, src2);
1796}
1797
1798
Steve Block3ce2e202009-11-05 08:53:23 +00001799void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1800 if (constant->value() == 0) {
Steve Block9fac8402011-05-12 15:51:54 +01001801 Set(dst, 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001802 } else if (dst.is(src)) {
1803 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001804 Register constant_reg = GetSmiConstant(constant);
1805 and_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001806 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001807 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001808 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001809 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001810}
1811
1812
1813void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1814 if (!dst.is(src1)) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001815 ASSERT(!src1.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +00001816 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001817 }
1818 or_(dst, src2);
1819}
1820
1821
Steve Block3ce2e202009-11-05 08:53:23 +00001822void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1823 if (dst.is(src)) {
1824 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001825 Register constant_reg = GetSmiConstant(constant);
1826 or_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001827 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001828 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001829 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001830 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001831}
1832
Steve Block3ce2e202009-11-05 08:53:23 +00001833
Steve Blocka7e24c12009-10-30 11:49:00 +00001834void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1835 if (!dst.is(src1)) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001836 ASSERT(!src1.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +00001837 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001838 }
1839 xor_(dst, src2);
1840}
1841
1842
Steve Block3ce2e202009-11-05 08:53:23 +00001843void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1844 if (dst.is(src)) {
1845 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001846 Register constant_reg = GetSmiConstant(constant);
1847 xor_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001848 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001849 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001850 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001851 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001852}
1853
1854
Steve Blocka7e24c12009-10-30 11:49:00 +00001855void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1856 Register src,
1857 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001858 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001859 if (shift_value > 0) {
1860 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001861 sar(dst, Immediate(shift_value + kSmiShift));
1862 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001863 } else {
1864 UNIMPLEMENTED(); // Not used.
1865 }
1866 }
1867}
1868
1869
Steve Blocka7e24c12009-10-30 11:49:00 +00001870void MacroAssembler::SmiShiftLeftConstant(Register dst,
1871 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001872 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001873 if (!dst.is(src)) {
1874 movq(dst, src);
1875 }
1876 if (shift_value > 0) {
1877 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001878 }
1879}
1880
1881
Ben Murdoch257744e2011-11-30 15:57:28 +00001882void MacroAssembler::SmiShiftLogicalRightConstant(
1883 Register dst, Register src, int shift_value,
1884 Label* on_not_smi_result, Label::Distance near_jump) {
1885 // Logic right shift interprets its result as an *unsigned* number.
1886 if (dst.is(src)) {
1887 UNIMPLEMENTED(); // Not used.
1888 } else {
1889 movq(dst, src);
1890 if (shift_value == 0) {
1891 testq(dst, dst);
1892 j(negative, on_not_smi_result, near_jump);
1893 }
1894 shr(dst, Immediate(shift_value + kSmiShift));
1895 shl(dst, Immediate(kSmiShift));
1896 }
1897}
1898
1899
Steve Blocka7e24c12009-10-30 11:49:00 +00001900void MacroAssembler::SmiShiftLeft(Register dst,
1901 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001902 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001903 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001904 // Untag shift amount.
1905 if (!dst.is(src1)) {
1906 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001907 }
Steve Block3ce2e202009-11-05 08:53:23 +00001908 SmiToInteger32(rcx, src2);
1909 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1910 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001911 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001912}
1913
1914
Ben Murdoch257744e2011-11-30 15:57:28 +00001915void MacroAssembler::SmiShiftLogicalRight(Register dst,
1916 Register src1,
1917 Register src2,
1918 Label* on_not_smi_result,
1919 Label::Distance near_jump) {
1920 ASSERT(!dst.is(kScratchRegister));
1921 ASSERT(!src1.is(kScratchRegister));
1922 ASSERT(!src2.is(kScratchRegister));
1923 ASSERT(!dst.is(rcx));
1924 // dst and src1 can be the same, because the one case that bails out
1925 // is a shift by 0, which leaves dst, and therefore src1, unchanged.
1926 if (src1.is(rcx) || src2.is(rcx)) {
1927 movq(kScratchRegister, rcx);
1928 }
1929 if (!dst.is(src1)) {
1930 movq(dst, src1);
1931 }
1932 SmiToInteger32(rcx, src2);
1933 orl(rcx, Immediate(kSmiShift));
1934 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
1935 shl(dst, Immediate(kSmiShift));
1936 testq(dst, dst);
1937 if (src1.is(rcx) || src2.is(rcx)) {
1938 Label positive_result;
1939 j(positive, &positive_result, Label::kNear);
1940 if (src1.is(rcx)) {
1941 movq(src1, kScratchRegister);
1942 } else {
1943 movq(src2, kScratchRegister);
1944 }
1945 jmp(on_not_smi_result, near_jump);
1946 bind(&positive_result);
1947 } else {
1948 // src2 was zero and src1 negative.
1949 j(negative, on_not_smi_result, near_jump);
1950 }
1951}
1952
1953
Steve Blocka7e24c12009-10-30 11:49:00 +00001954void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1955 Register src1,
1956 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001957 ASSERT(!dst.is(kScratchRegister));
1958 ASSERT(!src1.is(kScratchRegister));
1959 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001960 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001961 if (src1.is(rcx)) {
1962 movq(kScratchRegister, src1);
1963 } else if (src2.is(rcx)) {
1964 movq(kScratchRegister, src2);
1965 }
1966 if (!dst.is(src1)) {
1967 movq(dst, src1);
1968 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001969 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001970 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001971 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001972 shl(dst, Immediate(kSmiShift));
1973 if (src1.is(rcx)) {
1974 movq(src1, kScratchRegister);
1975 } else if (src2.is(rcx)) {
1976 movq(src2, kScratchRegister);
1977 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001978}
1979
1980
Ben Murdoch257744e2011-11-30 15:57:28 +00001981void MacroAssembler::SelectNonSmi(Register dst,
1982 Register src1,
1983 Register src2,
1984 Label* on_not_smis,
1985 Label::Distance near_jump) {
1986 ASSERT(!dst.is(kScratchRegister));
1987 ASSERT(!src1.is(kScratchRegister));
1988 ASSERT(!src2.is(kScratchRegister));
1989 ASSERT(!dst.is(src1));
1990 ASSERT(!dst.is(src2));
1991 // Both operands must not be smis.
1992#ifdef DEBUG
1993 if (allow_stub_calls()) { // Check contains a stub call.
1994 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1995 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1996 }
1997#endif
1998 ASSERT_EQ(0, kSmiTag);
1999 ASSERT_EQ(0, Smi::FromInt(0));
2000 movl(kScratchRegister, Immediate(kSmiTagMask));
2001 and_(kScratchRegister, src1);
2002 testl(kScratchRegister, src2);
2003 // If non-zero then both are smis.
2004 j(not_zero, on_not_smis, near_jump);
2005
2006 // Exactly one operand is a smi.
2007 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
2008 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
2009 subq(kScratchRegister, Immediate(1));
2010 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
2011 movq(dst, src1);
2012 xor_(dst, src2);
2013 and_(dst, kScratchRegister);
2014 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
2015 xor_(dst, src1);
2016 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
2017}
2018
2019
Steve Block3ce2e202009-11-05 08:53:23 +00002020SmiIndex MacroAssembler::SmiToIndex(Register dst,
2021 Register src,
2022 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002023 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00002024 // There is a possible optimization if shift is in the range 60-63, but that
2025 // will (and must) never happen.
2026 if (!dst.is(src)) {
2027 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002028 }
Steve Block3ce2e202009-11-05 08:53:23 +00002029 if (shift < kSmiShift) {
2030 sar(dst, Immediate(kSmiShift - shift));
2031 } else {
2032 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00002033 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002034 return SmiIndex(dst, times_1);
2035}
2036
Steve Blocka7e24c12009-10-30 11:49:00 +00002037SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2038 Register src,
2039 int shift) {
2040 // Register src holds a positive smi.
2041 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00002042 if (!dst.is(src)) {
2043 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002044 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002045 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00002046 if (shift < kSmiShift) {
2047 sar(dst, Immediate(kSmiShift - shift));
2048 } else {
2049 shl(dst, Immediate(shift - kSmiShift));
2050 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002051 return SmiIndex(dst, times_1);
2052}
2053
2054
Steve Block44f0eee2011-05-26 01:26:41 +01002055void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
2056 ASSERT_EQ(0, kSmiShift % kBitsPerByte);
2057 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
2058}
2059
2060
Ben Murdoch257744e2011-11-30 15:57:28 +00002061void MacroAssembler::JumpIfNotString(Register object,
2062 Register object_map,
2063 Label* not_string,
2064 Label::Distance near_jump) {
2065 Condition is_smi = CheckSmi(object);
2066 j(is_smi, not_string, near_jump);
2067 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
2068 j(above_equal, not_string, near_jump);
2069}
2070
2071
2072void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(
2073 Register first_object,
2074 Register second_object,
2075 Register scratch1,
2076 Register scratch2,
2077 Label* on_fail,
2078 Label::Distance near_jump) {
2079 // Check that both objects are not smis.
2080 Condition either_smi = CheckEitherSmi(first_object, second_object);
2081 j(either_smi, on_fail, near_jump);
2082
2083 // Load instance type for both strings.
2084 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
2085 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
2086 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2087 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2088
2089 // Check that both are flat ascii strings.
2090 ASSERT(kNotStringTag != 0);
2091 const int kFlatAsciiStringMask =
2092 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2093 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2094
2095 andl(scratch1, Immediate(kFlatAsciiStringMask));
2096 andl(scratch2, Immediate(kFlatAsciiStringMask));
2097 // Interleave the bits to check both scratch1 and scratch2 in one test.
2098 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2099 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2100 cmpl(scratch1,
2101 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2102 j(not_equal, on_fail, near_jump);
2103}
2104
2105
2106void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2107 Register instance_type,
2108 Register scratch,
2109 Label* failure,
2110 Label::Distance near_jump) {
2111 if (!scratch.is(instance_type)) {
2112 movl(scratch, instance_type);
2113 }
2114
2115 const int kFlatAsciiStringMask =
2116 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2117
2118 andl(scratch, Immediate(kFlatAsciiStringMask));
2119 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
2120 j(not_equal, failure, near_jump);
2121}
2122
2123
2124void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
2125 Register first_object_instance_type,
2126 Register second_object_instance_type,
2127 Register scratch1,
2128 Register scratch2,
2129 Label* on_fail,
2130 Label::Distance near_jump) {
2131 // Load instance type for both strings.
2132 movq(scratch1, first_object_instance_type);
2133 movq(scratch2, second_object_instance_type);
2134
2135 // Check that both are flat ascii strings.
2136 ASSERT(kNotStringTag != 0);
2137 const int kFlatAsciiStringMask =
2138 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2139 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2140
2141 andl(scratch1, Immediate(kFlatAsciiStringMask));
2142 andl(scratch2, Immediate(kFlatAsciiStringMask));
2143 // Interleave the bits to check both scratch1 and scratch2 in one test.
2144 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2145 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2146 cmpl(scratch1,
2147 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2148 j(not_equal, on_fail, near_jump);
2149}
2150
2151
Steve Block44f0eee2011-05-26 01:26:41 +01002152
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002153void MacroAssembler::Move(Register dst, Register src) {
2154 if (!dst.is(src)) {
2155 movq(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01002156 }
Steve Block6ded16b2010-05-10 14:33:55 +01002157}
2158
2159
Steve Blocka7e24c12009-10-30 11:49:00 +00002160void MacroAssembler::Move(Register dst, Handle<Object> source) {
2161 ASSERT(!source->IsFailure());
2162 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002163 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002164 } else {
2165 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
2166 }
2167}
2168
2169
2170void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00002171 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00002172 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002173 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002174 } else {
2175 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2176 movq(dst, kScratchRegister);
2177 }
2178}
2179
2180
2181void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00002182 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002183 Cmp(dst, Smi::cast(*source));
Steve Block3ce2e202009-11-05 08:53:23 +00002184 } else {
2185 Move(kScratchRegister, source);
2186 cmpq(dst, kScratchRegister);
2187 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002188}
2189
2190
2191void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
2192 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002193 Cmp(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002194 } else {
2195 ASSERT(source->IsHeapObject());
2196 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2197 cmpq(dst, kScratchRegister);
2198 }
2199}
2200
2201
2202void MacroAssembler::Push(Handle<Object> source) {
2203 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002204 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002205 } else {
2206 ASSERT(source->IsHeapObject());
2207 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2208 push(kScratchRegister);
2209 }
2210}
2211
2212
2213void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00002214 intptr_t smi = reinterpret_cast<intptr_t>(source);
2215 if (is_int32(smi)) {
2216 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00002217 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002218 Register constant = GetSmiConstant(source);
2219 push(constant);
Steve Block3ce2e202009-11-05 08:53:23 +00002220 }
2221}
2222
2223
Leon Clarkee46be812010-01-19 14:06:41 +00002224void MacroAssembler::Drop(int stack_elements) {
2225 if (stack_elements > 0) {
2226 addq(rsp, Immediate(stack_elements * kPointerSize));
2227 }
2228}
2229
2230
Steve Block3ce2e202009-11-05 08:53:23 +00002231void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01002232 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002233}
2234
2235
2236void MacroAssembler::Jump(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01002237 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00002238 jmp(kScratchRegister);
2239}
2240
2241
2242void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
2243 movq(kScratchRegister, destination, rmode);
2244 jmp(kScratchRegister);
2245}
2246
2247
2248void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00002249 // TODO(X64): Inline this
2250 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002251}
2252
2253
Steve Block44f0eee2011-05-26 01:26:41 +01002254int MacroAssembler::CallSize(ExternalReference ext) {
2255 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
2256 const int kCallInstructionSize = 3;
2257 return LoadAddressSize(ext) + kCallInstructionSize;
2258}
2259
2260
Steve Blocka7e24c12009-10-30 11:49:00 +00002261void MacroAssembler::Call(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01002262#ifdef DEBUG
2263 int end_position = pc_offset() + CallSize(ext);
2264#endif
2265 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00002266 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01002267#ifdef DEBUG
2268 CHECK_EQ(end_position, pc_offset());
2269#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002270}
2271
2272
2273void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
Steve Block44f0eee2011-05-26 01:26:41 +01002274#ifdef DEBUG
2275 int end_position = pc_offset() + CallSize(destination, rmode);
2276#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002277 movq(kScratchRegister, destination, rmode);
2278 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01002279#ifdef DEBUG
2280 CHECK_EQ(pc_offset(), end_position);
2281#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002282}
2283
2284
Ben Murdoch257744e2011-11-30 15:57:28 +00002285void MacroAssembler::Call(Handle<Code> code_object,
2286 RelocInfo::Mode rmode,
2287 unsigned ast_id) {
Steve Block44f0eee2011-05-26 01:26:41 +01002288#ifdef DEBUG
2289 int end_position = pc_offset() + CallSize(code_object);
2290#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002291 ASSERT(RelocInfo::IsCodeTarget(rmode));
Ben Murdoch257744e2011-11-30 15:57:28 +00002292 call(code_object, rmode, ast_id);
Steve Block44f0eee2011-05-26 01:26:41 +01002293#ifdef DEBUG
2294 CHECK_EQ(end_position, pc_offset());
2295#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002296}
2297
2298
Steve Block1e0659c2011-05-24 12:43:12 +01002299void MacroAssembler::Pushad() {
2300 push(rax);
2301 push(rcx);
2302 push(rdx);
2303 push(rbx);
2304 // Not pushing rsp or rbp.
2305 push(rsi);
2306 push(rdi);
2307 push(r8);
2308 push(r9);
2309 // r10 is kScratchRegister.
2310 push(r11);
Steve Block44f0eee2011-05-26 01:26:41 +01002311 // r12 is kSmiConstantRegister.
Steve Block1e0659c2011-05-24 12:43:12 +01002312 // r13 is kRootRegister.
2313 push(r14);
Steve Block44f0eee2011-05-26 01:26:41 +01002314 push(r15);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002315 STATIC_ASSERT(11 == kNumSafepointSavedRegisters);
2316 // Use lea for symmetry with Popad.
2317 int sp_delta =
2318 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
2319 lea(rsp, Operand(rsp, -sp_delta));
Steve Block1e0659c2011-05-24 12:43:12 +01002320}
2321
2322
2323void MacroAssembler::Popad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002324 // Popad must not change the flags, so use lea instead of addq.
2325 int sp_delta =
2326 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
2327 lea(rsp, Operand(rsp, sp_delta));
Steve Block44f0eee2011-05-26 01:26:41 +01002328 pop(r15);
Steve Block1e0659c2011-05-24 12:43:12 +01002329 pop(r14);
Steve Block1e0659c2011-05-24 12:43:12 +01002330 pop(r11);
2331 pop(r9);
2332 pop(r8);
2333 pop(rdi);
2334 pop(rsi);
2335 pop(rbx);
2336 pop(rdx);
2337 pop(rcx);
2338 pop(rax);
2339}
2340
2341
2342void MacroAssembler::Dropad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002343 addq(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
Steve Block1e0659c2011-05-24 12:43:12 +01002344}
2345
2346
2347// Order general registers are pushed by Pushad:
Steve Block44f0eee2011-05-26 01:26:41 +01002348// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
Steve Block1e0659c2011-05-24 12:43:12 +01002349int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
2350 0,
2351 1,
2352 2,
2353 3,
2354 -1,
2355 -1,
2356 4,
2357 5,
2358 6,
2359 7,
2360 -1,
2361 8,
Steve Block1e0659c2011-05-24 12:43:12 +01002362 -1,
Steve Block44f0eee2011-05-26 01:26:41 +01002363 -1,
2364 9,
2365 10
Steve Block1e0659c2011-05-24 12:43:12 +01002366};
2367
2368
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002369void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2370 movq(SafepointRegisterSlot(dst), src);
2371}
2372
2373
2374void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2375 movq(dst, SafepointRegisterSlot(src));
2376}
2377
2378
2379Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2380 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2381}
2382
2383
Steve Blocka7e24c12009-10-30 11:49:00 +00002384void MacroAssembler::PushTryHandler(CodeLocation try_location,
2385 HandlerType type) {
2386 // Adjust this code if not the case.
2387 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
2388
2389 // The pc (return address) is already on TOS. This code pushes state,
2390 // frame pointer and current handler. Check that they are expected
2391 // next on the stack, in that order.
2392 ASSERT_EQ(StackHandlerConstants::kStateOffset,
2393 StackHandlerConstants::kPCOffset - kPointerSize);
2394 ASSERT_EQ(StackHandlerConstants::kFPOffset,
2395 StackHandlerConstants::kStateOffset - kPointerSize);
2396 ASSERT_EQ(StackHandlerConstants::kNextOffset,
2397 StackHandlerConstants::kFPOffset - kPointerSize);
2398
2399 if (try_location == IN_JAVASCRIPT) {
2400 if (type == TRY_CATCH_HANDLER) {
2401 push(Immediate(StackHandler::TRY_CATCH));
2402 } else {
2403 push(Immediate(StackHandler::TRY_FINALLY));
2404 }
2405 push(rbp);
2406 } else {
2407 ASSERT(try_location == IN_JS_ENTRY);
2408 // The frame pointer does not point to a JS frame so we save NULL
2409 // for rbp. We expect the code throwing an exception to check rbp
2410 // before dereferencing it to restore the context.
2411 push(Immediate(StackHandler::ENTRY));
2412 push(Immediate(0)); // NULL frame pointer.
2413 }
2414 // Save the current handler.
Steve Block44f0eee2011-05-26 01:26:41 +01002415 Operand handler_operand =
2416 ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
2417 push(handler_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002418 // Link this handler.
Steve Block44f0eee2011-05-26 01:26:41 +01002419 movq(handler_operand, rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00002420}
2421
2422
Leon Clarkee46be812010-01-19 14:06:41 +00002423void MacroAssembler::PopTryHandler() {
2424 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
2425 // Unlink this handler.
Steve Block44f0eee2011-05-26 01:26:41 +01002426 Operand handler_operand =
2427 ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
2428 pop(handler_operand);
Leon Clarkee46be812010-01-19 14:06:41 +00002429 // Remove the remaining fields.
2430 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
2431}
2432
2433
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002434void MacroAssembler::Throw(Register value) {
2435 // Check that stack should contain next handler, frame pointer, state and
2436 // return address in that order.
2437 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
2438 StackHandlerConstants::kStateOffset);
2439 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
2440 StackHandlerConstants::kPCOffset);
2441 // Keep thrown value in rax.
2442 if (!value.is(rax)) {
2443 movq(rax, value);
2444 }
2445
Steve Block44f0eee2011-05-26 01:26:41 +01002446 ExternalReference handler_address(Isolate::k_handler_address, isolate());
2447 Operand handler_operand = ExternalOperand(handler_address);
2448 movq(rsp, handler_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002449 // get next in chain
Steve Block44f0eee2011-05-26 01:26:41 +01002450 pop(handler_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002451 pop(rbp); // pop frame pointer
2452 pop(rdx); // remove state
2453
2454 // Before returning we restore the context from the frame pointer if not NULL.
2455 // The frame pointer is NULL in the exception handler of a JS entry frame.
2456 Set(rsi, 0); // Tentatively set context pointer to NULL
Ben Murdoch257744e2011-11-30 15:57:28 +00002457 Label skip;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002458 cmpq(rbp, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002459 j(equal, &skip, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002460 movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2461 bind(&skip);
2462 ret(0);
2463}
2464
2465
2466void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
2467 Register value) {
2468 // Keep thrown value in rax.
2469 if (!value.is(rax)) {
2470 movq(rax, value);
2471 }
2472 // Fetch top stack handler.
Steve Block44f0eee2011-05-26 01:26:41 +01002473 ExternalReference handler_address(Isolate::k_handler_address, isolate());
2474 Load(rsp, handler_address);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002475
2476 // Unwind the handlers until the ENTRY handler is found.
Ben Murdoch257744e2011-11-30 15:57:28 +00002477 Label loop, done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002478 bind(&loop);
2479 // Load the type of the current stack handler.
2480 const int kStateOffset = StackHandlerConstants::kStateOffset;
2481 cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
Ben Murdoch257744e2011-11-30 15:57:28 +00002482 j(equal, &done, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002483 // Fetch the next handler in the list.
2484 const int kNextOffset = StackHandlerConstants::kNextOffset;
2485 movq(rsp, Operand(rsp, kNextOffset));
2486 jmp(&loop);
2487 bind(&done);
2488
2489 // Set the top handler address to next handler past the current ENTRY handler.
Steve Block44f0eee2011-05-26 01:26:41 +01002490 Operand handler_operand = ExternalOperand(handler_address);
2491 pop(handler_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002492
2493 if (type == OUT_OF_MEMORY) {
2494 // Set external caught exception to false.
Steve Block44f0eee2011-05-26 01:26:41 +01002495 ExternalReference external_caught(
2496 Isolate::k_external_caught_exception_address, isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +01002497 Set(rax, static_cast<int64_t>(false));
Steve Block44f0eee2011-05-26 01:26:41 +01002498 Store(external_caught, rax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002499
2500 // Set pending exception and rax to out of memory exception.
Steve Block44f0eee2011-05-26 01:26:41 +01002501 ExternalReference pending_exception(Isolate::k_pending_exception_address,
2502 isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002503 movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
Steve Block44f0eee2011-05-26 01:26:41 +01002504 Store(pending_exception, rax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002505 }
2506
2507 // Clear the context pointer.
2508 Set(rsi, 0);
2509
2510 // Restore registers from handler.
2511 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
Steve Block44f0eee2011-05-26 01:26:41 +01002512 StackHandlerConstants::kFPOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002513 pop(rbp); // FP
2514 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
Steve Block44f0eee2011-05-26 01:26:41 +01002515 StackHandlerConstants::kStateOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002516 pop(rdx); // State
2517
2518 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
Steve Block44f0eee2011-05-26 01:26:41 +01002519 StackHandlerConstants::kPCOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002520 ret(0);
2521}
2522
2523
Steve Blocka7e24c12009-10-30 11:49:00 +00002524void MacroAssembler::Ret() {
2525 ret(0);
2526}
2527
2528
Steve Block1e0659c2011-05-24 12:43:12 +01002529void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2530 if (is_uint16(bytes_dropped)) {
2531 ret(bytes_dropped);
2532 } else {
2533 pop(scratch);
2534 addq(rsp, Immediate(bytes_dropped));
2535 push(scratch);
2536 ret(0);
2537 }
2538}
2539
2540
Steve Blocka7e24c12009-10-30 11:49:00 +00002541void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00002542 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01002543 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002544}
2545
2546
2547void MacroAssembler::CmpObjectType(Register heap_object,
2548 InstanceType type,
2549 Register map) {
2550 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
2551 CmpInstanceType(map, type);
2552}
2553
2554
2555void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
2556 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
2557 Immediate(static_cast<int8_t>(type)));
2558}
2559
2560
Andrei Popescu31002712010-02-23 13:46:05 +00002561void MacroAssembler::CheckMap(Register obj,
2562 Handle<Map> map,
2563 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00002564 SmiCheckType smi_check_type) {
2565 if (smi_check_type == DO_SMI_CHECK) {
Andrei Popescu31002712010-02-23 13:46:05 +00002566 JumpIfSmi(obj, fail);
2567 }
2568 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
2569 j(not_equal, fail);
2570}
2571
2572
Ben Murdoch257744e2011-11-30 15:57:28 +00002573void MacroAssembler::ClampUint8(Register reg) {
2574 Label done;
2575 testl(reg, Immediate(0xFFFFFF00));
2576 j(zero, &done, Label::kNear);
2577 setcc(negative, reg); // 1 if negative, 0 if positive.
2578 decb(reg); // 0 if negative, 255 if positive.
2579 bind(&done);
2580}
2581
2582
2583void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
2584 XMMRegister temp_xmm_reg,
2585 Register result_reg,
2586 Register temp_reg) {
2587 Label done;
2588 Set(result_reg, 0);
2589 xorps(temp_xmm_reg, temp_xmm_reg);
2590 ucomisd(input_reg, temp_xmm_reg);
2591 j(below, &done, Label::kNear);
2592 uint64_t one_half = BitCast<uint64_t, double>(0.5);
2593 Set(temp_reg, one_half);
2594 movq(temp_xmm_reg, temp_reg);
2595 addsd(temp_xmm_reg, input_reg);
2596 cvttsd2si(result_reg, temp_xmm_reg);
2597 testl(result_reg, Immediate(0xFFFFFF00));
2598 j(zero, &done, Label::kNear);
2599 Set(result_reg, 255);
2600 bind(&done);
2601}
2602
2603
2604void MacroAssembler::LoadInstanceDescriptors(Register map,
2605 Register descriptors) {
2606 movq(descriptors, FieldOperand(map,
2607 Map::kInstanceDescriptorsOrBitField3Offset));
2608 Label not_smi;
2609 JumpIfNotSmi(descriptors, &not_smi, Label::kNear);
2610 Move(descriptors, isolate()->factory()->empty_descriptor_array());
2611 bind(&not_smi);
2612}
2613
2614
2615void MacroAssembler::DispatchMap(Register obj,
2616 Handle<Map> map,
2617 Handle<Code> success,
2618 SmiCheckType smi_check_type) {
2619 Label fail;
2620 if (smi_check_type == DO_SMI_CHECK) {
2621 JumpIfSmi(obj, &fail);
2622 }
2623 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
2624 j(equal, success, RelocInfo::CODE_TARGET);
2625
2626 bind(&fail);
2627}
2628
2629
Leon Clarkef7060e22010-06-03 12:02:55 +01002630void MacroAssembler::AbortIfNotNumber(Register object) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002631 Label ok;
Andrei Popescu402d9372010-02-26 13:31:12 +00002632 Condition is_smi = CheckSmi(object);
Ben Murdoch257744e2011-11-30 15:57:28 +00002633 j(is_smi, &ok, Label::kNear);
Andrei Popescu402d9372010-02-26 13:31:12 +00002634 Cmp(FieldOperand(object, HeapObject::kMapOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00002635 isolate()->factory()->heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01002636 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00002637 bind(&ok);
2638}
2639
2640
Iain Merrick75681382010-08-19 15:07:18 +01002641void MacroAssembler::AbortIfSmi(Register object) {
Iain Merrick75681382010-08-19 15:07:18 +01002642 Condition is_smi = CheckSmi(object);
2643 Assert(NegateCondition(is_smi), "Operand is a smi");
2644}
2645
2646
Leon Clarkef7060e22010-06-03 12:02:55 +01002647void MacroAssembler::AbortIfNotSmi(Register object) {
Steve Block44f0eee2011-05-26 01:26:41 +01002648 Condition is_smi = CheckSmi(object);
2649 Assert(is_smi, "Operand is not a smi");
2650}
2651
2652
2653void MacroAssembler::AbortIfNotSmi(const Operand& object) {
Steve Block6ded16b2010-05-10 14:33:55 +01002654 Condition is_smi = CheckSmi(object);
Iain Merrick75681382010-08-19 15:07:18 +01002655 Assert(is_smi, "Operand is not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01002656}
2657
2658
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002659void MacroAssembler::AbortIfNotString(Register object) {
2660 testb(object, Immediate(kSmiTagMask));
2661 Assert(not_equal, "Operand is not a string");
2662 push(object);
2663 movq(object, FieldOperand(object, HeapObject::kMapOffset));
2664 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
2665 pop(object);
2666 Assert(below, "Operand is not a string");
2667}
2668
2669
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002670void MacroAssembler::AbortIfNotRootValue(Register src,
2671 Heap::RootListIndex root_value_index,
2672 const char* message) {
2673 ASSERT(!src.is(kScratchRegister));
2674 LoadRoot(kScratchRegister, root_value_index);
2675 cmpq(src, kScratchRegister);
2676 Check(equal, message);
2677}
2678
2679
2680
Leon Clarked91b9f72010-01-27 17:25:45 +00002681Condition MacroAssembler::IsObjectStringType(Register heap_object,
2682 Register map,
2683 Register instance_type) {
2684 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00002685 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00002686 ASSERT(kNotStringTag != 0);
2687 testb(instance_type, Immediate(kIsNotStringMask));
2688 return zero;
2689}
2690
2691
Steve Blocka7e24c12009-10-30 11:49:00 +00002692void MacroAssembler::TryGetFunctionPrototype(Register function,
2693 Register result,
2694 Label* miss) {
2695 // Check that the receiver isn't a smi.
2696 testl(function, Immediate(kSmiTagMask));
2697 j(zero, miss);
2698
2699 // Check that the function really is a function.
2700 CmpObjectType(function, JS_FUNCTION_TYPE, result);
2701 j(not_equal, miss);
2702
2703 // Make sure that the function has an instance prototype.
Ben Murdoch257744e2011-11-30 15:57:28 +00002704 Label non_instance;
Steve Blocka7e24c12009-10-30 11:49:00 +00002705 testb(FieldOperand(result, Map::kBitFieldOffset),
2706 Immediate(1 << Map::kHasNonInstancePrototype));
Ben Murdoch257744e2011-11-30 15:57:28 +00002707 j(not_zero, &non_instance, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00002708
2709 // Get the prototype or initial map from the function.
2710 movq(result,
2711 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2712
2713 // If the prototype or initial map is the hole, don't return it and
2714 // simply miss the cache instead. This will allow us to allocate a
2715 // prototype object on-demand in the runtime system.
2716 CompareRoot(result, Heap::kTheHoleValueRootIndex);
2717 j(equal, miss);
2718
2719 // If the function does not have an initial map, we're done.
Ben Murdoch257744e2011-11-30 15:57:28 +00002720 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +00002721 CmpObjectType(result, MAP_TYPE, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002722 j(not_equal, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00002723
2724 // Get the prototype from the initial map.
2725 movq(result, FieldOperand(result, Map::kPrototypeOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002726 jmp(&done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00002727
2728 // Non-instance prototype: Fetch prototype from constructor field
2729 // in initial map.
2730 bind(&non_instance);
2731 movq(result, FieldOperand(result, Map::kConstructorOffset));
2732
2733 // All done.
2734 bind(&done);
2735}
2736
2737
2738void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2739 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002740 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002741 movl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002742 }
2743}
2744
2745
2746void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2747 ASSERT(value > 0);
2748 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002749 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00002750 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01002751 incl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002752 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002753 addl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002754 }
2755 }
2756}
2757
2758
2759void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2760 ASSERT(value > 0);
2761 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002762 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00002763 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01002764 decl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002765 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002766 subl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002767 }
2768 }
2769}
2770
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002771
Steve Blocka7e24c12009-10-30 11:49:00 +00002772#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00002773void MacroAssembler::DebugBreak() {
2774 ASSERT(allow_stub_calls());
Steve Block9fac8402011-05-12 15:51:54 +01002775 Set(rax, 0); // No arguments.
Steve Block44f0eee2011-05-26 01:26:41 +01002776 LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
Andrei Popescu402d9372010-02-26 13:31:12 +00002777 CEntryStub ces(1);
2778 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00002779}
Andrei Popescu402d9372010-02-26 13:31:12 +00002780#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00002781
2782
Ben Murdoch257744e2011-11-30 15:57:28 +00002783void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
2784 // This macro takes the dst register to make the code more readable
2785 // at the call sites. However, the dst register has to be rcx to
2786 // follow the calling convention which requires the call type to be
2787 // in rcx.
2788 ASSERT(dst.is(rcx));
2789 if (call_kind == CALL_AS_FUNCTION) {
2790 LoadSmiConstant(dst, Smi::FromInt(1));
2791 } else {
2792 LoadSmiConstant(dst, Smi::FromInt(0));
2793 }
2794}
2795
2796
Steve Blocka7e24c12009-10-30 11:49:00 +00002797void MacroAssembler::InvokeCode(Register code,
2798 const ParameterCount& expected,
2799 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002800 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002801 const CallWrapper& call_wrapper,
2802 CallKind call_kind) {
2803 Label done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002804 InvokePrologue(expected,
2805 actual,
2806 Handle<Code>::null(),
2807 code,
2808 &done,
2809 flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002810 Label::kNear,
2811 call_wrapper,
2812 call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00002813 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002814 call_wrapper.BeforeCall(CallSize(code));
2815 SetCallKind(rcx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00002816 call(code);
Ben Murdoch257744e2011-11-30 15:57:28 +00002817 call_wrapper.AfterCall();
Steve Blocka7e24c12009-10-30 11:49:00 +00002818 } else {
2819 ASSERT(flag == JUMP_FUNCTION);
Ben Murdoch257744e2011-11-30 15:57:28 +00002820 SetCallKind(rcx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00002821 jmp(code);
2822 }
2823 bind(&done);
2824}
2825
2826
2827void MacroAssembler::InvokeCode(Handle<Code> code,
2828 const ParameterCount& expected,
2829 const ParameterCount& actual,
2830 RelocInfo::Mode rmode,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002831 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002832 const CallWrapper& call_wrapper,
2833 CallKind call_kind) {
2834 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +00002835 Register dummy = rax;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002836 InvokePrologue(expected,
2837 actual,
2838 code,
2839 dummy,
2840 &done,
2841 flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002842 Label::kNear,
2843 call_wrapper,
2844 call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00002845 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002846 call_wrapper.BeforeCall(CallSize(code));
2847 SetCallKind(rcx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00002848 Call(code, rmode);
Ben Murdoch257744e2011-11-30 15:57:28 +00002849 call_wrapper.AfterCall();
Steve Blocka7e24c12009-10-30 11:49:00 +00002850 } else {
2851 ASSERT(flag == JUMP_FUNCTION);
Ben Murdoch257744e2011-11-30 15:57:28 +00002852 SetCallKind(rcx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00002853 Jump(code, rmode);
2854 }
2855 bind(&done);
2856}
2857
2858
2859void MacroAssembler::InvokeFunction(Register function,
2860 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002861 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002862 const CallWrapper& call_wrapper,
2863 CallKind call_kind) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002864 ASSERT(function.is(rdi));
2865 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2866 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
2867 movsxlq(rbx,
2868 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002869 // Advances rdx to the end of the Code object header, to the start of
2870 // the executable code.
Steve Block791712a2010-08-27 10:21:07 +01002871 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002872
2873 ParameterCount expected(rbx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002874 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00002875}
2876
2877
Andrei Popescu402d9372010-02-26 13:31:12 +00002878void MacroAssembler::InvokeFunction(JSFunction* function,
2879 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002880 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002881 const CallWrapper& call_wrapper,
2882 CallKind call_kind) {
Andrei Popescu402d9372010-02-26 13:31:12 +00002883 ASSERT(function->is_compiled());
2884 // Get the function and setup the context.
2885 Move(rdi, Handle<JSFunction>(function));
2886 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2887
Steve Block1e0659c2011-05-24 12:43:12 +01002888 if (V8::UseCrankshaft()) {
2889 // Since Crankshaft can recompile a function, we need to load
2890 // the Code object every time we call the function.
2891 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2892 ParameterCount expected(function->shared()->formal_parameter_count());
Ben Murdoch257744e2011-11-30 15:57:28 +00002893 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind);
Steve Block1e0659c2011-05-24 12:43:12 +01002894 } else {
2895 // Invoke the cached code.
2896 Handle<Code> code(function->code());
2897 ParameterCount expected(function->shared()->formal_parameter_count());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002898 InvokeCode(code,
2899 expected,
2900 actual,
2901 RelocInfo::CODE_TARGET,
2902 flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002903 call_wrapper,
2904 call_kind);
2905 }
2906}
2907
2908
2909void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2910 const ParameterCount& actual,
2911 Handle<Code> code_constant,
2912 Register code_register,
2913 Label* done,
2914 InvokeFlag flag,
2915 Label::Distance near_jump,
2916 const CallWrapper& call_wrapper,
2917 CallKind call_kind) {
2918 bool definitely_matches = false;
2919 Label invoke;
2920 if (expected.is_immediate()) {
2921 ASSERT(actual.is_immediate());
2922 if (expected.immediate() == actual.immediate()) {
2923 definitely_matches = true;
2924 } else {
2925 Set(rax, actual.immediate());
2926 if (expected.immediate() ==
2927 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
2928 // Don't worry about adapting arguments for built-ins that
2929 // don't want that done. Skip adaption code by making it look
2930 // like we have a match between expected and actual number of
2931 // arguments.
2932 definitely_matches = true;
2933 } else {
2934 Set(rbx, expected.immediate());
2935 }
2936 }
2937 } else {
2938 if (actual.is_immediate()) {
2939 // Expected is in register, actual is immediate. This is the
2940 // case when we invoke function values without going through the
2941 // IC mechanism.
2942 cmpq(expected.reg(), Immediate(actual.immediate()));
2943 j(equal, &invoke, Label::kNear);
2944 ASSERT(expected.reg().is(rbx));
2945 Set(rax, actual.immediate());
2946 } else if (!expected.reg().is(actual.reg())) {
2947 // Both expected and actual are in (different) registers. This
2948 // is the case when we invoke functions using call and apply.
2949 cmpq(expected.reg(), actual.reg());
2950 j(equal, &invoke, Label::kNear);
2951 ASSERT(actual.reg().is(rax));
2952 ASSERT(expected.reg().is(rbx));
2953 }
2954 }
2955
2956 if (!definitely_matches) {
2957 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
2958 if (!code_constant.is_null()) {
2959 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
2960 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2961 } else if (!code_register.is(rdx)) {
2962 movq(rdx, code_register);
2963 }
2964
2965 if (flag == CALL_FUNCTION) {
2966 call_wrapper.BeforeCall(CallSize(adaptor));
2967 SetCallKind(rcx, call_kind);
2968 Call(adaptor, RelocInfo::CODE_TARGET);
2969 call_wrapper.AfterCall();
2970 jmp(done, near_jump);
2971 } else {
2972 SetCallKind(rcx, call_kind);
2973 Jump(adaptor, RelocInfo::CODE_TARGET);
2974 }
2975 bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01002976 }
Andrei Popescu402d9372010-02-26 13:31:12 +00002977}
2978
2979
Steve Blocka7e24c12009-10-30 11:49:00 +00002980void MacroAssembler::EnterFrame(StackFrame::Type type) {
2981 push(rbp);
2982 movq(rbp, rsp);
2983 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00002984 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002985 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2986 push(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01002987 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002988 movq(kScratchRegister,
Ben Murdoch257744e2011-11-30 15:57:28 +00002989 isolate()->factory()->undefined_value(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002990 RelocInfo::EMBEDDED_OBJECT);
2991 cmpq(Operand(rsp, 0), kScratchRegister);
2992 Check(not_equal, "code object not properly patched");
2993 }
2994}
2995
2996
2997void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +01002998 if (emit_debug_code()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002999 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00003000 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
3001 Check(equal, "stack frame types must match");
3002 }
3003 movq(rsp, rbp);
3004 pop(rbp);
3005}
3006
3007
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003008void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003009 // Setup the frame structure on the stack.
3010 // All constants are relative to the frame pointer of the exit frame.
3011 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
3012 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
3013 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
3014 push(rbp);
3015 movq(rbp, rsp);
3016
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003017 // Reserve room for entry stack pointer and push the code object.
Steve Block3ce2e202009-11-05 08:53:23 +00003018 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00003019 push(Immediate(0)); // Saved entry sp, patched before call.
3020 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
3021 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00003022
3023 // Save the frame pointer and the context in top.
Ben Murdochbb769b22010-08-11 14:56:33 +01003024 if (save_rax) {
Steve Block44f0eee2011-05-26 01:26:41 +01003025 movq(r14, rax); // Backup rax in callee-save register.
Ben Murdochbb769b22010-08-11 14:56:33 +01003026 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003027
Steve Block44f0eee2011-05-26 01:26:41 +01003028 Store(ExternalReference(Isolate::k_c_entry_fp_address, isolate()), rbp);
3029 Store(ExternalReference(Isolate::k_context_address, isolate()), rsi);
Ben Murdochbb769b22010-08-11 14:56:33 +01003030}
Steve Blocka7e24c12009-10-30 11:49:00 +00003031
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003032
Steve Block1e0659c2011-05-24 12:43:12 +01003033void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
3034 bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003035#ifdef _WIN64
Steve Block1e0659c2011-05-24 12:43:12 +01003036 const int kShadowSpace = 4;
3037 arg_stack_space += kShadowSpace;
Steve Blocka7e24c12009-10-30 11:49:00 +00003038#endif
Steve Block1e0659c2011-05-24 12:43:12 +01003039 // Optionally save all XMM registers.
3040 if (save_doubles) {
Steve Block1e0659c2011-05-24 12:43:12 +01003041 int space = XMMRegister::kNumRegisters * kDoubleSize +
3042 arg_stack_space * kPointerSize;
3043 subq(rsp, Immediate(space));
3044 int offset = -2 * kPointerSize;
3045 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
3046 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3047 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
3048 }
3049 } else if (arg_stack_space > 0) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003050 subq(rsp, Immediate(arg_stack_space * kPointerSize));
3051 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003052
3053 // Get the required frame alignment for the OS.
Steve Block44f0eee2011-05-26 01:26:41 +01003054 const int kFrameAlignment = OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +00003055 if (kFrameAlignment > 0) {
3056 ASSERT(IsPowerOf2(kFrameAlignment));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003057 ASSERT(is_int8(kFrameAlignment));
3058 and_(rsp, Immediate(-kFrameAlignment));
Steve Blocka7e24c12009-10-30 11:49:00 +00003059 }
3060
3061 // Patch the saved entry sp.
3062 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
3063}
3064
3065
Steve Block1e0659c2011-05-24 12:43:12 +01003066void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003067 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01003068
Steve Block44f0eee2011-05-26 01:26:41 +01003069 // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
Ben Murdochbb769b22010-08-11 14:56:33 +01003070 // so it must be retained across the C-call.
3071 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Steve Block44f0eee2011-05-26 01:26:41 +01003072 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
Ben Murdochbb769b22010-08-11 14:56:33 +01003073
Steve Block1e0659c2011-05-24 12:43:12 +01003074 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
Ben Murdochbb769b22010-08-11 14:56:33 +01003075}
3076
3077
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003078void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003079 EnterExitFramePrologue(false);
Steve Block1e0659c2011-05-24 12:43:12 +01003080 EnterExitFrameEpilogue(arg_stack_space, false);
Ben Murdochbb769b22010-08-11 14:56:33 +01003081}
3082
3083
Steve Block1e0659c2011-05-24 12:43:12 +01003084void MacroAssembler::LeaveExitFrame(bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003085 // Registers:
Steve Block44f0eee2011-05-26 01:26:41 +01003086 // r15 : argv
Steve Block1e0659c2011-05-24 12:43:12 +01003087 if (save_doubles) {
3088 int offset = -2 * kPointerSize;
3089 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
3090 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3091 movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
3092 }
3093 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003094 // Get the return address from the stack and restore the frame pointer.
3095 movq(rcx, Operand(rbp, 1 * kPointerSize));
3096 movq(rbp, Operand(rbp, 0 * kPointerSize));
3097
Steve Block1e0659c2011-05-24 12:43:12 +01003098 // Drop everything up to and including the arguments and the receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00003099 // from the caller stack.
Steve Block44f0eee2011-05-26 01:26:41 +01003100 lea(rsp, Operand(r15, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00003101
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003102 // Push the return address to get ready to return.
3103 push(rcx);
3104
3105 LeaveExitFrameEpilogue();
3106}
3107
3108
3109void MacroAssembler::LeaveApiExitFrame() {
3110 movq(rsp, rbp);
3111 pop(rbp);
3112
3113 LeaveExitFrameEpilogue();
3114}
3115
3116
3117void MacroAssembler::LeaveExitFrameEpilogue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00003118 // Restore current context from top and clear it in debug mode.
Steve Block44f0eee2011-05-26 01:26:41 +01003119 ExternalReference context_address(Isolate::k_context_address, isolate());
3120 Operand context_operand = ExternalOperand(context_address);
3121 movq(rsi, context_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003122#ifdef DEBUG
Steve Block44f0eee2011-05-26 01:26:41 +01003123 movq(context_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00003124#endif
3125
Steve Blocka7e24c12009-10-30 11:49:00 +00003126 // Clear the top frame.
Steve Block44f0eee2011-05-26 01:26:41 +01003127 ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
3128 isolate());
3129 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
3130 movq(c_entry_fp_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00003131}
3132
3133
Steve Blocka7e24c12009-10-30 11:49:00 +00003134void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
3135 Register scratch,
3136 Label* miss) {
3137 Label same_contexts;
3138
3139 ASSERT(!holder_reg.is(scratch));
3140 ASSERT(!scratch.is(kScratchRegister));
3141 // Load current lexical context from the stack frame.
3142 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
3143
3144 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +01003145 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003146 cmpq(scratch, Immediate(0));
3147 Check(not_equal, "we should not have an empty lexical context");
3148 }
3149 // Load the global context of the current context.
3150 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
3151 movq(scratch, FieldOperand(scratch, offset));
3152 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
3153
3154 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +01003155 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003156 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00003157 isolate()->factory()->global_context_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00003158 Check(equal, "JSGlobalObject::global_context should be a global context.");
3159 }
3160
3161 // Check if both contexts are the same.
3162 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
3163 j(equal, &same_contexts);
3164
3165 // Compare security tokens.
3166 // Check that the security token in the calling global object is
3167 // compatible with the security token in the receiving global
3168 // object.
3169
3170 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +01003171 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003172 // Preserve original value of holder_reg.
3173 push(holder_reg);
3174 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
3175 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
3176 Check(not_equal, "JSGlobalProxy::context() should not be null.");
3177
3178 // Read the first word and compare to global_context_map(),
3179 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
3180 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
3181 Check(equal, "JSGlobalObject::global_context should be a global context.");
3182 pop(holder_reg);
3183 }
3184
3185 movq(kScratchRegister,
3186 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00003187 int token_offset =
3188 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00003189 movq(scratch, FieldOperand(scratch, token_offset));
3190 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
3191 j(not_equal, miss);
3192
3193 bind(&same_contexts);
3194}
3195
3196
3197void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00003198 Register scratch,
3199 AllocationFlags flags) {
3200 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01003201 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003202
3203 // Just return if allocation top is already known.
3204 if ((flags & RESULT_CONTAINS_TOP) != 0) {
3205 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01003206 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00003207#ifdef DEBUG
3208 // Assert that result actually contains top on entry.
Steve Block44f0eee2011-05-26 01:26:41 +01003209 Operand top_operand = ExternalOperand(new_space_allocation_top);
3210 cmpq(result, top_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003211 Check(equal, "Unexpected allocation top");
3212#endif
3213 return;
3214 }
3215
Steve Block6ded16b2010-05-10 14:33:55 +01003216 // Move address of new object to result. Use scratch register if available,
3217 // and keep address in scratch until call to UpdateAllocationTopHelper.
3218 if (scratch.is_valid()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003219 LoadAddress(scratch, new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00003220 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01003221 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003222 Load(result, new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00003223 }
3224}
3225
3226
3227void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
3228 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +01003229 if (emit_debug_code()) {
Steve Blockd0582a62009-12-15 09:54:21 +00003230 testq(result_end, Immediate(kObjectAlignmentMask));
3231 Check(zero, "Unaligned allocation in new space");
3232 }
3233
Steve Blocka7e24c12009-10-30 11:49:00 +00003234 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01003235 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003236
3237 // Update new top.
Steve Block44f0eee2011-05-26 01:26:41 +01003238 if (scratch.is_valid()) {
3239 // Scratch already contains address of allocation top.
3240 movq(Operand(scratch, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00003241 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003242 Store(new_space_allocation_top, result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00003243 }
3244}
3245
3246
3247void MacroAssembler::AllocateInNewSpace(int object_size,
3248 Register result,
3249 Register result_end,
3250 Register scratch,
3251 Label* gc_required,
3252 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07003253 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01003254 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07003255 // Trash the registers to simulate an allocation failure.
3256 movl(result, Immediate(0x7091));
3257 if (result_end.is_valid()) {
3258 movl(result_end, Immediate(0x7191));
3259 }
3260 if (scratch.is_valid()) {
3261 movl(scratch, Immediate(0x7291));
3262 }
3263 }
3264 jmp(gc_required);
3265 return;
3266 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003267 ASSERT(!result.is(result_end));
3268
3269 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003270 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003271
3272 // Calculate new top and bail out if new space is exhausted.
3273 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01003274 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Block6ded16b2010-05-10 14:33:55 +01003275
3276 Register top_reg = result_end.is_valid() ? result_end : result;
3277
Steve Block1e0659c2011-05-24 12:43:12 +01003278 if (!top_reg.is(result)) {
3279 movq(top_reg, result);
Steve Block6ded16b2010-05-10 14:33:55 +01003280 }
Steve Block1e0659c2011-05-24 12:43:12 +01003281 addq(top_reg, Immediate(object_size));
3282 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01003283 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3284 cmpq(top_reg, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003285 j(above, gc_required);
3286
3287 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01003288 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00003289
Steve Block6ded16b2010-05-10 14:33:55 +01003290 if (top_reg.is(result)) {
3291 if ((flags & TAG_OBJECT) != 0) {
3292 subq(result, Immediate(object_size - kHeapObjectTag));
3293 } else {
3294 subq(result, Immediate(object_size));
3295 }
3296 } else if ((flags & TAG_OBJECT) != 0) {
3297 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00003298 addq(result, Immediate(kHeapObjectTag));
3299 }
3300}
3301
3302
3303void MacroAssembler::AllocateInNewSpace(int header_size,
3304 ScaleFactor element_size,
3305 Register element_count,
3306 Register result,
3307 Register result_end,
3308 Register scratch,
3309 Label* gc_required,
3310 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07003311 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01003312 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07003313 // Trash the registers to simulate an allocation failure.
3314 movl(result, Immediate(0x7091));
3315 movl(result_end, Immediate(0x7191));
3316 if (scratch.is_valid()) {
3317 movl(scratch, Immediate(0x7291));
3318 }
3319 // Register element_count is not modified by the function.
3320 }
3321 jmp(gc_required);
3322 return;
3323 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003324 ASSERT(!result.is(result_end));
3325
3326 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003327 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003328
3329 // Calculate new top and bail out if new space is exhausted.
3330 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01003331 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Block1e0659c2011-05-24 12:43:12 +01003332
3333 // We assume that element_count*element_size + header_size does not
3334 // overflow.
3335 lea(result_end, Operand(element_count, element_size, header_size));
3336 addq(result_end, result);
3337 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01003338 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3339 cmpq(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003340 j(above, gc_required);
3341
3342 // Update allocation top.
3343 UpdateAllocationTopHelper(result_end, scratch);
3344
3345 // Tag the result if requested.
3346 if ((flags & TAG_OBJECT) != 0) {
3347 addq(result, Immediate(kHeapObjectTag));
3348 }
3349}
3350
3351
3352void MacroAssembler::AllocateInNewSpace(Register object_size,
3353 Register result,
3354 Register result_end,
3355 Register scratch,
3356 Label* gc_required,
3357 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07003358 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01003359 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07003360 // Trash the registers to simulate an allocation failure.
3361 movl(result, Immediate(0x7091));
3362 movl(result_end, Immediate(0x7191));
3363 if (scratch.is_valid()) {
3364 movl(scratch, Immediate(0x7291));
3365 }
3366 // object_size is left unchanged by this function.
3367 }
3368 jmp(gc_required);
3369 return;
3370 }
3371 ASSERT(!result.is(result_end));
3372
Steve Blocka7e24c12009-10-30 11:49:00 +00003373 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003374 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003375
3376 // Calculate new top and bail out if new space is exhausted.
3377 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01003378 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003379 if (!object_size.is(result_end)) {
3380 movq(result_end, object_size);
3381 }
3382 addq(result_end, result);
Steve Block1e0659c2011-05-24 12:43:12 +01003383 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01003384 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3385 cmpq(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003386 j(above, gc_required);
3387
3388 // Update allocation top.
3389 UpdateAllocationTopHelper(result_end, scratch);
3390
3391 // Tag the result if requested.
3392 if ((flags & TAG_OBJECT) != 0) {
3393 addq(result, Immediate(kHeapObjectTag));
3394 }
3395}
3396
3397
3398void MacroAssembler::UndoAllocationInNewSpace(Register object) {
3399 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01003400 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003401
3402 // Make sure the object has no tag before resetting top.
3403 and_(object, Immediate(~kHeapObjectTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003404 Operand top_operand = ExternalOperand(new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00003405#ifdef DEBUG
Steve Block44f0eee2011-05-26 01:26:41 +01003406 cmpq(object, top_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003407 Check(below, "Undo allocation of non allocated memory");
3408#endif
Steve Block44f0eee2011-05-26 01:26:41 +01003409 movq(top_operand, object);
Steve Blocka7e24c12009-10-30 11:49:00 +00003410}
3411
3412
Steve Block3ce2e202009-11-05 08:53:23 +00003413void MacroAssembler::AllocateHeapNumber(Register result,
3414 Register scratch,
3415 Label* gc_required) {
3416 // Allocate heap number in new space.
3417 AllocateInNewSpace(HeapNumber::kSize,
3418 result,
3419 scratch,
3420 no_reg,
3421 gc_required,
3422 TAG_OBJECT);
3423
3424 // Set the map.
3425 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
3426 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3427}
3428
3429
Leon Clarkee46be812010-01-19 14:06:41 +00003430void MacroAssembler::AllocateTwoByteString(Register result,
3431 Register length,
3432 Register scratch1,
3433 Register scratch2,
3434 Register scratch3,
3435 Label* gc_required) {
3436 // Calculate the number of bytes needed for the characters in the string while
3437 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01003438 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
3439 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00003440 ASSERT(kShortSize == 2);
3441 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01003442 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
3443 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00003444 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01003445 if (kHeaderAlignment > 0) {
3446 subq(scratch1, Immediate(kHeaderAlignment));
3447 }
Leon Clarkee46be812010-01-19 14:06:41 +00003448
3449 // Allocate two byte string in new space.
3450 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
3451 times_1,
3452 scratch1,
3453 result,
3454 scratch2,
3455 scratch3,
3456 gc_required,
3457 TAG_OBJECT);
3458
3459 // Set the map, length and hash field.
3460 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
3461 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01003462 Integer32ToSmi(scratch1, length);
3463 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003464 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00003465 Immediate(String::kEmptyHashField));
3466}
3467
3468
3469void MacroAssembler::AllocateAsciiString(Register result,
3470 Register length,
3471 Register scratch1,
3472 Register scratch2,
3473 Register scratch3,
3474 Label* gc_required) {
3475 // Calculate the number of bytes needed for the characters in the string while
3476 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01003477 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
3478 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00003479 movl(scratch1, length);
3480 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01003481 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00003482 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01003483 if (kHeaderAlignment > 0) {
3484 subq(scratch1, Immediate(kHeaderAlignment));
3485 }
Leon Clarkee46be812010-01-19 14:06:41 +00003486
3487 // Allocate ascii string in new space.
3488 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
3489 times_1,
3490 scratch1,
3491 result,
3492 scratch2,
3493 scratch3,
3494 gc_required,
3495 TAG_OBJECT);
3496
3497 // Set the map, length and hash field.
3498 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
3499 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01003500 Integer32ToSmi(scratch1, length);
3501 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003502 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00003503 Immediate(String::kEmptyHashField));
3504}
3505
3506
3507void MacroAssembler::AllocateConsString(Register result,
3508 Register scratch1,
3509 Register scratch2,
3510 Label* gc_required) {
3511 // Allocate heap number in new space.
3512 AllocateInNewSpace(ConsString::kSize,
3513 result,
3514 scratch1,
3515 scratch2,
3516 gc_required,
3517 TAG_OBJECT);
3518
3519 // Set the map. The other fields are left uninitialized.
3520 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
3521 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3522}
3523
3524
3525void MacroAssembler::AllocateAsciiConsString(Register result,
3526 Register scratch1,
3527 Register scratch2,
3528 Label* gc_required) {
3529 // Allocate heap number in new space.
3530 AllocateInNewSpace(ConsString::kSize,
3531 result,
3532 scratch1,
3533 scratch2,
3534 gc_required,
3535 TAG_OBJECT);
3536
3537 // Set the map. The other fields are left uninitialized.
3538 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
3539 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3540}
3541
3542
Steve Block44f0eee2011-05-26 01:26:41 +01003543// Copy memory, byte-by-byte, from source to destination. Not optimized for
3544// long or aligned copies. The contents of scratch and length are destroyed.
3545// Destination is incremented by length, source, length and scratch are
3546// clobbered.
3547// A simpler loop is faster on small copies, but slower on large ones.
3548// The cld() instruction must have been emitted, to set the direction flag(),
3549// before calling this function.
3550void MacroAssembler::CopyBytes(Register destination,
3551 Register source,
3552 Register length,
3553 int min_length,
3554 Register scratch) {
3555 ASSERT(min_length >= 0);
3556 if (FLAG_debug_code) {
3557 cmpl(length, Immediate(min_length));
3558 Assert(greater_equal, "Invalid min_length");
3559 }
3560 Label loop, done, short_string, short_loop;
3561
3562 const int kLongStringLimit = 20;
3563 if (min_length <= kLongStringLimit) {
3564 cmpl(length, Immediate(kLongStringLimit));
3565 j(less_equal, &short_string);
3566 }
3567
3568 ASSERT(source.is(rsi));
3569 ASSERT(destination.is(rdi));
3570 ASSERT(length.is(rcx));
3571
3572 // Because source is 8-byte aligned in our uses of this function,
3573 // we keep source aligned for the rep movs operation by copying the odd bytes
3574 // at the end of the ranges.
3575 movq(scratch, length);
3576 shrl(length, Immediate(3));
3577 repmovsq();
3578 // Move remaining bytes of length.
3579 andl(scratch, Immediate(0x7));
3580 movq(length, Operand(source, scratch, times_1, -8));
3581 movq(Operand(destination, scratch, times_1, -8), length);
3582 addq(destination, scratch);
3583
3584 if (min_length <= kLongStringLimit) {
3585 jmp(&done);
3586
3587 bind(&short_string);
3588 if (min_length == 0) {
3589 testl(length, length);
3590 j(zero, &done);
3591 }
3592 lea(scratch, Operand(destination, length, times_1, 0));
3593
3594 bind(&short_loop);
3595 movb(length, Operand(source, 0));
3596 movb(Operand(destination, 0), length);
3597 incq(source);
3598 incq(destination);
3599 cmpq(destination, scratch);
3600 j(not_equal, &short_loop);
3601
3602 bind(&done);
3603 }
3604}
3605
3606
Steve Blockd0582a62009-12-15 09:54:21 +00003607void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
3608 if (context_chain_length > 0) {
3609 // Move up the chain of contexts to the context containing the slot.
3610 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
3611 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00003612 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00003613 for (int i = 1; i < context_chain_length; i++) {
3614 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
3615 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
3616 }
3617 // The context may be an intermediate context, not a function context.
3618 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003619 } else {
3620 // Slot is in the current function context. Move it into the
3621 // destination register in case we store into it (the write barrier
3622 // cannot be allowed to destroy the context in rsi).
3623 movq(dst, rsi);
3624 }
3625
3626 // We should not have found a 'with' context by walking the context chain
3627 // (i.e., the static scope chain and runtime context chain do not agree).
3628 // A variable occurring in such a scope should have slot type LOOKUP and
3629 // not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01003630 if (emit_debug_code()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003631 cmpq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
3632 Check(equal, "Yo dawg, I heard you liked function contexts "
3633 "so I put function contexts in all your contexts");
Steve Blockd0582a62009-12-15 09:54:21 +00003634 }
3635}
3636
Steve Block44f0eee2011-05-26 01:26:41 +01003637#ifdef _WIN64
3638static const int kRegisterPassedArguments = 4;
3639#else
3640static const int kRegisterPassedArguments = 6;
3641#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003642
Ben Murdochb0fe1622011-05-05 13:52:32 +01003643void MacroAssembler::LoadGlobalFunction(int index, Register function) {
3644 // Load the global or builtins object from the current context.
3645 movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
3646 // Load the global context from the global or builtins object.
3647 movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
3648 // Load the function from the global context.
3649 movq(function, Operand(function, Context::SlotOffset(index)));
3650}
3651
3652
3653void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
3654 Register map) {
3655 // Load the initial map. The global functions all have initial maps.
3656 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01003657 if (emit_debug_code()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003658 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00003659 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003660 jmp(&ok);
3661 bind(&fail);
3662 Abort("Global functions must have initial map");
3663 bind(&ok);
3664 }
3665}
3666
3667
Leon Clarke4515c472010-02-03 11:58:03 +00003668int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003669 // On Windows 64 stack slots are reserved by the caller for all arguments
3670 // including the ones passed in registers, and space is always allocated for
3671 // the four register arguments even if the function takes fewer than four
3672 // arguments.
3673 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
3674 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00003675 ASSERT(num_arguments >= 0);
3676#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01003677 const int kMinimumStackSlots = kRegisterPassedArguments;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003678 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
3679 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00003680#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003681 if (num_arguments < kRegisterPassedArguments) return 0;
3682 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00003683#endif
Leon Clarke4515c472010-02-03 11:58:03 +00003684}
3685
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003686
Leon Clarke4515c472010-02-03 11:58:03 +00003687void MacroAssembler::PrepareCallCFunction(int num_arguments) {
3688 int frame_alignment = OS::ActivationFrameAlignment();
3689 ASSERT(frame_alignment != 0);
3690 ASSERT(num_arguments >= 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003691
Leon Clarke4515c472010-02-03 11:58:03 +00003692 // Make stack end at alignment and allocate space for arguments and old rsp.
3693 movq(kScratchRegister, rsp);
3694 ASSERT(IsPowerOf2(frame_alignment));
3695 int argument_slots_on_stack =
3696 ArgumentStackSlotsForCFunctionCall(num_arguments);
3697 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
3698 and_(rsp, Immediate(-frame_alignment));
3699 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
3700}
3701
3702
3703void MacroAssembler::CallCFunction(ExternalReference function,
3704 int num_arguments) {
Steve Block44f0eee2011-05-26 01:26:41 +01003705 LoadAddress(rax, function);
Leon Clarke4515c472010-02-03 11:58:03 +00003706 CallCFunction(rax, num_arguments);
3707}
3708
3709
3710void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01003711 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01003712 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003713 CheckStackAlignment();
3714 }
3715
Leon Clarke4515c472010-02-03 11:58:03 +00003716 call(function);
3717 ASSERT(OS::ActivationFrameAlignment() != 0);
3718 ASSERT(num_arguments >= 0);
3719 int argument_slots_on_stack =
3720 ArgumentStackSlotsForCFunctionCall(num_arguments);
3721 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
3722}
3723
Steve Blockd0582a62009-12-15 09:54:21 +00003724
Steve Blocka7e24c12009-10-30 11:49:00 +00003725CodePatcher::CodePatcher(byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01003726 : address_(address),
3727 size_(size),
3728 masm_(Isolate::Current(), address, size + Assembler::kGap) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003729 // Create a new macro assembler pointing to the address of the code to patch.
3730 // The size is adjusted with kGap on order for the assembler to generate size
3731 // bytes of instructions without failing with buffer size constraints.
3732 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3733}
3734
3735
3736CodePatcher::~CodePatcher() {
3737 // Indicate that code has changed.
3738 CPU::FlushICache(address_, size_);
3739
3740 // Check that the code was patched as expected.
3741 ASSERT(masm_.pc_ == address_ + size_);
3742 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3743}
3744
Steve Blocka7e24c12009-10-30 11:49:00 +00003745} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01003746
3747#endif // V8_TARGET_ARCH_X64