blob: b51d531eb46236b66e76494b13d91a669432420b [file] [log] [blame]
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
Ben Murdoch8b112d22011-06-08 16:22:53 +010033#include "codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010038#include "heap.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
40namespace v8 {
41namespace internal {
42
Ben Murdoch8b112d22011-06-08 16:22:53 +010043MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
44 : Assembler(arg_isolate, buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000045 generating_stub_(false),
46 allow_stub_calls_(true),
Ben Murdoch8b112d22011-06-08 16:22:53 +010047 root_array_available_(true) {
48 if (isolate() != NULL) {
49 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
50 isolate());
51 }
Steve Block44f0eee2011-05-26 01:26:41 +010052}
53
54
55static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
56 Address roots_register_value = kRootRegisterBias +
57 reinterpret_cast<Address>(isolate->heap()->roots_address());
58 intptr_t delta = other.address() - roots_register_value;
59 return delta;
60}
61
62
63Operand MacroAssembler::ExternalOperand(ExternalReference target,
64 Register scratch) {
65 if (root_array_available_ && !Serializer::enabled()) {
66 intptr_t delta = RootRegisterDelta(target, isolate());
67 if (is_int32(delta)) {
68 Serializer::TooLateToEnableNow();
69 return Operand(kRootRegister, static_cast<int32_t>(delta));
70 }
71 }
72 movq(scratch, target);
73 return Operand(scratch, 0);
74}
75
76
77void MacroAssembler::Load(Register destination, ExternalReference source) {
78 if (root_array_available_ && !Serializer::enabled()) {
79 intptr_t delta = RootRegisterDelta(source, isolate());
80 if (is_int32(delta)) {
81 Serializer::TooLateToEnableNow();
82 movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
83 return;
84 }
85 }
86 // Safe code.
87 if (destination.is(rax)) {
88 load_rax(source);
89 } else {
90 movq(kScratchRegister, source);
91 movq(destination, Operand(kScratchRegister, 0));
92 }
93}
94
95
96void MacroAssembler::Store(ExternalReference destination, Register source) {
97 if (root_array_available_ && !Serializer::enabled()) {
98 intptr_t delta = RootRegisterDelta(destination, isolate());
99 if (is_int32(delta)) {
100 Serializer::TooLateToEnableNow();
101 movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
102 return;
103 }
104 }
105 // Safe code.
106 if (source.is(rax)) {
107 store_rax(destination);
108 } else {
109 movq(kScratchRegister, destination);
110 movq(Operand(kScratchRegister, 0), source);
111 }
112}
113
114
115void MacroAssembler::LoadAddress(Register destination,
116 ExternalReference source) {
117 if (root_array_available_ && !Serializer::enabled()) {
118 intptr_t delta = RootRegisterDelta(source, isolate());
119 if (is_int32(delta)) {
120 Serializer::TooLateToEnableNow();
121 lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
122 return;
123 }
124 }
125 // Safe code.
126 movq(destination, source);
127}
128
129
130int MacroAssembler::LoadAddressSize(ExternalReference source) {
131 if (root_array_available_ && !Serializer::enabled()) {
132 // This calculation depends on the internals of LoadAddress.
133 // It's correctness is ensured by the asserts in the Call
134 // instruction below.
135 intptr_t delta = RootRegisterDelta(source, isolate());
136 if (is_int32(delta)) {
137 Serializer::TooLateToEnableNow();
138 // Operand is lea(scratch, Operand(kRootRegister, delta));
139 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
140 int size = 4;
141 if (!is_int8(static_cast<int32_t>(delta))) {
142 size += 3; // Need full four-byte displacement in lea.
143 }
144 return size;
145 }
146 }
147 // Size of movq(destination, src);
148 return 10;
Steve Blocka7e24c12009-10-30 11:49:00 +0000149}
150
151
Steve Block3ce2e202009-11-05 08:53:23 +0000152void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100153 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100154 movq(destination, Operand(kRootRegister,
155 (index << kPointerSizeLog2) - kRootRegisterBias));
156}
157
158
159void MacroAssembler::LoadRootIndexed(Register destination,
160 Register variable_offset,
161 int fixed_offset) {
Steve Block44f0eee2011-05-26 01:26:41 +0100162 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100163 movq(destination,
164 Operand(kRootRegister,
165 variable_offset, times_pointer_size,
166 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000167}
168
169
Kristian Monsen25f61362010-05-21 11:50:48 +0100170void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100171 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100172 movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
173 source);
Kristian Monsen25f61362010-05-21 11:50:48 +0100174}
175
176
Steve Blocka7e24c12009-10-30 11:49:00 +0000177void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100178 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100179 push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000180}
181
182
Steve Block3ce2e202009-11-05 08:53:23 +0000183void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100184 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100185 cmpq(with, Operand(kRootRegister,
186 (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000187}
188
189
Steve Block1e0659c2011-05-24 12:43:12 +0100190void MacroAssembler::CompareRoot(const Operand& with,
191 Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100192 ASSERT(root_array_available_);
Steve Block1e0659c2011-05-24 12:43:12 +0100193 ASSERT(!with.AddressUsesRegister(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000194 LoadRoot(kScratchRegister, index);
195 cmpq(with, kScratchRegister);
196}
197
198
Steve Block6ded16b2010-05-10 14:33:55 +0100199void MacroAssembler::RecordWriteHelper(Register object,
200 Register addr,
201 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +0100202 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100203 // Check that the object is not in new space.
Ben Murdoch257744e2011-11-30 15:57:28 +0000204 Label not_in_new_space;
205 InNewSpace(object, scratch, not_equal, &not_in_new_space, Label::kNear);
Steve Block6ded16b2010-05-10 14:33:55 +0100206 Abort("new-space object passed to RecordWriteHelper");
207 bind(&not_in_new_space);
208 }
209
Steve Blocka7e24c12009-10-30 11:49:00 +0000210 // Compute the page start address from the heap object pointer, and reuse
211 // the 'object' register for it.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100212 and_(object, Immediate(~Page::kPageAlignmentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +0000213
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100214 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
215 // method for more details.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100216 shrl(addr, Immediate(Page::kRegionSizeLog2));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100217 andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000218
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100219 // Set dirty mark for region.
220 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000221}
222
223
Ben Murdoch257744e2011-11-30 15:57:28 +0000224void MacroAssembler::InNewSpace(Register object,
225 Register scratch,
226 Condition cc,
227 Label* branch,
228 Label::Distance near_jump) {
229 if (Serializer::enabled()) {
230 // Can't do arithmetic on external references if it might get serialized.
231 // The mask isn't really an address. We load it as an external reference in
232 // case the size of the new space is different between the snapshot maker
233 // and the running system.
234 if (scratch.is(object)) {
235 movq(kScratchRegister, ExternalReference::new_space_mask(isolate()));
236 and_(scratch, kScratchRegister);
237 } else {
238 movq(scratch, ExternalReference::new_space_mask(isolate()));
239 and_(scratch, object);
240 }
241 movq(kScratchRegister, ExternalReference::new_space_start(isolate()));
242 cmpq(scratch, kScratchRegister);
243 j(cc, branch, near_jump);
244 } else {
245 ASSERT(is_int32(static_cast<int64_t>(HEAP->NewSpaceMask())));
246 intptr_t new_space_start =
247 reinterpret_cast<intptr_t>(HEAP->NewSpaceStart());
248 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
249 if (scratch.is(object)) {
250 addq(scratch, kScratchRegister);
251 } else {
252 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
253 }
254 and_(scratch, Immediate(static_cast<int32_t>(HEAP->NewSpaceMask())));
255 j(cc, branch, near_jump);
256 }
257}
258
259
Steve Blocka7e24c12009-10-30 11:49:00 +0000260void MacroAssembler::RecordWrite(Register object,
261 int offset,
262 Register value,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100263 Register index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000264 // The compiled code assumes that record write doesn't change the
265 // context register, so we check that none of the clobbered
266 // registers are rsi.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100267 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
Leon Clarke4515c472010-02-03 11:58:03 +0000268
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100269 // First, check if a write barrier is even needed. The tests below
Steve Block44f0eee2011-05-26 01:26:41 +0100270 // catch stores of smis and stores into the young generation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000271 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000272 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000273
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100274 RecordWriteNonSmi(object, offset, value, index);
Steve Block3ce2e202009-11-05 08:53:23 +0000275 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000276
277 // Clobber all input registers when running with the debug-code flag
278 // turned on to provoke errors. This clobbering repeats the
279 // clobbering done inside RecordWriteNonSmi but it's necessary to
280 // avoid having the fast case for smis leave the registers
281 // unchanged.
Steve Block44f0eee2011-05-26 01:26:41 +0100282 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100283 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
284 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100285 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000286 }
Steve Block3ce2e202009-11-05 08:53:23 +0000287}
288
289
Steve Block8defd9f2010-07-08 12:39:36 +0100290void MacroAssembler::RecordWrite(Register object,
291 Register address,
292 Register value) {
293 // The compiled code assumes that record write doesn't change the
294 // context register, so we check that none of the clobbered
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100295 // registers are rsi.
Steve Block8defd9f2010-07-08 12:39:36 +0100296 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
297
298 // First, check if a write barrier is even needed. The tests below
Steve Block44f0eee2011-05-26 01:26:41 +0100299 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100300 Label done;
301 JumpIfSmi(value, &done);
302
303 InNewSpace(object, value, equal, &done);
304
305 RecordWriteHelper(object, address, value);
306
307 bind(&done);
308
309 // Clobber all input registers when running with the debug-code flag
310 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100311 if (emit_debug_code()) {
Steve Block8defd9f2010-07-08 12:39:36 +0100312 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
313 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
314 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
315 }
316}
317
318
Steve Block3ce2e202009-11-05 08:53:23 +0000319void MacroAssembler::RecordWriteNonSmi(Register object,
320 int offset,
321 Register scratch,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100322 Register index) {
Steve Block3ce2e202009-11-05 08:53:23 +0000323 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000324
Steve Block44f0eee2011-05-26 01:26:41 +0100325 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000326 Label okay;
327 JumpIfNotSmi(object, &okay, Label::kNear);
Leon Clarke4515c472010-02-03 11:58:03 +0000328 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
329 bind(&okay);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100330
331 if (offset == 0) {
332 // index must be int32.
333 Register tmp = index.is(rax) ? rbx : rax;
334 push(tmp);
335 movl(tmp, index);
336 cmpq(tmp, index);
337 Check(equal, "Index register for RecordWrite must be untagged int32.");
338 pop(tmp);
339 }
Leon Clarke4515c472010-02-03 11:58:03 +0000340 }
341
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100342 // Test that the object address is not in the new space. We cannot
343 // update page dirty marks for new space pages.
Steve Block6ded16b2010-05-10 14:33:55 +0100344 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000345
Steve Block6ded16b2010-05-10 14:33:55 +0100346 // The offset is relative to a tagged or untagged HeapObject pointer,
347 // so either offset or offset + kHeapObjectTag must be a
348 // multiple of kPointerSize.
349 ASSERT(IsAligned(offset, kPointerSize) ||
350 IsAligned(offset + kHeapObjectTag, kPointerSize));
351
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100352 Register dst = index;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100353 if (offset != 0) {
354 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000355 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100356 // array access: calculate the destination address in the same manner as
357 // KeyedStoreIC::GenerateGeneric.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100358 lea(dst, FieldOperand(object,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100359 index,
360 times_pointer_size,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100361 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000362 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100363 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000364
365 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000366
367 // Clobber all input registers when running with the debug-code flag
368 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100369 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100370 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
371 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100372 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Steve Block6ded16b2010-05-10 14:33:55 +0100373 }
374}
375
Steve Blocka7e24c12009-10-30 11:49:00 +0000376void MacroAssembler::Assert(Condition cc, const char* msg) {
Steve Block44f0eee2011-05-26 01:26:41 +0100377 if (emit_debug_code()) Check(cc, msg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000378}
379
380
Iain Merrick75681382010-08-19 15:07:18 +0100381void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +0100382 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000383 Label ok;
Iain Merrick75681382010-08-19 15:07:18 +0100384 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
385 Heap::kFixedArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000386 j(equal, &ok, Label::kNear);
Iain Merrick75681382010-08-19 15:07:18 +0100387 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000388 Heap::kFixedDoubleArrayMapRootIndex);
389 j(equal, &ok, Label::kNear);
390 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Iain Merrick75681382010-08-19 15:07:18 +0100391 Heap::kFixedCOWArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000392 j(equal, &ok, Label::kNear);
Iain Merrick75681382010-08-19 15:07:18 +0100393 Abort("JSObject with fast elements map has slow elements");
394 bind(&ok);
395 }
396}
397
398
Steve Blocka7e24c12009-10-30 11:49:00 +0000399void MacroAssembler::Check(Condition cc, const char* msg) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000400 Label L;
401 j(cc, &L, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000402 Abort(msg);
403 // will not return here
404 bind(&L);
405}
406
407
Steve Block6ded16b2010-05-10 14:33:55 +0100408void MacroAssembler::CheckStackAlignment() {
409 int frame_alignment = OS::ActivationFrameAlignment();
410 int frame_alignment_mask = frame_alignment - 1;
411 if (frame_alignment > kPointerSize) {
412 ASSERT(IsPowerOf2(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +0000413 Label alignment_as_expected;
Steve Block6ded16b2010-05-10 14:33:55 +0100414 testq(rsp, Immediate(frame_alignment_mask));
Ben Murdoch257744e2011-11-30 15:57:28 +0000415 j(zero, &alignment_as_expected, Label::kNear);
Steve Block6ded16b2010-05-10 14:33:55 +0100416 // Abort if stack is not aligned.
417 int3();
418 bind(&alignment_as_expected);
419 }
420}
421
422
Steve Blocka7e24c12009-10-30 11:49:00 +0000423void MacroAssembler::NegativeZeroTest(Register result,
424 Register op,
425 Label* then_label) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000426 Label ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000427 testl(result, result);
Ben Murdoch257744e2011-11-30 15:57:28 +0000428 j(not_zero, &ok, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000429 testl(op, op);
430 j(sign, then_label);
431 bind(&ok);
432}
433
434
435void MacroAssembler::Abort(const char* msg) {
436 // We want to pass the msg string like a smi to avoid GC
437 // problems, however msg is not guaranteed to be aligned
438 // properly. Instead, we pass an aligned pointer that is
439 // a proper v8 smi, but also pass the alignment difference
440 // from the real pointer as a smi.
441 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
442 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
443 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
444 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
445#ifdef DEBUG
446 if (msg != NULL) {
447 RecordComment("Abort message: ");
448 RecordComment(msg);
449 }
450#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000451 // Disable stub call restrictions to always allow calls to abort.
Ben Murdoch086aeea2011-05-13 15:57:08 +0100452 AllowStubCallsScope allow_scope(this, true);
Steve Blockd0582a62009-12-15 09:54:21 +0000453
Steve Blocka7e24c12009-10-30 11:49:00 +0000454 push(rax);
455 movq(kScratchRegister, p0, RelocInfo::NONE);
456 push(kScratchRegister);
457 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000458 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000459 RelocInfo::NONE);
460 push(kScratchRegister);
461 CallRuntime(Runtime::kAbort, 2);
462 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000463 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000464}
465
466
Ben Murdoch257744e2011-11-30 15:57:28 +0000467void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000468 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
Ben Murdoch257744e2011-11-30 15:57:28 +0000469 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000470}
471
472
John Reck59135872010-11-02 12:39:01 -0700473MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100474 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700475 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100476 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700477 call(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
478 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100479 }
480 return result;
481}
482
483
Leon Clarkee46be812010-01-19 14:06:41 +0000484void MacroAssembler::TailCallStub(CodeStub* stub) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800485 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Leon Clarkee46be812010-01-19 14:06:41 +0000486 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
487}
488
489
John Reck59135872010-11-02 12:39:01 -0700490MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100491 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700492 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100493 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700494 jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
495 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100496 }
497 return result;
498}
499
500
Steve Blocka7e24c12009-10-30 11:49:00 +0000501void MacroAssembler::StubReturn(int argc) {
502 ASSERT(argc >= 1 && generating_stub());
503 ret((argc - 1) * kPointerSize);
504}
505
506
507void MacroAssembler::IllegalOperation(int num_arguments) {
508 if (num_arguments > 0) {
509 addq(rsp, Immediate(num_arguments * kPointerSize));
510 }
511 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
512}
513
514
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100515void MacroAssembler::IndexFromHash(Register hash, Register index) {
516 // The assert checks that the constants for the maximum number of digits
517 // for an array index cached in the hash field and the number of bits
518 // reserved for it does not conflict.
519 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
520 (1 << String::kArrayIndexValueBits));
521 // We want the smi-tagged index in key. Even if we subsequently go to
522 // the slow case, converting the key to a smi is always valid.
523 // key: string key
524 // hash: key's hash field, including its array index value.
525 and_(hash, Immediate(String::kArrayIndexValueMask));
526 shr(hash, Immediate(String::kHashShift));
527 // Here we actually clobber the key which will be used if calling into
528 // runtime later. However as the new key is the numeric value of a string key
529 // there is no difference in using either key.
530 Integer32ToSmi(index, hash);
531}
532
533
Steve Blocka7e24c12009-10-30 11:49:00 +0000534void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
535 CallRuntime(Runtime::FunctionForId(id), num_arguments);
536}
537
538
Steve Block1e0659c2011-05-24 12:43:12 +0100539void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
Steve Block44f0eee2011-05-26 01:26:41 +0100540 const Runtime::Function* function = Runtime::FunctionForId(id);
Steve Block1e0659c2011-05-24 12:43:12 +0100541 Set(rax, function->nargs);
Steve Block44f0eee2011-05-26 01:26:41 +0100542 LoadAddress(rbx, ExternalReference(function, isolate()));
Steve Block1e0659c2011-05-24 12:43:12 +0100543 CEntryStub ces(1);
544 ces.SaveDoubles();
545 CallStub(&ces);
546}
547
548
John Reck59135872010-11-02 12:39:01 -0700549MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
550 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100551 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
552}
553
554
Steve Block44f0eee2011-05-26 01:26:41 +0100555void MacroAssembler::CallRuntime(const Runtime::Function* f,
556 int num_arguments) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000557 // If the expected number of arguments of the runtime function is
558 // constant, we check that the actual number of arguments match the
559 // expectation.
560 if (f->nargs >= 0 && f->nargs != num_arguments) {
561 IllegalOperation(num_arguments);
562 return;
563 }
564
Leon Clarke4515c472010-02-03 11:58:03 +0000565 // TODO(1236192): Most runtime routines don't need the number of
566 // arguments passed in because it is constant. At some point we
567 // should remove this need and make the runtime routine entry code
568 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100569 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100570 LoadAddress(rbx, ExternalReference(f, isolate()));
Leon Clarke4515c472010-02-03 11:58:03 +0000571 CEntryStub ces(f->result_size);
572 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000573}
574
575
Steve Block44f0eee2011-05-26 01:26:41 +0100576MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
John Reck59135872010-11-02 12:39:01 -0700577 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100578 if (f->nargs >= 0 && f->nargs != num_arguments) {
579 IllegalOperation(num_arguments);
580 // Since we did not call the stub, there was no allocation failure.
581 // Return some non-failure object.
Steve Block44f0eee2011-05-26 01:26:41 +0100582 return HEAP->undefined_value();
Ben Murdochbb769b22010-08-11 14:56:33 +0100583 }
584
585 // TODO(1236192): Most runtime routines don't need the number of
586 // arguments passed in because it is constant. At some point we
587 // should remove this need and make the runtime routine entry code
588 // smarter.
589 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100590 LoadAddress(rbx, ExternalReference(f, isolate()));
Ben Murdochbb769b22010-08-11 14:56:33 +0100591 CEntryStub ces(f->result_size);
592 return TryCallStub(&ces);
593}
594
595
Andrei Popescu402d9372010-02-26 13:31:12 +0000596void MacroAssembler::CallExternalReference(const ExternalReference& ext,
597 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100598 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100599 LoadAddress(rbx, ext);
Andrei Popescu402d9372010-02-26 13:31:12 +0000600
601 CEntryStub stub(1);
602 CallStub(&stub);
603}
604
605
Steve Block6ded16b2010-05-10 14:33:55 +0100606void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
607 int num_arguments,
608 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000609 // ----------- S t a t e -------------
610 // -- rsp[0] : return address
611 // -- rsp[8] : argument num_arguments - 1
612 // ...
613 // -- rsp[8 * num_arguments] : argument 0 (receiver)
614 // -----------------------------------
615
616 // TODO(1236192): Most runtime routines don't need the number of
617 // arguments passed in because it is constant. At some point we
618 // should remove this need and make the runtime routine entry code
619 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100620 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100621 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000622}
623
624
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800625MaybeObject* MacroAssembler::TryTailCallExternalReference(
626 const ExternalReference& ext, int num_arguments, int result_size) {
627 // ----------- S t a t e -------------
628 // -- rsp[0] : return address
629 // -- rsp[8] : argument num_arguments - 1
630 // ...
631 // -- rsp[8 * num_arguments] : argument 0 (receiver)
632 // -----------------------------------
633
634 // TODO(1236192): Most runtime routines don't need the number of
635 // arguments passed in because it is constant. At some point we
636 // should remove this need and make the runtime routine entry code
637 // smarter.
638 Set(rax, num_arguments);
639 return TryJumpToExternalReference(ext, result_size);
640}
641
642
Steve Block6ded16b2010-05-10 14:33:55 +0100643void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
644 int num_arguments,
645 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100646 TailCallExternalReference(ExternalReference(fid, isolate()),
647 num_arguments,
648 result_size);
Steve Block6ded16b2010-05-10 14:33:55 +0100649}
650
651
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800652MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
653 int num_arguments,
654 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100655 return TryTailCallExternalReference(ExternalReference(fid, isolate()),
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800656 num_arguments,
657 result_size);
658}
659
660
Ben Murdochbb769b22010-08-11 14:56:33 +0100661static int Offset(ExternalReference ref0, ExternalReference ref1) {
662 int64_t offset = (ref0.address() - ref1.address());
663 // Check that fits into int.
664 ASSERT(static_cast<int>(offset) == offset);
665 return static_cast<int>(offset);
666}
667
668
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800669void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
670#ifdef _WIN64
671 // We need to prepare a slot for result handle on stack and put
672 // a pointer to it into 1st arg register.
673 EnterApiExitFrame(arg_stack_space + 1);
674
675 // rcx must be used to pass the pointer to the return value slot.
676 lea(rcx, StackSpaceOperand(arg_stack_space));
677#else
678 EnterApiExitFrame(arg_stack_space);
679#endif
Ben Murdochbb769b22010-08-11 14:56:33 +0100680}
681
682
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800683MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
684 ApiFunction* function, int stack_space) {
John Reck59135872010-11-02 12:39:01 -0700685 Label empty_result;
686 Label prologue;
687 Label promote_scheduled_exception;
688 Label delete_allocated_handles;
689 Label leave_exit_frame;
Ben Murdochbb769b22010-08-11 14:56:33 +0100690 Label write_back;
Ben Murdochbb769b22010-08-11 14:56:33 +0100691
Ben Murdoch257744e2011-11-30 15:57:28 +0000692 Factory* factory = isolate()->factory();
John Reck59135872010-11-02 12:39:01 -0700693 ExternalReference next_address =
694 ExternalReference::handle_scope_next_address();
695 const int kNextOffset = 0;
696 const int kLimitOffset = Offset(
697 ExternalReference::handle_scope_limit_address(),
698 next_address);
699 const int kLevelOffset = Offset(
700 ExternalReference::handle_scope_level_address(),
701 next_address);
702 ExternalReference scheduled_exception_address =
Steve Block44f0eee2011-05-26 01:26:41 +0100703 ExternalReference::scheduled_exception_address(isolate());
Ben Murdochbb769b22010-08-11 14:56:33 +0100704
John Reck59135872010-11-02 12:39:01 -0700705 // Allocate HandleScope in callee-save registers.
706 Register prev_next_address_reg = r14;
707 Register prev_limit_reg = rbx;
Steve Block44f0eee2011-05-26 01:26:41 +0100708 Register base_reg = r15;
John Reck59135872010-11-02 12:39:01 -0700709 movq(base_reg, next_address);
710 movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
711 movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
712 addl(Operand(base_reg, kLevelOffset), Immediate(1));
713 // Call the api function!
714 movq(rax,
715 reinterpret_cast<int64_t>(function->address()),
716 RelocInfo::RUNTIME_ENTRY);
717 call(rax);
Ben Murdochbb769b22010-08-11 14:56:33 +0100718
John Reck59135872010-11-02 12:39:01 -0700719#ifdef _WIN64
720 // rax keeps a pointer to v8::Handle, unpack it.
721 movq(rax, Operand(rax, 0));
722#endif
723 // Check if the result handle holds 0.
724 testq(rax, rax);
725 j(zero, &empty_result);
726 // It was non-zero. Dereference to get the result value.
727 movq(rax, Operand(rax, 0));
728 bind(&prologue);
Ben Murdochbb769b22010-08-11 14:56:33 +0100729
John Reck59135872010-11-02 12:39:01 -0700730 // No more valid handles (the result handle was the last one). Restore
731 // previous handle scope.
732 subl(Operand(base_reg, kLevelOffset), Immediate(1));
733 movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
734 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
735 j(not_equal, &delete_allocated_handles);
736 bind(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100737
John Reck59135872010-11-02 12:39:01 -0700738 // Check if the function scheduled an exception.
739 movq(rsi, scheduled_exception_address);
Ben Murdoch257744e2011-11-30 15:57:28 +0000740 Cmp(Operand(rsi, 0), factory->the_hole_value());
John Reck59135872010-11-02 12:39:01 -0700741 j(not_equal, &promote_scheduled_exception);
Ben Murdochbb769b22010-08-11 14:56:33 +0100742
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800743 LeaveApiExitFrame();
744 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -0700745
746 bind(&promote_scheduled_exception);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800747 MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException,
748 0, 1);
749 if (result->IsFailure()) {
750 return result;
751 }
John Reck59135872010-11-02 12:39:01 -0700752
753 bind(&empty_result);
754 // It was zero; the result is undefined.
Ben Murdoch257744e2011-11-30 15:57:28 +0000755 Move(rax, factory->undefined_value());
John Reck59135872010-11-02 12:39:01 -0700756 jmp(&prologue);
757
758 // HandleScope limit has changed. Delete allocated extensions.
759 bind(&delete_allocated_handles);
760 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
761 movq(prev_limit_reg, rax);
Steve Block44f0eee2011-05-26 01:26:41 +0100762#ifdef _WIN64
763 LoadAddress(rcx, ExternalReference::isolate_address());
764#else
765 LoadAddress(rdi, ExternalReference::isolate_address());
766#endif
767 LoadAddress(rax,
768 ExternalReference::delete_handle_scope_extensions(isolate()));
John Reck59135872010-11-02 12:39:01 -0700769 call(rax);
770 movq(rax, prev_limit_reg);
771 jmp(&leave_exit_frame);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800772
773 return result;
Ben Murdochbb769b22010-08-11 14:56:33 +0100774}
775
776
Steve Block6ded16b2010-05-10 14:33:55 +0100777void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
778 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000779 // Set the entry point and jump to the C entry runtime stub.
Steve Block44f0eee2011-05-26 01:26:41 +0100780 LoadAddress(rbx, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +0000781 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000782 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000783}
784
785
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800786MaybeObject* MacroAssembler::TryJumpToExternalReference(
787 const ExternalReference& ext, int result_size) {
788 // Set the entry point and jump to the C entry runtime stub.
Steve Block44f0eee2011-05-26 01:26:41 +0100789 LoadAddress(rbx, ext);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800790 CEntryStub ces(result_size);
791 return TryTailCallStub(&ces);
792}
793
794
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100795void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
796 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000797 const CallWrapper& call_wrapper) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000798 // Calls are not allowed in some stubs.
799 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000800
Andrei Popescu402d9372010-02-26 13:31:12 +0000801 // Rely on the assertion to check that the number of provided
802 // arguments match the expected number of arguments. Fake a
803 // parameter count to avoid emitting code to do the check.
804 ParameterCount expected(0);
805 GetBuiltinEntry(rdx, id);
Ben Murdoch257744e2011-11-30 15:57:28 +0000806 InvokeCode(rdx, expected, expected, flag, call_wrapper, CALL_AS_METHOD);
Steve Blocka7e24c12009-10-30 11:49:00 +0000807}
808
Andrei Popescu402d9372010-02-26 13:31:12 +0000809
Steve Block791712a2010-08-27 10:21:07 +0100810void MacroAssembler::GetBuiltinFunction(Register target,
811 Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100812 // Load the builtins object into target register.
813 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
814 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
Steve Block791712a2010-08-27 10:21:07 +0100815 movq(target, FieldOperand(target,
816 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
817}
Steve Block6ded16b2010-05-10 14:33:55 +0100818
Steve Block791712a2010-08-27 10:21:07 +0100819
820void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
821 ASSERT(!target.is(rdi));
Andrei Popescu402d9372010-02-26 13:31:12 +0000822 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +0100823 GetBuiltinFunction(rdi, id);
824 movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000825}
826
827
828void MacroAssembler::Set(Register dst, int64_t x) {
829 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100830 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000831 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000832 movl(dst, Immediate(static_cast<uint32_t>(x)));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100833 } else if (is_int32(x)) {
834 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000835 } else {
836 movq(dst, x, RelocInfo::NONE);
837 }
838}
839
Steve Blocka7e24c12009-10-30 11:49:00 +0000840void MacroAssembler::Set(const Operand& dst, int64_t x) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100841 if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000842 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000843 } else {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100844 Set(kScratchRegister, x);
Steve Blocka7e24c12009-10-30 11:49:00 +0000845 movq(dst, kScratchRegister);
846 }
847}
848
Steve Blocka7e24c12009-10-30 11:49:00 +0000849// ----------------------------------------------------------------------------
850// Smi tagging, untagging and tag detection.
851
Steve Block8defd9f2010-07-08 12:39:36 +0100852Register MacroAssembler::GetSmiConstant(Smi* source) {
853 int value = source->value();
854 if (value == 0) {
855 xorl(kScratchRegister, kScratchRegister);
856 return kScratchRegister;
857 }
858 if (value == 1) {
859 return kSmiConstantRegister;
860 }
861 LoadSmiConstant(kScratchRegister, source);
862 return kScratchRegister;
863}
864
865void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
Steve Block44f0eee2011-05-26 01:26:41 +0100866 if (emit_debug_code()) {
Steve Block8defd9f2010-07-08 12:39:36 +0100867 movq(dst,
868 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
869 RelocInfo::NONE);
870 cmpq(dst, kSmiConstantRegister);
871 if (allow_stub_calls()) {
872 Assert(equal, "Uninitialized kSmiConstantRegister");
873 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000874 Label ok;
875 j(equal, &ok, Label::kNear);
Steve Block8defd9f2010-07-08 12:39:36 +0100876 int3();
877 bind(&ok);
878 }
879 }
Steve Block44f0eee2011-05-26 01:26:41 +0100880 int value = source->value();
881 if (value == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100882 xorl(dst, dst);
883 return;
884 }
Steve Block8defd9f2010-07-08 12:39:36 +0100885 bool negative = value < 0;
886 unsigned int uvalue = negative ? -value : value;
887
888 switch (uvalue) {
889 case 9:
890 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
891 break;
892 case 8:
893 xorl(dst, dst);
894 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
895 break;
896 case 4:
897 xorl(dst, dst);
898 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
899 break;
900 case 5:
901 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
902 break;
903 case 3:
904 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
905 break;
906 case 2:
907 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
908 break;
909 case 1:
910 movq(dst, kSmiConstantRegister);
911 break;
912 case 0:
913 UNREACHABLE();
914 return;
915 default:
916 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
917 return;
918 }
919 if (negative) {
920 neg(dst);
921 }
922}
923
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100924
Steve Blocka7e24c12009-10-30 11:49:00 +0000925void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000926 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +0000927 if (!dst.is(src)) {
928 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000929 }
Steve Block3ce2e202009-11-05 08:53:23 +0000930 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000931}
932
933
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100934void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +0100935 if (emit_debug_code()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100936 testb(dst, Immediate(0x01));
Ben Murdoch257744e2011-11-30 15:57:28 +0000937 Label ok;
938 j(zero, &ok, Label::kNear);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100939 if (allow_stub_calls()) {
940 Abort("Integer32ToSmiField writing to non-smi location");
941 } else {
942 int3();
943 }
944 bind(&ok);
945 }
946 ASSERT(kSmiShift % kBitsPerByte == 0);
947 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
948}
949
950
Steve Block3ce2e202009-11-05 08:53:23 +0000951void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
952 Register src,
953 int constant) {
954 if (dst.is(src)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100955 addl(dst, Immediate(constant));
Steve Block3ce2e202009-11-05 08:53:23 +0000956 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100957 leal(dst, Operand(src, constant));
Steve Block3ce2e202009-11-05 08:53:23 +0000958 }
959 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000960}
961
962
963void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000964 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000965 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000966 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000967 }
Steve Block3ce2e202009-11-05 08:53:23 +0000968 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000969}
970
971
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100972void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
973 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
974}
975
976
Steve Blocka7e24c12009-10-30 11:49:00 +0000977void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000978 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +0000979 if (!dst.is(src)) {
980 movq(dst, src);
981 }
982 sar(dst, Immediate(kSmiShift));
983}
984
985
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100986void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
987 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
988}
989
990
Steve Block3ce2e202009-11-05 08:53:23 +0000991void MacroAssembler::SmiTest(Register src) {
992 testq(src, src);
993}
994
995
Steve Block44f0eee2011-05-26 01:26:41 +0100996void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
997 if (emit_debug_code()) {
998 AbortIfNotSmi(smi1);
999 AbortIfNotSmi(smi2);
1000 }
1001 cmpq(smi1, smi2);
Steve Block3ce2e202009-11-05 08:53:23 +00001002}
1003
1004
1005void MacroAssembler::SmiCompare(Register dst, Smi* src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001006 if (emit_debug_code()) {
1007 AbortIfNotSmi(dst);
1008 }
1009 Cmp(dst, src);
1010}
1011
1012
1013void MacroAssembler::Cmp(Register dst, Smi* src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001014 ASSERT(!dst.is(kScratchRegister));
1015 if (src->value() == 0) {
1016 testq(dst, dst);
1017 } else {
Iain Merrick75681382010-08-19 15:07:18 +01001018 Register constant_reg = GetSmiConstant(src);
1019 cmpq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001020 }
1021}
1022
1023
Leon Clarkef7060e22010-06-03 12:02:55 +01001024void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001025 if (emit_debug_code()) {
1026 AbortIfNotSmi(dst);
1027 AbortIfNotSmi(src);
1028 }
Steve Block6ded16b2010-05-10 14:33:55 +01001029 cmpq(dst, src);
1030}
1031
1032
Steve Block3ce2e202009-11-05 08:53:23 +00001033void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001034 if (emit_debug_code()) {
1035 AbortIfNotSmi(dst);
1036 AbortIfNotSmi(src);
1037 }
Steve Block3ce2e202009-11-05 08:53:23 +00001038 cmpq(dst, src);
1039}
1040
1041
1042void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001043 if (emit_debug_code()) {
1044 AbortIfNotSmi(dst);
1045 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001046 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001047}
1048
1049
Steve Block44f0eee2011-05-26 01:26:41 +01001050void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1051 // The Operand cannot use the smi register.
1052 Register smi_reg = GetSmiConstant(src);
1053 ASSERT(!dst.AddressUsesRegister(smi_reg));
1054 cmpq(dst, smi_reg);
1055}
1056
1057
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001058void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
1059 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1060}
1061
1062
Steve Blocka7e24c12009-10-30 11:49:00 +00001063void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1064 Register src,
1065 int power) {
1066 ASSERT(power >= 0);
1067 ASSERT(power < 64);
1068 if (power == 0) {
1069 SmiToInteger64(dst, src);
1070 return;
1071 }
Steve Block3ce2e202009-11-05 08:53:23 +00001072 if (!dst.is(src)) {
1073 movq(dst, src);
1074 }
1075 if (power < kSmiShift) {
1076 sar(dst, Immediate(kSmiShift - power));
1077 } else if (power > kSmiShift) {
1078 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001079 }
1080}
1081
1082
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001083void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1084 Register src,
1085 int power) {
1086 ASSERT((0 <= power) && (power < 32));
1087 if (dst.is(src)) {
1088 shr(dst, Immediate(power + kSmiShift));
1089 } else {
1090 UNIMPLEMENTED(); // Not used.
1091 }
1092}
1093
1094
Ben Murdoch257744e2011-11-30 15:57:28 +00001095void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1096 Label* on_not_smis,
1097 Label::Distance near_jump) {
1098 if (dst.is(src1) || dst.is(src2)) {
1099 ASSERT(!src1.is(kScratchRegister));
1100 ASSERT(!src2.is(kScratchRegister));
1101 movq(kScratchRegister, src1);
1102 or_(kScratchRegister, src2);
1103 JumpIfNotSmi(kScratchRegister, on_not_smis, near_jump);
1104 movq(dst, kScratchRegister);
1105 } else {
1106 movq(dst, src1);
1107 or_(dst, src2);
1108 JumpIfNotSmi(dst, on_not_smis, near_jump);
1109 }
1110}
1111
1112
Steve Blocka7e24c12009-10-30 11:49:00 +00001113Condition MacroAssembler::CheckSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001114 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001115 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +00001116 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001117}
1118
1119
Steve Block1e0659c2011-05-24 12:43:12 +01001120Condition MacroAssembler::CheckSmi(const Operand& src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001121 STATIC_ASSERT(kSmiTag == 0);
Steve Block1e0659c2011-05-24 12:43:12 +01001122 testb(src, Immediate(kSmiTagMask));
1123 return zero;
1124}
1125
1126
Ben Murdochf87a2032010-10-22 12:50:53 +01001127Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001128 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001129 // Test that both bits of the mask 0x8000000000000001 are zero.
Steve Block3ce2e202009-11-05 08:53:23 +00001130 movq(kScratchRegister, src);
1131 rol(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +01001132 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +00001133 return zero;
1134}
1135
1136
Steve Blocka7e24c12009-10-30 11:49:00 +00001137Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1138 if (first.is(second)) {
1139 return CheckSmi(first);
1140 }
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001141 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
Steve Block8defd9f2010-07-08 12:39:36 +01001142 leal(kScratchRegister, Operand(first, second, times_1, 0));
1143 testb(kScratchRegister, Immediate(0x03));
Steve Block3ce2e202009-11-05 08:53:23 +00001144 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001145}
1146
1147
Ben Murdochf87a2032010-10-22 12:50:53 +01001148Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1149 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001150 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +01001151 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +00001152 }
Steve Block8defd9f2010-07-08 12:39:36 +01001153 movq(kScratchRegister, first);
1154 or_(kScratchRegister, second);
Leon Clarked91b9f72010-01-27 17:25:45 +00001155 rol(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +01001156 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +00001157 return zero;
1158}
1159
1160
Ben Murdochbb769b22010-08-11 14:56:33 +01001161Condition MacroAssembler::CheckEitherSmi(Register first,
1162 Register second,
1163 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +00001164 if (first.is(second)) {
1165 return CheckSmi(first);
1166 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001167 if (scratch.is(second)) {
1168 andl(scratch, first);
1169 } else {
1170 if (!scratch.is(first)) {
1171 movl(scratch, first);
1172 }
1173 andl(scratch, second);
1174 }
1175 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +00001176 return zero;
1177}
1178
1179
Steve Blocka7e24c12009-10-30 11:49:00 +00001180Condition MacroAssembler::CheckIsMinSmi(Register src) {
Steve Block8defd9f2010-07-08 12:39:36 +01001181 ASSERT(!src.is(kScratchRegister));
1182 // If we overflow by subtracting one, it's the minimal smi value.
1183 cmpq(src, kSmiConstantRegister);
1184 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +00001185}
1186
Steve Blocka7e24c12009-10-30 11:49:00 +00001187
1188Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001189 // A 32-bit integer value can always be converted to a smi.
1190 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +00001191}
1192
1193
Steve Block3ce2e202009-11-05 08:53:23 +00001194Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
1195 // An unsigned 32-bit integer value is valid as long as the high bit
1196 // is not set.
Steve Block8defd9f2010-07-08 12:39:36 +01001197 testl(src, src);
1198 return positive;
Steve Block3ce2e202009-11-05 08:53:23 +00001199}
1200
1201
Steve Block1e0659c2011-05-24 12:43:12 +01001202void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1203 if (dst.is(src)) {
1204 andl(dst, Immediate(kSmiTagMask));
1205 } else {
1206 movl(dst, Immediate(kSmiTagMask));
1207 andl(dst, src);
1208 }
1209}
1210
1211
1212void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
1213 if (!(src.AddressUsesRegister(dst))) {
1214 movl(dst, Immediate(kSmiTagMask));
1215 andl(dst, src);
1216 } else {
1217 movl(dst, src);
1218 andl(dst, Immediate(kSmiTagMask));
1219 }
1220}
1221
1222
Ben Murdoch257744e2011-11-30 15:57:28 +00001223void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1224 Label* on_invalid,
1225 Label::Distance near_jump) {
1226 Condition is_valid = CheckInteger32ValidSmiValue(src);
1227 j(NegateCondition(is_valid), on_invalid, near_jump);
1228}
1229
1230
1231void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1232 Label* on_invalid,
1233 Label::Distance near_jump) {
1234 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1235 j(NegateCondition(is_valid), on_invalid, near_jump);
1236}
1237
1238
1239void MacroAssembler::JumpIfSmi(Register src,
1240 Label* on_smi,
1241 Label::Distance near_jump) {
1242 Condition smi = CheckSmi(src);
1243 j(smi, on_smi, near_jump);
1244}
1245
1246
1247void MacroAssembler::JumpIfNotSmi(Register src,
1248 Label* on_not_smi,
1249 Label::Distance near_jump) {
1250 Condition smi = CheckSmi(src);
1251 j(NegateCondition(smi), on_not_smi, near_jump);
1252}
1253
1254
1255void MacroAssembler::JumpUnlessNonNegativeSmi(
1256 Register src, Label* on_not_smi_or_negative,
1257 Label::Distance near_jump) {
1258 Condition non_negative_smi = CheckNonNegativeSmi(src);
1259 j(NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump);
1260}
1261
1262
1263void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1264 Smi* constant,
1265 Label* on_equals,
1266 Label::Distance near_jump) {
1267 SmiCompare(src, constant);
1268 j(equal, on_equals, near_jump);
1269}
1270
1271
1272void MacroAssembler::JumpIfNotBothSmi(Register src1,
1273 Register src2,
1274 Label* on_not_both_smi,
1275 Label::Distance near_jump) {
1276 Condition both_smi = CheckBothSmi(src1, src2);
1277 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1278}
1279
1280
1281void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1282 Register src2,
1283 Label* on_not_both_smi,
1284 Label::Distance near_jump) {
1285 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1286 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1287}
1288
1289
1290void MacroAssembler::SmiTryAddConstant(Register dst,
1291 Register src,
1292 Smi* constant,
1293 Label* on_not_smi_result,
1294 Label::Distance near_jump) {
1295 // Does not assume that src is a smi.
1296 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001297 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001298 ASSERT(!dst.is(kScratchRegister));
1299 ASSERT(!src.is(kScratchRegister));
1300
1301 JumpIfNotSmi(src, on_not_smi_result, near_jump);
1302 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1303 LoadSmiConstant(tmp, constant);
1304 addq(tmp, src);
1305 j(overflow, on_not_smi_result, near_jump);
1306 if (dst.is(src)) {
1307 movq(dst, tmp);
1308 }
1309}
1310
1311
Steve Block3ce2e202009-11-05 08:53:23 +00001312void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1313 if (constant->value() == 0) {
1314 if (!dst.is(src)) {
1315 movq(dst, src);
1316 }
Steve Block8defd9f2010-07-08 12:39:36 +01001317 return;
Steve Block3ce2e202009-11-05 08:53:23 +00001318 } else if (dst.is(src)) {
1319 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001320 switch (constant->value()) {
1321 case 1:
1322 addq(dst, kSmiConstantRegister);
1323 return;
1324 case 2:
1325 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1326 return;
1327 case 4:
1328 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1329 return;
1330 case 8:
1331 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1332 return;
1333 default:
1334 Register constant_reg = GetSmiConstant(constant);
1335 addq(dst, constant_reg);
1336 return;
1337 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001338 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001339 switch (constant->value()) {
1340 case 1:
1341 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
1342 return;
1343 case 2:
1344 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1345 return;
1346 case 4:
1347 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1348 return;
1349 case 8:
1350 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1351 return;
1352 default:
1353 LoadSmiConstant(dst, constant);
1354 addq(dst, src);
1355 return;
1356 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001357 }
1358}
1359
1360
Leon Clarkef7060e22010-06-03 12:02:55 +01001361void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1362 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001363 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01001364 }
1365}
1366
1367
Ben Murdoch257744e2011-11-30 15:57:28 +00001368void MacroAssembler::SmiAddConstant(Register dst,
1369 Register src,
1370 Smi* constant,
1371 Label* on_not_smi_result,
1372 Label::Distance near_jump) {
1373 if (constant->value() == 0) {
1374 if (!dst.is(src)) {
1375 movq(dst, src);
1376 }
1377 } else if (dst.is(src)) {
1378 ASSERT(!dst.is(kScratchRegister));
1379
1380 LoadSmiConstant(kScratchRegister, constant);
1381 addq(kScratchRegister, src);
1382 j(overflow, on_not_smi_result, near_jump);
1383 movq(dst, kScratchRegister);
1384 } else {
1385 LoadSmiConstant(dst, constant);
1386 addq(dst, src);
1387 j(overflow, on_not_smi_result, near_jump);
1388 }
1389}
1390
1391
Steve Block3ce2e202009-11-05 08:53:23 +00001392void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1393 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001394 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001395 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001396 }
Steve Block3ce2e202009-11-05 08:53:23 +00001397 } else if (dst.is(src)) {
1398 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001399 Register constant_reg = GetSmiConstant(constant);
1400 subq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001401 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001402 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001403 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001404 // Adding and subtracting the min-value gives the same result, it only
1405 // differs on the overflow bit, which we don't check here.
1406 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001407 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001408 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001409 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Steve Block3ce2e202009-11-05 08:53:23 +00001410 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001411 }
1412 }
1413}
1414
1415
Ben Murdoch257744e2011-11-30 15:57:28 +00001416void MacroAssembler::SmiSubConstant(Register dst,
1417 Register src,
1418 Smi* constant,
1419 Label* on_not_smi_result,
1420 Label::Distance near_jump) {
1421 if (constant->value() == 0) {
1422 if (!dst.is(src)) {
1423 movq(dst, src);
1424 }
1425 } else if (dst.is(src)) {
1426 ASSERT(!dst.is(kScratchRegister));
1427 if (constant->value() == Smi::kMinValue) {
1428 // Subtracting min-value from any non-negative value will overflow.
1429 // We test the non-negativeness before doing the subtraction.
1430 testq(src, src);
1431 j(not_sign, on_not_smi_result, near_jump);
1432 LoadSmiConstant(kScratchRegister, constant);
1433 subq(dst, kScratchRegister);
1434 } else {
1435 // Subtract by adding the negation.
1436 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1437 addq(kScratchRegister, dst);
1438 j(overflow, on_not_smi_result, near_jump);
1439 movq(dst, kScratchRegister);
1440 }
1441 } else {
1442 if (constant->value() == Smi::kMinValue) {
1443 // Subtracting min-value from any non-negative value will overflow.
1444 // We test the non-negativeness before doing the subtraction.
1445 testq(src, src);
1446 j(not_sign, on_not_smi_result, near_jump);
1447 LoadSmiConstant(dst, constant);
1448 // Adding and subtracting the min-value gives the same result, it only
1449 // differs on the overflow bit, which we don't check here.
1450 addq(dst, src);
1451 } else {
1452 // Subtract by adding the negation.
1453 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1454 addq(dst, src);
1455 j(overflow, on_not_smi_result, near_jump);
1456 }
1457 }
1458}
1459
1460
1461void MacroAssembler::SmiNeg(Register dst,
1462 Register src,
1463 Label* on_smi_result,
1464 Label::Distance near_jump) {
1465 if (dst.is(src)) {
1466 ASSERT(!dst.is(kScratchRegister));
1467 movq(kScratchRegister, src);
1468 neg(dst); // Low 32 bits are retained as zero by negation.
1469 // Test if result is zero or Smi::kMinValue.
1470 cmpq(dst, kScratchRegister);
1471 j(not_equal, on_smi_result, near_jump);
1472 movq(src, kScratchRegister);
1473 } else {
1474 movq(dst, src);
1475 neg(dst);
1476 cmpq(dst, src);
1477 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1478 j(not_equal, on_smi_result, near_jump);
1479 }
1480}
1481
1482
1483void MacroAssembler::SmiAdd(Register dst,
1484 Register src1,
1485 Register src2,
1486 Label* on_not_smi_result,
1487 Label::Distance near_jump) {
1488 ASSERT_NOT_NULL(on_not_smi_result);
1489 ASSERT(!dst.is(src2));
1490 if (dst.is(src1)) {
1491 movq(kScratchRegister, src1);
1492 addq(kScratchRegister, src2);
1493 j(overflow, on_not_smi_result, near_jump);
1494 movq(dst, kScratchRegister);
1495 } else {
1496 movq(dst, src1);
1497 addq(dst, src2);
1498 j(overflow, on_not_smi_result, near_jump);
1499 }
1500}
1501
1502
1503void MacroAssembler::SmiAdd(Register dst,
1504 Register src1,
1505 const Operand& src2,
1506 Label* on_not_smi_result,
1507 Label::Distance near_jump) {
1508 ASSERT_NOT_NULL(on_not_smi_result);
1509 if (dst.is(src1)) {
1510 movq(kScratchRegister, src1);
1511 addq(kScratchRegister, src2);
1512 j(overflow, on_not_smi_result, near_jump);
1513 movq(dst, kScratchRegister);
1514 } else {
1515 ASSERT(!src2.AddressUsesRegister(dst));
1516 movq(dst, src1);
1517 addq(dst, src2);
1518 j(overflow, on_not_smi_result, near_jump);
1519 }
1520}
1521
1522
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001523void MacroAssembler::SmiAdd(Register dst,
1524 Register src1,
1525 Register src2) {
1526 // No overflow checking. Use only when it's known that
1527 // overflowing is impossible.
Steve Block44f0eee2011-05-26 01:26:41 +01001528 if (!dst.is(src1)) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001529 if (emit_debug_code()) {
1530 movq(kScratchRegister, src1);
1531 addq(kScratchRegister, src2);
1532 Check(no_overflow, "Smi addition overflow");
1533 }
1534 lea(dst, Operand(src1, src2, times_1, 0));
1535 } else {
1536 addq(dst, src2);
1537 Assert(no_overflow, "Smi addition overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001538 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001539}
1540
1541
1542void MacroAssembler::SmiSub(Register dst,
1543 Register src1,
1544 Register src2,
1545 Label* on_not_smi_result,
1546 Label::Distance near_jump) {
1547 ASSERT_NOT_NULL(on_not_smi_result);
1548 ASSERT(!dst.is(src2));
1549 if (dst.is(src1)) {
1550 cmpq(dst, src2);
1551 j(overflow, on_not_smi_result, near_jump);
1552 subq(dst, src2);
1553 } else {
1554 movq(dst, src1);
1555 subq(dst, src2);
1556 j(overflow, on_not_smi_result, near_jump);
1557 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001558}
1559
1560
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001561void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1562 // No overflow checking. Use only when it's known that
1563 // overflowing is impossible (e.g., subtracting two positive smis).
1564 ASSERT(!dst.is(src2));
Steve Block44f0eee2011-05-26 01:26:41 +01001565 if (!dst.is(src1)) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001566 movq(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001567 }
Steve Block44f0eee2011-05-26 01:26:41 +01001568 subq(dst, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001569 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001570}
1571
1572
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001573void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001574 Register src1,
Ben Murdoch257744e2011-11-30 15:57:28 +00001575 const Operand& src2,
1576 Label* on_not_smi_result,
1577 Label::Distance near_jump) {
1578 ASSERT_NOT_NULL(on_not_smi_result);
1579 if (dst.is(src1)) {
1580 movq(kScratchRegister, src2);
1581 cmpq(src1, kScratchRegister);
1582 j(overflow, on_not_smi_result, near_jump);
1583 subq(src1, kScratchRegister);
1584 } else {
1585 movq(dst, src1);
1586 subq(dst, src2);
1587 j(overflow, on_not_smi_result, near_jump);
1588 }
1589}
1590
1591
1592void MacroAssembler::SmiSub(Register dst,
1593 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001594 const Operand& src2) {
1595 // No overflow checking. Use only when it's known that
1596 // overflowing is impossible (e.g., subtracting two positive smis).
Steve Block44f0eee2011-05-26 01:26:41 +01001597 if (!dst.is(src1)) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001598 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001599 }
Steve Block44f0eee2011-05-26 01:26:41 +01001600 subq(dst, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001601 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001602}
1603
1604
Ben Murdoch257744e2011-11-30 15:57:28 +00001605void MacroAssembler::SmiMul(Register dst,
1606 Register src1,
1607 Register src2,
1608 Label* on_not_smi_result,
1609 Label::Distance near_jump) {
1610 ASSERT(!dst.is(src2));
1611 ASSERT(!dst.is(kScratchRegister));
1612 ASSERT(!src1.is(kScratchRegister));
1613 ASSERT(!src2.is(kScratchRegister));
1614
1615 if (dst.is(src1)) {
1616 Label failure, zero_correct_result;
1617 movq(kScratchRegister, src1); // Create backup for later testing.
1618 SmiToInteger64(dst, src1);
1619 imul(dst, src2);
1620 j(overflow, &failure, Label::kNear);
1621
1622 // Check for negative zero result. If product is zero, and one
1623 // argument is negative, go to slow case.
1624 Label correct_result;
1625 testq(dst, dst);
1626 j(not_zero, &correct_result, Label::kNear);
1627
1628 movq(dst, kScratchRegister);
1629 xor_(dst, src2);
1630 // Result was positive zero.
1631 j(positive, &zero_correct_result, Label::kNear);
1632
1633 bind(&failure); // Reused failure exit, restores src1.
1634 movq(src1, kScratchRegister);
1635 jmp(on_not_smi_result, near_jump);
1636
1637 bind(&zero_correct_result);
1638 Set(dst, 0);
1639
1640 bind(&correct_result);
1641 } else {
1642 SmiToInteger64(dst, src1);
1643 imul(dst, src2);
1644 j(overflow, on_not_smi_result, near_jump);
1645 // Check for negative zero result. If product is zero, and one
1646 // argument is negative, go to slow case.
1647 Label correct_result;
1648 testq(dst, dst);
1649 j(not_zero, &correct_result, Label::kNear);
1650 // One of src1 and src2 is zero, the check whether the other is
1651 // negative.
1652 movq(kScratchRegister, src1);
1653 xor_(kScratchRegister, src2);
1654 j(negative, on_not_smi_result, near_jump);
1655 bind(&correct_result);
1656 }
1657}
1658
1659
1660void MacroAssembler::SmiDiv(Register dst,
1661 Register src1,
1662 Register src2,
1663 Label* on_not_smi_result,
1664 Label::Distance near_jump) {
1665 ASSERT(!src1.is(kScratchRegister));
1666 ASSERT(!src2.is(kScratchRegister));
1667 ASSERT(!dst.is(kScratchRegister));
1668 ASSERT(!src2.is(rax));
1669 ASSERT(!src2.is(rdx));
1670 ASSERT(!src1.is(rdx));
1671
1672 // Check for 0 divisor (result is +/-Infinity).
1673 testq(src2, src2);
1674 j(zero, on_not_smi_result, near_jump);
1675
1676 if (src1.is(rax)) {
1677 movq(kScratchRegister, src1);
1678 }
1679 SmiToInteger32(rax, src1);
1680 // We need to rule out dividing Smi::kMinValue by -1, since that would
1681 // overflow in idiv and raise an exception.
1682 // We combine this with negative zero test (negative zero only happens
1683 // when dividing zero by a negative number).
1684
1685 // We overshoot a little and go to slow case if we divide min-value
1686 // by any negative value, not just -1.
1687 Label safe_div;
1688 testl(rax, Immediate(0x7fffffff));
1689 j(not_zero, &safe_div, Label::kNear);
1690 testq(src2, src2);
1691 if (src1.is(rax)) {
1692 j(positive, &safe_div, Label::kNear);
1693 movq(src1, kScratchRegister);
1694 jmp(on_not_smi_result, near_jump);
1695 } else {
1696 j(negative, on_not_smi_result, near_jump);
1697 }
1698 bind(&safe_div);
1699
1700 SmiToInteger32(src2, src2);
1701 // Sign extend src1 into edx:eax.
1702 cdq();
1703 idivl(src2);
1704 Integer32ToSmi(src2, src2);
1705 // Check that the remainder is zero.
1706 testl(rdx, rdx);
1707 if (src1.is(rax)) {
1708 Label smi_result;
1709 j(zero, &smi_result, Label::kNear);
1710 movq(src1, kScratchRegister);
1711 jmp(on_not_smi_result, near_jump);
1712 bind(&smi_result);
1713 } else {
1714 j(not_zero, on_not_smi_result, near_jump);
1715 }
1716 if (!dst.is(src1) && src1.is(rax)) {
1717 movq(src1, kScratchRegister);
1718 }
1719 Integer32ToSmi(dst, rax);
1720}
1721
1722
1723void MacroAssembler::SmiMod(Register dst,
1724 Register src1,
1725 Register src2,
1726 Label* on_not_smi_result,
1727 Label::Distance near_jump) {
1728 ASSERT(!dst.is(kScratchRegister));
1729 ASSERT(!src1.is(kScratchRegister));
1730 ASSERT(!src2.is(kScratchRegister));
1731 ASSERT(!src2.is(rax));
1732 ASSERT(!src2.is(rdx));
1733 ASSERT(!src1.is(rdx));
1734 ASSERT(!src1.is(src2));
1735
1736 testq(src2, src2);
1737 j(zero, on_not_smi_result, near_jump);
1738
1739 if (src1.is(rax)) {
1740 movq(kScratchRegister, src1);
1741 }
1742 SmiToInteger32(rax, src1);
1743 SmiToInteger32(src2, src2);
1744
1745 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1746 Label safe_div;
1747 cmpl(rax, Immediate(Smi::kMinValue));
1748 j(not_equal, &safe_div, Label::kNear);
1749 cmpl(src2, Immediate(-1));
1750 j(not_equal, &safe_div, Label::kNear);
1751 // Retag inputs and go slow case.
1752 Integer32ToSmi(src2, src2);
1753 if (src1.is(rax)) {
1754 movq(src1, kScratchRegister);
1755 }
1756 jmp(on_not_smi_result, near_jump);
1757 bind(&safe_div);
1758
1759 // Sign extend eax into edx:eax.
1760 cdq();
1761 idivl(src2);
1762 // Restore smi tags on inputs.
1763 Integer32ToSmi(src2, src2);
1764 if (src1.is(rax)) {
1765 movq(src1, kScratchRegister);
1766 }
1767 // Check for a negative zero result. If the result is zero, and the
1768 // dividend is negative, go slow to return a floating point negative zero.
1769 Label smi_result;
1770 testl(rdx, rdx);
1771 j(not_zero, &smi_result, Label::kNear);
1772 testq(src1, src1);
1773 j(negative, on_not_smi_result, near_jump);
1774 bind(&smi_result);
1775 Integer32ToSmi(dst, rdx);
1776}
1777
1778
Steve Blocka7e24c12009-10-30 11:49:00 +00001779void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001780 ASSERT(!dst.is(kScratchRegister));
1781 ASSERT(!src.is(kScratchRegister));
1782 // Set tag and padding bits before negating, so that they are zero afterwards.
1783 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001784 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001785 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001786 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001787 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001788 }
Steve Block3ce2e202009-11-05 08:53:23 +00001789 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001790}
1791
1792
1793void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001794 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001795 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001796 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001797 }
1798 and_(dst, src2);
1799}
1800
1801
Steve Block3ce2e202009-11-05 08:53:23 +00001802void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1803 if (constant->value() == 0) {
Steve Block9fac8402011-05-12 15:51:54 +01001804 Set(dst, 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001805 } else if (dst.is(src)) {
1806 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001807 Register constant_reg = GetSmiConstant(constant);
1808 and_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001809 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001810 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001811 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001812 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001813}
1814
1815
1816void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1817 if (!dst.is(src1)) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001818 ASSERT(!src1.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +00001819 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001820 }
1821 or_(dst, src2);
1822}
1823
1824
Steve Block3ce2e202009-11-05 08:53:23 +00001825void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1826 if (dst.is(src)) {
1827 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001828 Register constant_reg = GetSmiConstant(constant);
1829 or_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001830 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001831 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001832 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001833 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001834}
1835
Steve Block3ce2e202009-11-05 08:53:23 +00001836
Steve Blocka7e24c12009-10-30 11:49:00 +00001837void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1838 if (!dst.is(src1)) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001839 ASSERT(!src1.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +00001840 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001841 }
1842 xor_(dst, src2);
1843}
1844
1845
Steve Block3ce2e202009-11-05 08:53:23 +00001846void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1847 if (dst.is(src)) {
1848 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001849 Register constant_reg = GetSmiConstant(constant);
1850 xor_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001851 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001852 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001853 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001854 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001855}
1856
1857
Steve Blocka7e24c12009-10-30 11:49:00 +00001858void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1859 Register src,
1860 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001861 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001862 if (shift_value > 0) {
1863 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001864 sar(dst, Immediate(shift_value + kSmiShift));
1865 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001866 } else {
1867 UNIMPLEMENTED(); // Not used.
1868 }
1869 }
1870}
1871
1872
Steve Blocka7e24c12009-10-30 11:49:00 +00001873void MacroAssembler::SmiShiftLeftConstant(Register dst,
1874 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001875 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001876 if (!dst.is(src)) {
1877 movq(dst, src);
1878 }
1879 if (shift_value > 0) {
1880 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001881 }
1882}
1883
1884
Ben Murdoch257744e2011-11-30 15:57:28 +00001885void MacroAssembler::SmiShiftLogicalRightConstant(
1886 Register dst, Register src, int shift_value,
1887 Label* on_not_smi_result, Label::Distance near_jump) {
1888 // Logic right shift interprets its result as an *unsigned* number.
1889 if (dst.is(src)) {
1890 UNIMPLEMENTED(); // Not used.
1891 } else {
1892 movq(dst, src);
1893 if (shift_value == 0) {
1894 testq(dst, dst);
1895 j(negative, on_not_smi_result, near_jump);
1896 }
1897 shr(dst, Immediate(shift_value + kSmiShift));
1898 shl(dst, Immediate(kSmiShift));
1899 }
1900}
1901
1902
Steve Blocka7e24c12009-10-30 11:49:00 +00001903void MacroAssembler::SmiShiftLeft(Register dst,
1904 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001905 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001906 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001907 // Untag shift amount.
1908 if (!dst.is(src1)) {
1909 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001910 }
Steve Block3ce2e202009-11-05 08:53:23 +00001911 SmiToInteger32(rcx, src2);
1912 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1913 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001914 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001915}
1916
1917
Ben Murdoch257744e2011-11-30 15:57:28 +00001918void MacroAssembler::SmiShiftLogicalRight(Register dst,
1919 Register src1,
1920 Register src2,
1921 Label* on_not_smi_result,
1922 Label::Distance near_jump) {
1923 ASSERT(!dst.is(kScratchRegister));
1924 ASSERT(!src1.is(kScratchRegister));
1925 ASSERT(!src2.is(kScratchRegister));
1926 ASSERT(!dst.is(rcx));
1927 // dst and src1 can be the same, because the one case that bails out
1928 // is a shift by 0, which leaves dst, and therefore src1, unchanged.
1929 if (src1.is(rcx) || src2.is(rcx)) {
1930 movq(kScratchRegister, rcx);
1931 }
1932 if (!dst.is(src1)) {
1933 movq(dst, src1);
1934 }
1935 SmiToInteger32(rcx, src2);
1936 orl(rcx, Immediate(kSmiShift));
1937 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
1938 shl(dst, Immediate(kSmiShift));
1939 testq(dst, dst);
1940 if (src1.is(rcx) || src2.is(rcx)) {
1941 Label positive_result;
1942 j(positive, &positive_result, Label::kNear);
1943 if (src1.is(rcx)) {
1944 movq(src1, kScratchRegister);
1945 } else {
1946 movq(src2, kScratchRegister);
1947 }
1948 jmp(on_not_smi_result, near_jump);
1949 bind(&positive_result);
1950 } else {
1951 // src2 was zero and src1 negative.
1952 j(negative, on_not_smi_result, near_jump);
1953 }
1954}
1955
1956
Steve Blocka7e24c12009-10-30 11:49:00 +00001957void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1958 Register src1,
1959 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001960 ASSERT(!dst.is(kScratchRegister));
1961 ASSERT(!src1.is(kScratchRegister));
1962 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001963 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001964 if (src1.is(rcx)) {
1965 movq(kScratchRegister, src1);
1966 } else if (src2.is(rcx)) {
1967 movq(kScratchRegister, src2);
1968 }
1969 if (!dst.is(src1)) {
1970 movq(dst, src1);
1971 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001972 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001973 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001974 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001975 shl(dst, Immediate(kSmiShift));
1976 if (src1.is(rcx)) {
1977 movq(src1, kScratchRegister);
1978 } else if (src2.is(rcx)) {
1979 movq(src2, kScratchRegister);
1980 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001981}
1982
1983
Ben Murdoch257744e2011-11-30 15:57:28 +00001984void MacroAssembler::SelectNonSmi(Register dst,
1985 Register src1,
1986 Register src2,
1987 Label* on_not_smis,
1988 Label::Distance near_jump) {
1989 ASSERT(!dst.is(kScratchRegister));
1990 ASSERT(!src1.is(kScratchRegister));
1991 ASSERT(!src2.is(kScratchRegister));
1992 ASSERT(!dst.is(src1));
1993 ASSERT(!dst.is(src2));
1994 // Both operands must not be smis.
1995#ifdef DEBUG
1996 if (allow_stub_calls()) { // Check contains a stub call.
1997 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1998 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1999 }
2000#endif
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002001 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00002002 ASSERT_EQ(0, Smi::FromInt(0));
2003 movl(kScratchRegister, Immediate(kSmiTagMask));
2004 and_(kScratchRegister, src1);
2005 testl(kScratchRegister, src2);
2006 // If non-zero then both are smis.
2007 j(not_zero, on_not_smis, near_jump);
2008
2009 // Exactly one operand is a smi.
2010 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
2011 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
2012 subq(kScratchRegister, Immediate(1));
2013 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
2014 movq(dst, src1);
2015 xor_(dst, src2);
2016 and_(dst, kScratchRegister);
2017 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
2018 xor_(dst, src1);
2019 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
2020}
2021
2022
Steve Block3ce2e202009-11-05 08:53:23 +00002023SmiIndex MacroAssembler::SmiToIndex(Register dst,
2024 Register src,
2025 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002026 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00002027 // There is a possible optimization if shift is in the range 60-63, but that
2028 // will (and must) never happen.
2029 if (!dst.is(src)) {
2030 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002031 }
Steve Block3ce2e202009-11-05 08:53:23 +00002032 if (shift < kSmiShift) {
2033 sar(dst, Immediate(kSmiShift - shift));
2034 } else {
2035 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00002036 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002037 return SmiIndex(dst, times_1);
2038}
2039
Steve Blocka7e24c12009-10-30 11:49:00 +00002040SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2041 Register src,
2042 int shift) {
2043 // Register src holds a positive smi.
2044 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00002045 if (!dst.is(src)) {
2046 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002047 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002048 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00002049 if (shift < kSmiShift) {
2050 sar(dst, Immediate(kSmiShift - shift));
2051 } else {
2052 shl(dst, Immediate(shift - kSmiShift));
2053 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002054 return SmiIndex(dst, times_1);
2055}
2056
2057
Steve Block44f0eee2011-05-26 01:26:41 +01002058void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
2059 ASSERT_EQ(0, kSmiShift % kBitsPerByte);
2060 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
2061}
2062
2063
Ben Murdoch257744e2011-11-30 15:57:28 +00002064void MacroAssembler::JumpIfNotString(Register object,
2065 Register object_map,
2066 Label* not_string,
2067 Label::Distance near_jump) {
2068 Condition is_smi = CheckSmi(object);
2069 j(is_smi, not_string, near_jump);
2070 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
2071 j(above_equal, not_string, near_jump);
2072}
2073
2074
2075void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(
2076 Register first_object,
2077 Register second_object,
2078 Register scratch1,
2079 Register scratch2,
2080 Label* on_fail,
2081 Label::Distance near_jump) {
2082 // Check that both objects are not smis.
2083 Condition either_smi = CheckEitherSmi(first_object, second_object);
2084 j(either_smi, on_fail, near_jump);
2085
2086 // Load instance type for both strings.
2087 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
2088 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
2089 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2090 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2091
2092 // Check that both are flat ascii strings.
2093 ASSERT(kNotStringTag != 0);
2094 const int kFlatAsciiStringMask =
2095 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2096 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2097
2098 andl(scratch1, Immediate(kFlatAsciiStringMask));
2099 andl(scratch2, Immediate(kFlatAsciiStringMask));
2100 // Interleave the bits to check both scratch1 and scratch2 in one test.
2101 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2102 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2103 cmpl(scratch1,
2104 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2105 j(not_equal, on_fail, near_jump);
2106}
2107
2108
2109void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2110 Register instance_type,
2111 Register scratch,
2112 Label* failure,
2113 Label::Distance near_jump) {
2114 if (!scratch.is(instance_type)) {
2115 movl(scratch, instance_type);
2116 }
2117
2118 const int kFlatAsciiStringMask =
2119 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2120
2121 andl(scratch, Immediate(kFlatAsciiStringMask));
2122 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
2123 j(not_equal, failure, near_jump);
2124}
2125
2126
2127void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
2128 Register first_object_instance_type,
2129 Register second_object_instance_type,
2130 Register scratch1,
2131 Register scratch2,
2132 Label* on_fail,
2133 Label::Distance near_jump) {
2134 // Load instance type for both strings.
2135 movq(scratch1, first_object_instance_type);
2136 movq(scratch2, second_object_instance_type);
2137
2138 // Check that both are flat ascii strings.
2139 ASSERT(kNotStringTag != 0);
2140 const int kFlatAsciiStringMask =
2141 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2142 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2143
2144 andl(scratch1, Immediate(kFlatAsciiStringMask));
2145 andl(scratch2, Immediate(kFlatAsciiStringMask));
2146 // Interleave the bits to check both scratch1 and scratch2 in one test.
2147 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2148 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2149 cmpl(scratch1,
2150 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2151 j(not_equal, on_fail, near_jump);
2152}
2153
2154
Steve Block44f0eee2011-05-26 01:26:41 +01002155
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002156void MacroAssembler::Move(Register dst, Register src) {
2157 if (!dst.is(src)) {
2158 movq(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01002159 }
Steve Block6ded16b2010-05-10 14:33:55 +01002160}
2161
2162
Steve Blocka7e24c12009-10-30 11:49:00 +00002163void MacroAssembler::Move(Register dst, Handle<Object> source) {
2164 ASSERT(!source->IsFailure());
2165 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002166 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002167 } else {
2168 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
2169 }
2170}
2171
2172
2173void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00002174 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00002175 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002176 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002177 } else {
2178 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2179 movq(dst, kScratchRegister);
2180 }
2181}
2182
2183
2184void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00002185 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002186 Cmp(dst, Smi::cast(*source));
Steve Block3ce2e202009-11-05 08:53:23 +00002187 } else {
2188 Move(kScratchRegister, source);
2189 cmpq(dst, kScratchRegister);
2190 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002191}
2192
2193
2194void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
2195 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002196 Cmp(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002197 } else {
2198 ASSERT(source->IsHeapObject());
2199 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2200 cmpq(dst, kScratchRegister);
2201 }
2202}
2203
2204
2205void MacroAssembler::Push(Handle<Object> source) {
2206 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002207 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002208 } else {
2209 ASSERT(source->IsHeapObject());
2210 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2211 push(kScratchRegister);
2212 }
2213}
2214
2215
2216void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00002217 intptr_t smi = reinterpret_cast<intptr_t>(source);
2218 if (is_int32(smi)) {
2219 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00002220 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002221 Register constant = GetSmiConstant(source);
2222 push(constant);
Steve Block3ce2e202009-11-05 08:53:23 +00002223 }
2224}
2225
2226
Leon Clarkee46be812010-01-19 14:06:41 +00002227void MacroAssembler::Drop(int stack_elements) {
2228 if (stack_elements > 0) {
2229 addq(rsp, Immediate(stack_elements * kPointerSize));
2230 }
2231}
2232
2233
Steve Block3ce2e202009-11-05 08:53:23 +00002234void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01002235 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002236}
2237
2238
2239void MacroAssembler::Jump(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01002240 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00002241 jmp(kScratchRegister);
2242}
2243
2244
2245void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
2246 movq(kScratchRegister, destination, rmode);
2247 jmp(kScratchRegister);
2248}
2249
2250
2251void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00002252 // TODO(X64): Inline this
2253 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002254}
2255
2256
Steve Block44f0eee2011-05-26 01:26:41 +01002257int MacroAssembler::CallSize(ExternalReference ext) {
2258 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
2259 const int kCallInstructionSize = 3;
2260 return LoadAddressSize(ext) + kCallInstructionSize;
2261}
2262
2263
Steve Blocka7e24c12009-10-30 11:49:00 +00002264void MacroAssembler::Call(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01002265#ifdef DEBUG
2266 int end_position = pc_offset() + CallSize(ext);
2267#endif
2268 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00002269 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01002270#ifdef DEBUG
2271 CHECK_EQ(end_position, pc_offset());
2272#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002273}
2274
2275
2276void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
Steve Block44f0eee2011-05-26 01:26:41 +01002277#ifdef DEBUG
2278 int end_position = pc_offset() + CallSize(destination, rmode);
2279#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002280 movq(kScratchRegister, destination, rmode);
2281 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01002282#ifdef DEBUG
2283 CHECK_EQ(pc_offset(), end_position);
2284#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002285}
2286
2287
Ben Murdoch257744e2011-11-30 15:57:28 +00002288void MacroAssembler::Call(Handle<Code> code_object,
2289 RelocInfo::Mode rmode,
2290 unsigned ast_id) {
Steve Block44f0eee2011-05-26 01:26:41 +01002291#ifdef DEBUG
2292 int end_position = pc_offset() + CallSize(code_object);
2293#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002294 ASSERT(RelocInfo::IsCodeTarget(rmode));
Ben Murdoch257744e2011-11-30 15:57:28 +00002295 call(code_object, rmode, ast_id);
Steve Block44f0eee2011-05-26 01:26:41 +01002296#ifdef DEBUG
2297 CHECK_EQ(end_position, pc_offset());
2298#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002299}
2300
2301
Steve Block1e0659c2011-05-24 12:43:12 +01002302void MacroAssembler::Pushad() {
2303 push(rax);
2304 push(rcx);
2305 push(rdx);
2306 push(rbx);
2307 // Not pushing rsp or rbp.
2308 push(rsi);
2309 push(rdi);
2310 push(r8);
2311 push(r9);
2312 // r10 is kScratchRegister.
2313 push(r11);
Steve Block44f0eee2011-05-26 01:26:41 +01002314 // r12 is kSmiConstantRegister.
Steve Block1e0659c2011-05-24 12:43:12 +01002315 // r13 is kRootRegister.
2316 push(r14);
Steve Block44f0eee2011-05-26 01:26:41 +01002317 push(r15);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002318 STATIC_ASSERT(11 == kNumSafepointSavedRegisters);
2319 // Use lea for symmetry with Popad.
2320 int sp_delta =
2321 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
2322 lea(rsp, Operand(rsp, -sp_delta));
Steve Block1e0659c2011-05-24 12:43:12 +01002323}
2324
2325
2326void MacroAssembler::Popad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002327 // Popad must not change the flags, so use lea instead of addq.
2328 int sp_delta =
2329 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
2330 lea(rsp, Operand(rsp, sp_delta));
Steve Block44f0eee2011-05-26 01:26:41 +01002331 pop(r15);
Steve Block1e0659c2011-05-24 12:43:12 +01002332 pop(r14);
Steve Block1e0659c2011-05-24 12:43:12 +01002333 pop(r11);
2334 pop(r9);
2335 pop(r8);
2336 pop(rdi);
2337 pop(rsi);
2338 pop(rbx);
2339 pop(rdx);
2340 pop(rcx);
2341 pop(rax);
2342}
2343
2344
2345void MacroAssembler::Dropad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002346 addq(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
Steve Block1e0659c2011-05-24 12:43:12 +01002347}
2348
2349
2350// Order general registers are pushed by Pushad:
Steve Block44f0eee2011-05-26 01:26:41 +01002351// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
Steve Block1e0659c2011-05-24 12:43:12 +01002352int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
2353 0,
2354 1,
2355 2,
2356 3,
2357 -1,
2358 -1,
2359 4,
2360 5,
2361 6,
2362 7,
2363 -1,
2364 8,
Steve Block1e0659c2011-05-24 12:43:12 +01002365 -1,
Steve Block44f0eee2011-05-26 01:26:41 +01002366 -1,
2367 9,
2368 10
Steve Block1e0659c2011-05-24 12:43:12 +01002369};
2370
2371
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002372void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2373 movq(SafepointRegisterSlot(dst), src);
2374}
2375
2376
2377void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2378 movq(dst, SafepointRegisterSlot(src));
2379}
2380
2381
2382Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2383 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2384}
2385
2386
Steve Blocka7e24c12009-10-30 11:49:00 +00002387void MacroAssembler::PushTryHandler(CodeLocation try_location,
2388 HandlerType type) {
2389 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002390 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
2391 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
2392 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
2393 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
2394 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
2395 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00002396
2397 // The pc (return address) is already on TOS. This code pushes state,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002398 // frame pointer, context, and current handler.
Steve Blocka7e24c12009-10-30 11:49:00 +00002399 if (try_location == IN_JAVASCRIPT) {
2400 if (type == TRY_CATCH_HANDLER) {
2401 push(Immediate(StackHandler::TRY_CATCH));
2402 } else {
2403 push(Immediate(StackHandler::TRY_FINALLY));
2404 }
2405 push(rbp);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002406 push(rsi);
Steve Blocka7e24c12009-10-30 11:49:00 +00002407 } else {
2408 ASSERT(try_location == IN_JS_ENTRY);
2409 // The frame pointer does not point to a JS frame so we save NULL
2410 // for rbp. We expect the code throwing an exception to check rbp
2411 // before dereferencing it to restore the context.
2412 push(Immediate(StackHandler::ENTRY));
2413 push(Immediate(0)); // NULL frame pointer.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002414 Push(Smi::FromInt(0)); // No context.
Steve Blocka7e24c12009-10-30 11:49:00 +00002415 }
2416 // Save the current handler.
Steve Block44f0eee2011-05-26 01:26:41 +01002417 Operand handler_operand =
2418 ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
2419 push(handler_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002420 // Link this handler.
Steve Block44f0eee2011-05-26 01:26:41 +01002421 movq(handler_operand, rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00002422}
2423
2424
Leon Clarkee46be812010-01-19 14:06:41 +00002425void MacroAssembler::PopTryHandler() {
2426 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
2427 // Unlink this handler.
Steve Block44f0eee2011-05-26 01:26:41 +01002428 Operand handler_operand =
2429 ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
2430 pop(handler_operand);
Leon Clarkee46be812010-01-19 14:06:41 +00002431 // Remove the remaining fields.
2432 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
2433}
2434
2435
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002436void MacroAssembler::Throw(Register value) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002437 // Adjust this code if not the case.
2438 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
2439 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
2440 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
2441 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
2442 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
2443 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002444 // Keep thrown value in rax.
2445 if (!value.is(rax)) {
2446 movq(rax, value);
2447 }
2448
Steve Block44f0eee2011-05-26 01:26:41 +01002449 ExternalReference handler_address(Isolate::k_handler_address, isolate());
2450 Operand handler_operand = ExternalOperand(handler_address);
2451 movq(rsp, handler_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002452 // get next in chain
Steve Block44f0eee2011-05-26 01:26:41 +01002453 pop(handler_operand);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002454 pop(rsi); // Context.
2455 pop(rbp); // Frame pointer.
2456 pop(rdx); // State.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002457
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002458 // If the handler is a JS frame, restore the context to the frame.
2459 // (rdx == ENTRY) == (rbp == 0) == (rsi == 0), so we could test any
2460 // of them.
Ben Murdoch257744e2011-11-30 15:57:28 +00002461 Label skip;
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002462 cmpq(rdx, Immediate(StackHandler::ENTRY));
Ben Murdoch257744e2011-11-30 15:57:28 +00002463 j(equal, &skip, Label::kNear);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002464 movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002465 bind(&skip);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002466
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002467 ret(0);
2468}
2469
2470
2471void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
2472 Register value) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002473 // Adjust this code if not the case.
2474 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
2475 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
2476 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
2477 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
2478 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
2479 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002480 // Keep thrown value in rax.
2481 if (!value.is(rax)) {
2482 movq(rax, value);
2483 }
2484 // Fetch top stack handler.
Steve Block44f0eee2011-05-26 01:26:41 +01002485 ExternalReference handler_address(Isolate::k_handler_address, isolate());
2486 Load(rsp, handler_address);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002487
2488 // Unwind the handlers until the ENTRY handler is found.
Ben Murdoch257744e2011-11-30 15:57:28 +00002489 Label loop, done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002490 bind(&loop);
2491 // Load the type of the current stack handler.
2492 const int kStateOffset = StackHandlerConstants::kStateOffset;
2493 cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
Ben Murdoch257744e2011-11-30 15:57:28 +00002494 j(equal, &done, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002495 // Fetch the next handler in the list.
2496 const int kNextOffset = StackHandlerConstants::kNextOffset;
2497 movq(rsp, Operand(rsp, kNextOffset));
2498 jmp(&loop);
2499 bind(&done);
2500
2501 // Set the top handler address to next handler past the current ENTRY handler.
Steve Block44f0eee2011-05-26 01:26:41 +01002502 Operand handler_operand = ExternalOperand(handler_address);
2503 pop(handler_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002504
2505 if (type == OUT_OF_MEMORY) {
2506 // Set external caught exception to false.
Steve Block44f0eee2011-05-26 01:26:41 +01002507 ExternalReference external_caught(
2508 Isolate::k_external_caught_exception_address, isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +01002509 Set(rax, static_cast<int64_t>(false));
Steve Block44f0eee2011-05-26 01:26:41 +01002510 Store(external_caught, rax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002511
2512 // Set pending exception and rax to out of memory exception.
Steve Block44f0eee2011-05-26 01:26:41 +01002513 ExternalReference pending_exception(Isolate::k_pending_exception_address,
2514 isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002515 movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
Steve Block44f0eee2011-05-26 01:26:41 +01002516 Store(pending_exception, rax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002517 }
2518
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002519 // Discard the context saved in the handler and clear the context pointer.
2520 pop(rdx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002521 Set(rsi, 0);
2522
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002523 pop(rbp); // Restore frame pointer.
2524 pop(rdx); // Discard state.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002525
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002526 ret(0);
2527}
2528
2529
Steve Blocka7e24c12009-10-30 11:49:00 +00002530void MacroAssembler::Ret() {
2531 ret(0);
2532}
2533
2534
Steve Block1e0659c2011-05-24 12:43:12 +01002535void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2536 if (is_uint16(bytes_dropped)) {
2537 ret(bytes_dropped);
2538 } else {
2539 pop(scratch);
2540 addq(rsp, Immediate(bytes_dropped));
2541 push(scratch);
2542 ret(0);
2543 }
2544}
2545
2546
Steve Blocka7e24c12009-10-30 11:49:00 +00002547void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00002548 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01002549 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002550}
2551
2552
2553void MacroAssembler::CmpObjectType(Register heap_object,
2554 InstanceType type,
2555 Register map) {
2556 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
2557 CmpInstanceType(map, type);
2558}
2559
2560
2561void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
2562 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
2563 Immediate(static_cast<int8_t>(type)));
2564}
2565
2566
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002567void MacroAssembler::CheckFastElements(Register map,
2568 Label* fail,
2569 Label::Distance distance) {
2570 STATIC_ASSERT(JSObject::FAST_ELEMENTS == 0);
2571 cmpb(FieldOperand(map, Map::kBitField2Offset),
2572 Immediate(Map::kMaximumBitField2FastElementValue));
2573 j(above, fail, distance);
2574}
2575
2576
Andrei Popescu31002712010-02-23 13:46:05 +00002577void MacroAssembler::CheckMap(Register obj,
2578 Handle<Map> map,
2579 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00002580 SmiCheckType smi_check_type) {
2581 if (smi_check_type == DO_SMI_CHECK) {
Andrei Popescu31002712010-02-23 13:46:05 +00002582 JumpIfSmi(obj, fail);
2583 }
2584 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
2585 j(not_equal, fail);
2586}
2587
2588
Ben Murdoch257744e2011-11-30 15:57:28 +00002589void MacroAssembler::ClampUint8(Register reg) {
2590 Label done;
2591 testl(reg, Immediate(0xFFFFFF00));
2592 j(zero, &done, Label::kNear);
2593 setcc(negative, reg); // 1 if negative, 0 if positive.
2594 decb(reg); // 0 if negative, 255 if positive.
2595 bind(&done);
2596}
2597
2598
2599void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
2600 XMMRegister temp_xmm_reg,
2601 Register result_reg,
2602 Register temp_reg) {
2603 Label done;
2604 Set(result_reg, 0);
2605 xorps(temp_xmm_reg, temp_xmm_reg);
2606 ucomisd(input_reg, temp_xmm_reg);
2607 j(below, &done, Label::kNear);
2608 uint64_t one_half = BitCast<uint64_t, double>(0.5);
2609 Set(temp_reg, one_half);
2610 movq(temp_xmm_reg, temp_reg);
2611 addsd(temp_xmm_reg, input_reg);
2612 cvttsd2si(result_reg, temp_xmm_reg);
2613 testl(result_reg, Immediate(0xFFFFFF00));
2614 j(zero, &done, Label::kNear);
2615 Set(result_reg, 255);
2616 bind(&done);
2617}
2618
2619
2620void MacroAssembler::LoadInstanceDescriptors(Register map,
2621 Register descriptors) {
2622 movq(descriptors, FieldOperand(map,
2623 Map::kInstanceDescriptorsOrBitField3Offset));
2624 Label not_smi;
2625 JumpIfNotSmi(descriptors, &not_smi, Label::kNear);
2626 Move(descriptors, isolate()->factory()->empty_descriptor_array());
2627 bind(&not_smi);
2628}
2629
2630
2631void MacroAssembler::DispatchMap(Register obj,
2632 Handle<Map> map,
2633 Handle<Code> success,
2634 SmiCheckType smi_check_type) {
2635 Label fail;
2636 if (smi_check_type == DO_SMI_CHECK) {
2637 JumpIfSmi(obj, &fail);
2638 }
2639 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
2640 j(equal, success, RelocInfo::CODE_TARGET);
2641
2642 bind(&fail);
2643}
2644
2645
Leon Clarkef7060e22010-06-03 12:02:55 +01002646void MacroAssembler::AbortIfNotNumber(Register object) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002647 Label ok;
Andrei Popescu402d9372010-02-26 13:31:12 +00002648 Condition is_smi = CheckSmi(object);
Ben Murdoch257744e2011-11-30 15:57:28 +00002649 j(is_smi, &ok, Label::kNear);
Andrei Popescu402d9372010-02-26 13:31:12 +00002650 Cmp(FieldOperand(object, HeapObject::kMapOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00002651 isolate()->factory()->heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01002652 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00002653 bind(&ok);
2654}
2655
2656
Iain Merrick75681382010-08-19 15:07:18 +01002657void MacroAssembler::AbortIfSmi(Register object) {
Iain Merrick75681382010-08-19 15:07:18 +01002658 Condition is_smi = CheckSmi(object);
2659 Assert(NegateCondition(is_smi), "Operand is a smi");
2660}
2661
2662
Leon Clarkef7060e22010-06-03 12:02:55 +01002663void MacroAssembler::AbortIfNotSmi(Register object) {
Steve Block44f0eee2011-05-26 01:26:41 +01002664 Condition is_smi = CheckSmi(object);
2665 Assert(is_smi, "Operand is not a smi");
2666}
2667
2668
2669void MacroAssembler::AbortIfNotSmi(const Operand& object) {
Steve Block6ded16b2010-05-10 14:33:55 +01002670 Condition is_smi = CheckSmi(object);
Iain Merrick75681382010-08-19 15:07:18 +01002671 Assert(is_smi, "Operand is not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01002672}
2673
2674
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002675void MacroAssembler::AbortIfNotString(Register object) {
2676 testb(object, Immediate(kSmiTagMask));
2677 Assert(not_equal, "Operand is not a string");
2678 push(object);
2679 movq(object, FieldOperand(object, HeapObject::kMapOffset));
2680 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
2681 pop(object);
2682 Assert(below, "Operand is not a string");
2683}
2684
2685
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002686void MacroAssembler::AbortIfNotRootValue(Register src,
2687 Heap::RootListIndex root_value_index,
2688 const char* message) {
2689 ASSERT(!src.is(kScratchRegister));
2690 LoadRoot(kScratchRegister, root_value_index);
2691 cmpq(src, kScratchRegister);
2692 Check(equal, message);
2693}
2694
2695
2696
Leon Clarked91b9f72010-01-27 17:25:45 +00002697Condition MacroAssembler::IsObjectStringType(Register heap_object,
2698 Register map,
2699 Register instance_type) {
2700 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00002701 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002702 STATIC_ASSERT(kNotStringTag != 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00002703 testb(instance_type, Immediate(kIsNotStringMask));
2704 return zero;
2705}
2706
2707
Steve Blocka7e24c12009-10-30 11:49:00 +00002708void MacroAssembler::TryGetFunctionPrototype(Register function,
2709 Register result,
2710 Label* miss) {
2711 // Check that the receiver isn't a smi.
2712 testl(function, Immediate(kSmiTagMask));
2713 j(zero, miss);
2714
2715 // Check that the function really is a function.
2716 CmpObjectType(function, JS_FUNCTION_TYPE, result);
2717 j(not_equal, miss);
2718
2719 // Make sure that the function has an instance prototype.
Ben Murdoch257744e2011-11-30 15:57:28 +00002720 Label non_instance;
Steve Blocka7e24c12009-10-30 11:49:00 +00002721 testb(FieldOperand(result, Map::kBitFieldOffset),
2722 Immediate(1 << Map::kHasNonInstancePrototype));
Ben Murdoch257744e2011-11-30 15:57:28 +00002723 j(not_zero, &non_instance, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00002724
2725 // Get the prototype or initial map from the function.
2726 movq(result,
2727 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2728
2729 // If the prototype or initial map is the hole, don't return it and
2730 // simply miss the cache instead. This will allow us to allocate a
2731 // prototype object on-demand in the runtime system.
2732 CompareRoot(result, Heap::kTheHoleValueRootIndex);
2733 j(equal, miss);
2734
2735 // If the function does not have an initial map, we're done.
Ben Murdoch257744e2011-11-30 15:57:28 +00002736 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +00002737 CmpObjectType(result, MAP_TYPE, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002738 j(not_equal, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00002739
2740 // Get the prototype from the initial map.
2741 movq(result, FieldOperand(result, Map::kPrototypeOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002742 jmp(&done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00002743
2744 // Non-instance prototype: Fetch prototype from constructor field
2745 // in initial map.
2746 bind(&non_instance);
2747 movq(result, FieldOperand(result, Map::kConstructorOffset));
2748
2749 // All done.
2750 bind(&done);
2751}
2752
2753
2754void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2755 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002756 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002757 movl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002758 }
2759}
2760
2761
2762void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2763 ASSERT(value > 0);
2764 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002765 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00002766 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01002767 incl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002768 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002769 addl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002770 }
2771 }
2772}
2773
2774
2775void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2776 ASSERT(value > 0);
2777 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002778 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00002779 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01002780 decl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002781 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002782 subl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002783 }
2784 }
2785}
2786
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002787
Steve Blocka7e24c12009-10-30 11:49:00 +00002788#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00002789void MacroAssembler::DebugBreak() {
2790 ASSERT(allow_stub_calls());
Steve Block9fac8402011-05-12 15:51:54 +01002791 Set(rax, 0); // No arguments.
Steve Block44f0eee2011-05-26 01:26:41 +01002792 LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
Andrei Popescu402d9372010-02-26 13:31:12 +00002793 CEntryStub ces(1);
2794 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00002795}
Andrei Popescu402d9372010-02-26 13:31:12 +00002796#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00002797
2798
Ben Murdoch257744e2011-11-30 15:57:28 +00002799void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
2800 // This macro takes the dst register to make the code more readable
2801 // at the call sites. However, the dst register has to be rcx to
2802 // follow the calling convention which requires the call type to be
2803 // in rcx.
2804 ASSERT(dst.is(rcx));
2805 if (call_kind == CALL_AS_FUNCTION) {
2806 LoadSmiConstant(dst, Smi::FromInt(1));
2807 } else {
2808 LoadSmiConstant(dst, Smi::FromInt(0));
2809 }
2810}
2811
2812
Steve Blocka7e24c12009-10-30 11:49:00 +00002813void MacroAssembler::InvokeCode(Register code,
2814 const ParameterCount& expected,
2815 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002816 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002817 const CallWrapper& call_wrapper,
2818 CallKind call_kind) {
2819 Label done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002820 InvokePrologue(expected,
2821 actual,
2822 Handle<Code>::null(),
2823 code,
2824 &done,
2825 flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002826 Label::kNear,
2827 call_wrapper,
2828 call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00002829 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002830 call_wrapper.BeforeCall(CallSize(code));
2831 SetCallKind(rcx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00002832 call(code);
Ben Murdoch257744e2011-11-30 15:57:28 +00002833 call_wrapper.AfterCall();
Steve Blocka7e24c12009-10-30 11:49:00 +00002834 } else {
2835 ASSERT(flag == JUMP_FUNCTION);
Ben Murdoch257744e2011-11-30 15:57:28 +00002836 SetCallKind(rcx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00002837 jmp(code);
2838 }
2839 bind(&done);
2840}
2841
2842
2843void MacroAssembler::InvokeCode(Handle<Code> code,
2844 const ParameterCount& expected,
2845 const ParameterCount& actual,
2846 RelocInfo::Mode rmode,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002847 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002848 const CallWrapper& call_wrapper,
2849 CallKind call_kind) {
2850 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +00002851 Register dummy = rax;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002852 InvokePrologue(expected,
2853 actual,
2854 code,
2855 dummy,
2856 &done,
2857 flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002858 Label::kNear,
2859 call_wrapper,
2860 call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00002861 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002862 call_wrapper.BeforeCall(CallSize(code));
2863 SetCallKind(rcx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00002864 Call(code, rmode);
Ben Murdoch257744e2011-11-30 15:57:28 +00002865 call_wrapper.AfterCall();
Steve Blocka7e24c12009-10-30 11:49:00 +00002866 } else {
2867 ASSERT(flag == JUMP_FUNCTION);
Ben Murdoch257744e2011-11-30 15:57:28 +00002868 SetCallKind(rcx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00002869 Jump(code, rmode);
2870 }
2871 bind(&done);
2872}
2873
2874
2875void MacroAssembler::InvokeFunction(Register function,
2876 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002877 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002878 const CallWrapper& call_wrapper,
2879 CallKind call_kind) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002880 ASSERT(function.is(rdi));
2881 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2882 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
2883 movsxlq(rbx,
2884 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002885 // Advances rdx to the end of the Code object header, to the start of
2886 // the executable code.
Steve Block791712a2010-08-27 10:21:07 +01002887 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002888
2889 ParameterCount expected(rbx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002890 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00002891}
2892
2893
Andrei Popescu402d9372010-02-26 13:31:12 +00002894void MacroAssembler::InvokeFunction(JSFunction* function,
2895 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002896 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002897 const CallWrapper& call_wrapper,
2898 CallKind call_kind) {
Andrei Popescu402d9372010-02-26 13:31:12 +00002899 ASSERT(function->is_compiled());
2900 // Get the function and setup the context.
2901 Move(rdi, Handle<JSFunction>(function));
2902 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2903
Steve Block1e0659c2011-05-24 12:43:12 +01002904 if (V8::UseCrankshaft()) {
2905 // Since Crankshaft can recompile a function, we need to load
2906 // the Code object every time we call the function.
2907 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2908 ParameterCount expected(function->shared()->formal_parameter_count());
Ben Murdoch257744e2011-11-30 15:57:28 +00002909 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind);
Steve Block1e0659c2011-05-24 12:43:12 +01002910 } else {
2911 // Invoke the cached code.
2912 Handle<Code> code(function->code());
2913 ParameterCount expected(function->shared()->formal_parameter_count());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002914 InvokeCode(code,
2915 expected,
2916 actual,
2917 RelocInfo::CODE_TARGET,
2918 flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002919 call_wrapper,
2920 call_kind);
2921 }
2922}
2923
2924
2925void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2926 const ParameterCount& actual,
2927 Handle<Code> code_constant,
2928 Register code_register,
2929 Label* done,
2930 InvokeFlag flag,
2931 Label::Distance near_jump,
2932 const CallWrapper& call_wrapper,
2933 CallKind call_kind) {
2934 bool definitely_matches = false;
2935 Label invoke;
2936 if (expected.is_immediate()) {
2937 ASSERT(actual.is_immediate());
2938 if (expected.immediate() == actual.immediate()) {
2939 definitely_matches = true;
2940 } else {
2941 Set(rax, actual.immediate());
2942 if (expected.immediate() ==
2943 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
2944 // Don't worry about adapting arguments for built-ins that
2945 // don't want that done. Skip adaption code by making it look
2946 // like we have a match between expected and actual number of
2947 // arguments.
2948 definitely_matches = true;
2949 } else {
2950 Set(rbx, expected.immediate());
2951 }
2952 }
2953 } else {
2954 if (actual.is_immediate()) {
2955 // Expected is in register, actual is immediate. This is the
2956 // case when we invoke function values without going through the
2957 // IC mechanism.
2958 cmpq(expected.reg(), Immediate(actual.immediate()));
2959 j(equal, &invoke, Label::kNear);
2960 ASSERT(expected.reg().is(rbx));
2961 Set(rax, actual.immediate());
2962 } else if (!expected.reg().is(actual.reg())) {
2963 // Both expected and actual are in (different) registers. This
2964 // is the case when we invoke functions using call and apply.
2965 cmpq(expected.reg(), actual.reg());
2966 j(equal, &invoke, Label::kNear);
2967 ASSERT(actual.reg().is(rax));
2968 ASSERT(expected.reg().is(rbx));
2969 }
2970 }
2971
2972 if (!definitely_matches) {
2973 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
2974 if (!code_constant.is_null()) {
2975 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
2976 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2977 } else if (!code_register.is(rdx)) {
2978 movq(rdx, code_register);
2979 }
2980
2981 if (flag == CALL_FUNCTION) {
2982 call_wrapper.BeforeCall(CallSize(adaptor));
2983 SetCallKind(rcx, call_kind);
2984 Call(adaptor, RelocInfo::CODE_TARGET);
2985 call_wrapper.AfterCall();
2986 jmp(done, near_jump);
2987 } else {
2988 SetCallKind(rcx, call_kind);
2989 Jump(adaptor, RelocInfo::CODE_TARGET);
2990 }
2991 bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01002992 }
Andrei Popescu402d9372010-02-26 13:31:12 +00002993}
2994
2995
Steve Blocka7e24c12009-10-30 11:49:00 +00002996void MacroAssembler::EnterFrame(StackFrame::Type type) {
2997 push(rbp);
2998 movq(rbp, rsp);
2999 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00003000 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00003001 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
3002 push(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01003003 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003004 movq(kScratchRegister,
Ben Murdoch257744e2011-11-30 15:57:28 +00003005 isolate()->factory()->undefined_value(),
Steve Blocka7e24c12009-10-30 11:49:00 +00003006 RelocInfo::EMBEDDED_OBJECT);
3007 cmpq(Operand(rsp, 0), kScratchRegister);
3008 Check(not_equal, "code object not properly patched");
3009 }
3010}
3011
3012
3013void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +01003014 if (emit_debug_code()) {
Steve Block3ce2e202009-11-05 08:53:23 +00003015 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00003016 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
3017 Check(equal, "stack frame types must match");
3018 }
3019 movq(rsp, rbp);
3020 pop(rbp);
3021}
3022
3023
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003024void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003025 // Setup the frame structure on the stack.
3026 // All constants are relative to the frame pointer of the exit frame.
3027 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
3028 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
3029 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
3030 push(rbp);
3031 movq(rbp, rsp);
3032
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003033 // Reserve room for entry stack pointer and push the code object.
Steve Block3ce2e202009-11-05 08:53:23 +00003034 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00003035 push(Immediate(0)); // Saved entry sp, patched before call.
3036 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
3037 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00003038
3039 // Save the frame pointer and the context in top.
Ben Murdochbb769b22010-08-11 14:56:33 +01003040 if (save_rax) {
Steve Block44f0eee2011-05-26 01:26:41 +01003041 movq(r14, rax); // Backup rax in callee-save register.
Ben Murdochbb769b22010-08-11 14:56:33 +01003042 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003043
Steve Block44f0eee2011-05-26 01:26:41 +01003044 Store(ExternalReference(Isolate::k_c_entry_fp_address, isolate()), rbp);
3045 Store(ExternalReference(Isolate::k_context_address, isolate()), rsi);
Ben Murdochbb769b22010-08-11 14:56:33 +01003046}
Steve Blocka7e24c12009-10-30 11:49:00 +00003047
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003048
Steve Block1e0659c2011-05-24 12:43:12 +01003049void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
3050 bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003051#ifdef _WIN64
Steve Block1e0659c2011-05-24 12:43:12 +01003052 const int kShadowSpace = 4;
3053 arg_stack_space += kShadowSpace;
Steve Blocka7e24c12009-10-30 11:49:00 +00003054#endif
Steve Block1e0659c2011-05-24 12:43:12 +01003055 // Optionally save all XMM registers.
3056 if (save_doubles) {
Steve Block1e0659c2011-05-24 12:43:12 +01003057 int space = XMMRegister::kNumRegisters * kDoubleSize +
3058 arg_stack_space * kPointerSize;
3059 subq(rsp, Immediate(space));
3060 int offset = -2 * kPointerSize;
3061 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
3062 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3063 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
3064 }
3065 } else if (arg_stack_space > 0) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003066 subq(rsp, Immediate(arg_stack_space * kPointerSize));
3067 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003068
3069 // Get the required frame alignment for the OS.
Steve Block44f0eee2011-05-26 01:26:41 +01003070 const int kFrameAlignment = OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +00003071 if (kFrameAlignment > 0) {
3072 ASSERT(IsPowerOf2(kFrameAlignment));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003073 ASSERT(is_int8(kFrameAlignment));
3074 and_(rsp, Immediate(-kFrameAlignment));
Steve Blocka7e24c12009-10-30 11:49:00 +00003075 }
3076
3077 // Patch the saved entry sp.
3078 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
3079}
3080
3081
Steve Block1e0659c2011-05-24 12:43:12 +01003082void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003083 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01003084
Steve Block44f0eee2011-05-26 01:26:41 +01003085 // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
Ben Murdochbb769b22010-08-11 14:56:33 +01003086 // so it must be retained across the C-call.
3087 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Steve Block44f0eee2011-05-26 01:26:41 +01003088 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
Ben Murdochbb769b22010-08-11 14:56:33 +01003089
Steve Block1e0659c2011-05-24 12:43:12 +01003090 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
Ben Murdochbb769b22010-08-11 14:56:33 +01003091}
3092
3093
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003094void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003095 EnterExitFramePrologue(false);
Steve Block1e0659c2011-05-24 12:43:12 +01003096 EnterExitFrameEpilogue(arg_stack_space, false);
Ben Murdochbb769b22010-08-11 14:56:33 +01003097}
3098
3099
Steve Block1e0659c2011-05-24 12:43:12 +01003100void MacroAssembler::LeaveExitFrame(bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003101 // Registers:
Steve Block44f0eee2011-05-26 01:26:41 +01003102 // r15 : argv
Steve Block1e0659c2011-05-24 12:43:12 +01003103 if (save_doubles) {
3104 int offset = -2 * kPointerSize;
3105 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
3106 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3107 movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
3108 }
3109 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003110 // Get the return address from the stack and restore the frame pointer.
3111 movq(rcx, Operand(rbp, 1 * kPointerSize));
3112 movq(rbp, Operand(rbp, 0 * kPointerSize));
3113
Steve Block1e0659c2011-05-24 12:43:12 +01003114 // Drop everything up to and including the arguments and the receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00003115 // from the caller stack.
Steve Block44f0eee2011-05-26 01:26:41 +01003116 lea(rsp, Operand(r15, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00003117
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003118 // Push the return address to get ready to return.
3119 push(rcx);
3120
3121 LeaveExitFrameEpilogue();
3122}
3123
3124
3125void MacroAssembler::LeaveApiExitFrame() {
3126 movq(rsp, rbp);
3127 pop(rbp);
3128
3129 LeaveExitFrameEpilogue();
3130}
3131
3132
3133void MacroAssembler::LeaveExitFrameEpilogue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00003134 // Restore current context from top and clear it in debug mode.
Steve Block44f0eee2011-05-26 01:26:41 +01003135 ExternalReference context_address(Isolate::k_context_address, isolate());
3136 Operand context_operand = ExternalOperand(context_address);
3137 movq(rsi, context_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003138#ifdef DEBUG
Steve Block44f0eee2011-05-26 01:26:41 +01003139 movq(context_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00003140#endif
3141
Steve Blocka7e24c12009-10-30 11:49:00 +00003142 // Clear the top frame.
Steve Block44f0eee2011-05-26 01:26:41 +01003143 ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
3144 isolate());
3145 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
3146 movq(c_entry_fp_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00003147}
3148
3149
Steve Blocka7e24c12009-10-30 11:49:00 +00003150void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
3151 Register scratch,
3152 Label* miss) {
3153 Label same_contexts;
3154
3155 ASSERT(!holder_reg.is(scratch));
3156 ASSERT(!scratch.is(kScratchRegister));
3157 // Load current lexical context from the stack frame.
3158 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
3159
3160 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +01003161 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003162 cmpq(scratch, Immediate(0));
3163 Check(not_equal, "we should not have an empty lexical context");
3164 }
3165 // Load the global context of the current context.
3166 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
3167 movq(scratch, FieldOperand(scratch, offset));
3168 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
3169
3170 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +01003171 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003172 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00003173 isolate()->factory()->global_context_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00003174 Check(equal, "JSGlobalObject::global_context should be a global context.");
3175 }
3176
3177 // Check if both contexts are the same.
3178 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
3179 j(equal, &same_contexts);
3180
3181 // Compare security tokens.
3182 // Check that the security token in the calling global object is
3183 // compatible with the security token in the receiving global
3184 // object.
3185
3186 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +01003187 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003188 // Preserve original value of holder_reg.
3189 push(holder_reg);
3190 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
3191 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
3192 Check(not_equal, "JSGlobalProxy::context() should not be null.");
3193
3194 // Read the first word and compare to global_context_map(),
3195 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
3196 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
3197 Check(equal, "JSGlobalObject::global_context should be a global context.");
3198 pop(holder_reg);
3199 }
3200
3201 movq(kScratchRegister,
3202 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00003203 int token_offset =
3204 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00003205 movq(scratch, FieldOperand(scratch, token_offset));
3206 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
3207 j(not_equal, miss);
3208
3209 bind(&same_contexts);
3210}
3211
3212
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003213void MacroAssembler::LoadFromNumberDictionary(Label* miss,
3214 Register elements,
3215 Register key,
3216 Register r0,
3217 Register r1,
3218 Register r2,
3219 Register result) {
3220 // Register use:
3221 //
3222 // elements - holds the slow-case elements of the receiver on entry.
3223 // Unchanged unless 'result' is the same register.
3224 //
3225 // key - holds the smi key on entry.
3226 // Unchanged unless 'result' is the same register.
3227 //
3228 // Scratch registers:
3229 //
3230 // r0 - holds the untagged key on entry and holds the hash once computed.
3231 //
3232 // r1 - used to hold the capacity mask of the dictionary
3233 //
3234 // r2 - used for the index into the dictionary.
3235 //
3236 // result - holds the result on exit if the load succeeded.
3237 // Allowed to be the same as 'key' or 'result'.
3238 // Unchanged on bailout so 'key' or 'result' can be used
3239 // in further computation.
3240
3241 Label done;
3242
3243 // Compute the hash code from the untagged key. This must be kept in sync
3244 // with ComputeIntegerHash in utils.h.
3245 //
3246 // hash = ~hash + (hash << 15);
3247 movl(r1, r0);
3248 notl(r0);
3249 shll(r1, Immediate(15));
3250 addl(r0, r1);
3251 // hash = hash ^ (hash >> 12);
3252 movl(r1, r0);
3253 shrl(r1, Immediate(12));
3254 xorl(r0, r1);
3255 // hash = hash + (hash << 2);
3256 leal(r0, Operand(r0, r0, times_4, 0));
3257 // hash = hash ^ (hash >> 4);
3258 movl(r1, r0);
3259 shrl(r1, Immediate(4));
3260 xorl(r0, r1);
3261 // hash = hash * 2057;
3262 imull(r0, r0, Immediate(2057));
3263 // hash = hash ^ (hash >> 16);
3264 movl(r1, r0);
3265 shrl(r1, Immediate(16));
3266 xorl(r0, r1);
3267
3268 // Compute capacity mask.
3269 SmiToInteger32(r1,
3270 FieldOperand(elements, NumberDictionary::kCapacityOffset));
3271 decl(r1);
3272
3273 // Generate an unrolled loop that performs a few probes before giving up.
3274 const int kProbes = 4;
3275 for (int i = 0; i < kProbes; i++) {
3276 // Use r2 for index calculations and keep the hash intact in r0.
3277 movq(r2, r0);
3278 // Compute the masked index: (hash + i + i * i) & mask.
3279 if (i > 0) {
3280 addl(r2, Immediate(NumberDictionary::GetProbeOffset(i)));
3281 }
3282 and_(r2, r1);
3283
3284 // Scale the index by multiplying by the entry size.
3285 ASSERT(NumberDictionary::kEntrySize == 3);
3286 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
3287
3288 // Check if the key matches.
3289 cmpq(key, FieldOperand(elements,
3290 r2,
3291 times_pointer_size,
3292 NumberDictionary::kElementsStartOffset));
3293 if (i != (kProbes - 1)) {
3294 j(equal, &done);
3295 } else {
3296 j(not_equal, miss);
3297 }
3298 }
3299
3300 bind(&done);
3301 // Check that the value is a normal propety.
3302 const int kDetailsOffset =
3303 NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
3304 ASSERT_EQ(NORMAL, 0);
3305 Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
3306 Smi::FromInt(PropertyDetails::TypeField::mask()));
3307 j(not_zero, miss);
3308
3309 // Get the value at the masked, scaled index.
3310 const int kValueOffset =
3311 NumberDictionary::kElementsStartOffset + kPointerSize;
3312 movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
3313}
3314
3315
Steve Blocka7e24c12009-10-30 11:49:00 +00003316void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00003317 Register scratch,
3318 AllocationFlags flags) {
3319 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01003320 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003321
3322 // Just return if allocation top is already known.
3323 if ((flags & RESULT_CONTAINS_TOP) != 0) {
3324 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01003325 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00003326#ifdef DEBUG
3327 // Assert that result actually contains top on entry.
Steve Block44f0eee2011-05-26 01:26:41 +01003328 Operand top_operand = ExternalOperand(new_space_allocation_top);
3329 cmpq(result, top_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003330 Check(equal, "Unexpected allocation top");
3331#endif
3332 return;
3333 }
3334
Steve Block6ded16b2010-05-10 14:33:55 +01003335 // Move address of new object to result. Use scratch register if available,
3336 // and keep address in scratch until call to UpdateAllocationTopHelper.
3337 if (scratch.is_valid()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003338 LoadAddress(scratch, new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00003339 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01003340 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003341 Load(result, new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00003342 }
3343}
3344
3345
3346void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
3347 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +01003348 if (emit_debug_code()) {
Steve Blockd0582a62009-12-15 09:54:21 +00003349 testq(result_end, Immediate(kObjectAlignmentMask));
3350 Check(zero, "Unaligned allocation in new space");
3351 }
3352
Steve Blocka7e24c12009-10-30 11:49:00 +00003353 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01003354 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003355
3356 // Update new top.
Steve Block44f0eee2011-05-26 01:26:41 +01003357 if (scratch.is_valid()) {
3358 // Scratch already contains address of allocation top.
3359 movq(Operand(scratch, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00003360 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003361 Store(new_space_allocation_top, result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00003362 }
3363}
3364
3365
3366void MacroAssembler::AllocateInNewSpace(int object_size,
3367 Register result,
3368 Register result_end,
3369 Register scratch,
3370 Label* gc_required,
3371 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07003372 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01003373 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07003374 // Trash the registers to simulate an allocation failure.
3375 movl(result, Immediate(0x7091));
3376 if (result_end.is_valid()) {
3377 movl(result_end, Immediate(0x7191));
3378 }
3379 if (scratch.is_valid()) {
3380 movl(scratch, Immediate(0x7291));
3381 }
3382 }
3383 jmp(gc_required);
3384 return;
3385 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003386 ASSERT(!result.is(result_end));
3387
3388 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003389 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003390
3391 // Calculate new top and bail out if new space is exhausted.
3392 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01003393 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Block6ded16b2010-05-10 14:33:55 +01003394
3395 Register top_reg = result_end.is_valid() ? result_end : result;
3396
Steve Block1e0659c2011-05-24 12:43:12 +01003397 if (!top_reg.is(result)) {
3398 movq(top_reg, result);
Steve Block6ded16b2010-05-10 14:33:55 +01003399 }
Steve Block1e0659c2011-05-24 12:43:12 +01003400 addq(top_reg, Immediate(object_size));
3401 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01003402 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3403 cmpq(top_reg, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003404 j(above, gc_required);
3405
3406 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01003407 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00003408
Steve Block6ded16b2010-05-10 14:33:55 +01003409 if (top_reg.is(result)) {
3410 if ((flags & TAG_OBJECT) != 0) {
3411 subq(result, Immediate(object_size - kHeapObjectTag));
3412 } else {
3413 subq(result, Immediate(object_size));
3414 }
3415 } else if ((flags & TAG_OBJECT) != 0) {
3416 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00003417 addq(result, Immediate(kHeapObjectTag));
3418 }
3419}
3420
3421
3422void MacroAssembler::AllocateInNewSpace(int header_size,
3423 ScaleFactor element_size,
3424 Register element_count,
3425 Register result,
3426 Register result_end,
3427 Register scratch,
3428 Label* gc_required,
3429 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07003430 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01003431 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07003432 // Trash the registers to simulate an allocation failure.
3433 movl(result, Immediate(0x7091));
3434 movl(result_end, Immediate(0x7191));
3435 if (scratch.is_valid()) {
3436 movl(scratch, Immediate(0x7291));
3437 }
3438 // Register element_count is not modified by the function.
3439 }
3440 jmp(gc_required);
3441 return;
3442 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003443 ASSERT(!result.is(result_end));
3444
3445 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003446 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003447
3448 // Calculate new top and bail out if new space is exhausted.
3449 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01003450 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Block1e0659c2011-05-24 12:43:12 +01003451
3452 // We assume that element_count*element_size + header_size does not
3453 // overflow.
3454 lea(result_end, Operand(element_count, element_size, header_size));
3455 addq(result_end, result);
3456 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01003457 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3458 cmpq(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003459 j(above, gc_required);
3460
3461 // Update allocation top.
3462 UpdateAllocationTopHelper(result_end, scratch);
3463
3464 // Tag the result if requested.
3465 if ((flags & TAG_OBJECT) != 0) {
3466 addq(result, Immediate(kHeapObjectTag));
3467 }
3468}
3469
3470
3471void MacroAssembler::AllocateInNewSpace(Register object_size,
3472 Register result,
3473 Register result_end,
3474 Register scratch,
3475 Label* gc_required,
3476 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07003477 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01003478 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07003479 // Trash the registers to simulate an allocation failure.
3480 movl(result, Immediate(0x7091));
3481 movl(result_end, Immediate(0x7191));
3482 if (scratch.is_valid()) {
3483 movl(scratch, Immediate(0x7291));
3484 }
3485 // object_size is left unchanged by this function.
3486 }
3487 jmp(gc_required);
3488 return;
3489 }
3490 ASSERT(!result.is(result_end));
3491
Steve Blocka7e24c12009-10-30 11:49:00 +00003492 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003493 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003494
3495 // Calculate new top and bail out if new space is exhausted.
3496 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01003497 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003498 if (!object_size.is(result_end)) {
3499 movq(result_end, object_size);
3500 }
3501 addq(result_end, result);
Steve Block1e0659c2011-05-24 12:43:12 +01003502 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01003503 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3504 cmpq(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003505 j(above, gc_required);
3506
3507 // Update allocation top.
3508 UpdateAllocationTopHelper(result_end, scratch);
3509
3510 // Tag the result if requested.
3511 if ((flags & TAG_OBJECT) != 0) {
3512 addq(result, Immediate(kHeapObjectTag));
3513 }
3514}
3515
3516
3517void MacroAssembler::UndoAllocationInNewSpace(Register object) {
3518 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01003519 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00003520
3521 // Make sure the object has no tag before resetting top.
3522 and_(object, Immediate(~kHeapObjectTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003523 Operand top_operand = ExternalOperand(new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00003524#ifdef DEBUG
Steve Block44f0eee2011-05-26 01:26:41 +01003525 cmpq(object, top_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003526 Check(below, "Undo allocation of non allocated memory");
3527#endif
Steve Block44f0eee2011-05-26 01:26:41 +01003528 movq(top_operand, object);
Steve Blocka7e24c12009-10-30 11:49:00 +00003529}
3530
3531
Steve Block3ce2e202009-11-05 08:53:23 +00003532void MacroAssembler::AllocateHeapNumber(Register result,
3533 Register scratch,
3534 Label* gc_required) {
3535 // Allocate heap number in new space.
3536 AllocateInNewSpace(HeapNumber::kSize,
3537 result,
3538 scratch,
3539 no_reg,
3540 gc_required,
3541 TAG_OBJECT);
3542
3543 // Set the map.
3544 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
3545 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3546}
3547
3548
Leon Clarkee46be812010-01-19 14:06:41 +00003549void MacroAssembler::AllocateTwoByteString(Register result,
3550 Register length,
3551 Register scratch1,
3552 Register scratch2,
3553 Register scratch3,
3554 Label* gc_required) {
3555 // Calculate the number of bytes needed for the characters in the string while
3556 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01003557 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
3558 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00003559 ASSERT(kShortSize == 2);
3560 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01003561 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
3562 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00003563 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01003564 if (kHeaderAlignment > 0) {
3565 subq(scratch1, Immediate(kHeaderAlignment));
3566 }
Leon Clarkee46be812010-01-19 14:06:41 +00003567
3568 // Allocate two byte string in new space.
3569 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
3570 times_1,
3571 scratch1,
3572 result,
3573 scratch2,
3574 scratch3,
3575 gc_required,
3576 TAG_OBJECT);
3577
3578 // Set the map, length and hash field.
3579 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
3580 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01003581 Integer32ToSmi(scratch1, length);
3582 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003583 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00003584 Immediate(String::kEmptyHashField));
3585}
3586
3587
3588void MacroAssembler::AllocateAsciiString(Register result,
3589 Register length,
3590 Register scratch1,
3591 Register scratch2,
3592 Register scratch3,
3593 Label* gc_required) {
3594 // Calculate the number of bytes needed for the characters in the string while
3595 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01003596 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
3597 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00003598 movl(scratch1, length);
3599 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01003600 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00003601 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01003602 if (kHeaderAlignment > 0) {
3603 subq(scratch1, Immediate(kHeaderAlignment));
3604 }
Leon Clarkee46be812010-01-19 14:06:41 +00003605
3606 // Allocate ascii string in new space.
3607 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
3608 times_1,
3609 scratch1,
3610 result,
3611 scratch2,
3612 scratch3,
3613 gc_required,
3614 TAG_OBJECT);
3615
3616 // Set the map, length and hash field.
3617 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
3618 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01003619 Integer32ToSmi(scratch1, length);
3620 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003621 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00003622 Immediate(String::kEmptyHashField));
3623}
3624
3625
3626void MacroAssembler::AllocateConsString(Register result,
3627 Register scratch1,
3628 Register scratch2,
3629 Label* gc_required) {
3630 // Allocate heap number in new space.
3631 AllocateInNewSpace(ConsString::kSize,
3632 result,
3633 scratch1,
3634 scratch2,
3635 gc_required,
3636 TAG_OBJECT);
3637
3638 // Set the map. The other fields are left uninitialized.
3639 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
3640 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3641}
3642
3643
3644void MacroAssembler::AllocateAsciiConsString(Register result,
3645 Register scratch1,
3646 Register scratch2,
3647 Label* gc_required) {
3648 // Allocate heap number in new space.
3649 AllocateInNewSpace(ConsString::kSize,
3650 result,
3651 scratch1,
3652 scratch2,
3653 gc_required,
3654 TAG_OBJECT);
3655
3656 // Set the map. The other fields are left uninitialized.
3657 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
3658 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3659}
3660
3661
Steve Block44f0eee2011-05-26 01:26:41 +01003662// Copy memory, byte-by-byte, from source to destination. Not optimized for
3663// long or aligned copies. The contents of scratch and length are destroyed.
3664// Destination is incremented by length, source, length and scratch are
3665// clobbered.
3666// A simpler loop is faster on small copies, but slower on large ones.
3667// The cld() instruction must have been emitted, to set the direction flag(),
3668// before calling this function.
3669void MacroAssembler::CopyBytes(Register destination,
3670 Register source,
3671 Register length,
3672 int min_length,
3673 Register scratch) {
3674 ASSERT(min_length >= 0);
3675 if (FLAG_debug_code) {
3676 cmpl(length, Immediate(min_length));
3677 Assert(greater_equal, "Invalid min_length");
3678 }
3679 Label loop, done, short_string, short_loop;
3680
3681 const int kLongStringLimit = 20;
3682 if (min_length <= kLongStringLimit) {
3683 cmpl(length, Immediate(kLongStringLimit));
3684 j(less_equal, &short_string);
3685 }
3686
3687 ASSERT(source.is(rsi));
3688 ASSERT(destination.is(rdi));
3689 ASSERT(length.is(rcx));
3690
3691 // Because source is 8-byte aligned in our uses of this function,
3692 // we keep source aligned for the rep movs operation by copying the odd bytes
3693 // at the end of the ranges.
3694 movq(scratch, length);
3695 shrl(length, Immediate(3));
3696 repmovsq();
3697 // Move remaining bytes of length.
3698 andl(scratch, Immediate(0x7));
3699 movq(length, Operand(source, scratch, times_1, -8));
3700 movq(Operand(destination, scratch, times_1, -8), length);
3701 addq(destination, scratch);
3702
3703 if (min_length <= kLongStringLimit) {
3704 jmp(&done);
3705
3706 bind(&short_string);
3707 if (min_length == 0) {
3708 testl(length, length);
3709 j(zero, &done);
3710 }
3711 lea(scratch, Operand(destination, length, times_1, 0));
3712
3713 bind(&short_loop);
3714 movb(length, Operand(source, 0));
3715 movb(Operand(destination, 0), length);
3716 incq(source);
3717 incq(destination);
3718 cmpq(destination, scratch);
3719 j(not_equal, &short_loop);
3720
3721 bind(&done);
3722 }
3723}
3724
3725
Steve Blockd0582a62009-12-15 09:54:21 +00003726void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
3727 if (context_chain_length > 0) {
3728 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003729 movq(dst, Operand(rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00003730 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003731 movq(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00003732 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003733 } else {
3734 // Slot is in the current function context. Move it into the
3735 // destination register in case we store into it (the write barrier
3736 // cannot be allowed to destroy the context in rsi).
3737 movq(dst, rsi);
3738 }
3739
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003740 // We should not have found a with context by walking the context
3741 // chain (i.e., the static scope chain and runtime context chain do
3742 // not agree). A variable occurring in such a scope should have
3743 // slot type LOOKUP and not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01003744 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003745 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
3746 Heap::kWithContextMapRootIndex);
3747 Check(not_equal, "Variable resolved to with context.");
Steve Blockd0582a62009-12-15 09:54:21 +00003748 }
3749}
3750
Steve Block44f0eee2011-05-26 01:26:41 +01003751#ifdef _WIN64
3752static const int kRegisterPassedArguments = 4;
3753#else
3754static const int kRegisterPassedArguments = 6;
3755#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003756
Ben Murdochb0fe1622011-05-05 13:52:32 +01003757void MacroAssembler::LoadGlobalFunction(int index, Register function) {
3758 // Load the global or builtins object from the current context.
3759 movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
3760 // Load the global context from the global or builtins object.
3761 movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
3762 // Load the function from the global context.
3763 movq(function, Operand(function, Context::SlotOffset(index)));
3764}
3765
3766
3767void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
3768 Register map) {
3769 // Load the initial map. The global functions all have initial maps.
3770 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01003771 if (emit_debug_code()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003772 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00003773 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003774 jmp(&ok);
3775 bind(&fail);
3776 Abort("Global functions must have initial map");
3777 bind(&ok);
3778 }
3779}
3780
3781
Leon Clarke4515c472010-02-03 11:58:03 +00003782int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003783 // On Windows 64 stack slots are reserved by the caller for all arguments
3784 // including the ones passed in registers, and space is always allocated for
3785 // the four register arguments even if the function takes fewer than four
3786 // arguments.
3787 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
3788 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00003789 ASSERT(num_arguments >= 0);
3790#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01003791 const int kMinimumStackSlots = kRegisterPassedArguments;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003792 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
3793 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00003794#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003795 if (num_arguments < kRegisterPassedArguments) return 0;
3796 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00003797#endif
Leon Clarke4515c472010-02-03 11:58:03 +00003798}
3799
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003800
Leon Clarke4515c472010-02-03 11:58:03 +00003801void MacroAssembler::PrepareCallCFunction(int num_arguments) {
3802 int frame_alignment = OS::ActivationFrameAlignment();
3803 ASSERT(frame_alignment != 0);
3804 ASSERT(num_arguments >= 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003805
Leon Clarke4515c472010-02-03 11:58:03 +00003806 // Make stack end at alignment and allocate space for arguments and old rsp.
3807 movq(kScratchRegister, rsp);
3808 ASSERT(IsPowerOf2(frame_alignment));
3809 int argument_slots_on_stack =
3810 ArgumentStackSlotsForCFunctionCall(num_arguments);
3811 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
3812 and_(rsp, Immediate(-frame_alignment));
3813 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
3814}
3815
3816
3817void MacroAssembler::CallCFunction(ExternalReference function,
3818 int num_arguments) {
Steve Block44f0eee2011-05-26 01:26:41 +01003819 LoadAddress(rax, function);
Leon Clarke4515c472010-02-03 11:58:03 +00003820 CallCFunction(rax, num_arguments);
3821}
3822
3823
3824void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01003825 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01003826 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003827 CheckStackAlignment();
3828 }
3829
Leon Clarke4515c472010-02-03 11:58:03 +00003830 call(function);
3831 ASSERT(OS::ActivationFrameAlignment() != 0);
3832 ASSERT(num_arguments >= 0);
3833 int argument_slots_on_stack =
3834 ArgumentStackSlotsForCFunctionCall(num_arguments);
3835 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
3836}
3837
Steve Blockd0582a62009-12-15 09:54:21 +00003838
Steve Blocka7e24c12009-10-30 11:49:00 +00003839CodePatcher::CodePatcher(byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01003840 : address_(address),
3841 size_(size),
3842 masm_(Isolate::Current(), address, size + Assembler::kGap) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003843 // Create a new macro assembler pointing to the address of the code to patch.
3844 // The size is adjusted with kGap on order for the assembler to generate size
3845 // bytes of instructions without failing with buffer size constraints.
3846 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3847}
3848
3849
3850CodePatcher::~CodePatcher() {
3851 // Indicate that code has changed.
3852 CPU::FlushICache(address_, size_);
3853
3854 // Check that the code was patched as expected.
3855 ASSERT(masm_.pc_ == address_ + size_);
3856 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3857}
3858
Steve Blocka7e24c12009-10-30 11:49:00 +00003859} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01003860
3861#endif // V8_TARGET_ARCH_X64