blob: 7f027f7094f96ea01db14fbb4b9c760f4fe9f482 [file] [log] [blame]
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
Ben Murdoch8b112d22011-06-08 16:22:53 +010033#include "codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010038#include "heap.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
40namespace v8 {
41namespace internal {
42
Ben Murdoch8b112d22011-06-08 16:22:53 +010043MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
44 : Assembler(arg_isolate, buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000045 generating_stub_(false),
46 allow_stub_calls_(true),
Ben Murdoch8b112d22011-06-08 16:22:53 +010047 root_array_available_(true) {
48 if (isolate() != NULL) {
49 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
50 isolate());
51 }
Steve Block44f0eee2011-05-26 01:26:41 +010052}
53
54
55static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
56 Address roots_register_value = kRootRegisterBias +
57 reinterpret_cast<Address>(isolate->heap()->roots_address());
58 intptr_t delta = other.address() - roots_register_value;
59 return delta;
60}
61
62
63Operand MacroAssembler::ExternalOperand(ExternalReference target,
64 Register scratch) {
65 if (root_array_available_ && !Serializer::enabled()) {
66 intptr_t delta = RootRegisterDelta(target, isolate());
67 if (is_int32(delta)) {
68 Serializer::TooLateToEnableNow();
69 return Operand(kRootRegister, static_cast<int32_t>(delta));
70 }
71 }
72 movq(scratch, target);
73 return Operand(scratch, 0);
74}
75
76
77void MacroAssembler::Load(Register destination, ExternalReference source) {
78 if (root_array_available_ && !Serializer::enabled()) {
79 intptr_t delta = RootRegisterDelta(source, isolate());
80 if (is_int32(delta)) {
81 Serializer::TooLateToEnableNow();
82 movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
83 return;
84 }
85 }
86 // Safe code.
87 if (destination.is(rax)) {
88 load_rax(source);
89 } else {
90 movq(kScratchRegister, source);
91 movq(destination, Operand(kScratchRegister, 0));
92 }
93}
94
95
96void MacroAssembler::Store(ExternalReference destination, Register source) {
97 if (root_array_available_ && !Serializer::enabled()) {
98 intptr_t delta = RootRegisterDelta(destination, isolate());
99 if (is_int32(delta)) {
100 Serializer::TooLateToEnableNow();
101 movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
102 return;
103 }
104 }
105 // Safe code.
106 if (source.is(rax)) {
107 store_rax(destination);
108 } else {
109 movq(kScratchRegister, destination);
110 movq(Operand(kScratchRegister, 0), source);
111 }
112}
113
114
115void MacroAssembler::LoadAddress(Register destination,
116 ExternalReference source) {
117 if (root_array_available_ && !Serializer::enabled()) {
118 intptr_t delta = RootRegisterDelta(source, isolate());
119 if (is_int32(delta)) {
120 Serializer::TooLateToEnableNow();
121 lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
122 return;
123 }
124 }
125 // Safe code.
126 movq(destination, source);
127}
128
129
130int MacroAssembler::LoadAddressSize(ExternalReference source) {
131 if (root_array_available_ && !Serializer::enabled()) {
132 // This calculation depends on the internals of LoadAddress.
133 // It's correctness is ensured by the asserts in the Call
134 // instruction below.
135 intptr_t delta = RootRegisterDelta(source, isolate());
136 if (is_int32(delta)) {
137 Serializer::TooLateToEnableNow();
138 // Operand is lea(scratch, Operand(kRootRegister, delta));
139 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
140 int size = 4;
141 if (!is_int8(static_cast<int32_t>(delta))) {
142 size += 3; // Need full four-byte displacement in lea.
143 }
144 return size;
145 }
146 }
147 // Size of movq(destination, src);
148 return 10;
Steve Blocka7e24c12009-10-30 11:49:00 +0000149}
150
151
Steve Block3ce2e202009-11-05 08:53:23 +0000152void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100153 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100154 movq(destination, Operand(kRootRegister,
155 (index << kPointerSizeLog2) - kRootRegisterBias));
156}
157
158
159void MacroAssembler::LoadRootIndexed(Register destination,
160 Register variable_offset,
161 int fixed_offset) {
Steve Block44f0eee2011-05-26 01:26:41 +0100162 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100163 movq(destination,
164 Operand(kRootRegister,
165 variable_offset, times_pointer_size,
166 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000167}
168
169
Kristian Monsen25f61362010-05-21 11:50:48 +0100170void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100171 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100172 movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
173 source);
Kristian Monsen25f61362010-05-21 11:50:48 +0100174}
175
176
Steve Blocka7e24c12009-10-30 11:49:00 +0000177void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100178 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100179 push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000180}
181
182
Steve Block3ce2e202009-11-05 08:53:23 +0000183void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100184 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100185 cmpq(with, Operand(kRootRegister,
186 (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000187}
188
189
Steve Block1e0659c2011-05-24 12:43:12 +0100190void MacroAssembler::CompareRoot(const Operand& with,
191 Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100192 ASSERT(root_array_available_);
Steve Block1e0659c2011-05-24 12:43:12 +0100193 ASSERT(!with.AddressUsesRegister(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000194 LoadRoot(kScratchRegister, index);
195 cmpq(with, kScratchRegister);
196}
197
198
Steve Block6ded16b2010-05-10 14:33:55 +0100199void MacroAssembler::RecordWriteHelper(Register object,
200 Register addr,
201 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +0100202 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100203 // Check that the object is not in new space.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100204 NearLabel not_in_new_space;
Steve Block6ded16b2010-05-10 14:33:55 +0100205 InNewSpace(object, scratch, not_equal, &not_in_new_space);
206 Abort("new-space object passed to RecordWriteHelper");
207 bind(&not_in_new_space);
208 }
209
Steve Blocka7e24c12009-10-30 11:49:00 +0000210 // Compute the page start address from the heap object pointer, and reuse
211 // the 'object' register for it.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100212 and_(object, Immediate(~Page::kPageAlignmentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +0000213
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100214 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
215 // method for more details.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100216 shrl(addr, Immediate(Page::kRegionSizeLog2));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100217 andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000218
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100219 // Set dirty mark for region.
220 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000221}
222
223
Steve Blocka7e24c12009-10-30 11:49:00 +0000224void MacroAssembler::RecordWrite(Register object,
225 int offset,
226 Register value,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100227 Register index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000228 // The compiled code assumes that record write doesn't change the
229 // context register, so we check that none of the clobbered
230 // registers are rsi.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100231 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
Leon Clarke4515c472010-02-03 11:58:03 +0000232
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100233 // First, check if a write barrier is even needed. The tests below
Steve Block44f0eee2011-05-26 01:26:41 +0100234 // catch stores of smis and stores into the young generation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000235 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000236 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000237
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100238 RecordWriteNonSmi(object, offset, value, index);
Steve Block3ce2e202009-11-05 08:53:23 +0000239 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000240
241 // Clobber all input registers when running with the debug-code flag
242 // turned on to provoke errors. This clobbering repeats the
243 // clobbering done inside RecordWriteNonSmi but it's necessary to
244 // avoid having the fast case for smis leave the registers
245 // unchanged.
Steve Block44f0eee2011-05-26 01:26:41 +0100246 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100247 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
248 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100249 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000250 }
Steve Block3ce2e202009-11-05 08:53:23 +0000251}
252
253
Steve Block8defd9f2010-07-08 12:39:36 +0100254void MacroAssembler::RecordWrite(Register object,
255 Register address,
256 Register value) {
257 // The compiled code assumes that record write doesn't change the
258 // context register, so we check that none of the clobbered
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100259 // registers are rsi.
Steve Block8defd9f2010-07-08 12:39:36 +0100260 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
261
262 // First, check if a write barrier is even needed. The tests below
Steve Block44f0eee2011-05-26 01:26:41 +0100263 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100264 Label done;
265 JumpIfSmi(value, &done);
266
267 InNewSpace(object, value, equal, &done);
268
269 RecordWriteHelper(object, address, value);
270
271 bind(&done);
272
273 // Clobber all input registers when running with the debug-code flag
274 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100275 if (emit_debug_code()) {
Steve Block8defd9f2010-07-08 12:39:36 +0100276 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
277 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
278 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
279 }
280}
281
282
Steve Block3ce2e202009-11-05 08:53:23 +0000283void MacroAssembler::RecordWriteNonSmi(Register object,
284 int offset,
285 Register scratch,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100286 Register index) {
Steve Block3ce2e202009-11-05 08:53:23 +0000287 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000288
Steve Block44f0eee2011-05-26 01:26:41 +0100289 if (emit_debug_code()) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100290 NearLabel okay;
Leon Clarke4515c472010-02-03 11:58:03 +0000291 JumpIfNotSmi(object, &okay);
292 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
293 bind(&okay);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100294
295 if (offset == 0) {
296 // index must be int32.
297 Register tmp = index.is(rax) ? rbx : rax;
298 push(tmp);
299 movl(tmp, index);
300 cmpq(tmp, index);
301 Check(equal, "Index register for RecordWrite must be untagged int32.");
302 pop(tmp);
303 }
Leon Clarke4515c472010-02-03 11:58:03 +0000304 }
305
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100306 // Test that the object address is not in the new space. We cannot
307 // update page dirty marks for new space pages.
Steve Block6ded16b2010-05-10 14:33:55 +0100308 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000309
Steve Block6ded16b2010-05-10 14:33:55 +0100310 // The offset is relative to a tagged or untagged HeapObject pointer,
311 // so either offset or offset + kHeapObjectTag must be a
312 // multiple of kPointerSize.
313 ASSERT(IsAligned(offset, kPointerSize) ||
314 IsAligned(offset + kHeapObjectTag, kPointerSize));
315
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100316 Register dst = index;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100317 if (offset != 0) {
318 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000319 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100320 // array access: calculate the destination address in the same manner as
321 // KeyedStoreIC::GenerateGeneric.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100322 lea(dst, FieldOperand(object,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100323 index,
324 times_pointer_size,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100325 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000326 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100327 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000328
329 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000330
331 // Clobber all input registers when running with the debug-code flag
332 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100333 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100334 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
335 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100336 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Steve Block6ded16b2010-05-10 14:33:55 +0100337 }
338}
339
Steve Blocka7e24c12009-10-30 11:49:00 +0000340void MacroAssembler::Assert(Condition cc, const char* msg) {
Steve Block44f0eee2011-05-26 01:26:41 +0100341 if (emit_debug_code()) Check(cc, msg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000342}
343
344
Iain Merrick75681382010-08-19 15:07:18 +0100345void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +0100346 if (emit_debug_code()) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100347 NearLabel ok;
Iain Merrick75681382010-08-19 15:07:18 +0100348 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
349 Heap::kFixedArrayMapRootIndex);
350 j(equal, &ok);
351 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
352 Heap::kFixedCOWArrayMapRootIndex);
353 j(equal, &ok);
354 Abort("JSObject with fast elements map has slow elements");
355 bind(&ok);
356 }
357}
358
359
Steve Blocka7e24c12009-10-30 11:49:00 +0000360void MacroAssembler::Check(Condition cc, const char* msg) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100361 NearLabel L;
Steve Blocka7e24c12009-10-30 11:49:00 +0000362 j(cc, &L);
363 Abort(msg);
364 // will not return here
365 bind(&L);
366}
367
368
Steve Block6ded16b2010-05-10 14:33:55 +0100369void MacroAssembler::CheckStackAlignment() {
370 int frame_alignment = OS::ActivationFrameAlignment();
371 int frame_alignment_mask = frame_alignment - 1;
372 if (frame_alignment > kPointerSize) {
373 ASSERT(IsPowerOf2(frame_alignment));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100374 NearLabel alignment_as_expected;
Steve Block6ded16b2010-05-10 14:33:55 +0100375 testq(rsp, Immediate(frame_alignment_mask));
376 j(zero, &alignment_as_expected);
377 // Abort if stack is not aligned.
378 int3();
379 bind(&alignment_as_expected);
380 }
381}
382
383
Steve Blocka7e24c12009-10-30 11:49:00 +0000384void MacroAssembler::NegativeZeroTest(Register result,
385 Register op,
386 Label* then_label) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100387 NearLabel ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000388 testl(result, result);
389 j(not_zero, &ok);
390 testl(op, op);
391 j(sign, then_label);
392 bind(&ok);
393}
394
395
396void MacroAssembler::Abort(const char* msg) {
397 // We want to pass the msg string like a smi to avoid GC
398 // problems, however msg is not guaranteed to be aligned
399 // properly. Instead, we pass an aligned pointer that is
400 // a proper v8 smi, but also pass the alignment difference
401 // from the real pointer as a smi.
402 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
403 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
404 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
405 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
406#ifdef DEBUG
407 if (msg != NULL) {
408 RecordComment("Abort message: ");
409 RecordComment(msg);
410 }
411#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000412 // Disable stub call restrictions to always allow calls to abort.
Ben Murdoch086aeea2011-05-13 15:57:08 +0100413 AllowStubCallsScope allow_scope(this, true);
Steve Blockd0582a62009-12-15 09:54:21 +0000414
Steve Blocka7e24c12009-10-30 11:49:00 +0000415 push(rax);
416 movq(kScratchRegister, p0, RelocInfo::NONE);
417 push(kScratchRegister);
418 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000419 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000420 RelocInfo::NONE);
421 push(kScratchRegister);
422 CallRuntime(Runtime::kAbort, 2);
423 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000424 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000425}
426
427
428void MacroAssembler::CallStub(CodeStub* stub) {
429 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
430 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
431}
432
433
John Reck59135872010-11-02 12:39:01 -0700434MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100435 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700436 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100437 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700438 call(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
439 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100440 }
441 return result;
442}
443
444
Leon Clarkee46be812010-01-19 14:06:41 +0000445void MacroAssembler::TailCallStub(CodeStub* stub) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800446 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Leon Clarkee46be812010-01-19 14:06:41 +0000447 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
448}
449
450
John Reck59135872010-11-02 12:39:01 -0700451MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100452 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700453 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100454 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700455 jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
456 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100457 }
458 return result;
459}
460
461
Steve Blocka7e24c12009-10-30 11:49:00 +0000462void MacroAssembler::StubReturn(int argc) {
463 ASSERT(argc >= 1 && generating_stub());
464 ret((argc - 1) * kPointerSize);
465}
466
467
468void MacroAssembler::IllegalOperation(int num_arguments) {
469 if (num_arguments > 0) {
470 addq(rsp, Immediate(num_arguments * kPointerSize));
471 }
472 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
473}
474
475
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100476void MacroAssembler::IndexFromHash(Register hash, Register index) {
477 // The assert checks that the constants for the maximum number of digits
478 // for an array index cached in the hash field and the number of bits
479 // reserved for it does not conflict.
480 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
481 (1 << String::kArrayIndexValueBits));
482 // We want the smi-tagged index in key. Even if we subsequently go to
483 // the slow case, converting the key to a smi is always valid.
484 // key: string key
485 // hash: key's hash field, including its array index value.
486 and_(hash, Immediate(String::kArrayIndexValueMask));
487 shr(hash, Immediate(String::kHashShift));
488 // Here we actually clobber the key which will be used if calling into
489 // runtime later. However as the new key is the numeric value of a string key
490 // there is no difference in using either key.
491 Integer32ToSmi(index, hash);
492}
493
494
Steve Blocka7e24c12009-10-30 11:49:00 +0000495void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
496 CallRuntime(Runtime::FunctionForId(id), num_arguments);
497}
498
499
Steve Block1e0659c2011-05-24 12:43:12 +0100500void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
Steve Block44f0eee2011-05-26 01:26:41 +0100501 const Runtime::Function* function = Runtime::FunctionForId(id);
Steve Block1e0659c2011-05-24 12:43:12 +0100502 Set(rax, function->nargs);
Steve Block44f0eee2011-05-26 01:26:41 +0100503 LoadAddress(rbx, ExternalReference(function, isolate()));
Steve Block1e0659c2011-05-24 12:43:12 +0100504 CEntryStub ces(1);
505 ces.SaveDoubles();
506 CallStub(&ces);
507}
508
509
John Reck59135872010-11-02 12:39:01 -0700510MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
511 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100512 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
513}
514
515
Steve Block44f0eee2011-05-26 01:26:41 +0100516void MacroAssembler::CallRuntime(const Runtime::Function* f,
517 int num_arguments) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000518 // If the expected number of arguments of the runtime function is
519 // constant, we check that the actual number of arguments match the
520 // expectation.
521 if (f->nargs >= 0 && f->nargs != num_arguments) {
522 IllegalOperation(num_arguments);
523 return;
524 }
525
Leon Clarke4515c472010-02-03 11:58:03 +0000526 // TODO(1236192): Most runtime routines don't need the number of
527 // arguments passed in because it is constant. At some point we
528 // should remove this need and make the runtime routine entry code
529 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100530 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100531 LoadAddress(rbx, ExternalReference(f, isolate()));
Leon Clarke4515c472010-02-03 11:58:03 +0000532 CEntryStub ces(f->result_size);
533 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000534}
535
536
Steve Block44f0eee2011-05-26 01:26:41 +0100537MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
John Reck59135872010-11-02 12:39:01 -0700538 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100539 if (f->nargs >= 0 && f->nargs != num_arguments) {
540 IllegalOperation(num_arguments);
541 // Since we did not call the stub, there was no allocation failure.
542 // Return some non-failure object.
Steve Block44f0eee2011-05-26 01:26:41 +0100543 return HEAP->undefined_value();
Ben Murdochbb769b22010-08-11 14:56:33 +0100544 }
545
546 // TODO(1236192): Most runtime routines don't need the number of
547 // arguments passed in because it is constant. At some point we
548 // should remove this need and make the runtime routine entry code
549 // smarter.
550 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100551 LoadAddress(rbx, ExternalReference(f, isolate()));
Ben Murdochbb769b22010-08-11 14:56:33 +0100552 CEntryStub ces(f->result_size);
553 return TryCallStub(&ces);
554}
555
556
Andrei Popescu402d9372010-02-26 13:31:12 +0000557void MacroAssembler::CallExternalReference(const ExternalReference& ext,
558 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100559 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100560 LoadAddress(rbx, ext);
Andrei Popescu402d9372010-02-26 13:31:12 +0000561
562 CEntryStub stub(1);
563 CallStub(&stub);
564}
565
566
Steve Block6ded16b2010-05-10 14:33:55 +0100567void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
568 int num_arguments,
569 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000570 // ----------- S t a t e -------------
571 // -- rsp[0] : return address
572 // -- rsp[8] : argument num_arguments - 1
573 // ...
574 // -- rsp[8 * num_arguments] : argument 0 (receiver)
575 // -----------------------------------
576
577 // TODO(1236192): Most runtime routines don't need the number of
578 // arguments passed in because it is constant. At some point we
579 // should remove this need and make the runtime routine entry code
580 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100581 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100582 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000583}
584
585
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800586MaybeObject* MacroAssembler::TryTailCallExternalReference(
587 const ExternalReference& ext, int num_arguments, int result_size) {
588 // ----------- S t a t e -------------
589 // -- rsp[0] : return address
590 // -- rsp[8] : argument num_arguments - 1
591 // ...
592 // -- rsp[8 * num_arguments] : argument 0 (receiver)
593 // -----------------------------------
594
595 // TODO(1236192): Most runtime routines don't need the number of
596 // arguments passed in because it is constant. At some point we
597 // should remove this need and make the runtime routine entry code
598 // smarter.
599 Set(rax, num_arguments);
600 return TryJumpToExternalReference(ext, result_size);
601}
602
603
Steve Block6ded16b2010-05-10 14:33:55 +0100604void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
605 int num_arguments,
606 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100607 TailCallExternalReference(ExternalReference(fid, isolate()),
608 num_arguments,
609 result_size);
Steve Block6ded16b2010-05-10 14:33:55 +0100610}
611
612
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800613MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
614 int num_arguments,
615 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100616 return TryTailCallExternalReference(ExternalReference(fid, isolate()),
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800617 num_arguments,
618 result_size);
619}
620
621
Ben Murdochbb769b22010-08-11 14:56:33 +0100622static int Offset(ExternalReference ref0, ExternalReference ref1) {
623 int64_t offset = (ref0.address() - ref1.address());
624 // Check that fits into int.
625 ASSERT(static_cast<int>(offset) == offset);
626 return static_cast<int>(offset);
627}
628
629
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800630void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
631#ifdef _WIN64
632 // We need to prepare a slot for result handle on stack and put
633 // a pointer to it into 1st arg register.
634 EnterApiExitFrame(arg_stack_space + 1);
635
636 // rcx must be used to pass the pointer to the return value slot.
637 lea(rcx, StackSpaceOperand(arg_stack_space));
638#else
639 EnterApiExitFrame(arg_stack_space);
640#endif
Ben Murdochbb769b22010-08-11 14:56:33 +0100641}
642
643
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800644MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
645 ApiFunction* function, int stack_space) {
John Reck59135872010-11-02 12:39:01 -0700646 Label empty_result;
647 Label prologue;
648 Label promote_scheduled_exception;
649 Label delete_allocated_handles;
650 Label leave_exit_frame;
Ben Murdochbb769b22010-08-11 14:56:33 +0100651 Label write_back;
Ben Murdochbb769b22010-08-11 14:56:33 +0100652
John Reck59135872010-11-02 12:39:01 -0700653 ExternalReference next_address =
654 ExternalReference::handle_scope_next_address();
655 const int kNextOffset = 0;
656 const int kLimitOffset = Offset(
657 ExternalReference::handle_scope_limit_address(),
658 next_address);
659 const int kLevelOffset = Offset(
660 ExternalReference::handle_scope_level_address(),
661 next_address);
662 ExternalReference scheduled_exception_address =
Steve Block44f0eee2011-05-26 01:26:41 +0100663 ExternalReference::scheduled_exception_address(isolate());
Ben Murdochbb769b22010-08-11 14:56:33 +0100664
John Reck59135872010-11-02 12:39:01 -0700665 // Allocate HandleScope in callee-save registers.
666 Register prev_next_address_reg = r14;
667 Register prev_limit_reg = rbx;
Steve Block44f0eee2011-05-26 01:26:41 +0100668 Register base_reg = r15;
John Reck59135872010-11-02 12:39:01 -0700669 movq(base_reg, next_address);
670 movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
671 movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
672 addl(Operand(base_reg, kLevelOffset), Immediate(1));
673 // Call the api function!
674 movq(rax,
675 reinterpret_cast<int64_t>(function->address()),
676 RelocInfo::RUNTIME_ENTRY);
677 call(rax);
Ben Murdochbb769b22010-08-11 14:56:33 +0100678
John Reck59135872010-11-02 12:39:01 -0700679#ifdef _WIN64
680 // rax keeps a pointer to v8::Handle, unpack it.
681 movq(rax, Operand(rax, 0));
682#endif
683 // Check if the result handle holds 0.
684 testq(rax, rax);
685 j(zero, &empty_result);
686 // It was non-zero. Dereference to get the result value.
687 movq(rax, Operand(rax, 0));
688 bind(&prologue);
Ben Murdochbb769b22010-08-11 14:56:33 +0100689
John Reck59135872010-11-02 12:39:01 -0700690 // No more valid handles (the result handle was the last one). Restore
691 // previous handle scope.
692 subl(Operand(base_reg, kLevelOffset), Immediate(1));
693 movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
694 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
695 j(not_equal, &delete_allocated_handles);
696 bind(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100697
John Reck59135872010-11-02 12:39:01 -0700698 // Check if the function scheduled an exception.
699 movq(rsi, scheduled_exception_address);
Steve Block053d10c2011-06-13 19:13:29 +0100700 Cmp(Operand(rsi, 0), FACTORY->the_hole_value());
John Reck59135872010-11-02 12:39:01 -0700701 j(not_equal, &promote_scheduled_exception);
Ben Murdochbb769b22010-08-11 14:56:33 +0100702
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800703 LeaveApiExitFrame();
704 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -0700705
706 bind(&promote_scheduled_exception);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800707 MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException,
708 0, 1);
709 if (result->IsFailure()) {
710 return result;
711 }
John Reck59135872010-11-02 12:39:01 -0700712
713 bind(&empty_result);
714 // It was zero; the result is undefined.
Steve Block053d10c2011-06-13 19:13:29 +0100715 Move(rax, FACTORY->undefined_value());
John Reck59135872010-11-02 12:39:01 -0700716 jmp(&prologue);
717
718 // HandleScope limit has changed. Delete allocated extensions.
719 bind(&delete_allocated_handles);
720 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
721 movq(prev_limit_reg, rax);
Steve Block44f0eee2011-05-26 01:26:41 +0100722#ifdef _WIN64
723 LoadAddress(rcx, ExternalReference::isolate_address());
724#else
725 LoadAddress(rdi, ExternalReference::isolate_address());
726#endif
727 LoadAddress(rax,
728 ExternalReference::delete_handle_scope_extensions(isolate()));
John Reck59135872010-11-02 12:39:01 -0700729 call(rax);
730 movq(rax, prev_limit_reg);
731 jmp(&leave_exit_frame);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800732
733 return result;
Ben Murdochbb769b22010-08-11 14:56:33 +0100734}
735
736
Steve Block6ded16b2010-05-10 14:33:55 +0100737void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
738 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000739 // Set the entry point and jump to the C entry runtime stub.
Steve Block44f0eee2011-05-26 01:26:41 +0100740 LoadAddress(rbx, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +0000741 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000742 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000743}
744
745
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800746MaybeObject* MacroAssembler::TryJumpToExternalReference(
747 const ExternalReference& ext, int result_size) {
748 // Set the entry point and jump to the C entry runtime stub.
Steve Block44f0eee2011-05-26 01:26:41 +0100749 LoadAddress(rbx, ext);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800750 CEntryStub ces(result_size);
751 return TryTailCallStub(&ces);
752}
753
754
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100755void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
756 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +0100757 CallWrapper* call_wrapper) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000758 // Calls are not allowed in some stubs.
759 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000760
Andrei Popescu402d9372010-02-26 13:31:12 +0000761 // Rely on the assertion to check that the number of provided
762 // arguments match the expected number of arguments. Fake a
763 // parameter count to avoid emitting code to do the check.
764 ParameterCount expected(0);
765 GetBuiltinEntry(rdx, id);
Steve Block44f0eee2011-05-26 01:26:41 +0100766 InvokeCode(rdx, expected, expected, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +0000767}
768
Andrei Popescu402d9372010-02-26 13:31:12 +0000769
Steve Block791712a2010-08-27 10:21:07 +0100770void MacroAssembler::GetBuiltinFunction(Register target,
771 Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100772 // Load the builtins object into target register.
773 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
774 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
Steve Block791712a2010-08-27 10:21:07 +0100775 movq(target, FieldOperand(target,
776 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
777}
Steve Block6ded16b2010-05-10 14:33:55 +0100778
Steve Block791712a2010-08-27 10:21:07 +0100779
780void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
781 ASSERT(!target.is(rdi));
Andrei Popescu402d9372010-02-26 13:31:12 +0000782 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +0100783 GetBuiltinFunction(rdi, id);
784 movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000785}
786
787
788void MacroAssembler::Set(Register dst, int64_t x) {
789 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100790 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000791 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000792 movl(dst, Immediate(static_cast<uint32_t>(x)));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100793 } else if (is_int32(x)) {
794 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000795 } else {
796 movq(dst, x, RelocInfo::NONE);
797 }
798}
799
Steve Blocka7e24c12009-10-30 11:49:00 +0000800void MacroAssembler::Set(const Operand& dst, int64_t x) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100801 if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000802 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000803 } else {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100804 Set(kScratchRegister, x);
Steve Blocka7e24c12009-10-30 11:49:00 +0000805 movq(dst, kScratchRegister);
806 }
807}
808
Steve Blocka7e24c12009-10-30 11:49:00 +0000809// ----------------------------------------------------------------------------
810// Smi tagging, untagging and tag detection.
811
Steve Block8defd9f2010-07-08 12:39:36 +0100812Register MacroAssembler::GetSmiConstant(Smi* source) {
813 int value = source->value();
814 if (value == 0) {
815 xorl(kScratchRegister, kScratchRegister);
816 return kScratchRegister;
817 }
818 if (value == 1) {
819 return kSmiConstantRegister;
820 }
821 LoadSmiConstant(kScratchRegister, source);
822 return kScratchRegister;
823}
824
825void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
Steve Block44f0eee2011-05-26 01:26:41 +0100826 if (emit_debug_code()) {
Steve Block8defd9f2010-07-08 12:39:36 +0100827 movq(dst,
828 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
829 RelocInfo::NONE);
830 cmpq(dst, kSmiConstantRegister);
831 if (allow_stub_calls()) {
832 Assert(equal, "Uninitialized kSmiConstantRegister");
833 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100834 NearLabel ok;
Steve Block8defd9f2010-07-08 12:39:36 +0100835 j(equal, &ok);
836 int3();
837 bind(&ok);
838 }
839 }
Steve Block44f0eee2011-05-26 01:26:41 +0100840 int value = source->value();
841 if (value == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100842 xorl(dst, dst);
843 return;
844 }
Steve Block8defd9f2010-07-08 12:39:36 +0100845 bool negative = value < 0;
846 unsigned int uvalue = negative ? -value : value;
847
848 switch (uvalue) {
849 case 9:
850 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
851 break;
852 case 8:
853 xorl(dst, dst);
854 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
855 break;
856 case 4:
857 xorl(dst, dst);
858 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
859 break;
860 case 5:
861 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
862 break;
863 case 3:
864 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
865 break;
866 case 2:
867 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
868 break;
869 case 1:
870 movq(dst, kSmiConstantRegister);
871 break;
872 case 0:
873 UNREACHABLE();
874 return;
875 default:
876 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
877 return;
878 }
879 if (negative) {
880 neg(dst);
881 }
882}
883
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100884
Steve Blocka7e24c12009-10-30 11:49:00 +0000885void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000886 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000887 if (!dst.is(src)) {
888 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000889 }
Steve Block3ce2e202009-11-05 08:53:23 +0000890 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000891}
892
893
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100894void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +0100895 if (emit_debug_code()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100896 testb(dst, Immediate(0x01));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100897 NearLabel ok;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100898 j(zero, &ok);
899 if (allow_stub_calls()) {
900 Abort("Integer32ToSmiField writing to non-smi location");
901 } else {
902 int3();
903 }
904 bind(&ok);
905 }
906 ASSERT(kSmiShift % kBitsPerByte == 0);
907 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
908}
909
910
Steve Block3ce2e202009-11-05 08:53:23 +0000911void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
912 Register src,
913 int constant) {
914 if (dst.is(src)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100915 addl(dst, Immediate(constant));
Steve Block3ce2e202009-11-05 08:53:23 +0000916 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100917 leal(dst, Operand(src, constant));
Steve Block3ce2e202009-11-05 08:53:23 +0000918 }
919 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000920}
921
922
923void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000924 ASSERT_EQ(0, kSmiTag);
925 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000926 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000927 }
Steve Block3ce2e202009-11-05 08:53:23 +0000928 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000929}
930
931
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100932void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
933 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
934}
935
936
Steve Blocka7e24c12009-10-30 11:49:00 +0000937void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000938 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000939 if (!dst.is(src)) {
940 movq(dst, src);
941 }
942 sar(dst, Immediate(kSmiShift));
943}
944
945
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100946void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
947 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
948}
949
950
Steve Block3ce2e202009-11-05 08:53:23 +0000951void MacroAssembler::SmiTest(Register src) {
952 testq(src, src);
953}
954
955
Steve Block44f0eee2011-05-26 01:26:41 +0100956void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
957 if (emit_debug_code()) {
958 AbortIfNotSmi(smi1);
959 AbortIfNotSmi(smi2);
960 }
961 cmpq(smi1, smi2);
Steve Block3ce2e202009-11-05 08:53:23 +0000962}
963
964
965void MacroAssembler::SmiCompare(Register dst, Smi* src) {
Steve Block44f0eee2011-05-26 01:26:41 +0100966 if (emit_debug_code()) {
967 AbortIfNotSmi(dst);
968 }
969 Cmp(dst, src);
970}
971
972
973void MacroAssembler::Cmp(Register dst, Smi* src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000974 ASSERT(!dst.is(kScratchRegister));
975 if (src->value() == 0) {
976 testq(dst, dst);
977 } else {
Iain Merrick75681382010-08-19 15:07:18 +0100978 Register constant_reg = GetSmiConstant(src);
979 cmpq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +0000980 }
981}
982
983
Leon Clarkef7060e22010-06-03 12:02:55 +0100984void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block44f0eee2011-05-26 01:26:41 +0100985 if (emit_debug_code()) {
986 AbortIfNotSmi(dst);
987 AbortIfNotSmi(src);
988 }
Steve Block6ded16b2010-05-10 14:33:55 +0100989 cmpq(dst, src);
990}
991
992
Steve Block3ce2e202009-11-05 08:53:23 +0000993void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +0100994 if (emit_debug_code()) {
995 AbortIfNotSmi(dst);
996 AbortIfNotSmi(src);
997 }
Steve Block3ce2e202009-11-05 08:53:23 +0000998 cmpq(dst, src);
999}
1000
1001
1002void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001003 if (emit_debug_code()) {
1004 AbortIfNotSmi(dst);
1005 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001006 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001007}
1008
1009
Steve Block44f0eee2011-05-26 01:26:41 +01001010void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1011 // The Operand cannot use the smi register.
1012 Register smi_reg = GetSmiConstant(src);
1013 ASSERT(!dst.AddressUsesRegister(smi_reg));
1014 cmpq(dst, smi_reg);
1015}
1016
1017
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001018void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
1019 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1020}
1021
1022
Steve Blocka7e24c12009-10-30 11:49:00 +00001023void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1024 Register src,
1025 int power) {
1026 ASSERT(power >= 0);
1027 ASSERT(power < 64);
1028 if (power == 0) {
1029 SmiToInteger64(dst, src);
1030 return;
1031 }
Steve Block3ce2e202009-11-05 08:53:23 +00001032 if (!dst.is(src)) {
1033 movq(dst, src);
1034 }
1035 if (power < kSmiShift) {
1036 sar(dst, Immediate(kSmiShift - power));
1037 } else if (power > kSmiShift) {
1038 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001039 }
1040}
1041
1042
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001043void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1044 Register src,
1045 int power) {
1046 ASSERT((0 <= power) && (power < 32));
1047 if (dst.is(src)) {
1048 shr(dst, Immediate(power + kSmiShift));
1049 } else {
1050 UNIMPLEMENTED(); // Not used.
1051 }
1052}
1053
1054
Steve Blocka7e24c12009-10-30 11:49:00 +00001055Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001056 ASSERT_EQ(0, kSmiTag);
1057 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +00001058 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001059}
1060
1061
Steve Block1e0659c2011-05-24 12:43:12 +01001062Condition MacroAssembler::CheckSmi(const Operand& src) {
1063 ASSERT_EQ(0, kSmiTag);
1064 testb(src, Immediate(kSmiTagMask));
1065 return zero;
1066}
1067
1068
Ben Murdochf87a2032010-10-22 12:50:53 +01001069Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001070 ASSERT_EQ(0, kSmiTag);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001071 // Test that both bits of the mask 0x8000000000000001 are zero.
Steve Block3ce2e202009-11-05 08:53:23 +00001072 movq(kScratchRegister, src);
1073 rol(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +01001074 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +00001075 return zero;
1076}
1077
1078
Steve Blocka7e24c12009-10-30 11:49:00 +00001079Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1080 if (first.is(second)) {
1081 return CheckSmi(first);
1082 }
Steve Block8defd9f2010-07-08 12:39:36 +01001083 ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
1084 leal(kScratchRegister, Operand(first, second, times_1, 0));
1085 testb(kScratchRegister, Immediate(0x03));
Steve Block3ce2e202009-11-05 08:53:23 +00001086 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001087}
1088
1089
Ben Murdochf87a2032010-10-22 12:50:53 +01001090Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1091 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001092 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +01001093 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +00001094 }
Steve Block8defd9f2010-07-08 12:39:36 +01001095 movq(kScratchRegister, first);
1096 or_(kScratchRegister, second);
Leon Clarked91b9f72010-01-27 17:25:45 +00001097 rol(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +01001098 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +00001099 return zero;
1100}
1101
1102
Ben Murdochbb769b22010-08-11 14:56:33 +01001103Condition MacroAssembler::CheckEitherSmi(Register first,
1104 Register second,
1105 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +00001106 if (first.is(second)) {
1107 return CheckSmi(first);
1108 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001109 if (scratch.is(second)) {
1110 andl(scratch, first);
1111 } else {
1112 if (!scratch.is(first)) {
1113 movl(scratch, first);
1114 }
1115 andl(scratch, second);
1116 }
1117 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +00001118 return zero;
1119}
1120
1121
Steve Blocka7e24c12009-10-30 11:49:00 +00001122Condition MacroAssembler::CheckIsMinSmi(Register src) {
Steve Block8defd9f2010-07-08 12:39:36 +01001123 ASSERT(!src.is(kScratchRegister));
1124 // If we overflow by subtracting one, it's the minimal smi value.
1125 cmpq(src, kSmiConstantRegister);
1126 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +00001127}
1128
Steve Blocka7e24c12009-10-30 11:49:00 +00001129
1130Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001131 // A 32-bit integer value can always be converted to a smi.
1132 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +00001133}
1134
1135
Steve Block3ce2e202009-11-05 08:53:23 +00001136Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
1137 // An unsigned 32-bit integer value is valid as long as the high bit
1138 // is not set.
Steve Block8defd9f2010-07-08 12:39:36 +01001139 testl(src, src);
1140 return positive;
Steve Block3ce2e202009-11-05 08:53:23 +00001141}
1142
1143
Steve Block1e0659c2011-05-24 12:43:12 +01001144void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1145 if (dst.is(src)) {
1146 andl(dst, Immediate(kSmiTagMask));
1147 } else {
1148 movl(dst, Immediate(kSmiTagMask));
1149 andl(dst, src);
1150 }
1151}
1152
1153
1154void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
1155 if (!(src.AddressUsesRegister(dst))) {
1156 movl(dst, Immediate(kSmiTagMask));
1157 andl(dst, src);
1158 } else {
1159 movl(dst, src);
1160 andl(dst, Immediate(kSmiTagMask));
1161 }
1162}
1163
1164
Steve Block3ce2e202009-11-05 08:53:23 +00001165void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1166 if (constant->value() == 0) {
1167 if (!dst.is(src)) {
1168 movq(dst, src);
1169 }
Steve Block8defd9f2010-07-08 12:39:36 +01001170 return;
Steve Block3ce2e202009-11-05 08:53:23 +00001171 } else if (dst.is(src)) {
1172 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001173 switch (constant->value()) {
1174 case 1:
1175 addq(dst, kSmiConstantRegister);
1176 return;
1177 case 2:
1178 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1179 return;
1180 case 4:
1181 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1182 return;
1183 case 8:
1184 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1185 return;
1186 default:
1187 Register constant_reg = GetSmiConstant(constant);
1188 addq(dst, constant_reg);
1189 return;
1190 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001191 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001192 switch (constant->value()) {
1193 case 1:
1194 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
1195 return;
1196 case 2:
1197 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1198 return;
1199 case 4:
1200 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1201 return;
1202 case 8:
1203 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1204 return;
1205 default:
1206 LoadSmiConstant(dst, constant);
1207 addq(dst, src);
1208 return;
1209 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001210 }
1211}
1212
1213
Leon Clarkef7060e22010-06-03 12:02:55 +01001214void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1215 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001216 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01001217 }
1218}
1219
1220
Steve Block3ce2e202009-11-05 08:53:23 +00001221void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1222 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001223 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001224 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001225 }
Steve Block3ce2e202009-11-05 08:53:23 +00001226 } else if (dst.is(src)) {
1227 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001228 Register constant_reg = GetSmiConstant(constant);
1229 subq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001230 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001231 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001232 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001233 // Adding and subtracting the min-value gives the same result, it only
1234 // differs on the overflow bit, which we don't check here.
1235 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001236 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001237 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001238 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Steve Block3ce2e202009-11-05 08:53:23 +00001239 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001240 }
1241 }
1242}
1243
1244
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001245void MacroAssembler::SmiAdd(Register dst,
1246 Register src1,
1247 Register src2) {
1248 // No overflow checking. Use only when it's known that
1249 // overflowing is impossible.
Steve Block053d10c2011-06-13 19:13:29 +01001250 ASSERT(!dst.is(src2));
Steve Block44f0eee2011-05-26 01:26:41 +01001251 if (!dst.is(src1)) {
Steve Block053d10c2011-06-13 19:13:29 +01001252 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001253 }
Steve Block053d10c2011-06-13 19:13:29 +01001254 addq(dst, src2);
1255 Assert(no_overflow, "Smi addition overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001256}
1257
1258
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001259void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1260 // No overflow checking. Use only when it's known that
1261 // overflowing is impossible (e.g., subtracting two positive smis).
1262 ASSERT(!dst.is(src2));
Steve Block44f0eee2011-05-26 01:26:41 +01001263 if (!dst.is(src1)) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001264 movq(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001265 }
Steve Block44f0eee2011-05-26 01:26:41 +01001266 subq(dst, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001267 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001268}
1269
1270
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001271void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001272 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001273 const Operand& src2) {
1274 // No overflow checking. Use only when it's known that
1275 // overflowing is impossible (e.g., subtracting two positive smis).
Steve Block44f0eee2011-05-26 01:26:41 +01001276 if (!dst.is(src1)) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001277 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001278 }
Steve Block44f0eee2011-05-26 01:26:41 +01001279 subq(dst, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001280 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001281}
1282
1283
1284void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001285 ASSERT(!dst.is(kScratchRegister));
1286 ASSERT(!src.is(kScratchRegister));
1287 // Set tag and padding bits before negating, so that they are zero afterwards.
1288 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001289 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001290 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001291 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001292 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001293 }
Steve Block3ce2e202009-11-05 08:53:23 +00001294 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001295}
1296
1297
1298void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001299 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001300 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001301 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001302 }
1303 and_(dst, src2);
1304}
1305
1306
Steve Block3ce2e202009-11-05 08:53:23 +00001307void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1308 if (constant->value() == 0) {
Steve Block9fac8402011-05-12 15:51:54 +01001309 Set(dst, 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001310 } else if (dst.is(src)) {
1311 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001312 Register constant_reg = GetSmiConstant(constant);
1313 and_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001314 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001315 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001316 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001317 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001318}
1319
1320
1321void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1322 if (!dst.is(src1)) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001323 ASSERT(!src1.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +00001324 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001325 }
1326 or_(dst, src2);
1327}
1328
1329
Steve Block3ce2e202009-11-05 08:53:23 +00001330void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1331 if (dst.is(src)) {
1332 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001333 Register constant_reg = GetSmiConstant(constant);
1334 or_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001335 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001336 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001337 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001338 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001339}
1340
Steve Block3ce2e202009-11-05 08:53:23 +00001341
Steve Blocka7e24c12009-10-30 11:49:00 +00001342void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1343 if (!dst.is(src1)) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001344 ASSERT(!src1.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +00001345 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001346 }
1347 xor_(dst, src2);
1348}
1349
1350
Steve Block3ce2e202009-11-05 08:53:23 +00001351void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1352 if (dst.is(src)) {
1353 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001354 Register constant_reg = GetSmiConstant(constant);
1355 xor_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001356 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001357 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001358 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001359 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001360}
1361
1362
Steve Blocka7e24c12009-10-30 11:49:00 +00001363void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1364 Register src,
1365 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001366 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001367 if (shift_value > 0) {
1368 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001369 sar(dst, Immediate(shift_value + kSmiShift));
1370 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001371 } else {
1372 UNIMPLEMENTED(); // Not used.
1373 }
1374 }
1375}
1376
1377
Steve Blocka7e24c12009-10-30 11:49:00 +00001378void MacroAssembler::SmiShiftLeftConstant(Register dst,
1379 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001380 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001381 if (!dst.is(src)) {
1382 movq(dst, src);
1383 }
1384 if (shift_value > 0) {
1385 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001386 }
1387}
1388
1389
1390void MacroAssembler::SmiShiftLeft(Register dst,
1391 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001392 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001393 ASSERT(!dst.is(rcx));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001394 NearLabel result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001395 // Untag shift amount.
1396 if (!dst.is(src1)) {
1397 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001398 }
Steve Block3ce2e202009-11-05 08:53:23 +00001399 SmiToInteger32(rcx, src2);
1400 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1401 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001402 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001403}
1404
1405
Steve Blocka7e24c12009-10-30 11:49:00 +00001406void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1407 Register src1,
1408 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001409 ASSERT(!dst.is(kScratchRegister));
1410 ASSERT(!src1.is(kScratchRegister));
1411 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001412 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001413 if (src1.is(rcx)) {
1414 movq(kScratchRegister, src1);
1415 } else if (src2.is(rcx)) {
1416 movq(kScratchRegister, src2);
1417 }
1418 if (!dst.is(src1)) {
1419 movq(dst, src1);
1420 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001421 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001422 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001423 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001424 shl(dst, Immediate(kSmiShift));
1425 if (src1.is(rcx)) {
1426 movq(src1, kScratchRegister);
1427 } else if (src2.is(rcx)) {
1428 movq(src2, kScratchRegister);
1429 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001430}
1431
1432
Steve Block3ce2e202009-11-05 08:53:23 +00001433SmiIndex MacroAssembler::SmiToIndex(Register dst,
1434 Register src,
1435 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001436 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001437 // There is a possible optimization if shift is in the range 60-63, but that
1438 // will (and must) never happen.
1439 if (!dst.is(src)) {
1440 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001441 }
Steve Block3ce2e202009-11-05 08:53:23 +00001442 if (shift < kSmiShift) {
1443 sar(dst, Immediate(kSmiShift - shift));
1444 } else {
1445 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001446 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001447 return SmiIndex(dst, times_1);
1448}
1449
Steve Blocka7e24c12009-10-30 11:49:00 +00001450SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1451 Register src,
1452 int shift) {
1453 // Register src holds a positive smi.
1454 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001455 if (!dst.is(src)) {
1456 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001457 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001458 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001459 if (shift < kSmiShift) {
1460 sar(dst, Immediate(kSmiShift - shift));
1461 } else {
1462 shl(dst, Immediate(shift - kSmiShift));
1463 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001464 return SmiIndex(dst, times_1);
1465}
1466
1467
Steve Block44f0eee2011-05-26 01:26:41 +01001468void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
1469 ASSERT_EQ(0, kSmiShift % kBitsPerByte);
1470 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
1471}
1472
1473
1474
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001475void MacroAssembler::Move(Register dst, Register src) {
1476 if (!dst.is(src)) {
1477 movq(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01001478 }
Steve Block6ded16b2010-05-10 14:33:55 +01001479}
1480
1481
Steve Blocka7e24c12009-10-30 11:49:00 +00001482void MacroAssembler::Move(Register dst, Handle<Object> source) {
1483 ASSERT(!source->IsFailure());
1484 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001485 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001486 } else {
1487 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1488 }
1489}
1490
1491
1492void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001493 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001494 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001495 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001496 } else {
1497 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1498 movq(dst, kScratchRegister);
1499 }
1500}
1501
1502
1503void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001504 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01001505 Cmp(dst, Smi::cast(*source));
Steve Block3ce2e202009-11-05 08:53:23 +00001506 } else {
1507 Move(kScratchRegister, source);
1508 cmpq(dst, kScratchRegister);
1509 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001510}
1511
1512
1513void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1514 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01001515 Cmp(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001516 } else {
1517 ASSERT(source->IsHeapObject());
1518 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1519 cmpq(dst, kScratchRegister);
1520 }
1521}
1522
1523
1524void MacroAssembler::Push(Handle<Object> source) {
1525 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001526 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001527 } else {
1528 ASSERT(source->IsHeapObject());
1529 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1530 push(kScratchRegister);
1531 }
1532}
1533
1534
1535void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001536 intptr_t smi = reinterpret_cast<intptr_t>(source);
1537 if (is_int32(smi)) {
1538 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001539 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001540 Register constant = GetSmiConstant(source);
1541 push(constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001542 }
1543}
1544
1545
Leon Clarkee46be812010-01-19 14:06:41 +00001546void MacroAssembler::Drop(int stack_elements) {
1547 if (stack_elements > 0) {
1548 addq(rsp, Immediate(stack_elements * kPointerSize));
1549 }
1550}
1551
1552
Steve Block3ce2e202009-11-05 08:53:23 +00001553void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001554 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001555}
1556
1557
1558void MacroAssembler::Jump(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01001559 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001560 jmp(kScratchRegister);
1561}
1562
1563
1564void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1565 movq(kScratchRegister, destination, rmode);
1566 jmp(kScratchRegister);
1567}
1568
1569
1570void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001571 // TODO(X64): Inline this
1572 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001573}
1574
1575
Steve Block44f0eee2011-05-26 01:26:41 +01001576int MacroAssembler::CallSize(ExternalReference ext) {
1577 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
1578 const int kCallInstructionSize = 3;
1579 return LoadAddressSize(ext) + kCallInstructionSize;
1580}
1581
1582
Steve Blocka7e24c12009-10-30 11:49:00 +00001583void MacroAssembler::Call(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01001584#ifdef DEBUG
1585 int end_position = pc_offset() + CallSize(ext);
1586#endif
1587 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001588 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01001589#ifdef DEBUG
1590 CHECK_EQ(end_position, pc_offset());
1591#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001592}
1593
1594
1595void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
Steve Block44f0eee2011-05-26 01:26:41 +01001596#ifdef DEBUG
1597 int end_position = pc_offset() + CallSize(destination, rmode);
1598#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001599 movq(kScratchRegister, destination, rmode);
1600 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01001601#ifdef DEBUG
1602 CHECK_EQ(pc_offset(), end_position);
1603#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001604}
1605
1606
1607void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block44f0eee2011-05-26 01:26:41 +01001608#ifdef DEBUG
1609 int end_position = pc_offset() + CallSize(code_object);
1610#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001611 ASSERT(RelocInfo::IsCodeTarget(rmode));
Steve Block3ce2e202009-11-05 08:53:23 +00001612 call(code_object, rmode);
Steve Block44f0eee2011-05-26 01:26:41 +01001613#ifdef DEBUG
1614 CHECK_EQ(end_position, pc_offset());
1615#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001616}
1617
1618
Steve Block1e0659c2011-05-24 12:43:12 +01001619void MacroAssembler::Pushad() {
1620 push(rax);
1621 push(rcx);
1622 push(rdx);
1623 push(rbx);
1624 // Not pushing rsp or rbp.
1625 push(rsi);
1626 push(rdi);
1627 push(r8);
1628 push(r9);
1629 // r10 is kScratchRegister.
1630 push(r11);
Steve Block44f0eee2011-05-26 01:26:41 +01001631 // r12 is kSmiConstantRegister.
Steve Block1e0659c2011-05-24 12:43:12 +01001632 // r13 is kRootRegister.
1633 push(r14);
Steve Block44f0eee2011-05-26 01:26:41 +01001634 push(r15);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001635 STATIC_ASSERT(11 == kNumSafepointSavedRegisters);
1636 // Use lea for symmetry with Popad.
1637 int sp_delta =
1638 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
1639 lea(rsp, Operand(rsp, -sp_delta));
Steve Block1e0659c2011-05-24 12:43:12 +01001640}
1641
1642
1643void MacroAssembler::Popad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001644 // Popad must not change the flags, so use lea instead of addq.
1645 int sp_delta =
1646 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
1647 lea(rsp, Operand(rsp, sp_delta));
Steve Block44f0eee2011-05-26 01:26:41 +01001648 pop(r15);
Steve Block1e0659c2011-05-24 12:43:12 +01001649 pop(r14);
Steve Block1e0659c2011-05-24 12:43:12 +01001650 pop(r11);
1651 pop(r9);
1652 pop(r8);
1653 pop(rdi);
1654 pop(rsi);
1655 pop(rbx);
1656 pop(rdx);
1657 pop(rcx);
1658 pop(rax);
1659}
1660
1661
1662void MacroAssembler::Dropad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001663 addq(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
Steve Block1e0659c2011-05-24 12:43:12 +01001664}
1665
1666
1667// Order general registers are pushed by Pushad:
Steve Block44f0eee2011-05-26 01:26:41 +01001668// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
Steve Block1e0659c2011-05-24 12:43:12 +01001669int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
1670 0,
1671 1,
1672 2,
1673 3,
1674 -1,
1675 -1,
1676 4,
1677 5,
1678 6,
1679 7,
1680 -1,
1681 8,
Steve Block1e0659c2011-05-24 12:43:12 +01001682 -1,
Steve Block44f0eee2011-05-26 01:26:41 +01001683 -1,
1684 9,
1685 10
Steve Block1e0659c2011-05-24 12:43:12 +01001686};
1687
1688
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001689void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
1690 movq(SafepointRegisterSlot(dst), src);
1691}
1692
1693
1694void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
1695 movq(dst, SafepointRegisterSlot(src));
1696}
1697
1698
1699Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
1700 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
1701}
1702
1703
Steve Blocka7e24c12009-10-30 11:49:00 +00001704void MacroAssembler::PushTryHandler(CodeLocation try_location,
1705 HandlerType type) {
1706 // Adjust this code if not the case.
1707 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1708
1709 // The pc (return address) is already on TOS. This code pushes state,
1710 // frame pointer and current handler. Check that they are expected
1711 // next on the stack, in that order.
1712 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1713 StackHandlerConstants::kPCOffset - kPointerSize);
1714 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1715 StackHandlerConstants::kStateOffset - kPointerSize);
1716 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1717 StackHandlerConstants::kFPOffset - kPointerSize);
1718
1719 if (try_location == IN_JAVASCRIPT) {
1720 if (type == TRY_CATCH_HANDLER) {
1721 push(Immediate(StackHandler::TRY_CATCH));
1722 } else {
1723 push(Immediate(StackHandler::TRY_FINALLY));
1724 }
1725 push(rbp);
1726 } else {
1727 ASSERT(try_location == IN_JS_ENTRY);
1728 // The frame pointer does not point to a JS frame so we save NULL
1729 // for rbp. We expect the code throwing an exception to check rbp
1730 // before dereferencing it to restore the context.
1731 push(Immediate(StackHandler::ENTRY));
1732 push(Immediate(0)); // NULL frame pointer.
1733 }
1734 // Save the current handler.
Steve Block44f0eee2011-05-26 01:26:41 +01001735 Operand handler_operand =
1736 ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
1737 push(handler_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00001738 // Link this handler.
Steve Block44f0eee2011-05-26 01:26:41 +01001739 movq(handler_operand, rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00001740}
1741
1742
Leon Clarkee46be812010-01-19 14:06:41 +00001743void MacroAssembler::PopTryHandler() {
1744 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1745 // Unlink this handler.
Steve Block44f0eee2011-05-26 01:26:41 +01001746 Operand handler_operand =
1747 ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
1748 pop(handler_operand);
Leon Clarkee46be812010-01-19 14:06:41 +00001749 // Remove the remaining fields.
1750 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1751}
1752
1753
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001754void MacroAssembler::Throw(Register value) {
1755 // Check that stack should contain next handler, frame pointer, state and
1756 // return address in that order.
1757 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
1758 StackHandlerConstants::kStateOffset);
1759 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
1760 StackHandlerConstants::kPCOffset);
1761 // Keep thrown value in rax.
1762 if (!value.is(rax)) {
1763 movq(rax, value);
1764 }
1765
Steve Block44f0eee2011-05-26 01:26:41 +01001766 ExternalReference handler_address(Isolate::k_handler_address, isolate());
1767 Operand handler_operand = ExternalOperand(handler_address);
1768 movq(rsp, handler_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001769 // get next in chain
Steve Block44f0eee2011-05-26 01:26:41 +01001770 pop(handler_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001771 pop(rbp); // pop frame pointer
1772 pop(rdx); // remove state
1773
1774 // Before returning we restore the context from the frame pointer if not NULL.
1775 // The frame pointer is NULL in the exception handler of a JS entry frame.
1776 Set(rsi, 0); // Tentatively set context pointer to NULL
1777 NearLabel skip;
1778 cmpq(rbp, Immediate(0));
1779 j(equal, &skip);
1780 movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1781 bind(&skip);
1782 ret(0);
1783}
1784
1785
1786void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
1787 Register value) {
1788 // Keep thrown value in rax.
1789 if (!value.is(rax)) {
1790 movq(rax, value);
1791 }
1792 // Fetch top stack handler.
Steve Block44f0eee2011-05-26 01:26:41 +01001793 ExternalReference handler_address(Isolate::k_handler_address, isolate());
1794 Load(rsp, handler_address);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001795
1796 // Unwind the handlers until the ENTRY handler is found.
1797 NearLabel loop, done;
1798 bind(&loop);
1799 // Load the type of the current stack handler.
1800 const int kStateOffset = StackHandlerConstants::kStateOffset;
1801 cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
1802 j(equal, &done);
1803 // Fetch the next handler in the list.
1804 const int kNextOffset = StackHandlerConstants::kNextOffset;
1805 movq(rsp, Operand(rsp, kNextOffset));
1806 jmp(&loop);
1807 bind(&done);
1808
1809 // Set the top handler address to next handler past the current ENTRY handler.
Steve Block44f0eee2011-05-26 01:26:41 +01001810 Operand handler_operand = ExternalOperand(handler_address);
1811 pop(handler_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001812
1813 if (type == OUT_OF_MEMORY) {
1814 // Set external caught exception to false.
Steve Block44f0eee2011-05-26 01:26:41 +01001815 ExternalReference external_caught(
1816 Isolate::k_external_caught_exception_address, isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +01001817 Set(rax, static_cast<int64_t>(false));
Steve Block44f0eee2011-05-26 01:26:41 +01001818 Store(external_caught, rax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001819
1820 // Set pending exception and rax to out of memory exception.
Steve Block44f0eee2011-05-26 01:26:41 +01001821 ExternalReference pending_exception(Isolate::k_pending_exception_address,
1822 isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001823 movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
Steve Block44f0eee2011-05-26 01:26:41 +01001824 Store(pending_exception, rax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001825 }
1826
1827 // Clear the context pointer.
1828 Set(rsi, 0);
1829
1830 // Restore registers from handler.
1831 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
Steve Block44f0eee2011-05-26 01:26:41 +01001832 StackHandlerConstants::kFPOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001833 pop(rbp); // FP
1834 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
Steve Block44f0eee2011-05-26 01:26:41 +01001835 StackHandlerConstants::kStateOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001836 pop(rdx); // State
1837
1838 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
Steve Block44f0eee2011-05-26 01:26:41 +01001839 StackHandlerConstants::kPCOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001840 ret(0);
1841}
1842
1843
Steve Blocka7e24c12009-10-30 11:49:00 +00001844void MacroAssembler::Ret() {
1845 ret(0);
1846}
1847
1848
Steve Block1e0659c2011-05-24 12:43:12 +01001849void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
1850 if (is_uint16(bytes_dropped)) {
1851 ret(bytes_dropped);
1852 } else {
1853 pop(scratch);
1854 addq(rsp, Immediate(bytes_dropped));
1855 push(scratch);
1856 ret(0);
1857 }
1858}
1859
1860
Steve Blocka7e24c12009-10-30 11:49:00 +00001861void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001862 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01001863 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001864}
1865
1866
1867void MacroAssembler::CmpObjectType(Register heap_object,
1868 InstanceType type,
1869 Register map) {
1870 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1871 CmpInstanceType(map, type);
1872}
1873
1874
1875void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1876 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1877 Immediate(static_cast<int8_t>(type)));
1878}
1879
1880
Andrei Popescu31002712010-02-23 13:46:05 +00001881void MacroAssembler::CheckMap(Register obj,
1882 Handle<Map> map,
1883 Label* fail,
1884 bool is_heap_object) {
1885 if (!is_heap_object) {
1886 JumpIfSmi(obj, fail);
1887 }
1888 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1889 j(not_equal, fail);
1890}
1891
1892
Leon Clarkef7060e22010-06-03 12:02:55 +01001893void MacroAssembler::AbortIfNotNumber(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001894 NearLabel ok;
Andrei Popescu402d9372010-02-26 13:31:12 +00001895 Condition is_smi = CheckSmi(object);
1896 j(is_smi, &ok);
1897 Cmp(FieldOperand(object, HeapObject::kMapOffset),
Steve Block053d10c2011-06-13 19:13:29 +01001898 FACTORY->heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01001899 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00001900 bind(&ok);
1901}
1902
1903
Iain Merrick75681382010-08-19 15:07:18 +01001904void MacroAssembler::AbortIfSmi(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001905 NearLabel ok;
Iain Merrick75681382010-08-19 15:07:18 +01001906 Condition is_smi = CheckSmi(object);
1907 Assert(NegateCondition(is_smi), "Operand is a smi");
1908}
1909
1910
Leon Clarkef7060e22010-06-03 12:02:55 +01001911void MacroAssembler::AbortIfNotSmi(Register object) {
Steve Block44f0eee2011-05-26 01:26:41 +01001912 Condition is_smi = CheckSmi(object);
1913 Assert(is_smi, "Operand is not a smi");
1914}
1915
1916
1917void MacroAssembler::AbortIfNotSmi(const Operand& object) {
Steve Block6ded16b2010-05-10 14:33:55 +01001918 Condition is_smi = CheckSmi(object);
Iain Merrick75681382010-08-19 15:07:18 +01001919 Assert(is_smi, "Operand is not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01001920}
1921
1922
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001923void MacroAssembler::AbortIfNotString(Register object) {
1924 testb(object, Immediate(kSmiTagMask));
1925 Assert(not_equal, "Operand is not a string");
1926 push(object);
1927 movq(object, FieldOperand(object, HeapObject::kMapOffset));
1928 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
1929 pop(object);
1930 Assert(below, "Operand is not a string");
1931}
1932
1933
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001934void MacroAssembler::AbortIfNotRootValue(Register src,
1935 Heap::RootListIndex root_value_index,
1936 const char* message) {
1937 ASSERT(!src.is(kScratchRegister));
1938 LoadRoot(kScratchRegister, root_value_index);
1939 cmpq(src, kScratchRegister);
1940 Check(equal, message);
1941}
1942
1943
1944
Leon Clarked91b9f72010-01-27 17:25:45 +00001945Condition MacroAssembler::IsObjectStringType(Register heap_object,
1946 Register map,
1947 Register instance_type) {
1948 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00001949 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00001950 ASSERT(kNotStringTag != 0);
1951 testb(instance_type, Immediate(kIsNotStringMask));
1952 return zero;
1953}
1954
1955
Steve Blocka7e24c12009-10-30 11:49:00 +00001956void MacroAssembler::TryGetFunctionPrototype(Register function,
1957 Register result,
1958 Label* miss) {
1959 // Check that the receiver isn't a smi.
1960 testl(function, Immediate(kSmiTagMask));
1961 j(zero, miss);
1962
1963 // Check that the function really is a function.
1964 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1965 j(not_equal, miss);
1966
1967 // Make sure that the function has an instance prototype.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001968 NearLabel non_instance;
Steve Blocka7e24c12009-10-30 11:49:00 +00001969 testb(FieldOperand(result, Map::kBitFieldOffset),
1970 Immediate(1 << Map::kHasNonInstancePrototype));
1971 j(not_zero, &non_instance);
1972
1973 // Get the prototype or initial map from the function.
1974 movq(result,
1975 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1976
1977 // If the prototype or initial map is the hole, don't return it and
1978 // simply miss the cache instead. This will allow us to allocate a
1979 // prototype object on-demand in the runtime system.
1980 CompareRoot(result, Heap::kTheHoleValueRootIndex);
1981 j(equal, miss);
1982
1983 // If the function does not have an initial map, we're done.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001984 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001985 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1986 j(not_equal, &done);
1987
1988 // Get the prototype from the initial map.
1989 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1990 jmp(&done);
1991
1992 // Non-instance prototype: Fetch prototype from constructor field
1993 // in initial map.
1994 bind(&non_instance);
1995 movq(result, FieldOperand(result, Map::kConstructorOffset));
1996
1997 // All done.
1998 bind(&done);
1999}
2000
2001
2002void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2003 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002004 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002005 movl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002006 }
2007}
2008
2009
2010void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2011 ASSERT(value > 0);
2012 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002013 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00002014 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01002015 incl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002016 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002017 addl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002018 }
2019 }
2020}
2021
2022
2023void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2024 ASSERT(value > 0);
2025 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002026 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00002027 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01002028 decl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002029 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002030 subl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002031 }
2032 }
2033}
2034
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002035
Steve Blocka7e24c12009-10-30 11:49:00 +00002036#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00002037void MacroAssembler::DebugBreak() {
2038 ASSERT(allow_stub_calls());
Steve Block9fac8402011-05-12 15:51:54 +01002039 Set(rax, 0); // No arguments.
Steve Block44f0eee2011-05-26 01:26:41 +01002040 LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
Andrei Popescu402d9372010-02-26 13:31:12 +00002041 CEntryStub ces(1);
2042 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00002043}
Andrei Popescu402d9372010-02-26 13:31:12 +00002044#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00002045
2046
Steve Blocka7e24c12009-10-30 11:49:00 +00002047void MacroAssembler::InvokeCode(Register code,
2048 const ParameterCount& expected,
2049 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002050 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +01002051 CallWrapper* call_wrapper) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002052 NearLabel done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002053 InvokePrologue(expected,
2054 actual,
2055 Handle<Code>::null(),
2056 code,
2057 &done,
2058 flag,
Steve Block44f0eee2011-05-26 01:26:41 +01002059 call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00002060 if (flag == CALL_FUNCTION) {
Steve Block44f0eee2011-05-26 01:26:41 +01002061 if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(code));
Steve Blocka7e24c12009-10-30 11:49:00 +00002062 call(code);
Steve Block44f0eee2011-05-26 01:26:41 +01002063 if (call_wrapper != NULL) call_wrapper->AfterCall();
Steve Blocka7e24c12009-10-30 11:49:00 +00002064 } else {
2065 ASSERT(flag == JUMP_FUNCTION);
2066 jmp(code);
2067 }
2068 bind(&done);
2069}
2070
2071
2072void MacroAssembler::InvokeCode(Handle<Code> code,
2073 const ParameterCount& expected,
2074 const ParameterCount& actual,
2075 RelocInfo::Mode rmode,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002076 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +01002077 CallWrapper* call_wrapper) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002078 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00002079 Register dummy = rax;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002080 InvokePrologue(expected,
2081 actual,
2082 code,
2083 dummy,
2084 &done,
2085 flag,
Steve Block44f0eee2011-05-26 01:26:41 +01002086 call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00002087 if (flag == CALL_FUNCTION) {
Steve Block44f0eee2011-05-26 01:26:41 +01002088 if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(code));
Steve Blocka7e24c12009-10-30 11:49:00 +00002089 Call(code, rmode);
Steve Block44f0eee2011-05-26 01:26:41 +01002090 if (call_wrapper != NULL) call_wrapper->AfterCall();
Steve Blocka7e24c12009-10-30 11:49:00 +00002091 } else {
2092 ASSERT(flag == JUMP_FUNCTION);
2093 Jump(code, rmode);
2094 }
2095 bind(&done);
2096}
2097
2098
2099void MacroAssembler::InvokeFunction(Register function,
2100 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002101 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +01002102 CallWrapper* call_wrapper) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002103 ASSERT(function.is(rdi));
2104 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2105 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
2106 movsxlq(rbx,
2107 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002108 // Advances rdx to the end of the Code object header, to the start of
2109 // the executable code.
Steve Block791712a2010-08-27 10:21:07 +01002110 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002111
2112 ParameterCount expected(rbx);
Steve Block44f0eee2011-05-26 01:26:41 +01002113 InvokeCode(rdx, expected, actual, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00002114}
2115
2116
Andrei Popescu402d9372010-02-26 13:31:12 +00002117void MacroAssembler::InvokeFunction(JSFunction* function,
2118 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002119 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +01002120 CallWrapper* call_wrapper) {
Andrei Popescu402d9372010-02-26 13:31:12 +00002121 ASSERT(function->is_compiled());
2122 // Get the function and setup the context.
2123 Move(rdi, Handle<JSFunction>(function));
2124 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2125
Steve Block1e0659c2011-05-24 12:43:12 +01002126 if (V8::UseCrankshaft()) {
2127 // Since Crankshaft can recompile a function, we need to load
2128 // the Code object every time we call the function.
2129 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2130 ParameterCount expected(function->shared()->formal_parameter_count());
Steve Block44f0eee2011-05-26 01:26:41 +01002131 InvokeCode(rdx, expected, actual, flag, call_wrapper);
Steve Block1e0659c2011-05-24 12:43:12 +01002132 } else {
2133 // Invoke the cached code.
2134 Handle<Code> code(function->code());
2135 ParameterCount expected(function->shared()->formal_parameter_count());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002136 InvokeCode(code,
2137 expected,
2138 actual,
2139 RelocInfo::CODE_TARGET,
2140 flag,
Steve Block44f0eee2011-05-26 01:26:41 +01002141 call_wrapper);
Steve Block1e0659c2011-05-24 12:43:12 +01002142 }
Andrei Popescu402d9372010-02-26 13:31:12 +00002143}
2144
2145
Steve Blocka7e24c12009-10-30 11:49:00 +00002146void MacroAssembler::EnterFrame(StackFrame::Type type) {
2147 push(rbp);
2148 movq(rbp, rsp);
2149 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00002150 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002151 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2152 push(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01002153 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002154 movq(kScratchRegister,
Steve Block053d10c2011-06-13 19:13:29 +01002155 FACTORY->undefined_value(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002156 RelocInfo::EMBEDDED_OBJECT);
2157 cmpq(Operand(rsp, 0), kScratchRegister);
2158 Check(not_equal, "code object not properly patched");
2159 }
2160}
2161
2162
2163void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +01002164 if (emit_debug_code()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002165 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002166 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2167 Check(equal, "stack frame types must match");
2168 }
2169 movq(rsp, rbp);
2170 pop(rbp);
2171}
2172
2173
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002174void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002175 // Setup the frame structure on the stack.
2176 // All constants are relative to the frame pointer of the exit frame.
2177 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
2178 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
2179 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
2180 push(rbp);
2181 movq(rbp, rsp);
2182
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002183 // Reserve room for entry stack pointer and push the code object.
Steve Block3ce2e202009-11-05 08:53:23 +00002184 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00002185 push(Immediate(0)); // Saved entry sp, patched before call.
2186 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2187 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00002188
2189 // Save the frame pointer and the context in top.
Ben Murdochbb769b22010-08-11 14:56:33 +01002190 if (save_rax) {
Steve Block44f0eee2011-05-26 01:26:41 +01002191 movq(r14, rax); // Backup rax in callee-save register.
Ben Murdochbb769b22010-08-11 14:56:33 +01002192 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002193
Steve Block44f0eee2011-05-26 01:26:41 +01002194 Store(ExternalReference(Isolate::k_c_entry_fp_address, isolate()), rbp);
2195 Store(ExternalReference(Isolate::k_context_address, isolate()), rsi);
Ben Murdochbb769b22010-08-11 14:56:33 +01002196}
Steve Blocka7e24c12009-10-30 11:49:00 +00002197
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002198
Steve Block1e0659c2011-05-24 12:43:12 +01002199void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
2200 bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002201#ifdef _WIN64
Steve Block1e0659c2011-05-24 12:43:12 +01002202 const int kShadowSpace = 4;
2203 arg_stack_space += kShadowSpace;
Steve Blocka7e24c12009-10-30 11:49:00 +00002204#endif
Steve Block1e0659c2011-05-24 12:43:12 +01002205 // Optionally save all XMM registers.
2206 if (save_doubles) {
Steve Block1e0659c2011-05-24 12:43:12 +01002207 int space = XMMRegister::kNumRegisters * kDoubleSize +
2208 arg_stack_space * kPointerSize;
2209 subq(rsp, Immediate(space));
2210 int offset = -2 * kPointerSize;
2211 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
2212 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
2213 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
2214 }
2215 } else if (arg_stack_space > 0) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002216 subq(rsp, Immediate(arg_stack_space * kPointerSize));
2217 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002218
2219 // Get the required frame alignment for the OS.
Steve Block44f0eee2011-05-26 01:26:41 +01002220 const int kFrameAlignment = OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +00002221 if (kFrameAlignment > 0) {
2222 ASSERT(IsPowerOf2(kFrameAlignment));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002223 ASSERT(is_int8(kFrameAlignment));
2224 and_(rsp, Immediate(-kFrameAlignment));
Steve Blocka7e24c12009-10-30 11:49:00 +00002225 }
2226
2227 // Patch the saved entry sp.
2228 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2229}
2230
2231
Steve Block1e0659c2011-05-24 12:43:12 +01002232void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002233 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01002234
Steve Block44f0eee2011-05-26 01:26:41 +01002235 // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
Ben Murdochbb769b22010-08-11 14:56:33 +01002236 // so it must be retained across the C-call.
2237 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Steve Block44f0eee2011-05-26 01:26:41 +01002238 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
Ben Murdochbb769b22010-08-11 14:56:33 +01002239
Steve Block1e0659c2011-05-24 12:43:12 +01002240 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
Ben Murdochbb769b22010-08-11 14:56:33 +01002241}
2242
2243
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002244void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002245 EnterExitFramePrologue(false);
Steve Block1e0659c2011-05-24 12:43:12 +01002246 EnterExitFrameEpilogue(arg_stack_space, false);
Ben Murdochbb769b22010-08-11 14:56:33 +01002247}
2248
2249
Steve Block1e0659c2011-05-24 12:43:12 +01002250void MacroAssembler::LeaveExitFrame(bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002251 // Registers:
Steve Block44f0eee2011-05-26 01:26:41 +01002252 // r15 : argv
Steve Block1e0659c2011-05-24 12:43:12 +01002253 if (save_doubles) {
2254 int offset = -2 * kPointerSize;
2255 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
2256 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
2257 movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
2258 }
2259 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002260 // Get the return address from the stack and restore the frame pointer.
2261 movq(rcx, Operand(rbp, 1 * kPointerSize));
2262 movq(rbp, Operand(rbp, 0 * kPointerSize));
2263
Steve Block1e0659c2011-05-24 12:43:12 +01002264 // Drop everything up to and including the arguments and the receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00002265 // from the caller stack.
Steve Block44f0eee2011-05-26 01:26:41 +01002266 lea(rsp, Operand(r15, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00002267
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002268 // Push the return address to get ready to return.
2269 push(rcx);
2270
2271 LeaveExitFrameEpilogue();
2272}
2273
2274
2275void MacroAssembler::LeaveApiExitFrame() {
2276 movq(rsp, rbp);
2277 pop(rbp);
2278
2279 LeaveExitFrameEpilogue();
2280}
2281
2282
2283void MacroAssembler::LeaveExitFrameEpilogue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00002284 // Restore current context from top and clear it in debug mode.
Steve Block44f0eee2011-05-26 01:26:41 +01002285 ExternalReference context_address(Isolate::k_context_address, isolate());
2286 Operand context_operand = ExternalOperand(context_address);
2287 movq(rsi, context_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002288#ifdef DEBUG
Steve Block44f0eee2011-05-26 01:26:41 +01002289 movq(context_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002290#endif
2291
Steve Blocka7e24c12009-10-30 11:49:00 +00002292 // Clear the top frame.
Steve Block44f0eee2011-05-26 01:26:41 +01002293 ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
2294 isolate());
2295 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
2296 movq(c_entry_fp_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002297}
2298
2299
Steve Blocka7e24c12009-10-30 11:49:00 +00002300void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2301 Register scratch,
2302 Label* miss) {
2303 Label same_contexts;
2304
2305 ASSERT(!holder_reg.is(scratch));
2306 ASSERT(!scratch.is(kScratchRegister));
2307 // Load current lexical context from the stack frame.
2308 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2309
2310 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +01002311 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002312 cmpq(scratch, Immediate(0));
2313 Check(not_equal, "we should not have an empty lexical context");
2314 }
2315 // Load the global context of the current context.
2316 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2317 movq(scratch, FieldOperand(scratch, offset));
2318 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2319
2320 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +01002321 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002322 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
Steve Block053d10c2011-06-13 19:13:29 +01002323 FACTORY->global_context_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00002324 Check(equal, "JSGlobalObject::global_context should be a global context.");
2325 }
2326
2327 // Check if both contexts are the same.
2328 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2329 j(equal, &same_contexts);
2330
2331 // Compare security tokens.
2332 // Check that the security token in the calling global object is
2333 // compatible with the security token in the receiving global
2334 // object.
2335
2336 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +01002337 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002338 // Preserve original value of holder_reg.
2339 push(holder_reg);
2340 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2341 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2342 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2343
2344 // Read the first word and compare to global_context_map(),
2345 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2346 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2347 Check(equal, "JSGlobalObject::global_context should be a global context.");
2348 pop(holder_reg);
2349 }
2350
2351 movq(kScratchRegister,
2352 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00002353 int token_offset =
2354 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002355 movq(scratch, FieldOperand(scratch, token_offset));
2356 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2357 j(not_equal, miss);
2358
2359 bind(&same_contexts);
2360}
2361
2362
2363void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00002364 Register scratch,
2365 AllocationFlags flags) {
2366 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01002367 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002368
2369 // Just return if allocation top is already known.
2370 if ((flags & RESULT_CONTAINS_TOP) != 0) {
2371 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01002372 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00002373#ifdef DEBUG
2374 // Assert that result actually contains top on entry.
Steve Block44f0eee2011-05-26 01:26:41 +01002375 Operand top_operand = ExternalOperand(new_space_allocation_top);
2376 cmpq(result, top_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002377 Check(equal, "Unexpected allocation top");
2378#endif
2379 return;
2380 }
2381
Steve Block6ded16b2010-05-10 14:33:55 +01002382 // Move address of new object to result. Use scratch register if available,
2383 // and keep address in scratch until call to UpdateAllocationTopHelper.
2384 if (scratch.is_valid()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002385 LoadAddress(scratch, new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00002386 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01002387 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002388 Load(result, new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00002389 }
2390}
2391
2392
2393void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2394 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +01002395 if (emit_debug_code()) {
Steve Blockd0582a62009-12-15 09:54:21 +00002396 testq(result_end, Immediate(kObjectAlignmentMask));
2397 Check(zero, "Unaligned allocation in new space");
2398 }
2399
Steve Blocka7e24c12009-10-30 11:49:00 +00002400 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01002401 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002402
2403 // Update new top.
Steve Block44f0eee2011-05-26 01:26:41 +01002404 if (scratch.is_valid()) {
2405 // Scratch already contains address of allocation top.
2406 movq(Operand(scratch, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00002407 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002408 Store(new_space_allocation_top, result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00002409 }
2410}
2411
2412
2413void MacroAssembler::AllocateInNewSpace(int object_size,
2414 Register result,
2415 Register result_end,
2416 Register scratch,
2417 Label* gc_required,
2418 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07002419 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01002420 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07002421 // Trash the registers to simulate an allocation failure.
2422 movl(result, Immediate(0x7091));
2423 if (result_end.is_valid()) {
2424 movl(result_end, Immediate(0x7191));
2425 }
2426 if (scratch.is_valid()) {
2427 movl(scratch, Immediate(0x7291));
2428 }
2429 }
2430 jmp(gc_required);
2431 return;
2432 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002433 ASSERT(!result.is(result_end));
2434
2435 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002436 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002437
2438 // Calculate new top and bail out if new space is exhausted.
2439 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01002440 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Block6ded16b2010-05-10 14:33:55 +01002441
2442 Register top_reg = result_end.is_valid() ? result_end : result;
2443
Steve Block1e0659c2011-05-24 12:43:12 +01002444 if (!top_reg.is(result)) {
2445 movq(top_reg, result);
Steve Block6ded16b2010-05-10 14:33:55 +01002446 }
Steve Block1e0659c2011-05-24 12:43:12 +01002447 addq(top_reg, Immediate(object_size));
2448 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01002449 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2450 cmpq(top_reg, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002451 j(above, gc_required);
2452
2453 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01002454 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00002455
Steve Block6ded16b2010-05-10 14:33:55 +01002456 if (top_reg.is(result)) {
2457 if ((flags & TAG_OBJECT) != 0) {
2458 subq(result, Immediate(object_size - kHeapObjectTag));
2459 } else {
2460 subq(result, Immediate(object_size));
2461 }
2462 } else if ((flags & TAG_OBJECT) != 0) {
2463 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00002464 addq(result, Immediate(kHeapObjectTag));
2465 }
2466}
2467
2468
2469void MacroAssembler::AllocateInNewSpace(int header_size,
2470 ScaleFactor element_size,
2471 Register element_count,
2472 Register result,
2473 Register result_end,
2474 Register scratch,
2475 Label* gc_required,
2476 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07002477 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01002478 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07002479 // Trash the registers to simulate an allocation failure.
2480 movl(result, Immediate(0x7091));
2481 movl(result_end, Immediate(0x7191));
2482 if (scratch.is_valid()) {
2483 movl(scratch, Immediate(0x7291));
2484 }
2485 // Register element_count is not modified by the function.
2486 }
2487 jmp(gc_required);
2488 return;
2489 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002490 ASSERT(!result.is(result_end));
2491
2492 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002493 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002494
2495 // Calculate new top and bail out if new space is exhausted.
2496 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01002497 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Block1e0659c2011-05-24 12:43:12 +01002498
2499 // We assume that element_count*element_size + header_size does not
2500 // overflow.
2501 lea(result_end, Operand(element_count, element_size, header_size));
2502 addq(result_end, result);
2503 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01002504 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2505 cmpq(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002506 j(above, gc_required);
2507
2508 // Update allocation top.
2509 UpdateAllocationTopHelper(result_end, scratch);
2510
2511 // Tag the result if requested.
2512 if ((flags & TAG_OBJECT) != 0) {
2513 addq(result, Immediate(kHeapObjectTag));
2514 }
2515}
2516
2517
2518void MacroAssembler::AllocateInNewSpace(Register object_size,
2519 Register result,
2520 Register result_end,
2521 Register scratch,
2522 Label* gc_required,
2523 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07002524 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01002525 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07002526 // Trash the registers to simulate an allocation failure.
2527 movl(result, Immediate(0x7091));
2528 movl(result_end, Immediate(0x7191));
2529 if (scratch.is_valid()) {
2530 movl(scratch, Immediate(0x7291));
2531 }
2532 // object_size is left unchanged by this function.
2533 }
2534 jmp(gc_required);
2535 return;
2536 }
2537 ASSERT(!result.is(result_end));
2538
Steve Blocka7e24c12009-10-30 11:49:00 +00002539 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002540 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002541
2542 // Calculate new top and bail out if new space is exhausted.
2543 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01002544 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002545 if (!object_size.is(result_end)) {
2546 movq(result_end, object_size);
2547 }
2548 addq(result_end, result);
Steve Block1e0659c2011-05-24 12:43:12 +01002549 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01002550 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2551 cmpq(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002552 j(above, gc_required);
2553
2554 // Update allocation top.
2555 UpdateAllocationTopHelper(result_end, scratch);
2556
2557 // Tag the result if requested.
2558 if ((flags & TAG_OBJECT) != 0) {
2559 addq(result, Immediate(kHeapObjectTag));
2560 }
2561}
2562
2563
2564void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2565 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01002566 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002567
2568 // Make sure the object has no tag before resetting top.
2569 and_(object, Immediate(~kHeapObjectTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01002570 Operand top_operand = ExternalOperand(new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00002571#ifdef DEBUG
Steve Block44f0eee2011-05-26 01:26:41 +01002572 cmpq(object, top_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002573 Check(below, "Undo allocation of non allocated memory");
2574#endif
Steve Block44f0eee2011-05-26 01:26:41 +01002575 movq(top_operand, object);
Steve Blocka7e24c12009-10-30 11:49:00 +00002576}
2577
2578
Steve Block3ce2e202009-11-05 08:53:23 +00002579void MacroAssembler::AllocateHeapNumber(Register result,
2580 Register scratch,
2581 Label* gc_required) {
2582 // Allocate heap number in new space.
2583 AllocateInNewSpace(HeapNumber::kSize,
2584 result,
2585 scratch,
2586 no_reg,
2587 gc_required,
2588 TAG_OBJECT);
2589
2590 // Set the map.
2591 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2592 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2593}
2594
2595
Leon Clarkee46be812010-01-19 14:06:41 +00002596void MacroAssembler::AllocateTwoByteString(Register result,
2597 Register length,
2598 Register scratch1,
2599 Register scratch2,
2600 Register scratch3,
2601 Label* gc_required) {
2602 // Calculate the number of bytes needed for the characters in the string while
2603 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002604 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2605 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002606 ASSERT(kShortSize == 2);
2607 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01002608 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2609 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002610 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002611 if (kHeaderAlignment > 0) {
2612 subq(scratch1, Immediate(kHeaderAlignment));
2613 }
Leon Clarkee46be812010-01-19 14:06:41 +00002614
2615 // Allocate two byte string in new space.
2616 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2617 times_1,
2618 scratch1,
2619 result,
2620 scratch2,
2621 scratch3,
2622 gc_required,
2623 TAG_OBJECT);
2624
2625 // Set the map, length and hash field.
2626 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2627 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002628 Integer32ToSmi(scratch1, length);
2629 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002630 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002631 Immediate(String::kEmptyHashField));
2632}
2633
2634
2635void MacroAssembler::AllocateAsciiString(Register result,
2636 Register length,
2637 Register scratch1,
2638 Register scratch2,
2639 Register scratch3,
2640 Label* gc_required) {
2641 // Calculate the number of bytes needed for the characters in the string while
2642 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002643 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2644 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002645 movl(scratch1, length);
2646 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01002647 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002648 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002649 if (kHeaderAlignment > 0) {
2650 subq(scratch1, Immediate(kHeaderAlignment));
2651 }
Leon Clarkee46be812010-01-19 14:06:41 +00002652
2653 // Allocate ascii string in new space.
2654 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2655 times_1,
2656 scratch1,
2657 result,
2658 scratch2,
2659 scratch3,
2660 gc_required,
2661 TAG_OBJECT);
2662
2663 // Set the map, length and hash field.
2664 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2665 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002666 Integer32ToSmi(scratch1, length);
2667 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002668 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002669 Immediate(String::kEmptyHashField));
2670}
2671
2672
2673void MacroAssembler::AllocateConsString(Register result,
2674 Register scratch1,
2675 Register scratch2,
2676 Label* gc_required) {
2677 // Allocate heap number in new space.
2678 AllocateInNewSpace(ConsString::kSize,
2679 result,
2680 scratch1,
2681 scratch2,
2682 gc_required,
2683 TAG_OBJECT);
2684
2685 // Set the map. The other fields are left uninitialized.
2686 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2687 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2688}
2689
2690
2691void MacroAssembler::AllocateAsciiConsString(Register result,
2692 Register scratch1,
2693 Register scratch2,
2694 Label* gc_required) {
2695 // Allocate heap number in new space.
2696 AllocateInNewSpace(ConsString::kSize,
2697 result,
2698 scratch1,
2699 scratch2,
2700 gc_required,
2701 TAG_OBJECT);
2702
2703 // Set the map. The other fields are left uninitialized.
2704 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2705 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2706}
2707
2708
Steve Block44f0eee2011-05-26 01:26:41 +01002709// Copy memory, byte-by-byte, from source to destination. Not optimized for
2710// long or aligned copies. The contents of scratch and length are destroyed.
2711// Destination is incremented by length, source, length and scratch are
2712// clobbered.
2713// A simpler loop is faster on small copies, but slower on large ones.
2714// The cld() instruction must have been emitted, to set the direction flag(),
2715// before calling this function.
2716void MacroAssembler::CopyBytes(Register destination,
2717 Register source,
2718 Register length,
2719 int min_length,
2720 Register scratch) {
2721 ASSERT(min_length >= 0);
2722 if (FLAG_debug_code) {
2723 cmpl(length, Immediate(min_length));
2724 Assert(greater_equal, "Invalid min_length");
2725 }
2726 Label loop, done, short_string, short_loop;
2727
2728 const int kLongStringLimit = 20;
2729 if (min_length <= kLongStringLimit) {
2730 cmpl(length, Immediate(kLongStringLimit));
2731 j(less_equal, &short_string);
2732 }
2733
2734 ASSERT(source.is(rsi));
2735 ASSERT(destination.is(rdi));
2736 ASSERT(length.is(rcx));
2737
2738 // Because source is 8-byte aligned in our uses of this function,
2739 // we keep source aligned for the rep movs operation by copying the odd bytes
2740 // at the end of the ranges.
2741 movq(scratch, length);
2742 shrl(length, Immediate(3));
2743 repmovsq();
2744 // Move remaining bytes of length.
2745 andl(scratch, Immediate(0x7));
2746 movq(length, Operand(source, scratch, times_1, -8));
2747 movq(Operand(destination, scratch, times_1, -8), length);
2748 addq(destination, scratch);
2749
2750 if (min_length <= kLongStringLimit) {
2751 jmp(&done);
2752
2753 bind(&short_string);
2754 if (min_length == 0) {
2755 testl(length, length);
2756 j(zero, &done);
2757 }
2758 lea(scratch, Operand(destination, length, times_1, 0));
2759
2760 bind(&short_loop);
2761 movb(length, Operand(source, 0));
2762 movb(Operand(destination, 0), length);
2763 incq(source);
2764 incq(destination);
2765 cmpq(destination, scratch);
2766 j(not_equal, &short_loop);
2767
2768 bind(&done);
2769 }
2770}
2771
2772
Steve Blockd0582a62009-12-15 09:54:21 +00002773void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2774 if (context_chain_length > 0) {
2775 // Move up the chain of contexts to the context containing the slot.
2776 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2777 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002778 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002779 for (int i = 1; i < context_chain_length; i++) {
2780 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2781 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2782 }
2783 // The context may be an intermediate context, not a function context.
2784 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002785 } else {
2786 // Slot is in the current function context. Move it into the
2787 // destination register in case we store into it (the write barrier
2788 // cannot be allowed to destroy the context in rsi).
2789 movq(dst, rsi);
2790 }
2791
2792 // We should not have found a 'with' context by walking the context chain
2793 // (i.e., the static scope chain and runtime context chain do not agree).
2794 // A variable occurring in such a scope should have slot type LOOKUP and
2795 // not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01002796 if (emit_debug_code()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002797 cmpq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2798 Check(equal, "Yo dawg, I heard you liked function contexts "
2799 "so I put function contexts in all your contexts");
Steve Blockd0582a62009-12-15 09:54:21 +00002800 }
2801}
2802
Steve Block44f0eee2011-05-26 01:26:41 +01002803#ifdef _WIN64
2804static const int kRegisterPassedArguments = 4;
2805#else
2806static const int kRegisterPassedArguments = 6;
2807#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002808
Ben Murdochb0fe1622011-05-05 13:52:32 +01002809void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2810 // Load the global or builtins object from the current context.
2811 movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2812 // Load the global context from the global or builtins object.
2813 movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
2814 // Load the function from the global context.
2815 movq(function, Operand(function, Context::SlotOffset(index)));
2816}
2817
2818
2819void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2820 Register map) {
2821 // Load the initial map. The global functions all have initial maps.
2822 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002823 if (emit_debug_code()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002824 Label ok, fail;
Steve Block053d10c2011-06-13 19:13:29 +01002825 CheckMap(map, FACTORY->meta_map(), &fail, false);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002826 jmp(&ok);
2827 bind(&fail);
2828 Abort("Global functions must have initial map");
2829 bind(&ok);
2830 }
2831}
2832
2833
Leon Clarke4515c472010-02-03 11:58:03 +00002834int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002835 // On Windows 64 stack slots are reserved by the caller for all arguments
2836 // including the ones passed in registers, and space is always allocated for
2837 // the four register arguments even if the function takes fewer than four
2838 // arguments.
2839 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2840 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00002841 ASSERT(num_arguments >= 0);
2842#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01002843 const int kMinimumStackSlots = kRegisterPassedArguments;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002844 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2845 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002846#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002847 if (num_arguments < kRegisterPassedArguments) return 0;
2848 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002849#endif
Leon Clarke4515c472010-02-03 11:58:03 +00002850}
2851
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002852
Leon Clarke4515c472010-02-03 11:58:03 +00002853void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2854 int frame_alignment = OS::ActivationFrameAlignment();
2855 ASSERT(frame_alignment != 0);
2856 ASSERT(num_arguments >= 0);
Steve Block44f0eee2011-05-26 01:26:41 +01002857
Leon Clarke4515c472010-02-03 11:58:03 +00002858 // Make stack end at alignment and allocate space for arguments and old rsp.
2859 movq(kScratchRegister, rsp);
2860 ASSERT(IsPowerOf2(frame_alignment));
2861 int argument_slots_on_stack =
2862 ArgumentStackSlotsForCFunctionCall(num_arguments);
2863 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2864 and_(rsp, Immediate(-frame_alignment));
2865 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2866}
2867
2868
2869void MacroAssembler::CallCFunction(ExternalReference function,
2870 int num_arguments) {
Steve Block44f0eee2011-05-26 01:26:41 +01002871 LoadAddress(rax, function);
Leon Clarke4515c472010-02-03 11:58:03 +00002872 CallCFunction(rax, num_arguments);
2873}
2874
2875
2876void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01002877 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01002878 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01002879 CheckStackAlignment();
2880 }
2881
Leon Clarke4515c472010-02-03 11:58:03 +00002882 call(function);
2883 ASSERT(OS::ActivationFrameAlignment() != 0);
2884 ASSERT(num_arguments >= 0);
2885 int argument_slots_on_stack =
2886 ArgumentStackSlotsForCFunctionCall(num_arguments);
2887 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2888}
2889
Steve Blockd0582a62009-12-15 09:54:21 +00002890
Steve Blocka7e24c12009-10-30 11:49:00 +00002891CodePatcher::CodePatcher(byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01002892 : address_(address),
2893 size_(size),
2894 masm_(Isolate::Current(), address, size + Assembler::kGap) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002895 // Create a new macro assembler pointing to the address of the code to patch.
2896 // The size is adjusted with kGap on order for the assembler to generate size
2897 // bytes of instructions without failing with buffer size constraints.
2898 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2899}
2900
2901
2902CodePatcher::~CodePatcher() {
2903 // Indicate that code has changed.
2904 CPU::FlushICache(address_, size_);
2905
2906 // Check that the code was patched as expected.
2907 ASSERT(masm_.pc_ == address_ + size_);
2908 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2909}
2910
Steve Blocka7e24c12009-10-30 11:49:00 +00002911} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002912
2913#endif // V8_TARGET_ARCH_X64