blob: 339420679184b15ead2d8a4921d07368eeb86106 [file] [log] [blame]
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
Ben Murdoch8b112d22011-06-08 16:22:53 +010033#include "codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010038#include "heap.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
40namespace v8 {
41namespace internal {
42
Ben Murdoch8b112d22011-06-08 16:22:53 +010043MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
44 : Assembler(arg_isolate, buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000045 generating_stub_(false),
46 allow_stub_calls_(true),
Ben Murdoch8b112d22011-06-08 16:22:53 +010047 root_array_available_(true) {
48 if (isolate() != NULL) {
49 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
50 isolate());
51 }
Steve Block44f0eee2011-05-26 01:26:41 +010052}
53
54
55static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
56 Address roots_register_value = kRootRegisterBias +
57 reinterpret_cast<Address>(isolate->heap()->roots_address());
58 intptr_t delta = other.address() - roots_register_value;
59 return delta;
60}
61
62
63Operand MacroAssembler::ExternalOperand(ExternalReference target,
64 Register scratch) {
65 if (root_array_available_ && !Serializer::enabled()) {
66 intptr_t delta = RootRegisterDelta(target, isolate());
67 if (is_int32(delta)) {
68 Serializer::TooLateToEnableNow();
69 return Operand(kRootRegister, static_cast<int32_t>(delta));
70 }
71 }
72 movq(scratch, target);
73 return Operand(scratch, 0);
74}
75
76
77void MacroAssembler::Load(Register destination, ExternalReference source) {
78 if (root_array_available_ && !Serializer::enabled()) {
79 intptr_t delta = RootRegisterDelta(source, isolate());
80 if (is_int32(delta)) {
81 Serializer::TooLateToEnableNow();
82 movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
83 return;
84 }
85 }
86 // Safe code.
87 if (destination.is(rax)) {
88 load_rax(source);
89 } else {
90 movq(kScratchRegister, source);
91 movq(destination, Operand(kScratchRegister, 0));
92 }
93}
94
95
96void MacroAssembler::Store(ExternalReference destination, Register source) {
97 if (root_array_available_ && !Serializer::enabled()) {
98 intptr_t delta = RootRegisterDelta(destination, isolate());
99 if (is_int32(delta)) {
100 Serializer::TooLateToEnableNow();
101 movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
102 return;
103 }
104 }
105 // Safe code.
106 if (source.is(rax)) {
107 store_rax(destination);
108 } else {
109 movq(kScratchRegister, destination);
110 movq(Operand(kScratchRegister, 0), source);
111 }
112}
113
114
115void MacroAssembler::LoadAddress(Register destination,
116 ExternalReference source) {
117 if (root_array_available_ && !Serializer::enabled()) {
118 intptr_t delta = RootRegisterDelta(source, isolate());
119 if (is_int32(delta)) {
120 Serializer::TooLateToEnableNow();
121 lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
122 return;
123 }
124 }
125 // Safe code.
126 movq(destination, source);
127}
128
129
130int MacroAssembler::LoadAddressSize(ExternalReference source) {
131 if (root_array_available_ && !Serializer::enabled()) {
132 // This calculation depends on the internals of LoadAddress.
133 // It's correctness is ensured by the asserts in the Call
134 // instruction below.
135 intptr_t delta = RootRegisterDelta(source, isolate());
136 if (is_int32(delta)) {
137 Serializer::TooLateToEnableNow();
138 // Operand is lea(scratch, Operand(kRootRegister, delta));
139 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
140 int size = 4;
141 if (!is_int8(static_cast<int32_t>(delta))) {
142 size += 3; // Need full four-byte displacement in lea.
143 }
144 return size;
145 }
146 }
147 // Size of movq(destination, src);
148 return 10;
Steve Blocka7e24c12009-10-30 11:49:00 +0000149}
150
151
Steve Block3ce2e202009-11-05 08:53:23 +0000152void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100153 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100154 movq(destination, Operand(kRootRegister,
155 (index << kPointerSizeLog2) - kRootRegisterBias));
156}
157
158
159void MacroAssembler::LoadRootIndexed(Register destination,
160 Register variable_offset,
161 int fixed_offset) {
Steve Block44f0eee2011-05-26 01:26:41 +0100162 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100163 movq(destination,
164 Operand(kRootRegister,
165 variable_offset, times_pointer_size,
166 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000167}
168
169
Kristian Monsen25f61362010-05-21 11:50:48 +0100170void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100171 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100172 movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
173 source);
Kristian Monsen25f61362010-05-21 11:50:48 +0100174}
175
176
Steve Blocka7e24c12009-10-30 11:49:00 +0000177void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100178 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100179 push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000180}
181
182
Steve Block3ce2e202009-11-05 08:53:23 +0000183void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100184 ASSERT(root_array_available_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100185 cmpq(with, Operand(kRootRegister,
186 (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000187}
188
189
Steve Block1e0659c2011-05-24 12:43:12 +0100190void MacroAssembler::CompareRoot(const Operand& with,
191 Heap::RootListIndex index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100192 ASSERT(root_array_available_);
Steve Block1e0659c2011-05-24 12:43:12 +0100193 ASSERT(!with.AddressUsesRegister(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000194 LoadRoot(kScratchRegister, index);
195 cmpq(with, kScratchRegister);
196}
197
198
Steve Block6ded16b2010-05-10 14:33:55 +0100199void MacroAssembler::RecordWriteHelper(Register object,
200 Register addr,
201 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +0100202 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100203 // Check that the object is not in new space.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100204 NearLabel not_in_new_space;
Steve Block6ded16b2010-05-10 14:33:55 +0100205 InNewSpace(object, scratch, not_equal, &not_in_new_space);
206 Abort("new-space object passed to RecordWriteHelper");
207 bind(&not_in_new_space);
208 }
209
Steve Blocka7e24c12009-10-30 11:49:00 +0000210 // Compute the page start address from the heap object pointer, and reuse
211 // the 'object' register for it.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100212 and_(object, Immediate(~Page::kPageAlignmentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +0000213
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100214 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
215 // method for more details.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100216 shrl(addr, Immediate(Page::kRegionSizeLog2));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100217 andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000218
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100219 // Set dirty mark for region.
220 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000221}
222
223
Steve Blocka7e24c12009-10-30 11:49:00 +0000224void MacroAssembler::RecordWrite(Register object,
225 int offset,
226 Register value,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100227 Register index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000228 // The compiled code assumes that record write doesn't change the
229 // context register, so we check that none of the clobbered
230 // registers are rsi.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100231 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
Leon Clarke4515c472010-02-03 11:58:03 +0000232
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100233 // First, check if a write barrier is even needed. The tests below
Steve Block44f0eee2011-05-26 01:26:41 +0100234 // catch stores of smis and stores into the young generation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000235 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000236 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000237
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100238 RecordWriteNonSmi(object, offset, value, index);
Steve Block3ce2e202009-11-05 08:53:23 +0000239 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000240
241 // Clobber all input registers when running with the debug-code flag
242 // turned on to provoke errors. This clobbering repeats the
243 // clobbering done inside RecordWriteNonSmi but it's necessary to
244 // avoid having the fast case for smis leave the registers
245 // unchanged.
Steve Block44f0eee2011-05-26 01:26:41 +0100246 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100247 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
248 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100249 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000250 }
Steve Block3ce2e202009-11-05 08:53:23 +0000251}
252
253
Steve Block8defd9f2010-07-08 12:39:36 +0100254void MacroAssembler::RecordWrite(Register object,
255 Register address,
256 Register value) {
257 // The compiled code assumes that record write doesn't change the
258 // context register, so we check that none of the clobbered
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100259 // registers are rsi.
Steve Block8defd9f2010-07-08 12:39:36 +0100260 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
261
262 // First, check if a write barrier is even needed. The tests below
Steve Block44f0eee2011-05-26 01:26:41 +0100263 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100264 Label done;
265 JumpIfSmi(value, &done);
266
267 InNewSpace(object, value, equal, &done);
268
269 RecordWriteHelper(object, address, value);
270
271 bind(&done);
272
273 // Clobber all input registers when running with the debug-code flag
274 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100275 if (emit_debug_code()) {
Steve Block8defd9f2010-07-08 12:39:36 +0100276 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
277 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
278 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
279 }
280}
281
282
Steve Block3ce2e202009-11-05 08:53:23 +0000283void MacroAssembler::RecordWriteNonSmi(Register object,
284 int offset,
285 Register scratch,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100286 Register index) {
Steve Block3ce2e202009-11-05 08:53:23 +0000287 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000288
Steve Block44f0eee2011-05-26 01:26:41 +0100289 if (emit_debug_code()) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100290 NearLabel okay;
Leon Clarke4515c472010-02-03 11:58:03 +0000291 JumpIfNotSmi(object, &okay);
292 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
293 bind(&okay);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100294
295 if (offset == 0) {
296 // index must be int32.
297 Register tmp = index.is(rax) ? rbx : rax;
298 push(tmp);
299 movl(tmp, index);
300 cmpq(tmp, index);
301 Check(equal, "Index register for RecordWrite must be untagged int32.");
302 pop(tmp);
303 }
Leon Clarke4515c472010-02-03 11:58:03 +0000304 }
305
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100306 // Test that the object address is not in the new space. We cannot
307 // update page dirty marks for new space pages.
Steve Block6ded16b2010-05-10 14:33:55 +0100308 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000309
Steve Block6ded16b2010-05-10 14:33:55 +0100310 // The offset is relative to a tagged or untagged HeapObject pointer,
311 // so either offset or offset + kHeapObjectTag must be a
312 // multiple of kPointerSize.
313 ASSERT(IsAligned(offset, kPointerSize) ||
314 IsAligned(offset + kHeapObjectTag, kPointerSize));
315
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100316 Register dst = index;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100317 if (offset != 0) {
318 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000319 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100320 // array access: calculate the destination address in the same manner as
321 // KeyedStoreIC::GenerateGeneric.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100322 lea(dst, FieldOperand(object,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100323 index,
324 times_pointer_size,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100325 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000326 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100327 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000328
329 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000330
331 // Clobber all input registers when running with the debug-code flag
332 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100333 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100334 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
335 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100336 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Steve Block6ded16b2010-05-10 14:33:55 +0100337 }
338}
339
Steve Blocka7e24c12009-10-30 11:49:00 +0000340void MacroAssembler::Assert(Condition cc, const char* msg) {
Steve Block44f0eee2011-05-26 01:26:41 +0100341 if (emit_debug_code()) Check(cc, msg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000342}
343
344
Iain Merrick75681382010-08-19 15:07:18 +0100345void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +0100346 if (emit_debug_code()) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100347 NearLabel ok;
Iain Merrick75681382010-08-19 15:07:18 +0100348 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
349 Heap::kFixedArrayMapRootIndex);
350 j(equal, &ok);
351 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
352 Heap::kFixedCOWArrayMapRootIndex);
353 j(equal, &ok);
354 Abort("JSObject with fast elements map has slow elements");
355 bind(&ok);
356 }
357}
358
359
Steve Blocka7e24c12009-10-30 11:49:00 +0000360void MacroAssembler::Check(Condition cc, const char* msg) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100361 NearLabel L;
Steve Blocka7e24c12009-10-30 11:49:00 +0000362 j(cc, &L);
363 Abort(msg);
364 // will not return here
365 bind(&L);
366}
367
368
Steve Block6ded16b2010-05-10 14:33:55 +0100369void MacroAssembler::CheckStackAlignment() {
370 int frame_alignment = OS::ActivationFrameAlignment();
371 int frame_alignment_mask = frame_alignment - 1;
372 if (frame_alignment > kPointerSize) {
373 ASSERT(IsPowerOf2(frame_alignment));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100374 NearLabel alignment_as_expected;
Steve Block6ded16b2010-05-10 14:33:55 +0100375 testq(rsp, Immediate(frame_alignment_mask));
376 j(zero, &alignment_as_expected);
377 // Abort if stack is not aligned.
378 int3();
379 bind(&alignment_as_expected);
380 }
381}
382
383
Steve Blocka7e24c12009-10-30 11:49:00 +0000384void MacroAssembler::NegativeZeroTest(Register result,
385 Register op,
386 Label* then_label) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100387 NearLabel ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000388 testl(result, result);
389 j(not_zero, &ok);
390 testl(op, op);
391 j(sign, then_label);
392 bind(&ok);
393}
394
395
396void MacroAssembler::Abort(const char* msg) {
397 // We want to pass the msg string like a smi to avoid GC
398 // problems, however msg is not guaranteed to be aligned
399 // properly. Instead, we pass an aligned pointer that is
400 // a proper v8 smi, but also pass the alignment difference
401 // from the real pointer as a smi.
402 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
403 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
404 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
405 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
406#ifdef DEBUG
407 if (msg != NULL) {
408 RecordComment("Abort message: ");
409 RecordComment(msg);
410 }
411#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000412 // Disable stub call restrictions to always allow calls to abort.
Ben Murdoch086aeea2011-05-13 15:57:08 +0100413 AllowStubCallsScope allow_scope(this, true);
Steve Blockd0582a62009-12-15 09:54:21 +0000414
Steve Blocka7e24c12009-10-30 11:49:00 +0000415 push(rax);
416 movq(kScratchRegister, p0, RelocInfo::NONE);
417 push(kScratchRegister);
418 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000419 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000420 RelocInfo::NONE);
421 push(kScratchRegister);
422 CallRuntime(Runtime::kAbort, 2);
423 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000424 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000425}
426
427
428void MacroAssembler::CallStub(CodeStub* stub) {
429 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
430 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
431}
432
433
John Reck59135872010-11-02 12:39:01 -0700434MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100435 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700436 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100437 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700438 call(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
439 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100440 }
441 return result;
442}
443
444
Leon Clarkee46be812010-01-19 14:06:41 +0000445void MacroAssembler::TailCallStub(CodeStub* stub) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800446 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Leon Clarkee46be812010-01-19 14:06:41 +0000447 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
448}
449
450
John Reck59135872010-11-02 12:39:01 -0700451MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100452 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700453 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100454 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700455 jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
456 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100457 }
458 return result;
459}
460
461
Steve Blocka7e24c12009-10-30 11:49:00 +0000462void MacroAssembler::StubReturn(int argc) {
463 ASSERT(argc >= 1 && generating_stub());
464 ret((argc - 1) * kPointerSize);
465}
466
467
468void MacroAssembler::IllegalOperation(int num_arguments) {
469 if (num_arguments > 0) {
470 addq(rsp, Immediate(num_arguments * kPointerSize));
471 }
472 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
473}
474
475
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100476void MacroAssembler::IndexFromHash(Register hash, Register index) {
477 // The assert checks that the constants for the maximum number of digits
478 // for an array index cached in the hash field and the number of bits
479 // reserved for it does not conflict.
480 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
481 (1 << String::kArrayIndexValueBits));
482 // We want the smi-tagged index in key. Even if we subsequently go to
483 // the slow case, converting the key to a smi is always valid.
484 // key: string key
485 // hash: key's hash field, including its array index value.
486 and_(hash, Immediate(String::kArrayIndexValueMask));
487 shr(hash, Immediate(String::kHashShift));
488 // Here we actually clobber the key which will be used if calling into
489 // runtime later. However as the new key is the numeric value of a string key
490 // there is no difference in using either key.
491 Integer32ToSmi(index, hash);
492}
493
494
Steve Blocka7e24c12009-10-30 11:49:00 +0000495void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
496 CallRuntime(Runtime::FunctionForId(id), num_arguments);
497}
498
499
Steve Block1e0659c2011-05-24 12:43:12 +0100500void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
Steve Block44f0eee2011-05-26 01:26:41 +0100501 const Runtime::Function* function = Runtime::FunctionForId(id);
Steve Block1e0659c2011-05-24 12:43:12 +0100502 Set(rax, function->nargs);
Steve Block44f0eee2011-05-26 01:26:41 +0100503 LoadAddress(rbx, ExternalReference(function, isolate()));
Steve Block1e0659c2011-05-24 12:43:12 +0100504 CEntryStub ces(1);
505 ces.SaveDoubles();
506 CallStub(&ces);
507}
508
509
John Reck59135872010-11-02 12:39:01 -0700510MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
511 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100512 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
513}
514
515
Steve Block44f0eee2011-05-26 01:26:41 +0100516void MacroAssembler::CallRuntime(const Runtime::Function* f,
517 int num_arguments) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000518 // If the expected number of arguments of the runtime function is
519 // constant, we check that the actual number of arguments match the
520 // expectation.
521 if (f->nargs >= 0 && f->nargs != num_arguments) {
522 IllegalOperation(num_arguments);
523 return;
524 }
525
Leon Clarke4515c472010-02-03 11:58:03 +0000526 // TODO(1236192): Most runtime routines don't need the number of
527 // arguments passed in because it is constant. At some point we
528 // should remove this need and make the runtime routine entry code
529 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100530 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100531 LoadAddress(rbx, ExternalReference(f, isolate()));
Leon Clarke4515c472010-02-03 11:58:03 +0000532 CEntryStub ces(f->result_size);
533 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000534}
535
536
Steve Block44f0eee2011-05-26 01:26:41 +0100537MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
John Reck59135872010-11-02 12:39:01 -0700538 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100539 if (f->nargs >= 0 && f->nargs != num_arguments) {
540 IllegalOperation(num_arguments);
541 // Since we did not call the stub, there was no allocation failure.
542 // Return some non-failure object.
Steve Block44f0eee2011-05-26 01:26:41 +0100543 return HEAP->undefined_value();
Ben Murdochbb769b22010-08-11 14:56:33 +0100544 }
545
546 // TODO(1236192): Most runtime routines don't need the number of
547 // arguments passed in because it is constant. At some point we
548 // should remove this need and make the runtime routine entry code
549 // smarter.
550 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100551 LoadAddress(rbx, ExternalReference(f, isolate()));
Ben Murdochbb769b22010-08-11 14:56:33 +0100552 CEntryStub ces(f->result_size);
553 return TryCallStub(&ces);
554}
555
556
Andrei Popescu402d9372010-02-26 13:31:12 +0000557void MacroAssembler::CallExternalReference(const ExternalReference& ext,
558 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100559 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100560 LoadAddress(rbx, ext);
Andrei Popescu402d9372010-02-26 13:31:12 +0000561
562 CEntryStub stub(1);
563 CallStub(&stub);
564}
565
566
Steve Block6ded16b2010-05-10 14:33:55 +0100567void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
568 int num_arguments,
569 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000570 // ----------- S t a t e -------------
571 // -- rsp[0] : return address
572 // -- rsp[8] : argument num_arguments - 1
573 // ...
574 // -- rsp[8 * num_arguments] : argument 0 (receiver)
575 // -----------------------------------
576
577 // TODO(1236192): Most runtime routines don't need the number of
578 // arguments passed in because it is constant. At some point we
579 // should remove this need and make the runtime routine entry code
580 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100581 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100582 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000583}
584
585
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800586MaybeObject* MacroAssembler::TryTailCallExternalReference(
587 const ExternalReference& ext, int num_arguments, int result_size) {
588 // ----------- S t a t e -------------
589 // -- rsp[0] : return address
590 // -- rsp[8] : argument num_arguments - 1
591 // ...
592 // -- rsp[8 * num_arguments] : argument 0 (receiver)
593 // -----------------------------------
594
595 // TODO(1236192): Most runtime routines don't need the number of
596 // arguments passed in because it is constant. At some point we
597 // should remove this need and make the runtime routine entry code
598 // smarter.
599 Set(rax, num_arguments);
600 return TryJumpToExternalReference(ext, result_size);
601}
602
603
Steve Block6ded16b2010-05-10 14:33:55 +0100604void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
605 int num_arguments,
606 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100607 TailCallExternalReference(ExternalReference(fid, isolate()),
608 num_arguments,
609 result_size);
Steve Block6ded16b2010-05-10 14:33:55 +0100610}
611
612
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800613MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
614 int num_arguments,
615 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100616 return TryTailCallExternalReference(ExternalReference(fid, isolate()),
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800617 num_arguments,
618 result_size);
619}
620
621
Ben Murdochbb769b22010-08-11 14:56:33 +0100622static int Offset(ExternalReference ref0, ExternalReference ref1) {
623 int64_t offset = (ref0.address() - ref1.address());
624 // Check that fits into int.
625 ASSERT(static_cast<int>(offset) == offset);
626 return static_cast<int>(offset);
627}
628
629
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800630void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
631#ifdef _WIN64
632 // We need to prepare a slot for result handle on stack and put
633 // a pointer to it into 1st arg register.
634 EnterApiExitFrame(arg_stack_space + 1);
635
636 // rcx must be used to pass the pointer to the return value slot.
637 lea(rcx, StackSpaceOperand(arg_stack_space));
638#else
639 EnterApiExitFrame(arg_stack_space);
640#endif
Ben Murdochbb769b22010-08-11 14:56:33 +0100641}
642
643
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800644MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
645 ApiFunction* function, int stack_space) {
John Reck59135872010-11-02 12:39:01 -0700646 Label empty_result;
647 Label prologue;
648 Label promote_scheduled_exception;
649 Label delete_allocated_handles;
650 Label leave_exit_frame;
Ben Murdochbb769b22010-08-11 14:56:33 +0100651 Label write_back;
Ben Murdochbb769b22010-08-11 14:56:33 +0100652
Ben Murdoch8b112d22011-06-08 16:22:53 +0100653 Factory* factory = isolate()->factory();
John Reck59135872010-11-02 12:39:01 -0700654 ExternalReference next_address =
655 ExternalReference::handle_scope_next_address();
656 const int kNextOffset = 0;
657 const int kLimitOffset = Offset(
658 ExternalReference::handle_scope_limit_address(),
659 next_address);
660 const int kLevelOffset = Offset(
661 ExternalReference::handle_scope_level_address(),
662 next_address);
663 ExternalReference scheduled_exception_address =
Steve Block44f0eee2011-05-26 01:26:41 +0100664 ExternalReference::scheduled_exception_address(isolate());
Ben Murdochbb769b22010-08-11 14:56:33 +0100665
John Reck59135872010-11-02 12:39:01 -0700666 // Allocate HandleScope in callee-save registers.
667 Register prev_next_address_reg = r14;
668 Register prev_limit_reg = rbx;
Steve Block44f0eee2011-05-26 01:26:41 +0100669 Register base_reg = r15;
John Reck59135872010-11-02 12:39:01 -0700670 movq(base_reg, next_address);
671 movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
672 movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
673 addl(Operand(base_reg, kLevelOffset), Immediate(1));
674 // Call the api function!
675 movq(rax,
676 reinterpret_cast<int64_t>(function->address()),
677 RelocInfo::RUNTIME_ENTRY);
678 call(rax);
Ben Murdochbb769b22010-08-11 14:56:33 +0100679
John Reck59135872010-11-02 12:39:01 -0700680#ifdef _WIN64
681 // rax keeps a pointer to v8::Handle, unpack it.
682 movq(rax, Operand(rax, 0));
683#endif
684 // Check if the result handle holds 0.
685 testq(rax, rax);
686 j(zero, &empty_result);
687 // It was non-zero. Dereference to get the result value.
688 movq(rax, Operand(rax, 0));
689 bind(&prologue);
Ben Murdochbb769b22010-08-11 14:56:33 +0100690
John Reck59135872010-11-02 12:39:01 -0700691 // No more valid handles (the result handle was the last one). Restore
692 // previous handle scope.
693 subl(Operand(base_reg, kLevelOffset), Immediate(1));
694 movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
695 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
696 j(not_equal, &delete_allocated_handles);
697 bind(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100698
John Reck59135872010-11-02 12:39:01 -0700699 // Check if the function scheduled an exception.
700 movq(rsi, scheduled_exception_address);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100701 Cmp(Operand(rsi, 0), factory->the_hole_value());
John Reck59135872010-11-02 12:39:01 -0700702 j(not_equal, &promote_scheduled_exception);
Ben Murdochbb769b22010-08-11 14:56:33 +0100703
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800704 LeaveApiExitFrame();
705 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -0700706
707 bind(&promote_scheduled_exception);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800708 MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException,
709 0, 1);
710 if (result->IsFailure()) {
711 return result;
712 }
John Reck59135872010-11-02 12:39:01 -0700713
714 bind(&empty_result);
715 // It was zero; the result is undefined.
Ben Murdoch8b112d22011-06-08 16:22:53 +0100716 Move(rax, factory->undefined_value());
John Reck59135872010-11-02 12:39:01 -0700717 jmp(&prologue);
718
719 // HandleScope limit has changed. Delete allocated extensions.
720 bind(&delete_allocated_handles);
721 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
722 movq(prev_limit_reg, rax);
Steve Block44f0eee2011-05-26 01:26:41 +0100723#ifdef _WIN64
724 LoadAddress(rcx, ExternalReference::isolate_address());
725#else
726 LoadAddress(rdi, ExternalReference::isolate_address());
727#endif
728 LoadAddress(rax,
729 ExternalReference::delete_handle_scope_extensions(isolate()));
John Reck59135872010-11-02 12:39:01 -0700730 call(rax);
731 movq(rax, prev_limit_reg);
732 jmp(&leave_exit_frame);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800733
734 return result;
Ben Murdochbb769b22010-08-11 14:56:33 +0100735}
736
737
Steve Block6ded16b2010-05-10 14:33:55 +0100738void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
739 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000740 // Set the entry point and jump to the C entry runtime stub.
Steve Block44f0eee2011-05-26 01:26:41 +0100741 LoadAddress(rbx, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +0000742 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000743 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000744}
745
746
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800747MaybeObject* MacroAssembler::TryJumpToExternalReference(
748 const ExternalReference& ext, int result_size) {
749 // Set the entry point and jump to the C entry runtime stub.
Steve Block44f0eee2011-05-26 01:26:41 +0100750 LoadAddress(rbx, ext);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800751 CEntryStub ces(result_size);
752 return TryTailCallStub(&ces);
753}
754
755
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100756void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
757 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +0100758 CallWrapper* call_wrapper) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000759 // Calls are not allowed in some stubs.
760 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000761
Andrei Popescu402d9372010-02-26 13:31:12 +0000762 // Rely on the assertion to check that the number of provided
763 // arguments match the expected number of arguments. Fake a
764 // parameter count to avoid emitting code to do the check.
765 ParameterCount expected(0);
766 GetBuiltinEntry(rdx, id);
Steve Block44f0eee2011-05-26 01:26:41 +0100767 InvokeCode(rdx, expected, expected, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +0000768}
769
Andrei Popescu402d9372010-02-26 13:31:12 +0000770
Steve Block791712a2010-08-27 10:21:07 +0100771void MacroAssembler::GetBuiltinFunction(Register target,
772 Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100773 // Load the builtins object into target register.
774 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
775 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
Steve Block791712a2010-08-27 10:21:07 +0100776 movq(target, FieldOperand(target,
777 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
778}
Steve Block6ded16b2010-05-10 14:33:55 +0100779
Steve Block791712a2010-08-27 10:21:07 +0100780
781void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
782 ASSERT(!target.is(rdi));
Andrei Popescu402d9372010-02-26 13:31:12 +0000783 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +0100784 GetBuiltinFunction(rdi, id);
785 movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000786}
787
788
789void MacroAssembler::Set(Register dst, int64_t x) {
790 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100791 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000792 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000793 movl(dst, Immediate(static_cast<uint32_t>(x)));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100794 } else if (is_int32(x)) {
795 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000796 } else {
797 movq(dst, x, RelocInfo::NONE);
798 }
799}
800
Steve Blocka7e24c12009-10-30 11:49:00 +0000801void MacroAssembler::Set(const Operand& dst, int64_t x) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100802 if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000803 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000804 } else {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100805 Set(kScratchRegister, x);
Steve Blocka7e24c12009-10-30 11:49:00 +0000806 movq(dst, kScratchRegister);
807 }
808}
809
Steve Blocka7e24c12009-10-30 11:49:00 +0000810// ----------------------------------------------------------------------------
811// Smi tagging, untagging and tag detection.
812
Steve Block8defd9f2010-07-08 12:39:36 +0100813Register MacroAssembler::GetSmiConstant(Smi* source) {
814 int value = source->value();
815 if (value == 0) {
816 xorl(kScratchRegister, kScratchRegister);
817 return kScratchRegister;
818 }
819 if (value == 1) {
820 return kSmiConstantRegister;
821 }
822 LoadSmiConstant(kScratchRegister, source);
823 return kScratchRegister;
824}
825
826void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
Steve Block44f0eee2011-05-26 01:26:41 +0100827 if (emit_debug_code()) {
Steve Block8defd9f2010-07-08 12:39:36 +0100828 movq(dst,
829 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
830 RelocInfo::NONE);
831 cmpq(dst, kSmiConstantRegister);
832 if (allow_stub_calls()) {
833 Assert(equal, "Uninitialized kSmiConstantRegister");
834 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100835 NearLabel ok;
Steve Block8defd9f2010-07-08 12:39:36 +0100836 j(equal, &ok);
837 int3();
838 bind(&ok);
839 }
840 }
Steve Block44f0eee2011-05-26 01:26:41 +0100841 int value = source->value();
842 if (value == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100843 xorl(dst, dst);
844 return;
845 }
Steve Block8defd9f2010-07-08 12:39:36 +0100846 bool negative = value < 0;
847 unsigned int uvalue = negative ? -value : value;
848
849 switch (uvalue) {
850 case 9:
851 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
852 break;
853 case 8:
854 xorl(dst, dst);
855 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
856 break;
857 case 4:
858 xorl(dst, dst);
859 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
860 break;
861 case 5:
862 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
863 break;
864 case 3:
865 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
866 break;
867 case 2:
868 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
869 break;
870 case 1:
871 movq(dst, kSmiConstantRegister);
872 break;
873 case 0:
874 UNREACHABLE();
875 return;
876 default:
877 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
878 return;
879 }
880 if (negative) {
881 neg(dst);
882 }
883}
884
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100885
Steve Blocka7e24c12009-10-30 11:49:00 +0000886void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000887 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000888 if (!dst.is(src)) {
889 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000890 }
Steve Block3ce2e202009-11-05 08:53:23 +0000891 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000892}
893
894
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100895void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +0100896 if (emit_debug_code()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100897 testb(dst, Immediate(0x01));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100898 NearLabel ok;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100899 j(zero, &ok);
900 if (allow_stub_calls()) {
901 Abort("Integer32ToSmiField writing to non-smi location");
902 } else {
903 int3();
904 }
905 bind(&ok);
906 }
907 ASSERT(kSmiShift % kBitsPerByte == 0);
908 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
909}
910
911
Steve Block3ce2e202009-11-05 08:53:23 +0000912void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
913 Register src,
914 int constant) {
915 if (dst.is(src)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100916 addl(dst, Immediate(constant));
Steve Block3ce2e202009-11-05 08:53:23 +0000917 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100918 leal(dst, Operand(src, constant));
Steve Block3ce2e202009-11-05 08:53:23 +0000919 }
920 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000921}
922
923
924void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000925 ASSERT_EQ(0, kSmiTag);
926 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000927 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000928 }
Steve Block3ce2e202009-11-05 08:53:23 +0000929 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000930}
931
932
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100933void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
934 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
935}
936
937
Steve Blocka7e24c12009-10-30 11:49:00 +0000938void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000939 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000940 if (!dst.is(src)) {
941 movq(dst, src);
942 }
943 sar(dst, Immediate(kSmiShift));
944}
945
946
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100947void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
948 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
949}
950
951
Steve Block3ce2e202009-11-05 08:53:23 +0000952void MacroAssembler::SmiTest(Register src) {
953 testq(src, src);
954}
955
956
Steve Block44f0eee2011-05-26 01:26:41 +0100957void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
958 if (emit_debug_code()) {
959 AbortIfNotSmi(smi1);
960 AbortIfNotSmi(smi2);
961 }
962 cmpq(smi1, smi2);
Steve Block3ce2e202009-11-05 08:53:23 +0000963}
964
965
966void MacroAssembler::SmiCompare(Register dst, Smi* src) {
Steve Block44f0eee2011-05-26 01:26:41 +0100967 if (emit_debug_code()) {
968 AbortIfNotSmi(dst);
969 }
970 Cmp(dst, src);
971}
972
973
974void MacroAssembler::Cmp(Register dst, Smi* src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000975 ASSERT(!dst.is(kScratchRegister));
976 if (src->value() == 0) {
977 testq(dst, dst);
978 } else {
Iain Merrick75681382010-08-19 15:07:18 +0100979 Register constant_reg = GetSmiConstant(src);
980 cmpq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +0000981 }
982}
983
984
Leon Clarkef7060e22010-06-03 12:02:55 +0100985void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block44f0eee2011-05-26 01:26:41 +0100986 if (emit_debug_code()) {
987 AbortIfNotSmi(dst);
988 AbortIfNotSmi(src);
989 }
Steve Block6ded16b2010-05-10 14:33:55 +0100990 cmpq(dst, src);
991}
992
993
Steve Block3ce2e202009-11-05 08:53:23 +0000994void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +0100995 if (emit_debug_code()) {
996 AbortIfNotSmi(dst);
997 AbortIfNotSmi(src);
998 }
Steve Block3ce2e202009-11-05 08:53:23 +0000999 cmpq(dst, src);
1000}
1001
1002
1003void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001004 if (emit_debug_code()) {
1005 AbortIfNotSmi(dst);
1006 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001007 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001008}
1009
1010
Steve Block44f0eee2011-05-26 01:26:41 +01001011void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1012 // The Operand cannot use the smi register.
1013 Register smi_reg = GetSmiConstant(src);
1014 ASSERT(!dst.AddressUsesRegister(smi_reg));
1015 cmpq(dst, smi_reg);
1016}
1017
1018
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001019void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
1020 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1021}
1022
1023
Steve Blocka7e24c12009-10-30 11:49:00 +00001024void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1025 Register src,
1026 int power) {
1027 ASSERT(power >= 0);
1028 ASSERT(power < 64);
1029 if (power == 0) {
1030 SmiToInteger64(dst, src);
1031 return;
1032 }
Steve Block3ce2e202009-11-05 08:53:23 +00001033 if (!dst.is(src)) {
1034 movq(dst, src);
1035 }
1036 if (power < kSmiShift) {
1037 sar(dst, Immediate(kSmiShift - power));
1038 } else if (power > kSmiShift) {
1039 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001040 }
1041}
1042
1043
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001044void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1045 Register src,
1046 int power) {
1047 ASSERT((0 <= power) && (power < 32));
1048 if (dst.is(src)) {
1049 shr(dst, Immediate(power + kSmiShift));
1050 } else {
1051 UNIMPLEMENTED(); // Not used.
1052 }
1053}
1054
1055
Steve Blocka7e24c12009-10-30 11:49:00 +00001056Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001057 ASSERT_EQ(0, kSmiTag);
1058 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +00001059 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001060}
1061
1062
Steve Block1e0659c2011-05-24 12:43:12 +01001063Condition MacroAssembler::CheckSmi(const Operand& src) {
1064 ASSERT_EQ(0, kSmiTag);
1065 testb(src, Immediate(kSmiTagMask));
1066 return zero;
1067}
1068
1069
Ben Murdochf87a2032010-10-22 12:50:53 +01001070Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001071 ASSERT_EQ(0, kSmiTag);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001072 // Test that both bits of the mask 0x8000000000000001 are zero.
Steve Block3ce2e202009-11-05 08:53:23 +00001073 movq(kScratchRegister, src);
1074 rol(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +01001075 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +00001076 return zero;
1077}
1078
1079
Steve Blocka7e24c12009-10-30 11:49:00 +00001080Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1081 if (first.is(second)) {
1082 return CheckSmi(first);
1083 }
Steve Block8defd9f2010-07-08 12:39:36 +01001084 ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
1085 leal(kScratchRegister, Operand(first, second, times_1, 0));
1086 testb(kScratchRegister, Immediate(0x03));
Steve Block3ce2e202009-11-05 08:53:23 +00001087 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001088}
1089
1090
Ben Murdochf87a2032010-10-22 12:50:53 +01001091Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1092 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001093 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +01001094 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +00001095 }
Steve Block8defd9f2010-07-08 12:39:36 +01001096 movq(kScratchRegister, first);
1097 or_(kScratchRegister, second);
Leon Clarked91b9f72010-01-27 17:25:45 +00001098 rol(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +01001099 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +00001100 return zero;
1101}
1102
1103
Ben Murdochbb769b22010-08-11 14:56:33 +01001104Condition MacroAssembler::CheckEitherSmi(Register first,
1105 Register second,
1106 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +00001107 if (first.is(second)) {
1108 return CheckSmi(first);
1109 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001110 if (scratch.is(second)) {
1111 andl(scratch, first);
1112 } else {
1113 if (!scratch.is(first)) {
1114 movl(scratch, first);
1115 }
1116 andl(scratch, second);
1117 }
1118 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +00001119 return zero;
1120}
1121
1122
Steve Blocka7e24c12009-10-30 11:49:00 +00001123Condition MacroAssembler::CheckIsMinSmi(Register src) {
Steve Block8defd9f2010-07-08 12:39:36 +01001124 ASSERT(!src.is(kScratchRegister));
1125 // If we overflow by subtracting one, it's the minimal smi value.
1126 cmpq(src, kSmiConstantRegister);
1127 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +00001128}
1129
Steve Blocka7e24c12009-10-30 11:49:00 +00001130
1131Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001132 // A 32-bit integer value can always be converted to a smi.
1133 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +00001134}
1135
1136
Steve Block3ce2e202009-11-05 08:53:23 +00001137Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
1138 // An unsigned 32-bit integer value is valid as long as the high bit
1139 // is not set.
Steve Block8defd9f2010-07-08 12:39:36 +01001140 testl(src, src);
1141 return positive;
Steve Block3ce2e202009-11-05 08:53:23 +00001142}
1143
1144
Steve Block1e0659c2011-05-24 12:43:12 +01001145void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1146 if (dst.is(src)) {
1147 andl(dst, Immediate(kSmiTagMask));
1148 } else {
1149 movl(dst, Immediate(kSmiTagMask));
1150 andl(dst, src);
1151 }
1152}
1153
1154
1155void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
1156 if (!(src.AddressUsesRegister(dst))) {
1157 movl(dst, Immediate(kSmiTagMask));
1158 andl(dst, src);
1159 } else {
1160 movl(dst, src);
1161 andl(dst, Immediate(kSmiTagMask));
1162 }
1163}
1164
1165
Steve Block3ce2e202009-11-05 08:53:23 +00001166void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1167 if (constant->value() == 0) {
1168 if (!dst.is(src)) {
1169 movq(dst, src);
1170 }
Steve Block8defd9f2010-07-08 12:39:36 +01001171 return;
Steve Block3ce2e202009-11-05 08:53:23 +00001172 } else if (dst.is(src)) {
1173 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001174 switch (constant->value()) {
1175 case 1:
1176 addq(dst, kSmiConstantRegister);
1177 return;
1178 case 2:
1179 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1180 return;
1181 case 4:
1182 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1183 return;
1184 case 8:
1185 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1186 return;
1187 default:
1188 Register constant_reg = GetSmiConstant(constant);
1189 addq(dst, constant_reg);
1190 return;
1191 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001192 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001193 switch (constant->value()) {
1194 case 1:
1195 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
1196 return;
1197 case 2:
1198 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1199 return;
1200 case 4:
1201 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1202 return;
1203 case 8:
1204 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1205 return;
1206 default:
1207 LoadSmiConstant(dst, constant);
1208 addq(dst, src);
1209 return;
1210 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001211 }
1212}
1213
1214
Leon Clarkef7060e22010-06-03 12:02:55 +01001215void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1216 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001217 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01001218 }
1219}
1220
1221
Steve Block3ce2e202009-11-05 08:53:23 +00001222void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1223 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001224 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001225 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001226 }
Steve Block3ce2e202009-11-05 08:53:23 +00001227 } else if (dst.is(src)) {
1228 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001229 Register constant_reg = GetSmiConstant(constant);
1230 subq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001231 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001232 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001233 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001234 // Adding and subtracting the min-value gives the same result, it only
1235 // differs on the overflow bit, which we don't check here.
1236 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001237 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001238 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001239 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Steve Block3ce2e202009-11-05 08:53:23 +00001240 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001241 }
1242 }
1243}
1244
1245
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001246void MacroAssembler::SmiAdd(Register dst,
1247 Register src1,
1248 Register src2) {
1249 // No overflow checking. Use only when it's known that
1250 // overflowing is impossible.
Steve Block44f0eee2011-05-26 01:26:41 +01001251 if (!dst.is(src1)) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001252 if (emit_debug_code()) {
1253 movq(kScratchRegister, src1);
1254 addq(kScratchRegister, src2);
1255 Check(no_overflow, "Smi addition overflow");
1256 }
1257 lea(dst, Operand(src1, src2, times_1, 0));
1258 } else {
1259 addq(dst, src2);
1260 Assert(no_overflow, "Smi addition overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001261 }
1262}
1263
1264
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001265void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1266 // No overflow checking. Use only when it's known that
1267 // overflowing is impossible (e.g., subtracting two positive smis).
1268 ASSERT(!dst.is(src2));
Steve Block44f0eee2011-05-26 01:26:41 +01001269 if (!dst.is(src1)) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001270 movq(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001271 }
Steve Block44f0eee2011-05-26 01:26:41 +01001272 subq(dst, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001273 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001274}
1275
1276
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001277void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001278 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001279 const Operand& src2) {
1280 // No overflow checking. Use only when it's known that
1281 // overflowing is impossible (e.g., subtracting two positive smis).
Steve Block44f0eee2011-05-26 01:26:41 +01001282 if (!dst.is(src1)) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001283 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001284 }
Steve Block44f0eee2011-05-26 01:26:41 +01001285 subq(dst, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001286 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001287}
1288
1289
1290void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001291 ASSERT(!dst.is(kScratchRegister));
1292 ASSERT(!src.is(kScratchRegister));
1293 // Set tag and padding bits before negating, so that they are zero afterwards.
1294 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001295 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001296 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001297 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001298 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001299 }
Steve Block3ce2e202009-11-05 08:53:23 +00001300 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001301}
1302
1303
1304void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001305 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001306 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001307 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001308 }
1309 and_(dst, src2);
1310}
1311
1312
Steve Block3ce2e202009-11-05 08:53:23 +00001313void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1314 if (constant->value() == 0) {
Steve Block9fac8402011-05-12 15:51:54 +01001315 Set(dst, 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001316 } else if (dst.is(src)) {
1317 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001318 Register constant_reg = GetSmiConstant(constant);
1319 and_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001320 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001321 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001322 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001323 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001324}
1325
1326
1327void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1328 if (!dst.is(src1)) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001329 ASSERT(!src1.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +00001330 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001331 }
1332 or_(dst, src2);
1333}
1334
1335
Steve Block3ce2e202009-11-05 08:53:23 +00001336void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1337 if (dst.is(src)) {
1338 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001339 Register constant_reg = GetSmiConstant(constant);
1340 or_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001341 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001342 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001343 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001344 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001345}
1346
Steve Block3ce2e202009-11-05 08:53:23 +00001347
Steve Blocka7e24c12009-10-30 11:49:00 +00001348void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1349 if (!dst.is(src1)) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001350 ASSERT(!src1.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +00001351 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001352 }
1353 xor_(dst, src2);
1354}
1355
1356
Steve Block3ce2e202009-11-05 08:53:23 +00001357void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1358 if (dst.is(src)) {
1359 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001360 Register constant_reg = GetSmiConstant(constant);
1361 xor_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001362 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001363 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001364 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001365 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001366}
1367
1368
Steve Blocka7e24c12009-10-30 11:49:00 +00001369void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1370 Register src,
1371 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001372 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001373 if (shift_value > 0) {
1374 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001375 sar(dst, Immediate(shift_value + kSmiShift));
1376 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001377 } else {
1378 UNIMPLEMENTED(); // Not used.
1379 }
1380 }
1381}
1382
1383
Steve Blocka7e24c12009-10-30 11:49:00 +00001384void MacroAssembler::SmiShiftLeftConstant(Register dst,
1385 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001386 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001387 if (!dst.is(src)) {
1388 movq(dst, src);
1389 }
1390 if (shift_value > 0) {
1391 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001392 }
1393}
1394
1395
1396void MacroAssembler::SmiShiftLeft(Register dst,
1397 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001398 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001399 ASSERT(!dst.is(rcx));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001400 NearLabel result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001401 // Untag shift amount.
1402 if (!dst.is(src1)) {
1403 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001404 }
Steve Block3ce2e202009-11-05 08:53:23 +00001405 SmiToInteger32(rcx, src2);
1406 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1407 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001408 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001409}
1410
1411
Steve Blocka7e24c12009-10-30 11:49:00 +00001412void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1413 Register src1,
1414 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001415 ASSERT(!dst.is(kScratchRegister));
1416 ASSERT(!src1.is(kScratchRegister));
1417 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001418 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001419 if (src1.is(rcx)) {
1420 movq(kScratchRegister, src1);
1421 } else if (src2.is(rcx)) {
1422 movq(kScratchRegister, src2);
1423 }
1424 if (!dst.is(src1)) {
1425 movq(dst, src1);
1426 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001427 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001428 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001429 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001430 shl(dst, Immediate(kSmiShift));
1431 if (src1.is(rcx)) {
1432 movq(src1, kScratchRegister);
1433 } else if (src2.is(rcx)) {
1434 movq(src2, kScratchRegister);
1435 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001436}
1437
1438
Steve Block3ce2e202009-11-05 08:53:23 +00001439SmiIndex MacroAssembler::SmiToIndex(Register dst,
1440 Register src,
1441 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001442 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001443 // There is a possible optimization if shift is in the range 60-63, but that
1444 // will (and must) never happen.
1445 if (!dst.is(src)) {
1446 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001447 }
Steve Block3ce2e202009-11-05 08:53:23 +00001448 if (shift < kSmiShift) {
1449 sar(dst, Immediate(kSmiShift - shift));
1450 } else {
1451 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001452 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001453 return SmiIndex(dst, times_1);
1454}
1455
Steve Blocka7e24c12009-10-30 11:49:00 +00001456SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1457 Register src,
1458 int shift) {
1459 // Register src holds a positive smi.
1460 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001461 if (!dst.is(src)) {
1462 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001463 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001464 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001465 if (shift < kSmiShift) {
1466 sar(dst, Immediate(kSmiShift - shift));
1467 } else {
1468 shl(dst, Immediate(shift - kSmiShift));
1469 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001470 return SmiIndex(dst, times_1);
1471}
1472
1473
Steve Block44f0eee2011-05-26 01:26:41 +01001474void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
1475 ASSERT_EQ(0, kSmiShift % kBitsPerByte);
1476 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
1477}
1478
1479
1480
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001481void MacroAssembler::Move(Register dst, Register src) {
1482 if (!dst.is(src)) {
1483 movq(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01001484 }
Steve Block6ded16b2010-05-10 14:33:55 +01001485}
1486
1487
Steve Blocka7e24c12009-10-30 11:49:00 +00001488void MacroAssembler::Move(Register dst, Handle<Object> source) {
1489 ASSERT(!source->IsFailure());
1490 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001491 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001492 } else {
1493 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1494 }
1495}
1496
1497
1498void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001499 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001500 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001501 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001502 } else {
1503 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1504 movq(dst, kScratchRegister);
1505 }
1506}
1507
1508
1509void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001510 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01001511 Cmp(dst, Smi::cast(*source));
Steve Block3ce2e202009-11-05 08:53:23 +00001512 } else {
1513 Move(kScratchRegister, source);
1514 cmpq(dst, kScratchRegister);
1515 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001516}
1517
1518
1519void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1520 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01001521 Cmp(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001522 } else {
1523 ASSERT(source->IsHeapObject());
1524 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1525 cmpq(dst, kScratchRegister);
1526 }
1527}
1528
1529
1530void MacroAssembler::Push(Handle<Object> source) {
1531 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001532 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001533 } else {
1534 ASSERT(source->IsHeapObject());
1535 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1536 push(kScratchRegister);
1537 }
1538}
1539
1540
1541void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001542 intptr_t smi = reinterpret_cast<intptr_t>(source);
1543 if (is_int32(smi)) {
1544 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001545 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001546 Register constant = GetSmiConstant(source);
1547 push(constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001548 }
1549}
1550
1551
Leon Clarkee46be812010-01-19 14:06:41 +00001552void MacroAssembler::Drop(int stack_elements) {
1553 if (stack_elements > 0) {
1554 addq(rsp, Immediate(stack_elements * kPointerSize));
1555 }
1556}
1557
1558
Steve Block3ce2e202009-11-05 08:53:23 +00001559void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001560 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001561}
1562
1563
1564void MacroAssembler::Jump(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01001565 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001566 jmp(kScratchRegister);
1567}
1568
1569
1570void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1571 movq(kScratchRegister, destination, rmode);
1572 jmp(kScratchRegister);
1573}
1574
1575
1576void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001577 // TODO(X64): Inline this
1578 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001579}
1580
1581
Steve Block44f0eee2011-05-26 01:26:41 +01001582int MacroAssembler::CallSize(ExternalReference ext) {
1583 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
1584 const int kCallInstructionSize = 3;
1585 return LoadAddressSize(ext) + kCallInstructionSize;
1586}
1587
1588
Steve Blocka7e24c12009-10-30 11:49:00 +00001589void MacroAssembler::Call(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01001590#ifdef DEBUG
1591 int end_position = pc_offset() + CallSize(ext);
1592#endif
1593 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001594 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01001595#ifdef DEBUG
1596 CHECK_EQ(end_position, pc_offset());
1597#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001598}
1599
1600
1601void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
Steve Block44f0eee2011-05-26 01:26:41 +01001602#ifdef DEBUG
1603 int end_position = pc_offset() + CallSize(destination, rmode);
1604#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001605 movq(kScratchRegister, destination, rmode);
1606 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01001607#ifdef DEBUG
1608 CHECK_EQ(pc_offset(), end_position);
1609#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001610}
1611
1612
1613void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block44f0eee2011-05-26 01:26:41 +01001614#ifdef DEBUG
1615 int end_position = pc_offset() + CallSize(code_object);
1616#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001617 ASSERT(RelocInfo::IsCodeTarget(rmode));
Steve Block3ce2e202009-11-05 08:53:23 +00001618 call(code_object, rmode);
Steve Block44f0eee2011-05-26 01:26:41 +01001619#ifdef DEBUG
1620 CHECK_EQ(end_position, pc_offset());
1621#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001622}
1623
1624
Steve Block1e0659c2011-05-24 12:43:12 +01001625void MacroAssembler::Pushad() {
1626 push(rax);
1627 push(rcx);
1628 push(rdx);
1629 push(rbx);
1630 // Not pushing rsp or rbp.
1631 push(rsi);
1632 push(rdi);
1633 push(r8);
1634 push(r9);
1635 // r10 is kScratchRegister.
1636 push(r11);
Steve Block44f0eee2011-05-26 01:26:41 +01001637 // r12 is kSmiConstantRegister.
Steve Block1e0659c2011-05-24 12:43:12 +01001638 // r13 is kRootRegister.
1639 push(r14);
Steve Block44f0eee2011-05-26 01:26:41 +01001640 push(r15);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001641 STATIC_ASSERT(11 == kNumSafepointSavedRegisters);
1642 // Use lea for symmetry with Popad.
1643 int sp_delta =
1644 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
1645 lea(rsp, Operand(rsp, -sp_delta));
Steve Block1e0659c2011-05-24 12:43:12 +01001646}
1647
1648
1649void MacroAssembler::Popad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001650 // Popad must not change the flags, so use lea instead of addq.
1651 int sp_delta =
1652 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
1653 lea(rsp, Operand(rsp, sp_delta));
Steve Block44f0eee2011-05-26 01:26:41 +01001654 pop(r15);
Steve Block1e0659c2011-05-24 12:43:12 +01001655 pop(r14);
Steve Block1e0659c2011-05-24 12:43:12 +01001656 pop(r11);
1657 pop(r9);
1658 pop(r8);
1659 pop(rdi);
1660 pop(rsi);
1661 pop(rbx);
1662 pop(rdx);
1663 pop(rcx);
1664 pop(rax);
1665}
1666
1667
1668void MacroAssembler::Dropad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001669 addq(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
Steve Block1e0659c2011-05-24 12:43:12 +01001670}
1671
1672
1673// Order general registers are pushed by Pushad:
Steve Block44f0eee2011-05-26 01:26:41 +01001674// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
Steve Block1e0659c2011-05-24 12:43:12 +01001675int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
1676 0,
1677 1,
1678 2,
1679 3,
1680 -1,
1681 -1,
1682 4,
1683 5,
1684 6,
1685 7,
1686 -1,
1687 8,
Steve Block1e0659c2011-05-24 12:43:12 +01001688 -1,
Steve Block44f0eee2011-05-26 01:26:41 +01001689 -1,
1690 9,
1691 10
Steve Block1e0659c2011-05-24 12:43:12 +01001692};
1693
1694
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001695void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
1696 movq(SafepointRegisterSlot(dst), src);
1697}
1698
1699
1700void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
1701 movq(dst, SafepointRegisterSlot(src));
1702}
1703
1704
1705Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
1706 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
1707}
1708
1709
Steve Blocka7e24c12009-10-30 11:49:00 +00001710void MacroAssembler::PushTryHandler(CodeLocation try_location,
1711 HandlerType type) {
1712 // Adjust this code if not the case.
1713 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1714
1715 // The pc (return address) is already on TOS. This code pushes state,
1716 // frame pointer and current handler. Check that they are expected
1717 // next on the stack, in that order.
1718 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1719 StackHandlerConstants::kPCOffset - kPointerSize);
1720 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1721 StackHandlerConstants::kStateOffset - kPointerSize);
1722 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1723 StackHandlerConstants::kFPOffset - kPointerSize);
1724
1725 if (try_location == IN_JAVASCRIPT) {
1726 if (type == TRY_CATCH_HANDLER) {
1727 push(Immediate(StackHandler::TRY_CATCH));
1728 } else {
1729 push(Immediate(StackHandler::TRY_FINALLY));
1730 }
1731 push(rbp);
1732 } else {
1733 ASSERT(try_location == IN_JS_ENTRY);
1734 // The frame pointer does not point to a JS frame so we save NULL
1735 // for rbp. We expect the code throwing an exception to check rbp
1736 // before dereferencing it to restore the context.
1737 push(Immediate(StackHandler::ENTRY));
1738 push(Immediate(0)); // NULL frame pointer.
1739 }
1740 // Save the current handler.
Steve Block44f0eee2011-05-26 01:26:41 +01001741 Operand handler_operand =
1742 ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
1743 push(handler_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00001744 // Link this handler.
Steve Block44f0eee2011-05-26 01:26:41 +01001745 movq(handler_operand, rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00001746}
1747
1748
Leon Clarkee46be812010-01-19 14:06:41 +00001749void MacroAssembler::PopTryHandler() {
1750 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1751 // Unlink this handler.
Steve Block44f0eee2011-05-26 01:26:41 +01001752 Operand handler_operand =
1753 ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
1754 pop(handler_operand);
Leon Clarkee46be812010-01-19 14:06:41 +00001755 // Remove the remaining fields.
1756 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1757}
1758
1759
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001760void MacroAssembler::Throw(Register value) {
1761 // Check that stack should contain next handler, frame pointer, state and
1762 // return address in that order.
1763 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
1764 StackHandlerConstants::kStateOffset);
1765 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
1766 StackHandlerConstants::kPCOffset);
1767 // Keep thrown value in rax.
1768 if (!value.is(rax)) {
1769 movq(rax, value);
1770 }
1771
Steve Block44f0eee2011-05-26 01:26:41 +01001772 ExternalReference handler_address(Isolate::k_handler_address, isolate());
1773 Operand handler_operand = ExternalOperand(handler_address);
1774 movq(rsp, handler_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001775 // get next in chain
Steve Block44f0eee2011-05-26 01:26:41 +01001776 pop(handler_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001777 pop(rbp); // pop frame pointer
1778 pop(rdx); // remove state
1779
1780 // Before returning we restore the context from the frame pointer if not NULL.
1781 // The frame pointer is NULL in the exception handler of a JS entry frame.
1782 Set(rsi, 0); // Tentatively set context pointer to NULL
1783 NearLabel skip;
1784 cmpq(rbp, Immediate(0));
1785 j(equal, &skip);
1786 movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1787 bind(&skip);
1788 ret(0);
1789}
1790
1791
1792void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
1793 Register value) {
1794 // Keep thrown value in rax.
1795 if (!value.is(rax)) {
1796 movq(rax, value);
1797 }
1798 // Fetch top stack handler.
Steve Block44f0eee2011-05-26 01:26:41 +01001799 ExternalReference handler_address(Isolate::k_handler_address, isolate());
1800 Load(rsp, handler_address);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001801
1802 // Unwind the handlers until the ENTRY handler is found.
1803 NearLabel loop, done;
1804 bind(&loop);
1805 // Load the type of the current stack handler.
1806 const int kStateOffset = StackHandlerConstants::kStateOffset;
1807 cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
1808 j(equal, &done);
1809 // Fetch the next handler in the list.
1810 const int kNextOffset = StackHandlerConstants::kNextOffset;
1811 movq(rsp, Operand(rsp, kNextOffset));
1812 jmp(&loop);
1813 bind(&done);
1814
1815 // Set the top handler address to next handler past the current ENTRY handler.
Steve Block44f0eee2011-05-26 01:26:41 +01001816 Operand handler_operand = ExternalOperand(handler_address);
1817 pop(handler_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001818
1819 if (type == OUT_OF_MEMORY) {
1820 // Set external caught exception to false.
Steve Block44f0eee2011-05-26 01:26:41 +01001821 ExternalReference external_caught(
1822 Isolate::k_external_caught_exception_address, isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +01001823 Set(rax, static_cast<int64_t>(false));
Steve Block44f0eee2011-05-26 01:26:41 +01001824 Store(external_caught, rax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001825
1826 // Set pending exception and rax to out of memory exception.
Steve Block44f0eee2011-05-26 01:26:41 +01001827 ExternalReference pending_exception(Isolate::k_pending_exception_address,
1828 isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001829 movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
Steve Block44f0eee2011-05-26 01:26:41 +01001830 Store(pending_exception, rax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001831 }
1832
1833 // Clear the context pointer.
1834 Set(rsi, 0);
1835
1836 // Restore registers from handler.
1837 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
Steve Block44f0eee2011-05-26 01:26:41 +01001838 StackHandlerConstants::kFPOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001839 pop(rbp); // FP
1840 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
Steve Block44f0eee2011-05-26 01:26:41 +01001841 StackHandlerConstants::kStateOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001842 pop(rdx); // State
1843
1844 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
Steve Block44f0eee2011-05-26 01:26:41 +01001845 StackHandlerConstants::kPCOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001846 ret(0);
1847}
1848
1849
Steve Blocka7e24c12009-10-30 11:49:00 +00001850void MacroAssembler::Ret() {
1851 ret(0);
1852}
1853
1854
Steve Block1e0659c2011-05-24 12:43:12 +01001855void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
1856 if (is_uint16(bytes_dropped)) {
1857 ret(bytes_dropped);
1858 } else {
1859 pop(scratch);
1860 addq(rsp, Immediate(bytes_dropped));
1861 push(scratch);
1862 ret(0);
1863 }
1864}
1865
1866
Steve Blocka7e24c12009-10-30 11:49:00 +00001867void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001868 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01001869 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001870}
1871
1872
1873void MacroAssembler::CmpObjectType(Register heap_object,
1874 InstanceType type,
1875 Register map) {
1876 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1877 CmpInstanceType(map, type);
1878}
1879
1880
1881void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1882 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1883 Immediate(static_cast<int8_t>(type)));
1884}
1885
1886
Andrei Popescu31002712010-02-23 13:46:05 +00001887void MacroAssembler::CheckMap(Register obj,
1888 Handle<Map> map,
1889 Label* fail,
1890 bool is_heap_object) {
1891 if (!is_heap_object) {
1892 JumpIfSmi(obj, fail);
1893 }
1894 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1895 j(not_equal, fail);
1896}
1897
1898
Leon Clarkef7060e22010-06-03 12:02:55 +01001899void MacroAssembler::AbortIfNotNumber(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001900 NearLabel ok;
Andrei Popescu402d9372010-02-26 13:31:12 +00001901 Condition is_smi = CheckSmi(object);
1902 j(is_smi, &ok);
1903 Cmp(FieldOperand(object, HeapObject::kMapOffset),
Ben Murdoch8b112d22011-06-08 16:22:53 +01001904 isolate()->factory()->heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01001905 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00001906 bind(&ok);
1907}
1908
1909
Iain Merrick75681382010-08-19 15:07:18 +01001910void MacroAssembler::AbortIfSmi(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001911 NearLabel ok;
Iain Merrick75681382010-08-19 15:07:18 +01001912 Condition is_smi = CheckSmi(object);
1913 Assert(NegateCondition(is_smi), "Operand is a smi");
1914}
1915
1916
Leon Clarkef7060e22010-06-03 12:02:55 +01001917void MacroAssembler::AbortIfNotSmi(Register object) {
Steve Block44f0eee2011-05-26 01:26:41 +01001918 Condition is_smi = CheckSmi(object);
1919 Assert(is_smi, "Operand is not a smi");
1920}
1921
1922
1923void MacroAssembler::AbortIfNotSmi(const Operand& object) {
Steve Block6ded16b2010-05-10 14:33:55 +01001924 Condition is_smi = CheckSmi(object);
Iain Merrick75681382010-08-19 15:07:18 +01001925 Assert(is_smi, "Operand is not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01001926}
1927
1928
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001929void MacroAssembler::AbortIfNotString(Register object) {
1930 testb(object, Immediate(kSmiTagMask));
1931 Assert(not_equal, "Operand is not a string");
1932 push(object);
1933 movq(object, FieldOperand(object, HeapObject::kMapOffset));
1934 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
1935 pop(object);
1936 Assert(below, "Operand is not a string");
1937}
1938
1939
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001940void MacroAssembler::AbortIfNotRootValue(Register src,
1941 Heap::RootListIndex root_value_index,
1942 const char* message) {
1943 ASSERT(!src.is(kScratchRegister));
1944 LoadRoot(kScratchRegister, root_value_index);
1945 cmpq(src, kScratchRegister);
1946 Check(equal, message);
1947}
1948
1949
1950
Leon Clarked91b9f72010-01-27 17:25:45 +00001951Condition MacroAssembler::IsObjectStringType(Register heap_object,
1952 Register map,
1953 Register instance_type) {
1954 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00001955 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00001956 ASSERT(kNotStringTag != 0);
1957 testb(instance_type, Immediate(kIsNotStringMask));
1958 return zero;
1959}
1960
1961
Steve Blocka7e24c12009-10-30 11:49:00 +00001962void MacroAssembler::TryGetFunctionPrototype(Register function,
1963 Register result,
1964 Label* miss) {
1965 // Check that the receiver isn't a smi.
1966 testl(function, Immediate(kSmiTagMask));
1967 j(zero, miss);
1968
1969 // Check that the function really is a function.
1970 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1971 j(not_equal, miss);
1972
1973 // Make sure that the function has an instance prototype.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001974 NearLabel non_instance;
Steve Blocka7e24c12009-10-30 11:49:00 +00001975 testb(FieldOperand(result, Map::kBitFieldOffset),
1976 Immediate(1 << Map::kHasNonInstancePrototype));
1977 j(not_zero, &non_instance);
1978
1979 // Get the prototype or initial map from the function.
1980 movq(result,
1981 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1982
1983 // If the prototype or initial map is the hole, don't return it and
1984 // simply miss the cache instead. This will allow us to allocate a
1985 // prototype object on-demand in the runtime system.
1986 CompareRoot(result, Heap::kTheHoleValueRootIndex);
1987 j(equal, miss);
1988
1989 // If the function does not have an initial map, we're done.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001990 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001991 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1992 j(not_equal, &done);
1993
1994 // Get the prototype from the initial map.
1995 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1996 jmp(&done);
1997
1998 // Non-instance prototype: Fetch prototype from constructor field
1999 // in initial map.
2000 bind(&non_instance);
2001 movq(result, FieldOperand(result, Map::kConstructorOffset));
2002
2003 // All done.
2004 bind(&done);
2005}
2006
2007
2008void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2009 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002010 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002011 movl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002012 }
2013}
2014
2015
2016void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2017 ASSERT(value > 0);
2018 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002019 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00002020 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01002021 incl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002022 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002023 addl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002024 }
2025 }
2026}
2027
2028
2029void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2030 ASSERT(value > 0);
2031 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002032 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00002033 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01002034 decl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002035 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002036 subl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002037 }
2038 }
2039}
2040
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002041
Steve Blocka7e24c12009-10-30 11:49:00 +00002042#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00002043void MacroAssembler::DebugBreak() {
2044 ASSERT(allow_stub_calls());
Steve Block9fac8402011-05-12 15:51:54 +01002045 Set(rax, 0); // No arguments.
Steve Block44f0eee2011-05-26 01:26:41 +01002046 LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
Andrei Popescu402d9372010-02-26 13:31:12 +00002047 CEntryStub ces(1);
2048 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00002049}
Andrei Popescu402d9372010-02-26 13:31:12 +00002050#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00002051
2052
Steve Blocka7e24c12009-10-30 11:49:00 +00002053void MacroAssembler::InvokeCode(Register code,
2054 const ParameterCount& expected,
2055 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002056 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +01002057 CallWrapper* call_wrapper) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002058 NearLabel done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002059 InvokePrologue(expected,
2060 actual,
2061 Handle<Code>::null(),
2062 code,
2063 &done,
2064 flag,
Steve Block44f0eee2011-05-26 01:26:41 +01002065 call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00002066 if (flag == CALL_FUNCTION) {
Steve Block44f0eee2011-05-26 01:26:41 +01002067 if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(code));
Steve Blocka7e24c12009-10-30 11:49:00 +00002068 call(code);
Steve Block44f0eee2011-05-26 01:26:41 +01002069 if (call_wrapper != NULL) call_wrapper->AfterCall();
Steve Blocka7e24c12009-10-30 11:49:00 +00002070 } else {
2071 ASSERT(flag == JUMP_FUNCTION);
2072 jmp(code);
2073 }
2074 bind(&done);
2075}
2076
2077
2078void MacroAssembler::InvokeCode(Handle<Code> code,
2079 const ParameterCount& expected,
2080 const ParameterCount& actual,
2081 RelocInfo::Mode rmode,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002082 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +01002083 CallWrapper* call_wrapper) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002084 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00002085 Register dummy = rax;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002086 InvokePrologue(expected,
2087 actual,
2088 code,
2089 dummy,
2090 &done,
2091 flag,
Steve Block44f0eee2011-05-26 01:26:41 +01002092 call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00002093 if (flag == CALL_FUNCTION) {
Steve Block44f0eee2011-05-26 01:26:41 +01002094 if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(code));
Steve Blocka7e24c12009-10-30 11:49:00 +00002095 Call(code, rmode);
Steve Block44f0eee2011-05-26 01:26:41 +01002096 if (call_wrapper != NULL) call_wrapper->AfterCall();
Steve Blocka7e24c12009-10-30 11:49:00 +00002097 } else {
2098 ASSERT(flag == JUMP_FUNCTION);
2099 Jump(code, rmode);
2100 }
2101 bind(&done);
2102}
2103
2104
2105void MacroAssembler::InvokeFunction(Register function,
2106 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002107 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +01002108 CallWrapper* call_wrapper) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002109 ASSERT(function.is(rdi));
2110 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2111 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
2112 movsxlq(rbx,
2113 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002114 // Advances rdx to the end of the Code object header, to the start of
2115 // the executable code.
Steve Block791712a2010-08-27 10:21:07 +01002116 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002117
2118 ParameterCount expected(rbx);
Steve Block44f0eee2011-05-26 01:26:41 +01002119 InvokeCode(rdx, expected, actual, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00002120}
2121
2122
Andrei Popescu402d9372010-02-26 13:31:12 +00002123void MacroAssembler::InvokeFunction(JSFunction* function,
2124 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002125 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +01002126 CallWrapper* call_wrapper) {
Andrei Popescu402d9372010-02-26 13:31:12 +00002127 ASSERT(function->is_compiled());
2128 // Get the function and setup the context.
2129 Move(rdi, Handle<JSFunction>(function));
2130 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2131
Steve Block1e0659c2011-05-24 12:43:12 +01002132 if (V8::UseCrankshaft()) {
2133 // Since Crankshaft can recompile a function, we need to load
2134 // the Code object every time we call the function.
2135 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2136 ParameterCount expected(function->shared()->formal_parameter_count());
Steve Block44f0eee2011-05-26 01:26:41 +01002137 InvokeCode(rdx, expected, actual, flag, call_wrapper);
Steve Block1e0659c2011-05-24 12:43:12 +01002138 } else {
2139 // Invoke the cached code.
2140 Handle<Code> code(function->code());
2141 ParameterCount expected(function->shared()->formal_parameter_count());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002142 InvokeCode(code,
2143 expected,
2144 actual,
2145 RelocInfo::CODE_TARGET,
2146 flag,
Steve Block44f0eee2011-05-26 01:26:41 +01002147 call_wrapper);
Steve Block1e0659c2011-05-24 12:43:12 +01002148 }
Andrei Popescu402d9372010-02-26 13:31:12 +00002149}
2150
2151
Steve Blocka7e24c12009-10-30 11:49:00 +00002152void MacroAssembler::EnterFrame(StackFrame::Type type) {
2153 push(rbp);
2154 movq(rbp, rsp);
2155 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00002156 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002157 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2158 push(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01002159 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002160 movq(kScratchRegister,
Ben Murdoch8b112d22011-06-08 16:22:53 +01002161 isolate()->factory()->undefined_value(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002162 RelocInfo::EMBEDDED_OBJECT);
2163 cmpq(Operand(rsp, 0), kScratchRegister);
2164 Check(not_equal, "code object not properly patched");
2165 }
2166}
2167
2168
2169void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +01002170 if (emit_debug_code()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002171 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002172 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2173 Check(equal, "stack frame types must match");
2174 }
2175 movq(rsp, rbp);
2176 pop(rbp);
2177}
2178
2179
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002180void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002181 // Setup the frame structure on the stack.
2182 // All constants are relative to the frame pointer of the exit frame.
2183 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
2184 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
2185 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
2186 push(rbp);
2187 movq(rbp, rsp);
2188
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002189 // Reserve room for entry stack pointer and push the code object.
Steve Block3ce2e202009-11-05 08:53:23 +00002190 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00002191 push(Immediate(0)); // Saved entry sp, patched before call.
2192 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2193 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00002194
2195 // Save the frame pointer and the context in top.
Ben Murdochbb769b22010-08-11 14:56:33 +01002196 if (save_rax) {
Steve Block44f0eee2011-05-26 01:26:41 +01002197 movq(r14, rax); // Backup rax in callee-save register.
Ben Murdochbb769b22010-08-11 14:56:33 +01002198 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002199
Steve Block44f0eee2011-05-26 01:26:41 +01002200 Store(ExternalReference(Isolate::k_c_entry_fp_address, isolate()), rbp);
2201 Store(ExternalReference(Isolate::k_context_address, isolate()), rsi);
Ben Murdochbb769b22010-08-11 14:56:33 +01002202}
Steve Blocka7e24c12009-10-30 11:49:00 +00002203
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002204
Steve Block1e0659c2011-05-24 12:43:12 +01002205void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
2206 bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002207#ifdef _WIN64
Steve Block1e0659c2011-05-24 12:43:12 +01002208 const int kShadowSpace = 4;
2209 arg_stack_space += kShadowSpace;
Steve Blocka7e24c12009-10-30 11:49:00 +00002210#endif
Steve Block1e0659c2011-05-24 12:43:12 +01002211 // Optionally save all XMM registers.
2212 if (save_doubles) {
Steve Block1e0659c2011-05-24 12:43:12 +01002213 int space = XMMRegister::kNumRegisters * kDoubleSize +
2214 arg_stack_space * kPointerSize;
2215 subq(rsp, Immediate(space));
2216 int offset = -2 * kPointerSize;
2217 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
2218 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
2219 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
2220 }
2221 } else if (arg_stack_space > 0) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002222 subq(rsp, Immediate(arg_stack_space * kPointerSize));
2223 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002224
2225 // Get the required frame alignment for the OS.
Steve Block44f0eee2011-05-26 01:26:41 +01002226 const int kFrameAlignment = OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +00002227 if (kFrameAlignment > 0) {
2228 ASSERT(IsPowerOf2(kFrameAlignment));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002229 ASSERT(is_int8(kFrameAlignment));
2230 and_(rsp, Immediate(-kFrameAlignment));
Steve Blocka7e24c12009-10-30 11:49:00 +00002231 }
2232
2233 // Patch the saved entry sp.
2234 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2235}
2236
2237
Steve Block1e0659c2011-05-24 12:43:12 +01002238void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002239 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01002240
Steve Block44f0eee2011-05-26 01:26:41 +01002241 // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
Ben Murdochbb769b22010-08-11 14:56:33 +01002242 // so it must be retained across the C-call.
2243 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Steve Block44f0eee2011-05-26 01:26:41 +01002244 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
Ben Murdochbb769b22010-08-11 14:56:33 +01002245
Steve Block1e0659c2011-05-24 12:43:12 +01002246 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
Ben Murdochbb769b22010-08-11 14:56:33 +01002247}
2248
2249
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002250void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002251 EnterExitFramePrologue(false);
Steve Block1e0659c2011-05-24 12:43:12 +01002252 EnterExitFrameEpilogue(arg_stack_space, false);
Ben Murdochbb769b22010-08-11 14:56:33 +01002253}
2254
2255
Steve Block1e0659c2011-05-24 12:43:12 +01002256void MacroAssembler::LeaveExitFrame(bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002257 // Registers:
Steve Block44f0eee2011-05-26 01:26:41 +01002258 // r15 : argv
Steve Block1e0659c2011-05-24 12:43:12 +01002259 if (save_doubles) {
2260 int offset = -2 * kPointerSize;
2261 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
2262 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
2263 movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
2264 }
2265 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002266 // Get the return address from the stack and restore the frame pointer.
2267 movq(rcx, Operand(rbp, 1 * kPointerSize));
2268 movq(rbp, Operand(rbp, 0 * kPointerSize));
2269
Steve Block1e0659c2011-05-24 12:43:12 +01002270 // Drop everything up to and including the arguments and the receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00002271 // from the caller stack.
Steve Block44f0eee2011-05-26 01:26:41 +01002272 lea(rsp, Operand(r15, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00002273
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002274 // Push the return address to get ready to return.
2275 push(rcx);
2276
2277 LeaveExitFrameEpilogue();
2278}
2279
2280
2281void MacroAssembler::LeaveApiExitFrame() {
2282 movq(rsp, rbp);
2283 pop(rbp);
2284
2285 LeaveExitFrameEpilogue();
2286}
2287
2288
2289void MacroAssembler::LeaveExitFrameEpilogue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00002290 // Restore current context from top and clear it in debug mode.
Steve Block44f0eee2011-05-26 01:26:41 +01002291 ExternalReference context_address(Isolate::k_context_address, isolate());
2292 Operand context_operand = ExternalOperand(context_address);
2293 movq(rsi, context_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002294#ifdef DEBUG
Steve Block44f0eee2011-05-26 01:26:41 +01002295 movq(context_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002296#endif
2297
Steve Blocka7e24c12009-10-30 11:49:00 +00002298 // Clear the top frame.
Steve Block44f0eee2011-05-26 01:26:41 +01002299 ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
2300 isolate());
2301 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
2302 movq(c_entry_fp_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002303}
2304
2305
Steve Blocka7e24c12009-10-30 11:49:00 +00002306void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2307 Register scratch,
2308 Label* miss) {
2309 Label same_contexts;
2310
2311 ASSERT(!holder_reg.is(scratch));
2312 ASSERT(!scratch.is(kScratchRegister));
2313 // Load current lexical context from the stack frame.
2314 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2315
2316 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +01002317 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002318 cmpq(scratch, Immediate(0));
2319 Check(not_equal, "we should not have an empty lexical context");
2320 }
2321 // Load the global context of the current context.
2322 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2323 movq(scratch, FieldOperand(scratch, offset));
2324 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2325
2326 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +01002327 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002328 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
Ben Murdoch8b112d22011-06-08 16:22:53 +01002329 isolate()->factory()->global_context_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00002330 Check(equal, "JSGlobalObject::global_context should be a global context.");
2331 }
2332
2333 // Check if both contexts are the same.
2334 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2335 j(equal, &same_contexts);
2336
2337 // Compare security tokens.
2338 // Check that the security token in the calling global object is
2339 // compatible with the security token in the receiving global
2340 // object.
2341
2342 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +01002343 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002344 // Preserve original value of holder_reg.
2345 push(holder_reg);
2346 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2347 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2348 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2349
2350 // Read the first word and compare to global_context_map(),
2351 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2352 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2353 Check(equal, "JSGlobalObject::global_context should be a global context.");
2354 pop(holder_reg);
2355 }
2356
2357 movq(kScratchRegister,
2358 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00002359 int token_offset =
2360 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002361 movq(scratch, FieldOperand(scratch, token_offset));
2362 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2363 j(not_equal, miss);
2364
2365 bind(&same_contexts);
2366}
2367
2368
2369void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00002370 Register scratch,
2371 AllocationFlags flags) {
2372 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01002373 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002374
2375 // Just return if allocation top is already known.
2376 if ((flags & RESULT_CONTAINS_TOP) != 0) {
2377 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01002378 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00002379#ifdef DEBUG
2380 // Assert that result actually contains top on entry.
Steve Block44f0eee2011-05-26 01:26:41 +01002381 Operand top_operand = ExternalOperand(new_space_allocation_top);
2382 cmpq(result, top_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002383 Check(equal, "Unexpected allocation top");
2384#endif
2385 return;
2386 }
2387
Steve Block6ded16b2010-05-10 14:33:55 +01002388 // Move address of new object to result. Use scratch register if available,
2389 // and keep address in scratch until call to UpdateAllocationTopHelper.
2390 if (scratch.is_valid()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002391 LoadAddress(scratch, new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00002392 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01002393 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002394 Load(result, new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00002395 }
2396}
2397
2398
2399void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2400 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +01002401 if (emit_debug_code()) {
Steve Blockd0582a62009-12-15 09:54:21 +00002402 testq(result_end, Immediate(kObjectAlignmentMask));
2403 Check(zero, "Unaligned allocation in new space");
2404 }
2405
Steve Blocka7e24c12009-10-30 11:49:00 +00002406 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01002407 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002408
2409 // Update new top.
Steve Block44f0eee2011-05-26 01:26:41 +01002410 if (scratch.is_valid()) {
2411 // Scratch already contains address of allocation top.
2412 movq(Operand(scratch, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00002413 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002414 Store(new_space_allocation_top, result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00002415 }
2416}
2417
2418
2419void MacroAssembler::AllocateInNewSpace(int object_size,
2420 Register result,
2421 Register result_end,
2422 Register scratch,
2423 Label* gc_required,
2424 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07002425 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01002426 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07002427 // Trash the registers to simulate an allocation failure.
2428 movl(result, Immediate(0x7091));
2429 if (result_end.is_valid()) {
2430 movl(result_end, Immediate(0x7191));
2431 }
2432 if (scratch.is_valid()) {
2433 movl(scratch, Immediate(0x7291));
2434 }
2435 }
2436 jmp(gc_required);
2437 return;
2438 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002439 ASSERT(!result.is(result_end));
2440
2441 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002442 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002443
2444 // Calculate new top and bail out if new space is exhausted.
2445 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01002446 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Block6ded16b2010-05-10 14:33:55 +01002447
2448 Register top_reg = result_end.is_valid() ? result_end : result;
2449
Steve Block1e0659c2011-05-24 12:43:12 +01002450 if (!top_reg.is(result)) {
2451 movq(top_reg, result);
Steve Block6ded16b2010-05-10 14:33:55 +01002452 }
Steve Block1e0659c2011-05-24 12:43:12 +01002453 addq(top_reg, Immediate(object_size));
2454 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01002455 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2456 cmpq(top_reg, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002457 j(above, gc_required);
2458
2459 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01002460 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00002461
Steve Block6ded16b2010-05-10 14:33:55 +01002462 if (top_reg.is(result)) {
2463 if ((flags & TAG_OBJECT) != 0) {
2464 subq(result, Immediate(object_size - kHeapObjectTag));
2465 } else {
2466 subq(result, Immediate(object_size));
2467 }
2468 } else if ((flags & TAG_OBJECT) != 0) {
2469 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00002470 addq(result, Immediate(kHeapObjectTag));
2471 }
2472}
2473
2474
2475void MacroAssembler::AllocateInNewSpace(int header_size,
2476 ScaleFactor element_size,
2477 Register element_count,
2478 Register result,
2479 Register result_end,
2480 Register scratch,
2481 Label* gc_required,
2482 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07002483 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01002484 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07002485 // Trash the registers to simulate an allocation failure.
2486 movl(result, Immediate(0x7091));
2487 movl(result_end, Immediate(0x7191));
2488 if (scratch.is_valid()) {
2489 movl(scratch, Immediate(0x7291));
2490 }
2491 // Register element_count is not modified by the function.
2492 }
2493 jmp(gc_required);
2494 return;
2495 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002496 ASSERT(!result.is(result_end));
2497
2498 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002499 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002500
2501 // Calculate new top and bail out if new space is exhausted.
2502 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01002503 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Block1e0659c2011-05-24 12:43:12 +01002504
2505 // We assume that element_count*element_size + header_size does not
2506 // overflow.
2507 lea(result_end, Operand(element_count, element_size, header_size));
2508 addq(result_end, result);
2509 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01002510 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2511 cmpq(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002512 j(above, gc_required);
2513
2514 // Update allocation top.
2515 UpdateAllocationTopHelper(result_end, scratch);
2516
2517 // Tag the result if requested.
2518 if ((flags & TAG_OBJECT) != 0) {
2519 addq(result, Immediate(kHeapObjectTag));
2520 }
2521}
2522
2523
2524void MacroAssembler::AllocateInNewSpace(Register object_size,
2525 Register result,
2526 Register result_end,
2527 Register scratch,
2528 Label* gc_required,
2529 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07002530 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01002531 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07002532 // Trash the registers to simulate an allocation failure.
2533 movl(result, Immediate(0x7091));
2534 movl(result_end, Immediate(0x7191));
2535 if (scratch.is_valid()) {
2536 movl(scratch, Immediate(0x7291));
2537 }
2538 // object_size is left unchanged by this function.
2539 }
2540 jmp(gc_required);
2541 return;
2542 }
2543 ASSERT(!result.is(result_end));
2544
Steve Blocka7e24c12009-10-30 11:49:00 +00002545 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002546 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002547
2548 // Calculate new top and bail out if new space is exhausted.
2549 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01002550 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002551 if (!object_size.is(result_end)) {
2552 movq(result_end, object_size);
2553 }
2554 addq(result_end, result);
Steve Block1e0659c2011-05-24 12:43:12 +01002555 j(carry, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01002556 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2557 cmpq(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002558 j(above, gc_required);
2559
2560 // Update allocation top.
2561 UpdateAllocationTopHelper(result_end, scratch);
2562
2563 // Tag the result if requested.
2564 if ((flags & TAG_OBJECT) != 0) {
2565 addq(result, Immediate(kHeapObjectTag));
2566 }
2567}
2568
2569
2570void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2571 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01002572 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002573
2574 // Make sure the object has no tag before resetting top.
2575 and_(object, Immediate(~kHeapObjectTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01002576 Operand top_operand = ExternalOperand(new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00002577#ifdef DEBUG
Steve Block44f0eee2011-05-26 01:26:41 +01002578 cmpq(object, top_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00002579 Check(below, "Undo allocation of non allocated memory");
2580#endif
Steve Block44f0eee2011-05-26 01:26:41 +01002581 movq(top_operand, object);
Steve Blocka7e24c12009-10-30 11:49:00 +00002582}
2583
2584
Steve Block3ce2e202009-11-05 08:53:23 +00002585void MacroAssembler::AllocateHeapNumber(Register result,
2586 Register scratch,
2587 Label* gc_required) {
2588 // Allocate heap number in new space.
2589 AllocateInNewSpace(HeapNumber::kSize,
2590 result,
2591 scratch,
2592 no_reg,
2593 gc_required,
2594 TAG_OBJECT);
2595
2596 // Set the map.
2597 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2598 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2599}
2600
2601
Leon Clarkee46be812010-01-19 14:06:41 +00002602void MacroAssembler::AllocateTwoByteString(Register result,
2603 Register length,
2604 Register scratch1,
2605 Register scratch2,
2606 Register scratch3,
2607 Label* gc_required) {
2608 // Calculate the number of bytes needed for the characters in the string while
2609 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002610 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2611 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002612 ASSERT(kShortSize == 2);
2613 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01002614 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2615 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002616 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002617 if (kHeaderAlignment > 0) {
2618 subq(scratch1, Immediate(kHeaderAlignment));
2619 }
Leon Clarkee46be812010-01-19 14:06:41 +00002620
2621 // Allocate two byte string in new space.
2622 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2623 times_1,
2624 scratch1,
2625 result,
2626 scratch2,
2627 scratch3,
2628 gc_required,
2629 TAG_OBJECT);
2630
2631 // Set the map, length and hash field.
2632 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2633 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002634 Integer32ToSmi(scratch1, length);
2635 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002636 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002637 Immediate(String::kEmptyHashField));
2638}
2639
2640
2641void MacroAssembler::AllocateAsciiString(Register result,
2642 Register length,
2643 Register scratch1,
2644 Register scratch2,
2645 Register scratch3,
2646 Label* gc_required) {
2647 // Calculate the number of bytes needed for the characters in the string while
2648 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002649 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2650 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002651 movl(scratch1, length);
2652 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01002653 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002654 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002655 if (kHeaderAlignment > 0) {
2656 subq(scratch1, Immediate(kHeaderAlignment));
2657 }
Leon Clarkee46be812010-01-19 14:06:41 +00002658
2659 // Allocate ascii string in new space.
2660 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2661 times_1,
2662 scratch1,
2663 result,
2664 scratch2,
2665 scratch3,
2666 gc_required,
2667 TAG_OBJECT);
2668
2669 // Set the map, length and hash field.
2670 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2671 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002672 Integer32ToSmi(scratch1, length);
2673 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002674 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002675 Immediate(String::kEmptyHashField));
2676}
2677
2678
2679void MacroAssembler::AllocateConsString(Register result,
2680 Register scratch1,
2681 Register scratch2,
2682 Label* gc_required) {
2683 // Allocate heap number in new space.
2684 AllocateInNewSpace(ConsString::kSize,
2685 result,
2686 scratch1,
2687 scratch2,
2688 gc_required,
2689 TAG_OBJECT);
2690
2691 // Set the map. The other fields are left uninitialized.
2692 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2693 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2694}
2695
2696
2697void MacroAssembler::AllocateAsciiConsString(Register result,
2698 Register scratch1,
2699 Register scratch2,
2700 Label* gc_required) {
2701 // Allocate heap number in new space.
2702 AllocateInNewSpace(ConsString::kSize,
2703 result,
2704 scratch1,
2705 scratch2,
2706 gc_required,
2707 TAG_OBJECT);
2708
2709 // Set the map. The other fields are left uninitialized.
2710 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2711 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2712}
2713
2714
Steve Block44f0eee2011-05-26 01:26:41 +01002715// Copy memory, byte-by-byte, from source to destination. Not optimized for
2716// long or aligned copies. The contents of scratch and length are destroyed.
2717// Destination is incremented by length, source, length and scratch are
2718// clobbered.
2719// A simpler loop is faster on small copies, but slower on large ones.
2720// The cld() instruction must have been emitted, to set the direction flag(),
2721// before calling this function.
2722void MacroAssembler::CopyBytes(Register destination,
2723 Register source,
2724 Register length,
2725 int min_length,
2726 Register scratch) {
2727 ASSERT(min_length >= 0);
2728 if (FLAG_debug_code) {
2729 cmpl(length, Immediate(min_length));
2730 Assert(greater_equal, "Invalid min_length");
2731 }
2732 Label loop, done, short_string, short_loop;
2733
2734 const int kLongStringLimit = 20;
2735 if (min_length <= kLongStringLimit) {
2736 cmpl(length, Immediate(kLongStringLimit));
2737 j(less_equal, &short_string);
2738 }
2739
2740 ASSERT(source.is(rsi));
2741 ASSERT(destination.is(rdi));
2742 ASSERT(length.is(rcx));
2743
2744 // Because source is 8-byte aligned in our uses of this function,
2745 // we keep source aligned for the rep movs operation by copying the odd bytes
2746 // at the end of the ranges.
2747 movq(scratch, length);
2748 shrl(length, Immediate(3));
2749 repmovsq();
2750 // Move remaining bytes of length.
2751 andl(scratch, Immediate(0x7));
2752 movq(length, Operand(source, scratch, times_1, -8));
2753 movq(Operand(destination, scratch, times_1, -8), length);
2754 addq(destination, scratch);
2755
2756 if (min_length <= kLongStringLimit) {
2757 jmp(&done);
2758
2759 bind(&short_string);
2760 if (min_length == 0) {
2761 testl(length, length);
2762 j(zero, &done);
2763 }
2764 lea(scratch, Operand(destination, length, times_1, 0));
2765
2766 bind(&short_loop);
2767 movb(length, Operand(source, 0));
2768 movb(Operand(destination, 0), length);
2769 incq(source);
2770 incq(destination);
2771 cmpq(destination, scratch);
2772 j(not_equal, &short_loop);
2773
2774 bind(&done);
2775 }
2776}
2777
2778
Steve Blockd0582a62009-12-15 09:54:21 +00002779void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2780 if (context_chain_length > 0) {
2781 // Move up the chain of contexts to the context containing the slot.
2782 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2783 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002784 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002785 for (int i = 1; i < context_chain_length; i++) {
2786 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2787 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2788 }
2789 // The context may be an intermediate context, not a function context.
2790 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002791 } else {
2792 // Slot is in the current function context. Move it into the
2793 // destination register in case we store into it (the write barrier
2794 // cannot be allowed to destroy the context in rsi).
2795 movq(dst, rsi);
2796 }
2797
2798 // We should not have found a 'with' context by walking the context chain
2799 // (i.e., the static scope chain and runtime context chain do not agree).
2800 // A variable occurring in such a scope should have slot type LOOKUP and
2801 // not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01002802 if (emit_debug_code()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002803 cmpq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2804 Check(equal, "Yo dawg, I heard you liked function contexts "
2805 "so I put function contexts in all your contexts");
Steve Blockd0582a62009-12-15 09:54:21 +00002806 }
2807}
2808
Steve Block44f0eee2011-05-26 01:26:41 +01002809#ifdef _WIN64
2810static const int kRegisterPassedArguments = 4;
2811#else
2812static const int kRegisterPassedArguments = 6;
2813#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002814
Ben Murdochb0fe1622011-05-05 13:52:32 +01002815void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2816 // Load the global or builtins object from the current context.
2817 movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2818 // Load the global context from the global or builtins object.
2819 movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
2820 // Load the function from the global context.
2821 movq(function, Operand(function, Context::SlotOffset(index)));
2822}
2823
2824
2825void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2826 Register map) {
2827 // Load the initial map. The global functions all have initial maps.
2828 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002829 if (emit_debug_code()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002830 Label ok, fail;
Ben Murdoch8b112d22011-06-08 16:22:53 +01002831 CheckMap(map, isolate()->factory()->meta_map(), &fail, false);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002832 jmp(&ok);
2833 bind(&fail);
2834 Abort("Global functions must have initial map");
2835 bind(&ok);
2836 }
2837}
2838
2839
Leon Clarke4515c472010-02-03 11:58:03 +00002840int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002841 // On Windows 64 stack slots are reserved by the caller for all arguments
2842 // including the ones passed in registers, and space is always allocated for
2843 // the four register arguments even if the function takes fewer than four
2844 // arguments.
2845 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2846 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00002847 ASSERT(num_arguments >= 0);
2848#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01002849 const int kMinimumStackSlots = kRegisterPassedArguments;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002850 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2851 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002852#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002853 if (num_arguments < kRegisterPassedArguments) return 0;
2854 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002855#endif
Leon Clarke4515c472010-02-03 11:58:03 +00002856}
2857
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002858
Leon Clarke4515c472010-02-03 11:58:03 +00002859void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2860 int frame_alignment = OS::ActivationFrameAlignment();
2861 ASSERT(frame_alignment != 0);
2862 ASSERT(num_arguments >= 0);
Steve Block44f0eee2011-05-26 01:26:41 +01002863
Leon Clarke4515c472010-02-03 11:58:03 +00002864 // Make stack end at alignment and allocate space for arguments and old rsp.
2865 movq(kScratchRegister, rsp);
2866 ASSERT(IsPowerOf2(frame_alignment));
2867 int argument_slots_on_stack =
2868 ArgumentStackSlotsForCFunctionCall(num_arguments);
2869 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2870 and_(rsp, Immediate(-frame_alignment));
2871 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2872}
2873
2874
2875void MacroAssembler::CallCFunction(ExternalReference function,
2876 int num_arguments) {
Steve Block44f0eee2011-05-26 01:26:41 +01002877 LoadAddress(rax, function);
Leon Clarke4515c472010-02-03 11:58:03 +00002878 CallCFunction(rax, num_arguments);
2879}
2880
2881
2882void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01002883 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01002884 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01002885 CheckStackAlignment();
2886 }
2887
Leon Clarke4515c472010-02-03 11:58:03 +00002888 call(function);
2889 ASSERT(OS::ActivationFrameAlignment() != 0);
2890 ASSERT(num_arguments >= 0);
2891 int argument_slots_on_stack =
2892 ArgumentStackSlotsForCFunctionCall(num_arguments);
2893 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2894}
2895
Steve Blockd0582a62009-12-15 09:54:21 +00002896
Steve Blocka7e24c12009-10-30 11:49:00 +00002897CodePatcher::CodePatcher(byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01002898 : address_(address),
2899 size_(size),
2900 masm_(Isolate::Current(), address, size + Assembler::kGap) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002901 // Create a new macro assembler pointing to the address of the code to patch.
2902 // The size is adjusted with kGap on order for the assembler to generate size
2903 // bytes of instructions without failing with buffer size constraints.
2904 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2905}
2906
2907
2908CodePatcher::~CodePatcher() {
2909 // Indicate that code has changed.
2910 CPU::FlushICache(address_, size_);
2911
2912 // Check that the code was patched as expected.
2913 ASSERT(masm_.pc_ == address_ + size_);
2914 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2915}
2916
Steve Blocka7e24c12009-10-30 11:49:00 +00002917} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002918
2919#endif // V8_TARGET_ARCH_X64