blob: d9198338b23df146600aec0cae39f03fa0c75092 [file] [log] [blame]
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010038#include "heap.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
40namespace v8 {
41namespace internal {
42
43MacroAssembler::MacroAssembler(void* buffer, int size)
Steve Block3ce2e202009-11-05 08:53:23 +000044 : Assembler(buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000045 generating_stub_(false),
46 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
Steve Blocka7e24c12009-10-30 11:49:00 +000048}
49
50
Steve Block3ce2e202009-11-05 08:53:23 +000051void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010052 movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000053}
54
55
Kristian Monsen25f61362010-05-21 11:50:48 +010056void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
57 movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
58}
59
60
Steve Blocka7e24c12009-10-30 11:49:00 +000061void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010062 push(Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000063}
64
65
Steve Block3ce2e202009-11-05 08:53:23 +000066void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010067 cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000068}
69
70
Steve Block3ce2e202009-11-05 08:53:23 +000071void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000072 LoadRoot(kScratchRegister, index);
73 cmpq(with, kScratchRegister);
74}
75
76
Steve Blockd0582a62009-12-15 09:54:21 +000077void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
78 CompareRoot(rsp, Heap::kStackLimitRootIndex);
79 j(below, on_stack_overflow);
80}
81
82
Steve Block6ded16b2010-05-10 14:33:55 +010083void MacroAssembler::RecordWriteHelper(Register object,
84 Register addr,
85 Register scratch) {
86 if (FLAG_debug_code) {
87 // Check that the object is not in new space.
Kristian Monsen0d5e1162010-09-30 15:31:59 +010088 NearLabel not_in_new_space;
Steve Block6ded16b2010-05-10 14:33:55 +010089 InNewSpace(object, scratch, not_equal, &not_in_new_space);
90 Abort("new-space object passed to RecordWriteHelper");
91 bind(&not_in_new_space);
92 }
93
Steve Blocka7e24c12009-10-30 11:49:00 +000094 // Compute the page start address from the heap object pointer, and reuse
95 // the 'object' register for it.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010096 and_(object, Immediate(~Page::kPageAlignmentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +000097
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010098 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
99 // method for more details.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100100 shrl(addr, Immediate(Page::kRegionSizeLog2));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100101 andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000102
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100103 // Set dirty mark for region.
104 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000105}
106
107
Steve Blocka7e24c12009-10-30 11:49:00 +0000108void MacroAssembler::RecordWrite(Register object,
109 int offset,
110 Register value,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100111 Register index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000112 // The compiled code assumes that record write doesn't change the
113 // context register, so we check that none of the clobbered
114 // registers are rsi.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100115 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
Leon Clarke4515c472010-02-03 11:58:03 +0000116
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100117 // First, check if a write barrier is even needed. The tests below
118 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000119 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000120 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000121
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100122 RecordWriteNonSmi(object, offset, value, index);
Steve Block3ce2e202009-11-05 08:53:23 +0000123 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000124
125 // Clobber all input registers when running with the debug-code flag
126 // turned on to provoke errors. This clobbering repeats the
127 // clobbering done inside RecordWriteNonSmi but it's necessary to
128 // avoid having the fast case for smis leave the registers
129 // unchanged.
130 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100131 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
132 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100133 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000134 }
Steve Block3ce2e202009-11-05 08:53:23 +0000135}
136
137
Steve Block8defd9f2010-07-08 12:39:36 +0100138void MacroAssembler::RecordWrite(Register object,
139 Register address,
140 Register value) {
141 // The compiled code assumes that record write doesn't change the
142 // context register, so we check that none of the clobbered
143 // registers are esi.
144 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
145
146 // First, check if a write barrier is even needed. The tests below
147 // catch stores of Smis and stores into young gen.
148 Label done;
149 JumpIfSmi(value, &done);
150
151 InNewSpace(object, value, equal, &done);
152
153 RecordWriteHelper(object, address, value);
154
155 bind(&done);
156
157 // Clobber all input registers when running with the debug-code flag
158 // turned on to provoke errors.
159 if (FLAG_debug_code) {
160 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
161 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
162 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
163 }
164}
165
166
Steve Block3ce2e202009-11-05 08:53:23 +0000167void MacroAssembler::RecordWriteNonSmi(Register object,
168 int offset,
169 Register scratch,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100170 Register index) {
Steve Block3ce2e202009-11-05 08:53:23 +0000171 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000172
173 if (FLAG_debug_code) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100174 NearLabel okay;
Leon Clarke4515c472010-02-03 11:58:03 +0000175 JumpIfNotSmi(object, &okay);
176 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
177 bind(&okay);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100178
179 if (offset == 0) {
180 // index must be int32.
181 Register tmp = index.is(rax) ? rbx : rax;
182 push(tmp);
183 movl(tmp, index);
184 cmpq(tmp, index);
185 Check(equal, "Index register for RecordWrite must be untagged int32.");
186 pop(tmp);
187 }
Leon Clarke4515c472010-02-03 11:58:03 +0000188 }
189
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100190 // Test that the object address is not in the new space. We cannot
191 // update page dirty marks for new space pages.
Steve Block6ded16b2010-05-10 14:33:55 +0100192 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000193
Steve Block6ded16b2010-05-10 14:33:55 +0100194 // The offset is relative to a tagged or untagged HeapObject pointer,
195 // so either offset or offset + kHeapObjectTag must be a
196 // multiple of kPointerSize.
197 ASSERT(IsAligned(offset, kPointerSize) ||
198 IsAligned(offset + kHeapObjectTag, kPointerSize));
199
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100200 Register dst = index;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100201 if (offset != 0) {
202 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000203 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100204 // array access: calculate the destination address in the same manner as
205 // KeyedStoreIC::GenerateGeneric.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100206 lea(dst, FieldOperand(object,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100207 index,
208 times_pointer_size,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100209 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000210 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100211 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000212
213 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000214
215 // Clobber all input registers when running with the debug-code flag
216 // turned on to provoke errors.
217 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100218 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
219 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100220 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Steve Block6ded16b2010-05-10 14:33:55 +0100221 }
222}
223
Steve Blocka7e24c12009-10-30 11:49:00 +0000224void MacroAssembler::Assert(Condition cc, const char* msg) {
225 if (FLAG_debug_code) Check(cc, msg);
226}
227
228
Iain Merrick75681382010-08-19 15:07:18 +0100229void MacroAssembler::AssertFastElements(Register elements) {
230 if (FLAG_debug_code) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100231 NearLabel ok;
Iain Merrick75681382010-08-19 15:07:18 +0100232 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
233 Heap::kFixedArrayMapRootIndex);
234 j(equal, &ok);
235 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
236 Heap::kFixedCOWArrayMapRootIndex);
237 j(equal, &ok);
238 Abort("JSObject with fast elements map has slow elements");
239 bind(&ok);
240 }
241}
242
243
Steve Blocka7e24c12009-10-30 11:49:00 +0000244void MacroAssembler::Check(Condition cc, const char* msg) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100245 NearLabel L;
Steve Blocka7e24c12009-10-30 11:49:00 +0000246 j(cc, &L);
247 Abort(msg);
248 // will not return here
249 bind(&L);
250}
251
252
Steve Block6ded16b2010-05-10 14:33:55 +0100253void MacroAssembler::CheckStackAlignment() {
254 int frame_alignment = OS::ActivationFrameAlignment();
255 int frame_alignment_mask = frame_alignment - 1;
256 if (frame_alignment > kPointerSize) {
257 ASSERT(IsPowerOf2(frame_alignment));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100258 NearLabel alignment_as_expected;
Steve Block6ded16b2010-05-10 14:33:55 +0100259 testq(rsp, Immediate(frame_alignment_mask));
260 j(zero, &alignment_as_expected);
261 // Abort if stack is not aligned.
262 int3();
263 bind(&alignment_as_expected);
264 }
265}
266
267
Steve Blocka7e24c12009-10-30 11:49:00 +0000268void MacroAssembler::NegativeZeroTest(Register result,
269 Register op,
270 Label* then_label) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100271 NearLabel ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000272 testl(result, result);
273 j(not_zero, &ok);
274 testl(op, op);
275 j(sign, then_label);
276 bind(&ok);
277}
278
279
280void MacroAssembler::Abort(const char* msg) {
281 // We want to pass the msg string like a smi to avoid GC
282 // problems, however msg is not guaranteed to be aligned
283 // properly. Instead, we pass an aligned pointer that is
284 // a proper v8 smi, but also pass the alignment difference
285 // from the real pointer as a smi.
286 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
287 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
288 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
289 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
290#ifdef DEBUG
291 if (msg != NULL) {
292 RecordComment("Abort message: ");
293 RecordComment(msg);
294 }
295#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000296 // Disable stub call restrictions to always allow calls to abort.
297 set_allow_stub_calls(true);
298
Steve Blocka7e24c12009-10-30 11:49:00 +0000299 push(rax);
300 movq(kScratchRegister, p0, RelocInfo::NONE);
301 push(kScratchRegister);
302 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000303 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000304 RelocInfo::NONE);
305 push(kScratchRegister);
306 CallRuntime(Runtime::kAbort, 2);
307 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000308 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000309}
310
311
312void MacroAssembler::CallStub(CodeStub* stub) {
313 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
314 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
315}
316
317
John Reck59135872010-11-02 12:39:01 -0700318MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100319 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700320 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100321 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700322 call(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
323 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100324 }
325 return result;
326}
327
328
Leon Clarkee46be812010-01-19 14:06:41 +0000329void MacroAssembler::TailCallStub(CodeStub* stub) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800330 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Leon Clarkee46be812010-01-19 14:06:41 +0000331 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
332}
333
334
John Reck59135872010-11-02 12:39:01 -0700335MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100336 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700337 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100338 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700339 jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
340 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100341 }
342 return result;
343}
344
345
Steve Blocka7e24c12009-10-30 11:49:00 +0000346void MacroAssembler::StubReturn(int argc) {
347 ASSERT(argc >= 1 && generating_stub());
348 ret((argc - 1) * kPointerSize);
349}
350
351
352void MacroAssembler::IllegalOperation(int num_arguments) {
353 if (num_arguments > 0) {
354 addq(rsp, Immediate(num_arguments * kPointerSize));
355 }
356 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
357}
358
359
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100360void MacroAssembler::IndexFromHash(Register hash, Register index) {
361 // The assert checks that the constants for the maximum number of digits
362 // for an array index cached in the hash field and the number of bits
363 // reserved for it does not conflict.
364 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
365 (1 << String::kArrayIndexValueBits));
366 // We want the smi-tagged index in key. Even if we subsequently go to
367 // the slow case, converting the key to a smi is always valid.
368 // key: string key
369 // hash: key's hash field, including its array index value.
370 and_(hash, Immediate(String::kArrayIndexValueMask));
371 shr(hash, Immediate(String::kHashShift));
372 // Here we actually clobber the key which will be used if calling into
373 // runtime later. However as the new key is the numeric value of a string key
374 // there is no difference in using either key.
375 Integer32ToSmi(index, hash);
376}
377
378
Steve Blocka7e24c12009-10-30 11:49:00 +0000379void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
380 CallRuntime(Runtime::FunctionForId(id), num_arguments);
381}
382
383
John Reck59135872010-11-02 12:39:01 -0700384MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
385 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100386 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
387}
388
389
Steve Blocka7e24c12009-10-30 11:49:00 +0000390void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
391 // If the expected number of arguments of the runtime function is
392 // constant, we check that the actual number of arguments match the
393 // expectation.
394 if (f->nargs >= 0 && f->nargs != num_arguments) {
395 IllegalOperation(num_arguments);
396 return;
397 }
398
Leon Clarke4515c472010-02-03 11:58:03 +0000399 // TODO(1236192): Most runtime routines don't need the number of
400 // arguments passed in because it is constant. At some point we
401 // should remove this need and make the runtime routine entry code
402 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100403 Set(rax, num_arguments);
Leon Clarke4515c472010-02-03 11:58:03 +0000404 movq(rbx, ExternalReference(f));
405 CEntryStub ces(f->result_size);
406 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000407}
408
409
John Reck59135872010-11-02 12:39:01 -0700410MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
411 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100412 if (f->nargs >= 0 && f->nargs != num_arguments) {
413 IllegalOperation(num_arguments);
414 // Since we did not call the stub, there was no allocation failure.
415 // Return some non-failure object.
416 return Heap::undefined_value();
417 }
418
419 // TODO(1236192): Most runtime routines don't need the number of
420 // arguments passed in because it is constant. At some point we
421 // should remove this need and make the runtime routine entry code
422 // smarter.
423 Set(rax, num_arguments);
424 movq(rbx, ExternalReference(f));
425 CEntryStub ces(f->result_size);
426 return TryCallStub(&ces);
427}
428
429
Andrei Popescu402d9372010-02-26 13:31:12 +0000430void MacroAssembler::CallExternalReference(const ExternalReference& ext,
431 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100432 Set(rax, num_arguments);
Andrei Popescu402d9372010-02-26 13:31:12 +0000433 movq(rbx, ext);
434
435 CEntryStub stub(1);
436 CallStub(&stub);
437}
438
439
Steve Block6ded16b2010-05-10 14:33:55 +0100440void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
441 int num_arguments,
442 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000443 // ----------- S t a t e -------------
444 // -- rsp[0] : return address
445 // -- rsp[8] : argument num_arguments - 1
446 // ...
447 // -- rsp[8 * num_arguments] : argument 0 (receiver)
448 // -----------------------------------
449
450 // TODO(1236192): Most runtime routines don't need the number of
451 // arguments passed in because it is constant. At some point we
452 // should remove this need and make the runtime routine entry code
453 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100454 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100455 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000456}
457
458
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800459MaybeObject* MacroAssembler::TryTailCallExternalReference(
460 const ExternalReference& ext, int num_arguments, int result_size) {
461 // ----------- S t a t e -------------
462 // -- rsp[0] : return address
463 // -- rsp[8] : argument num_arguments - 1
464 // ...
465 // -- rsp[8 * num_arguments] : argument 0 (receiver)
466 // -----------------------------------
467
468 // TODO(1236192): Most runtime routines don't need the number of
469 // arguments passed in because it is constant. At some point we
470 // should remove this need and make the runtime routine entry code
471 // smarter.
472 Set(rax, num_arguments);
473 return TryJumpToExternalReference(ext, result_size);
474}
475
476
Steve Block6ded16b2010-05-10 14:33:55 +0100477void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
478 int num_arguments,
479 int result_size) {
480 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
481}
482
483
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800484MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
485 int num_arguments,
486 int result_size) {
487 return TryTailCallExternalReference(ExternalReference(fid),
488 num_arguments,
489 result_size);
490}
491
492
Ben Murdochbb769b22010-08-11 14:56:33 +0100493static int Offset(ExternalReference ref0, ExternalReference ref1) {
494 int64_t offset = (ref0.address() - ref1.address());
495 // Check that fits into int.
496 ASSERT(static_cast<int>(offset) == offset);
497 return static_cast<int>(offset);
498}
499
500
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800501void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
502#ifdef _WIN64
503 // We need to prepare a slot for result handle on stack and put
504 // a pointer to it into 1st arg register.
505 EnterApiExitFrame(arg_stack_space + 1);
506
507 // rcx must be used to pass the pointer to the return value slot.
508 lea(rcx, StackSpaceOperand(arg_stack_space));
509#else
510 EnterApiExitFrame(arg_stack_space);
511#endif
Ben Murdochbb769b22010-08-11 14:56:33 +0100512}
513
514
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800515MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
516 ApiFunction* function, int stack_space) {
John Reck59135872010-11-02 12:39:01 -0700517 Label empty_result;
518 Label prologue;
519 Label promote_scheduled_exception;
520 Label delete_allocated_handles;
521 Label leave_exit_frame;
Ben Murdochbb769b22010-08-11 14:56:33 +0100522 Label write_back;
Ben Murdochbb769b22010-08-11 14:56:33 +0100523
John Reck59135872010-11-02 12:39:01 -0700524 ExternalReference next_address =
525 ExternalReference::handle_scope_next_address();
526 const int kNextOffset = 0;
527 const int kLimitOffset = Offset(
528 ExternalReference::handle_scope_limit_address(),
529 next_address);
530 const int kLevelOffset = Offset(
531 ExternalReference::handle_scope_level_address(),
532 next_address);
533 ExternalReference scheduled_exception_address =
534 ExternalReference::scheduled_exception_address();
Ben Murdochbb769b22010-08-11 14:56:33 +0100535
John Reck59135872010-11-02 12:39:01 -0700536 // Allocate HandleScope in callee-save registers.
537 Register prev_next_address_reg = r14;
538 Register prev_limit_reg = rbx;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800539 Register base_reg = r12;
John Reck59135872010-11-02 12:39:01 -0700540 movq(base_reg, next_address);
541 movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
542 movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
543 addl(Operand(base_reg, kLevelOffset), Immediate(1));
544 // Call the api function!
545 movq(rax,
546 reinterpret_cast<int64_t>(function->address()),
547 RelocInfo::RUNTIME_ENTRY);
548 call(rax);
Ben Murdochbb769b22010-08-11 14:56:33 +0100549
John Reck59135872010-11-02 12:39:01 -0700550#ifdef _WIN64
551 // rax keeps a pointer to v8::Handle, unpack it.
552 movq(rax, Operand(rax, 0));
553#endif
554 // Check if the result handle holds 0.
555 testq(rax, rax);
556 j(zero, &empty_result);
557 // It was non-zero. Dereference to get the result value.
558 movq(rax, Operand(rax, 0));
559 bind(&prologue);
Ben Murdochbb769b22010-08-11 14:56:33 +0100560
John Reck59135872010-11-02 12:39:01 -0700561 // No more valid handles (the result handle was the last one). Restore
562 // previous handle scope.
563 subl(Operand(base_reg, kLevelOffset), Immediate(1));
564 movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
565 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
566 j(not_equal, &delete_allocated_handles);
567 bind(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100568
John Reck59135872010-11-02 12:39:01 -0700569 // Check if the function scheduled an exception.
570 movq(rsi, scheduled_exception_address);
571 Cmp(Operand(rsi, 0), Factory::the_hole_value());
572 j(not_equal, &promote_scheduled_exception);
Ben Murdochbb769b22010-08-11 14:56:33 +0100573
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800574 LeaveApiExitFrame();
575 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -0700576
577 bind(&promote_scheduled_exception);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800578 MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException,
579 0, 1);
580 if (result->IsFailure()) {
581 return result;
582 }
John Reck59135872010-11-02 12:39:01 -0700583
584 bind(&empty_result);
585 // It was zero; the result is undefined.
586 Move(rax, Factory::undefined_value());
587 jmp(&prologue);
588
589 // HandleScope limit has changed. Delete allocated extensions.
590 bind(&delete_allocated_handles);
591 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
592 movq(prev_limit_reg, rax);
593 movq(rax, ExternalReference::delete_handle_scope_extensions());
594 call(rax);
595 movq(rax, prev_limit_reg);
596 jmp(&leave_exit_frame);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800597
598 return result;
Ben Murdochbb769b22010-08-11 14:56:33 +0100599}
600
601
Steve Block6ded16b2010-05-10 14:33:55 +0100602void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
603 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000604 // Set the entry point and jump to the C entry runtime stub.
605 movq(rbx, ext);
606 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000607 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000608}
609
610
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800611MaybeObject* MacroAssembler::TryJumpToExternalReference(
612 const ExternalReference& ext, int result_size) {
613 // Set the entry point and jump to the C entry runtime stub.
614 movq(rbx, ext);
615 CEntryStub ces(result_size);
616 return TryTailCallStub(&ces);
617}
618
619
Andrei Popescu402d9372010-02-26 13:31:12 +0000620void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
621 // Calls are not allowed in some stubs.
622 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000623
Andrei Popescu402d9372010-02-26 13:31:12 +0000624 // Rely on the assertion to check that the number of provided
625 // arguments match the expected number of arguments. Fake a
626 // parameter count to avoid emitting code to do the check.
627 ParameterCount expected(0);
628 GetBuiltinEntry(rdx, id);
629 InvokeCode(rdx, expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +0000630}
631
Andrei Popescu402d9372010-02-26 13:31:12 +0000632
Steve Block791712a2010-08-27 10:21:07 +0100633void MacroAssembler::GetBuiltinFunction(Register target,
634 Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100635 // Load the builtins object into target register.
636 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
637 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
Steve Block791712a2010-08-27 10:21:07 +0100638 movq(target, FieldOperand(target,
639 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
640}
Steve Block6ded16b2010-05-10 14:33:55 +0100641
Steve Block791712a2010-08-27 10:21:07 +0100642
643void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
644 ASSERT(!target.is(rdi));
Andrei Popescu402d9372010-02-26 13:31:12 +0000645 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +0100646 GetBuiltinFunction(rdi, id);
647 movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000648}
649
650
651void MacroAssembler::Set(Register dst, int64_t x) {
652 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100653 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000654 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000655 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000656 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000657 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000658 } else {
659 movq(dst, x, RelocInfo::NONE);
660 }
661}
662
Steve Blocka7e24c12009-10-30 11:49:00 +0000663void MacroAssembler::Set(const Operand& dst, int64_t x) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100664 if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000665 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000666 } else {
667 movq(kScratchRegister, x, RelocInfo::NONE);
668 movq(dst, kScratchRegister);
669 }
670}
671
Steve Blocka7e24c12009-10-30 11:49:00 +0000672// ----------------------------------------------------------------------------
673// Smi tagging, untagging and tag detection.
674
Steve Block8defd9f2010-07-08 12:39:36 +0100675Register MacroAssembler::GetSmiConstant(Smi* source) {
676 int value = source->value();
677 if (value == 0) {
678 xorl(kScratchRegister, kScratchRegister);
679 return kScratchRegister;
680 }
681 if (value == 1) {
682 return kSmiConstantRegister;
683 }
684 LoadSmiConstant(kScratchRegister, source);
685 return kScratchRegister;
686}
687
688void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
689 if (FLAG_debug_code) {
690 movq(dst,
691 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
692 RelocInfo::NONE);
693 cmpq(dst, kSmiConstantRegister);
694 if (allow_stub_calls()) {
695 Assert(equal, "Uninitialized kSmiConstantRegister");
696 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100697 NearLabel ok;
Steve Block8defd9f2010-07-08 12:39:36 +0100698 j(equal, &ok);
699 int3();
700 bind(&ok);
701 }
702 }
703 if (source->value() == 0) {
704 xorl(dst, dst);
705 return;
706 }
707 int value = source->value();
708 bool negative = value < 0;
709 unsigned int uvalue = negative ? -value : value;
710
711 switch (uvalue) {
712 case 9:
713 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
714 break;
715 case 8:
716 xorl(dst, dst);
717 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
718 break;
719 case 4:
720 xorl(dst, dst);
721 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
722 break;
723 case 5:
724 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
725 break;
726 case 3:
727 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
728 break;
729 case 2:
730 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
731 break;
732 case 1:
733 movq(dst, kSmiConstantRegister);
734 break;
735 case 0:
736 UNREACHABLE();
737 return;
738 default:
739 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
740 return;
741 }
742 if (negative) {
743 neg(dst);
744 }
745}
746
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100747
Steve Blocka7e24c12009-10-30 11:49:00 +0000748void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000749 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000750 if (!dst.is(src)) {
751 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000752 }
Steve Block3ce2e202009-11-05 08:53:23 +0000753 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000754}
755
756
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100757void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
758 if (FLAG_debug_code) {
759 testb(dst, Immediate(0x01));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100760 NearLabel ok;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100761 j(zero, &ok);
762 if (allow_stub_calls()) {
763 Abort("Integer32ToSmiField writing to non-smi location");
764 } else {
765 int3();
766 }
767 bind(&ok);
768 }
769 ASSERT(kSmiShift % kBitsPerByte == 0);
770 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
771}
772
773
Steve Block3ce2e202009-11-05 08:53:23 +0000774void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
775 Register src,
776 int constant) {
777 if (dst.is(src)) {
778 addq(dst, Immediate(constant));
779 } else {
780 lea(dst, Operand(src, constant));
781 }
782 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000783}
784
785
786void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000787 ASSERT_EQ(0, kSmiTag);
788 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000789 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000790 }
Steve Block3ce2e202009-11-05 08:53:23 +0000791 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000792}
793
794
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100795void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
796 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
797}
798
799
Steve Blocka7e24c12009-10-30 11:49:00 +0000800void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000801 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000802 if (!dst.is(src)) {
803 movq(dst, src);
804 }
805 sar(dst, Immediate(kSmiShift));
806}
807
808
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100809void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
810 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
811}
812
813
Steve Block3ce2e202009-11-05 08:53:23 +0000814void MacroAssembler::SmiTest(Register src) {
815 testq(src, src);
816}
817
818
819void MacroAssembler::SmiCompare(Register dst, Register src) {
820 cmpq(dst, src);
821}
822
823
824void MacroAssembler::SmiCompare(Register dst, Smi* src) {
825 ASSERT(!dst.is(kScratchRegister));
826 if (src->value() == 0) {
827 testq(dst, dst);
828 } else {
Iain Merrick75681382010-08-19 15:07:18 +0100829 Register constant_reg = GetSmiConstant(src);
830 cmpq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +0000831 }
832}
833
834
Leon Clarkef7060e22010-06-03 12:02:55 +0100835void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block6ded16b2010-05-10 14:33:55 +0100836 cmpq(dst, src);
837}
838
839
Steve Block3ce2e202009-11-05 08:53:23 +0000840void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
841 cmpq(dst, src);
842}
843
844
845void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100846 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000847}
848
849
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100850void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
851 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
852}
853
854
Steve Blocka7e24c12009-10-30 11:49:00 +0000855void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
856 Register src,
857 int power) {
858 ASSERT(power >= 0);
859 ASSERT(power < 64);
860 if (power == 0) {
861 SmiToInteger64(dst, src);
862 return;
863 }
Steve Block3ce2e202009-11-05 08:53:23 +0000864 if (!dst.is(src)) {
865 movq(dst, src);
866 }
867 if (power < kSmiShift) {
868 sar(dst, Immediate(kSmiShift - power));
869 } else if (power > kSmiShift) {
870 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000871 }
872}
873
874
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100875void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
876 Register src,
877 int power) {
878 ASSERT((0 <= power) && (power < 32));
879 if (dst.is(src)) {
880 shr(dst, Immediate(power + kSmiShift));
881 } else {
882 UNIMPLEMENTED(); // Not used.
883 }
884}
885
886
Steve Blocka7e24c12009-10-30 11:49:00 +0000887Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000888 ASSERT_EQ(0, kSmiTag);
889 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000890 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000891}
892
893
Ben Murdochf87a2032010-10-22 12:50:53 +0100894Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000895 ASSERT_EQ(0, kSmiTag);
Steve Block8defd9f2010-07-08 12:39:36 +0100896 // Make mask 0x8000000000000001 and test that both bits are zero.
Steve Block3ce2e202009-11-05 08:53:23 +0000897 movq(kScratchRegister, src);
898 rol(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +0100899 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +0000900 return zero;
901}
902
903
Steve Blocka7e24c12009-10-30 11:49:00 +0000904Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
905 if (first.is(second)) {
906 return CheckSmi(first);
907 }
Steve Block8defd9f2010-07-08 12:39:36 +0100908 ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
909 leal(kScratchRegister, Operand(first, second, times_1, 0));
910 testb(kScratchRegister, Immediate(0x03));
Steve Block3ce2e202009-11-05 08:53:23 +0000911 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000912}
913
914
Ben Murdochf87a2032010-10-22 12:50:53 +0100915Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
916 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +0000917 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +0100918 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +0000919 }
Steve Block8defd9f2010-07-08 12:39:36 +0100920 movq(kScratchRegister, first);
921 or_(kScratchRegister, second);
Leon Clarked91b9f72010-01-27 17:25:45 +0000922 rol(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +0100923 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +0000924 return zero;
925}
926
927
Ben Murdochbb769b22010-08-11 14:56:33 +0100928Condition MacroAssembler::CheckEitherSmi(Register first,
929 Register second,
930 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +0000931 if (first.is(second)) {
932 return CheckSmi(first);
933 }
Ben Murdochbb769b22010-08-11 14:56:33 +0100934 if (scratch.is(second)) {
935 andl(scratch, first);
936 } else {
937 if (!scratch.is(first)) {
938 movl(scratch, first);
939 }
940 andl(scratch, second);
941 }
942 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +0000943 return zero;
944}
945
946
Steve Blocka7e24c12009-10-30 11:49:00 +0000947Condition MacroAssembler::CheckIsMinSmi(Register src) {
Steve Block8defd9f2010-07-08 12:39:36 +0100948 ASSERT(!src.is(kScratchRegister));
949 // If we overflow by subtracting one, it's the minimal smi value.
950 cmpq(src, kSmiConstantRegister);
951 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +0000952}
953
Steve Blocka7e24c12009-10-30 11:49:00 +0000954
955Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000956 // A 32-bit integer value can always be converted to a smi.
957 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +0000958}
959
960
Steve Block3ce2e202009-11-05 08:53:23 +0000961Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
962 // An unsigned 32-bit integer value is valid as long as the high bit
963 // is not set.
Steve Block8defd9f2010-07-08 12:39:36 +0100964 testl(src, src);
965 return positive;
Steve Block3ce2e202009-11-05 08:53:23 +0000966}
967
968
Steve Block3ce2e202009-11-05 08:53:23 +0000969void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
970 if (constant->value() == 0) {
971 if (!dst.is(src)) {
972 movq(dst, src);
973 }
Steve Block8defd9f2010-07-08 12:39:36 +0100974 return;
Steve Block3ce2e202009-11-05 08:53:23 +0000975 } else if (dst.is(src)) {
976 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +0100977 switch (constant->value()) {
978 case 1:
979 addq(dst, kSmiConstantRegister);
980 return;
981 case 2:
982 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
983 return;
984 case 4:
985 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
986 return;
987 case 8:
988 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
989 return;
990 default:
991 Register constant_reg = GetSmiConstant(constant);
992 addq(dst, constant_reg);
993 return;
994 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000995 } else {
Steve Block8defd9f2010-07-08 12:39:36 +0100996 switch (constant->value()) {
997 case 1:
998 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
999 return;
1000 case 2:
1001 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1002 return;
1003 case 4:
1004 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1005 return;
1006 case 8:
1007 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1008 return;
1009 default:
1010 LoadSmiConstant(dst, constant);
1011 addq(dst, src);
1012 return;
1013 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001014 }
1015}
1016
1017
Leon Clarkef7060e22010-06-03 12:02:55 +01001018void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1019 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001020 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01001021 }
1022}
1023
1024
Steve Block3ce2e202009-11-05 08:53:23 +00001025void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1026 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001027 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001028 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001029 }
Steve Block3ce2e202009-11-05 08:53:23 +00001030 } else if (dst.is(src)) {
1031 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001032 Register constant_reg = GetSmiConstant(constant);
1033 subq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001034 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001035 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001036 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001037 // Adding and subtracting the min-value gives the same result, it only
1038 // differs on the overflow bit, which we don't check here.
1039 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001040 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001041 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001042 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Steve Block3ce2e202009-11-05 08:53:23 +00001043 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001044 }
1045 }
1046}
1047
1048
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001049void MacroAssembler::SmiAdd(Register dst,
1050 Register src1,
1051 Register src2) {
1052 // No overflow checking. Use only when it's known that
1053 // overflowing is impossible.
1054 ASSERT(!dst.is(src2));
1055 if (dst.is(src1)) {
1056 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001057 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001058 movq(dst, src1);
1059 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001060 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001061 Assert(no_overflow, "Smi addition overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001062}
1063
1064
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001065void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1066 // No overflow checking. Use only when it's known that
1067 // overflowing is impossible (e.g., subtracting two positive smis).
1068 ASSERT(!dst.is(src2));
1069 if (dst.is(src1)) {
1070 subq(dst, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001071 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001072 movq(dst, src1);
1073 subq(dst, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001074 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001075 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001076}
1077
1078
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001079void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001080 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001081 const Operand& src2) {
1082 // No overflow checking. Use only when it's known that
1083 // overflowing is impossible (e.g., subtracting two positive smis).
1084 if (dst.is(src1)) {
1085 subq(dst, src2);
1086 } else {
1087 movq(dst, src1);
1088 subq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001089 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001090 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001091}
1092
1093
1094void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001095 ASSERT(!dst.is(kScratchRegister));
1096 ASSERT(!src.is(kScratchRegister));
1097 // Set tag and padding bits before negating, so that they are zero afterwards.
1098 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001099 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001100 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001101 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001102 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001103 }
Steve Block3ce2e202009-11-05 08:53:23 +00001104 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001105}
1106
1107
1108void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001109 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001110 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001111 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001112 }
1113 and_(dst, src2);
1114}
1115
1116
Steve Block3ce2e202009-11-05 08:53:23 +00001117void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1118 if (constant->value() == 0) {
1119 xor_(dst, dst);
1120 } else if (dst.is(src)) {
1121 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001122 Register constant_reg = GetSmiConstant(constant);
1123 and_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001124 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001125 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001126 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001127 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001128}
1129
1130
1131void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1132 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001133 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001134 }
1135 or_(dst, src2);
1136}
1137
1138
Steve Block3ce2e202009-11-05 08:53:23 +00001139void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1140 if (dst.is(src)) {
1141 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001142 Register constant_reg = GetSmiConstant(constant);
1143 or_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001144 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001145 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001146 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001147 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001148}
1149
Steve Block3ce2e202009-11-05 08:53:23 +00001150
Steve Blocka7e24c12009-10-30 11:49:00 +00001151void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1152 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001153 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001154 }
1155 xor_(dst, src2);
1156}
1157
1158
Steve Block3ce2e202009-11-05 08:53:23 +00001159void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1160 if (dst.is(src)) {
1161 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001162 Register constant_reg = GetSmiConstant(constant);
1163 xor_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001164 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001165 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001166 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001167 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001168}
1169
1170
Steve Blocka7e24c12009-10-30 11:49:00 +00001171void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1172 Register src,
1173 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001174 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001175 if (shift_value > 0) {
1176 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001177 sar(dst, Immediate(shift_value + kSmiShift));
1178 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001179 } else {
1180 UNIMPLEMENTED(); // Not used.
1181 }
1182 }
1183}
1184
1185
Steve Blocka7e24c12009-10-30 11:49:00 +00001186void MacroAssembler::SmiShiftLeftConstant(Register dst,
1187 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001188 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001189 if (!dst.is(src)) {
1190 movq(dst, src);
1191 }
1192 if (shift_value > 0) {
1193 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001194 }
1195}
1196
1197
1198void MacroAssembler::SmiShiftLeft(Register dst,
1199 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001200 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001201 ASSERT(!dst.is(rcx));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001202 NearLabel result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001203 // Untag shift amount.
1204 if (!dst.is(src1)) {
1205 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001206 }
Steve Block3ce2e202009-11-05 08:53:23 +00001207 SmiToInteger32(rcx, src2);
1208 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1209 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001210 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001211}
1212
1213
Steve Blocka7e24c12009-10-30 11:49:00 +00001214void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1215 Register src1,
1216 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001217 ASSERT(!dst.is(kScratchRegister));
1218 ASSERT(!src1.is(kScratchRegister));
1219 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001220 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001221 if (src1.is(rcx)) {
1222 movq(kScratchRegister, src1);
1223 } else if (src2.is(rcx)) {
1224 movq(kScratchRegister, src2);
1225 }
1226 if (!dst.is(src1)) {
1227 movq(dst, src1);
1228 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001229 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001230 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001231 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001232 shl(dst, Immediate(kSmiShift));
1233 if (src1.is(rcx)) {
1234 movq(src1, kScratchRegister);
1235 } else if (src2.is(rcx)) {
1236 movq(src2, kScratchRegister);
1237 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001238}
1239
1240
Steve Block3ce2e202009-11-05 08:53:23 +00001241SmiIndex MacroAssembler::SmiToIndex(Register dst,
1242 Register src,
1243 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001244 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001245 // There is a possible optimization if shift is in the range 60-63, but that
1246 // will (and must) never happen.
1247 if (!dst.is(src)) {
1248 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001249 }
Steve Block3ce2e202009-11-05 08:53:23 +00001250 if (shift < kSmiShift) {
1251 sar(dst, Immediate(kSmiShift - shift));
1252 } else {
1253 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001254 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001255 return SmiIndex(dst, times_1);
1256}
1257
Steve Blocka7e24c12009-10-30 11:49:00 +00001258SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1259 Register src,
1260 int shift) {
1261 // Register src holds a positive smi.
1262 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001263 if (!dst.is(src)) {
1264 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001265 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001266 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001267 if (shift < kSmiShift) {
1268 sar(dst, Immediate(kSmiShift - shift));
1269 } else {
1270 shl(dst, Immediate(shift - kSmiShift));
1271 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001272 return SmiIndex(dst, times_1);
1273}
1274
1275
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001276void MacroAssembler::Move(Register dst, Register src) {
1277 if (!dst.is(src)) {
1278 movq(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01001279 }
Steve Block6ded16b2010-05-10 14:33:55 +01001280}
1281
1282
Steve Block6ded16b2010-05-10 14:33:55 +01001283
1284
Steve Blocka7e24c12009-10-30 11:49:00 +00001285void MacroAssembler::Move(Register dst, Handle<Object> source) {
1286 ASSERT(!source->IsFailure());
1287 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001288 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001289 } else {
1290 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1291 }
1292}
1293
1294
1295void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001296 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001297 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001298 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001299 } else {
1300 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1301 movq(dst, kScratchRegister);
1302 }
1303}
1304
1305
1306void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001307 if (source->IsSmi()) {
1308 SmiCompare(dst, Smi::cast(*source));
1309 } else {
1310 Move(kScratchRegister, source);
1311 cmpq(dst, kScratchRegister);
1312 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001313}
1314
1315
1316void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1317 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001318 SmiCompare(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001319 } else {
1320 ASSERT(source->IsHeapObject());
1321 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1322 cmpq(dst, kScratchRegister);
1323 }
1324}
1325
1326
1327void MacroAssembler::Push(Handle<Object> source) {
1328 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001329 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001330 } else {
1331 ASSERT(source->IsHeapObject());
1332 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1333 push(kScratchRegister);
1334 }
1335}
1336
1337
1338void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001339 intptr_t smi = reinterpret_cast<intptr_t>(source);
1340 if (is_int32(smi)) {
1341 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001342 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001343 Register constant = GetSmiConstant(source);
1344 push(constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001345 }
1346}
1347
1348
Leon Clarkee46be812010-01-19 14:06:41 +00001349void MacroAssembler::Drop(int stack_elements) {
1350 if (stack_elements > 0) {
1351 addq(rsp, Immediate(stack_elements * kPointerSize));
1352 }
1353}
1354
1355
Steve Block3ce2e202009-11-05 08:53:23 +00001356void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001357 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001358}
1359
1360
1361void MacroAssembler::Jump(ExternalReference ext) {
1362 movq(kScratchRegister, ext);
1363 jmp(kScratchRegister);
1364}
1365
1366
1367void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1368 movq(kScratchRegister, destination, rmode);
1369 jmp(kScratchRegister);
1370}
1371
1372
1373void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001374 // TODO(X64): Inline this
1375 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001376}
1377
1378
1379void MacroAssembler::Call(ExternalReference ext) {
1380 movq(kScratchRegister, ext);
1381 call(kScratchRegister);
1382}
1383
1384
1385void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1386 movq(kScratchRegister, destination, rmode);
1387 call(kScratchRegister);
1388}
1389
1390
1391void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1392 ASSERT(RelocInfo::IsCodeTarget(rmode));
Steve Block3ce2e202009-11-05 08:53:23 +00001393 call(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001394}
1395
1396
1397void MacroAssembler::PushTryHandler(CodeLocation try_location,
1398 HandlerType type) {
1399 // Adjust this code if not the case.
1400 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1401
1402 // The pc (return address) is already on TOS. This code pushes state,
1403 // frame pointer and current handler. Check that they are expected
1404 // next on the stack, in that order.
1405 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1406 StackHandlerConstants::kPCOffset - kPointerSize);
1407 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1408 StackHandlerConstants::kStateOffset - kPointerSize);
1409 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1410 StackHandlerConstants::kFPOffset - kPointerSize);
1411
1412 if (try_location == IN_JAVASCRIPT) {
1413 if (type == TRY_CATCH_HANDLER) {
1414 push(Immediate(StackHandler::TRY_CATCH));
1415 } else {
1416 push(Immediate(StackHandler::TRY_FINALLY));
1417 }
1418 push(rbp);
1419 } else {
1420 ASSERT(try_location == IN_JS_ENTRY);
1421 // The frame pointer does not point to a JS frame so we save NULL
1422 // for rbp. We expect the code throwing an exception to check rbp
1423 // before dereferencing it to restore the context.
1424 push(Immediate(StackHandler::ENTRY));
1425 push(Immediate(0)); // NULL frame pointer.
1426 }
1427 // Save the current handler.
1428 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1429 push(Operand(kScratchRegister, 0));
1430 // Link this handler.
1431 movq(Operand(kScratchRegister, 0), rsp);
1432}
1433
1434
Leon Clarkee46be812010-01-19 14:06:41 +00001435void MacroAssembler::PopTryHandler() {
1436 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1437 // Unlink this handler.
1438 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1439 pop(Operand(kScratchRegister, 0));
1440 // Remove the remaining fields.
1441 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1442}
1443
1444
Steve Blocka7e24c12009-10-30 11:49:00 +00001445void MacroAssembler::Ret() {
1446 ret(0);
1447}
1448
1449
1450void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001451 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01001452 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001453}
1454
1455
1456void MacroAssembler::CmpObjectType(Register heap_object,
1457 InstanceType type,
1458 Register map) {
1459 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1460 CmpInstanceType(map, type);
1461}
1462
1463
1464void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1465 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1466 Immediate(static_cast<int8_t>(type)));
1467}
1468
1469
Andrei Popescu31002712010-02-23 13:46:05 +00001470void MacroAssembler::CheckMap(Register obj,
1471 Handle<Map> map,
1472 Label* fail,
1473 bool is_heap_object) {
1474 if (!is_heap_object) {
1475 JumpIfSmi(obj, fail);
1476 }
1477 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1478 j(not_equal, fail);
1479}
1480
1481
Leon Clarkef7060e22010-06-03 12:02:55 +01001482void MacroAssembler::AbortIfNotNumber(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001483 NearLabel ok;
Andrei Popescu402d9372010-02-26 13:31:12 +00001484 Condition is_smi = CheckSmi(object);
1485 j(is_smi, &ok);
1486 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1487 Factory::heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01001488 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00001489 bind(&ok);
1490}
1491
1492
Iain Merrick75681382010-08-19 15:07:18 +01001493void MacroAssembler::AbortIfSmi(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001494 NearLabel ok;
Iain Merrick75681382010-08-19 15:07:18 +01001495 Condition is_smi = CheckSmi(object);
1496 Assert(NegateCondition(is_smi), "Operand is a smi");
1497}
1498
1499
Leon Clarkef7060e22010-06-03 12:02:55 +01001500void MacroAssembler::AbortIfNotSmi(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001501 NearLabel ok;
Steve Block6ded16b2010-05-10 14:33:55 +01001502 Condition is_smi = CheckSmi(object);
Iain Merrick75681382010-08-19 15:07:18 +01001503 Assert(is_smi, "Operand is not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01001504}
1505
1506
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001507void MacroAssembler::AbortIfNotRootValue(Register src,
1508 Heap::RootListIndex root_value_index,
1509 const char* message) {
1510 ASSERT(!src.is(kScratchRegister));
1511 LoadRoot(kScratchRegister, root_value_index);
1512 cmpq(src, kScratchRegister);
1513 Check(equal, message);
1514}
1515
1516
1517
Leon Clarked91b9f72010-01-27 17:25:45 +00001518Condition MacroAssembler::IsObjectStringType(Register heap_object,
1519 Register map,
1520 Register instance_type) {
1521 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00001522 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00001523 ASSERT(kNotStringTag != 0);
1524 testb(instance_type, Immediate(kIsNotStringMask));
1525 return zero;
1526}
1527
1528
Steve Blocka7e24c12009-10-30 11:49:00 +00001529void MacroAssembler::TryGetFunctionPrototype(Register function,
1530 Register result,
1531 Label* miss) {
1532 // Check that the receiver isn't a smi.
1533 testl(function, Immediate(kSmiTagMask));
1534 j(zero, miss);
1535
1536 // Check that the function really is a function.
1537 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1538 j(not_equal, miss);
1539
1540 // Make sure that the function has an instance prototype.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001541 NearLabel non_instance;
Steve Blocka7e24c12009-10-30 11:49:00 +00001542 testb(FieldOperand(result, Map::kBitFieldOffset),
1543 Immediate(1 << Map::kHasNonInstancePrototype));
1544 j(not_zero, &non_instance);
1545
1546 // Get the prototype or initial map from the function.
1547 movq(result,
1548 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1549
1550 // If the prototype or initial map is the hole, don't return it and
1551 // simply miss the cache instead. This will allow us to allocate a
1552 // prototype object on-demand in the runtime system.
1553 CompareRoot(result, Heap::kTheHoleValueRootIndex);
1554 j(equal, miss);
1555
1556 // If the function does not have an initial map, we're done.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001557 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001558 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1559 j(not_equal, &done);
1560
1561 // Get the prototype from the initial map.
1562 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1563 jmp(&done);
1564
1565 // Non-instance prototype: Fetch prototype from constructor field
1566 // in initial map.
1567 bind(&non_instance);
1568 movq(result, FieldOperand(result, Map::kConstructorOffset));
1569
1570 // All done.
1571 bind(&done);
1572}
1573
1574
1575void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1576 if (FLAG_native_code_counters && counter->Enabled()) {
1577 movq(kScratchRegister, ExternalReference(counter));
1578 movl(Operand(kScratchRegister, 0), Immediate(value));
1579 }
1580}
1581
1582
1583void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1584 ASSERT(value > 0);
1585 if (FLAG_native_code_counters && counter->Enabled()) {
1586 movq(kScratchRegister, ExternalReference(counter));
1587 Operand operand(kScratchRegister, 0);
1588 if (value == 1) {
1589 incl(operand);
1590 } else {
1591 addl(operand, Immediate(value));
1592 }
1593 }
1594}
1595
1596
1597void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1598 ASSERT(value > 0);
1599 if (FLAG_native_code_counters && counter->Enabled()) {
1600 movq(kScratchRegister, ExternalReference(counter));
1601 Operand operand(kScratchRegister, 0);
1602 if (value == 1) {
1603 decl(operand);
1604 } else {
1605 subl(operand, Immediate(value));
1606 }
1607 }
1608}
1609
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001610
Steve Blocka7e24c12009-10-30 11:49:00 +00001611#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00001612void MacroAssembler::DebugBreak() {
1613 ASSERT(allow_stub_calls());
1614 xor_(rax, rax); // no arguments
1615 movq(rbx, ExternalReference(Runtime::kDebugBreak));
1616 CEntryStub ces(1);
1617 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00001618}
Andrei Popescu402d9372010-02-26 13:31:12 +00001619#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00001620
1621
Steve Blocka7e24c12009-10-30 11:49:00 +00001622void MacroAssembler::InvokeCode(Register code,
1623 const ParameterCount& expected,
1624 const ParameterCount& actual,
1625 InvokeFlag flag) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001626 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001627 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1628 if (flag == CALL_FUNCTION) {
1629 call(code);
1630 } else {
1631 ASSERT(flag == JUMP_FUNCTION);
1632 jmp(code);
1633 }
1634 bind(&done);
1635}
1636
1637
1638void MacroAssembler::InvokeCode(Handle<Code> code,
1639 const ParameterCount& expected,
1640 const ParameterCount& actual,
1641 RelocInfo::Mode rmode,
1642 InvokeFlag flag) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001643 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001644 Register dummy = rax;
1645 InvokePrologue(expected, actual, code, dummy, &done, flag);
1646 if (flag == CALL_FUNCTION) {
1647 Call(code, rmode);
1648 } else {
1649 ASSERT(flag == JUMP_FUNCTION);
1650 Jump(code, rmode);
1651 }
1652 bind(&done);
1653}
1654
1655
1656void MacroAssembler::InvokeFunction(Register function,
1657 const ParameterCount& actual,
1658 InvokeFlag flag) {
1659 ASSERT(function.is(rdi));
1660 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1661 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
1662 movsxlq(rbx,
1663 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001664 // Advances rdx to the end of the Code object header, to the start of
1665 // the executable code.
Steve Block791712a2010-08-27 10:21:07 +01001666 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001667
1668 ParameterCount expected(rbx);
1669 InvokeCode(rdx, expected, actual, flag);
1670}
1671
1672
Andrei Popescu402d9372010-02-26 13:31:12 +00001673void MacroAssembler::InvokeFunction(JSFunction* function,
1674 const ParameterCount& actual,
1675 InvokeFlag flag) {
1676 ASSERT(function->is_compiled());
1677 // Get the function and setup the context.
1678 Move(rdi, Handle<JSFunction>(function));
1679 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1680
1681 // Invoke the cached code.
1682 Handle<Code> code(function->code());
1683 ParameterCount expected(function->shared()->formal_parameter_count());
1684 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1685}
1686
1687
Steve Blocka7e24c12009-10-30 11:49:00 +00001688void MacroAssembler::EnterFrame(StackFrame::Type type) {
1689 push(rbp);
1690 movq(rbp, rsp);
1691 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00001692 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00001693 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1694 push(kScratchRegister);
1695 if (FLAG_debug_code) {
1696 movq(kScratchRegister,
1697 Factory::undefined_value(),
1698 RelocInfo::EMBEDDED_OBJECT);
1699 cmpq(Operand(rsp, 0), kScratchRegister);
1700 Check(not_equal, "code object not properly patched");
1701 }
1702}
1703
1704
1705void MacroAssembler::LeaveFrame(StackFrame::Type type) {
1706 if (FLAG_debug_code) {
Steve Block3ce2e202009-11-05 08:53:23 +00001707 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00001708 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
1709 Check(equal, "stack frame types must match");
1710 }
1711 movq(rsp, rbp);
1712 pop(rbp);
1713}
1714
1715
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001716void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001717 // Setup the frame structure on the stack.
1718 // All constants are relative to the frame pointer of the exit frame.
1719 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
1720 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
1721 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
1722 push(rbp);
1723 movq(rbp, rsp);
1724
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001725 // Reserve room for entry stack pointer and push the code object.
Steve Block3ce2e202009-11-05 08:53:23 +00001726 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00001727 push(Immediate(0)); // Saved entry sp, patched before call.
1728 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1729 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00001730
1731 // Save the frame pointer and the context in top.
1732 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1733 ExternalReference context_address(Top::k_context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01001734 if (save_rax) {
1735 movq(r14, rax); // Backup rax before we use it.
1736 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001737
1738 movq(rax, rbp);
1739 store_rax(c_entry_fp_address);
1740 movq(rax, rsi);
1741 store_rax(context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01001742}
Steve Blocka7e24c12009-10-30 11:49:00 +00001743
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001744
1745void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001746#ifdef _WIN64
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001747 const int kShaddowSpace = 4;
1748 arg_stack_space += kShaddowSpace;
Steve Blocka7e24c12009-10-30 11:49:00 +00001749#endif
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001750 if (arg_stack_space > 0) {
1751 subq(rsp, Immediate(arg_stack_space * kPointerSize));
1752 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001753
1754 // Get the required frame alignment for the OS.
1755 static const int kFrameAlignment = OS::ActivationFrameAlignment();
1756 if (kFrameAlignment > 0) {
1757 ASSERT(IsPowerOf2(kFrameAlignment));
1758 movq(kScratchRegister, Immediate(-kFrameAlignment));
1759 and_(rsp, kScratchRegister);
1760 }
1761
1762 // Patch the saved entry sp.
1763 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
1764}
1765
1766
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001767void MacroAssembler::EnterExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001768 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01001769
1770 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
1771 // so it must be retained across the C-call.
1772 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1773 lea(r12, Operand(rbp, r14, times_pointer_size, offset));
1774
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001775 EnterExitFrameEpilogue(arg_stack_space);
Ben Murdochbb769b22010-08-11 14:56:33 +01001776}
1777
1778
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001779void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001780 EnterExitFramePrologue(false);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001781 EnterExitFrameEpilogue(arg_stack_space);
Ben Murdochbb769b22010-08-11 14:56:33 +01001782}
1783
1784
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001785void MacroAssembler::LeaveExitFrame() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001786 // Registers:
Steve Block8defd9f2010-07-08 12:39:36 +01001787 // r12 : argv
Steve Blocka7e24c12009-10-30 11:49:00 +00001788
1789 // Get the return address from the stack and restore the frame pointer.
1790 movq(rcx, Operand(rbp, 1 * kPointerSize));
1791 movq(rbp, Operand(rbp, 0 * kPointerSize));
1792
Steve Blocka7e24c12009-10-30 11:49:00 +00001793 // Pop everything up to and including the arguments and the receiver
1794 // from the caller stack.
Steve Block8defd9f2010-07-08 12:39:36 +01001795 lea(rsp, Operand(r12, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001796
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001797 // Push the return address to get ready to return.
1798 push(rcx);
1799
1800 LeaveExitFrameEpilogue();
1801}
1802
1803
1804void MacroAssembler::LeaveApiExitFrame() {
1805 movq(rsp, rbp);
1806 pop(rbp);
1807
1808 LeaveExitFrameEpilogue();
1809}
1810
1811
1812void MacroAssembler::LeaveExitFrameEpilogue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001813 // Restore current context from top and clear it in debug mode.
1814 ExternalReference context_address(Top::k_context_address);
1815 movq(kScratchRegister, context_address);
1816 movq(rsi, Operand(kScratchRegister, 0));
1817#ifdef DEBUG
1818 movq(Operand(kScratchRegister, 0), Immediate(0));
1819#endif
1820
Steve Blocka7e24c12009-10-30 11:49:00 +00001821 // Clear the top frame.
1822 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1823 movq(kScratchRegister, c_entry_fp_address);
1824 movq(Operand(kScratchRegister, 0), Immediate(0));
1825}
1826
1827
Steve Blocka7e24c12009-10-30 11:49:00 +00001828void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1829 Register scratch,
1830 Label* miss) {
1831 Label same_contexts;
1832
1833 ASSERT(!holder_reg.is(scratch));
1834 ASSERT(!scratch.is(kScratchRegister));
1835 // Load current lexical context from the stack frame.
1836 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
1837
1838 // When generating debug code, make sure the lexical context is set.
1839 if (FLAG_debug_code) {
1840 cmpq(scratch, Immediate(0));
1841 Check(not_equal, "we should not have an empty lexical context");
1842 }
1843 // Load the global context of the current context.
1844 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1845 movq(scratch, FieldOperand(scratch, offset));
1846 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
1847
1848 // Check the context is a global context.
1849 if (FLAG_debug_code) {
1850 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
1851 Factory::global_context_map());
1852 Check(equal, "JSGlobalObject::global_context should be a global context.");
1853 }
1854
1855 // Check if both contexts are the same.
1856 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1857 j(equal, &same_contexts);
1858
1859 // Compare security tokens.
1860 // Check that the security token in the calling global object is
1861 // compatible with the security token in the receiving global
1862 // object.
1863
1864 // Check the context is a global context.
1865 if (FLAG_debug_code) {
1866 // Preserve original value of holder_reg.
1867 push(holder_reg);
1868 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1869 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
1870 Check(not_equal, "JSGlobalProxy::context() should not be null.");
1871
1872 // Read the first word and compare to global_context_map(),
1873 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
1874 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
1875 Check(equal, "JSGlobalObject::global_context should be a global context.");
1876 pop(holder_reg);
1877 }
1878
1879 movq(kScratchRegister,
1880 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00001881 int token_offset =
1882 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00001883 movq(scratch, FieldOperand(scratch, token_offset));
1884 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
1885 j(not_equal, miss);
1886
1887 bind(&same_contexts);
1888}
1889
1890
1891void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00001892 Register scratch,
1893 AllocationFlags flags) {
1894 ExternalReference new_space_allocation_top =
1895 ExternalReference::new_space_allocation_top_address();
1896
1897 // Just return if allocation top is already known.
1898 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1899 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01001900 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00001901#ifdef DEBUG
1902 // Assert that result actually contains top on entry.
1903 movq(kScratchRegister, new_space_allocation_top);
1904 cmpq(result, Operand(kScratchRegister, 0));
1905 Check(equal, "Unexpected allocation top");
1906#endif
1907 return;
1908 }
1909
Steve Block6ded16b2010-05-10 14:33:55 +01001910 // Move address of new object to result. Use scratch register if available,
1911 // and keep address in scratch until call to UpdateAllocationTopHelper.
1912 if (scratch.is_valid()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001913 movq(scratch, new_space_allocation_top);
1914 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01001915 } else if (result.is(rax)) {
1916 load_rax(new_space_allocation_top);
1917 } else {
1918 movq(kScratchRegister, new_space_allocation_top);
1919 movq(result, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001920 }
1921}
1922
1923
1924void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1925 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +00001926 if (FLAG_debug_code) {
1927 testq(result_end, Immediate(kObjectAlignmentMask));
1928 Check(zero, "Unaligned allocation in new space");
1929 }
1930
Steve Blocka7e24c12009-10-30 11:49:00 +00001931 ExternalReference new_space_allocation_top =
1932 ExternalReference::new_space_allocation_top_address();
1933
1934 // Update new top.
1935 if (result_end.is(rax)) {
1936 // rax can be stored directly to a memory location.
1937 store_rax(new_space_allocation_top);
1938 } else {
1939 // Register required - use scratch provided if available.
Steve Block6ded16b2010-05-10 14:33:55 +01001940 if (scratch.is_valid()) {
1941 movq(Operand(scratch, 0), result_end);
1942 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00001943 movq(kScratchRegister, new_space_allocation_top);
1944 movq(Operand(kScratchRegister, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00001945 }
1946 }
1947}
1948
1949
1950void MacroAssembler::AllocateInNewSpace(int object_size,
1951 Register result,
1952 Register result_end,
1953 Register scratch,
1954 Label* gc_required,
1955 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07001956 if (!FLAG_inline_new) {
1957 if (FLAG_debug_code) {
1958 // Trash the registers to simulate an allocation failure.
1959 movl(result, Immediate(0x7091));
1960 if (result_end.is_valid()) {
1961 movl(result_end, Immediate(0x7191));
1962 }
1963 if (scratch.is_valid()) {
1964 movl(scratch, Immediate(0x7291));
1965 }
1966 }
1967 jmp(gc_required);
1968 return;
1969 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001970 ASSERT(!result.is(result_end));
1971
1972 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001973 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001974
1975 // Calculate new top and bail out if new space is exhausted.
1976 ExternalReference new_space_allocation_limit =
1977 ExternalReference::new_space_allocation_limit_address();
Steve Block6ded16b2010-05-10 14:33:55 +01001978
1979 Register top_reg = result_end.is_valid() ? result_end : result;
1980
1981 if (top_reg.is(result)) {
1982 addq(top_reg, Immediate(object_size));
1983 } else {
1984 lea(top_reg, Operand(result, object_size));
1985 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001986 movq(kScratchRegister, new_space_allocation_limit);
Steve Block6ded16b2010-05-10 14:33:55 +01001987 cmpq(top_reg, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001988 j(above, gc_required);
1989
1990 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01001991 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00001992
Steve Block6ded16b2010-05-10 14:33:55 +01001993 if (top_reg.is(result)) {
1994 if ((flags & TAG_OBJECT) != 0) {
1995 subq(result, Immediate(object_size - kHeapObjectTag));
1996 } else {
1997 subq(result, Immediate(object_size));
1998 }
1999 } else if ((flags & TAG_OBJECT) != 0) {
2000 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00002001 addq(result, Immediate(kHeapObjectTag));
2002 }
2003}
2004
2005
2006void MacroAssembler::AllocateInNewSpace(int header_size,
2007 ScaleFactor element_size,
2008 Register element_count,
2009 Register result,
2010 Register result_end,
2011 Register scratch,
2012 Label* gc_required,
2013 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07002014 if (!FLAG_inline_new) {
2015 if (FLAG_debug_code) {
2016 // Trash the registers to simulate an allocation failure.
2017 movl(result, Immediate(0x7091));
2018 movl(result_end, Immediate(0x7191));
2019 if (scratch.is_valid()) {
2020 movl(scratch, Immediate(0x7291));
2021 }
2022 // Register element_count is not modified by the function.
2023 }
2024 jmp(gc_required);
2025 return;
2026 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002027 ASSERT(!result.is(result_end));
2028
2029 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002030 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002031
2032 // Calculate new top and bail out if new space is exhausted.
2033 ExternalReference new_space_allocation_limit =
2034 ExternalReference::new_space_allocation_limit_address();
2035 lea(result_end, Operand(result, element_count, element_size, header_size));
2036 movq(kScratchRegister, new_space_allocation_limit);
2037 cmpq(result_end, Operand(kScratchRegister, 0));
2038 j(above, gc_required);
2039
2040 // Update allocation top.
2041 UpdateAllocationTopHelper(result_end, scratch);
2042
2043 // Tag the result if requested.
2044 if ((flags & TAG_OBJECT) != 0) {
2045 addq(result, Immediate(kHeapObjectTag));
2046 }
2047}
2048
2049
2050void MacroAssembler::AllocateInNewSpace(Register object_size,
2051 Register result,
2052 Register result_end,
2053 Register scratch,
2054 Label* gc_required,
2055 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07002056 if (!FLAG_inline_new) {
2057 if (FLAG_debug_code) {
2058 // Trash the registers to simulate an allocation failure.
2059 movl(result, Immediate(0x7091));
2060 movl(result_end, Immediate(0x7191));
2061 if (scratch.is_valid()) {
2062 movl(scratch, Immediate(0x7291));
2063 }
2064 // object_size is left unchanged by this function.
2065 }
2066 jmp(gc_required);
2067 return;
2068 }
2069 ASSERT(!result.is(result_end));
2070
Steve Blocka7e24c12009-10-30 11:49:00 +00002071 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002072 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002073
2074 // Calculate new top and bail out if new space is exhausted.
2075 ExternalReference new_space_allocation_limit =
2076 ExternalReference::new_space_allocation_limit_address();
2077 if (!object_size.is(result_end)) {
2078 movq(result_end, object_size);
2079 }
2080 addq(result_end, result);
2081 movq(kScratchRegister, new_space_allocation_limit);
2082 cmpq(result_end, Operand(kScratchRegister, 0));
2083 j(above, gc_required);
2084
2085 // Update allocation top.
2086 UpdateAllocationTopHelper(result_end, scratch);
2087
2088 // Tag the result if requested.
2089 if ((flags & TAG_OBJECT) != 0) {
2090 addq(result, Immediate(kHeapObjectTag));
2091 }
2092}
2093
2094
2095void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2096 ExternalReference new_space_allocation_top =
2097 ExternalReference::new_space_allocation_top_address();
2098
2099 // Make sure the object has no tag before resetting top.
2100 and_(object, Immediate(~kHeapObjectTagMask));
2101 movq(kScratchRegister, new_space_allocation_top);
2102#ifdef DEBUG
2103 cmpq(object, Operand(kScratchRegister, 0));
2104 Check(below, "Undo allocation of non allocated memory");
2105#endif
2106 movq(Operand(kScratchRegister, 0), object);
2107}
2108
2109
Steve Block3ce2e202009-11-05 08:53:23 +00002110void MacroAssembler::AllocateHeapNumber(Register result,
2111 Register scratch,
2112 Label* gc_required) {
2113 // Allocate heap number in new space.
2114 AllocateInNewSpace(HeapNumber::kSize,
2115 result,
2116 scratch,
2117 no_reg,
2118 gc_required,
2119 TAG_OBJECT);
2120
2121 // Set the map.
2122 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2123 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2124}
2125
2126
Leon Clarkee46be812010-01-19 14:06:41 +00002127void MacroAssembler::AllocateTwoByteString(Register result,
2128 Register length,
2129 Register scratch1,
2130 Register scratch2,
2131 Register scratch3,
2132 Label* gc_required) {
2133 // Calculate the number of bytes needed for the characters in the string while
2134 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002135 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2136 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002137 ASSERT(kShortSize == 2);
2138 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01002139 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2140 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002141 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002142 if (kHeaderAlignment > 0) {
2143 subq(scratch1, Immediate(kHeaderAlignment));
2144 }
Leon Clarkee46be812010-01-19 14:06:41 +00002145
2146 // Allocate two byte string in new space.
2147 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2148 times_1,
2149 scratch1,
2150 result,
2151 scratch2,
2152 scratch3,
2153 gc_required,
2154 TAG_OBJECT);
2155
2156 // Set the map, length and hash field.
2157 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2158 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002159 Integer32ToSmi(scratch1, length);
2160 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002161 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002162 Immediate(String::kEmptyHashField));
2163}
2164
2165
2166void MacroAssembler::AllocateAsciiString(Register result,
2167 Register length,
2168 Register scratch1,
2169 Register scratch2,
2170 Register scratch3,
2171 Label* gc_required) {
2172 // Calculate the number of bytes needed for the characters in the string while
2173 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002174 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2175 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002176 movl(scratch1, length);
2177 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01002178 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002179 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002180 if (kHeaderAlignment > 0) {
2181 subq(scratch1, Immediate(kHeaderAlignment));
2182 }
Leon Clarkee46be812010-01-19 14:06:41 +00002183
2184 // Allocate ascii string in new space.
2185 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2186 times_1,
2187 scratch1,
2188 result,
2189 scratch2,
2190 scratch3,
2191 gc_required,
2192 TAG_OBJECT);
2193
2194 // Set the map, length and hash field.
2195 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2196 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002197 Integer32ToSmi(scratch1, length);
2198 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002199 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002200 Immediate(String::kEmptyHashField));
2201}
2202
2203
2204void MacroAssembler::AllocateConsString(Register result,
2205 Register scratch1,
2206 Register scratch2,
2207 Label* gc_required) {
2208 // Allocate heap number in new space.
2209 AllocateInNewSpace(ConsString::kSize,
2210 result,
2211 scratch1,
2212 scratch2,
2213 gc_required,
2214 TAG_OBJECT);
2215
2216 // Set the map. The other fields are left uninitialized.
2217 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2218 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2219}
2220
2221
2222void MacroAssembler::AllocateAsciiConsString(Register result,
2223 Register scratch1,
2224 Register scratch2,
2225 Label* gc_required) {
2226 // Allocate heap number in new space.
2227 AllocateInNewSpace(ConsString::kSize,
2228 result,
2229 scratch1,
2230 scratch2,
2231 gc_required,
2232 TAG_OBJECT);
2233
2234 // Set the map. The other fields are left uninitialized.
2235 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2236 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2237}
2238
2239
Steve Blockd0582a62009-12-15 09:54:21 +00002240void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2241 if (context_chain_length > 0) {
2242 // Move up the chain of contexts to the context containing the slot.
2243 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2244 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002245 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002246 for (int i = 1; i < context_chain_length; i++) {
2247 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2248 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2249 }
2250 // The context may be an intermediate context, not a function context.
2251 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2252 } else { // context is the current function context.
2253 // The context may be an intermediate context, not a function context.
2254 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2255 }
2256}
2257
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002258
Leon Clarke4515c472010-02-03 11:58:03 +00002259int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002260 // On Windows 64 stack slots are reserved by the caller for all arguments
2261 // including the ones passed in registers, and space is always allocated for
2262 // the four register arguments even if the function takes fewer than four
2263 // arguments.
2264 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2265 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00002266 ASSERT(num_arguments >= 0);
2267#ifdef _WIN64
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002268 static const int kMinimumStackSlots = 4;
2269 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2270 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002271#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002272 static const int kRegisterPassedArguments = 6;
2273 if (num_arguments < kRegisterPassedArguments) return 0;
2274 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002275#endif
Leon Clarke4515c472010-02-03 11:58:03 +00002276}
2277
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002278
Leon Clarke4515c472010-02-03 11:58:03 +00002279void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2280 int frame_alignment = OS::ActivationFrameAlignment();
2281 ASSERT(frame_alignment != 0);
2282 ASSERT(num_arguments >= 0);
2283 // Make stack end at alignment and allocate space for arguments and old rsp.
2284 movq(kScratchRegister, rsp);
2285 ASSERT(IsPowerOf2(frame_alignment));
2286 int argument_slots_on_stack =
2287 ArgumentStackSlotsForCFunctionCall(num_arguments);
2288 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2289 and_(rsp, Immediate(-frame_alignment));
2290 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2291}
2292
2293
2294void MacroAssembler::CallCFunction(ExternalReference function,
2295 int num_arguments) {
2296 movq(rax, function);
2297 CallCFunction(rax, num_arguments);
2298}
2299
2300
2301void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01002302 // Check stack alignment.
2303 if (FLAG_debug_code) {
2304 CheckStackAlignment();
2305 }
2306
Leon Clarke4515c472010-02-03 11:58:03 +00002307 call(function);
2308 ASSERT(OS::ActivationFrameAlignment() != 0);
2309 ASSERT(num_arguments >= 0);
2310 int argument_slots_on_stack =
2311 ArgumentStackSlotsForCFunctionCall(num_arguments);
2312 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2313}
2314
Steve Blockd0582a62009-12-15 09:54:21 +00002315
Steve Blocka7e24c12009-10-30 11:49:00 +00002316CodePatcher::CodePatcher(byte* address, int size)
2317 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2318 // Create a new macro assembler pointing to the address of the code to patch.
2319 // The size is adjusted with kGap on order for the assembler to generate size
2320 // bytes of instructions without failing with buffer size constraints.
2321 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2322}
2323
2324
2325CodePatcher::~CodePatcher() {
2326 // Indicate that code has changed.
2327 CPU::FlushICache(address_, size_);
2328
2329 // Check that the code was patched as expected.
2330 ASSERT(masm_.pc_ == address_ + size_);
2331 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2332}
2333
Steve Blocka7e24c12009-10-30 11:49:00 +00002334} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002335
2336#endif // V8_TARGET_ARCH_X64