blob: 1df9b4750ce53451187ee56ad2197ddfb856b9b3 [file] [log] [blame]
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010038#include "heap.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
40namespace v8 {
41namespace internal {
42
43MacroAssembler::MacroAssembler(void* buffer, int size)
Steve Block3ce2e202009-11-05 08:53:23 +000044 : Assembler(buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000045 generating_stub_(false),
46 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
Steve Blocka7e24c12009-10-30 11:49:00 +000048}
49
50
Steve Block3ce2e202009-11-05 08:53:23 +000051void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010052 movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000053}
54
55
Kristian Monsen25f61362010-05-21 11:50:48 +010056void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
57 movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
58}
59
60
Steve Blocka7e24c12009-10-30 11:49:00 +000061void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010062 push(Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000063}
64
65
Steve Block3ce2e202009-11-05 08:53:23 +000066void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010067 cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000068}
69
70
Steve Block3ce2e202009-11-05 08:53:23 +000071void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000072 LoadRoot(kScratchRegister, index);
73 cmpq(with, kScratchRegister);
74}
75
76
Steve Block6ded16b2010-05-10 14:33:55 +010077void MacroAssembler::RecordWriteHelper(Register object,
78 Register addr,
79 Register scratch) {
80 if (FLAG_debug_code) {
81 // Check that the object is not in new space.
Kristian Monsen0d5e1162010-09-30 15:31:59 +010082 NearLabel not_in_new_space;
Steve Block6ded16b2010-05-10 14:33:55 +010083 InNewSpace(object, scratch, not_equal, &not_in_new_space);
84 Abort("new-space object passed to RecordWriteHelper");
85 bind(&not_in_new_space);
86 }
87
Steve Blocka7e24c12009-10-30 11:49:00 +000088 // Compute the page start address from the heap object pointer, and reuse
89 // the 'object' register for it.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010090 and_(object, Immediate(~Page::kPageAlignmentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +000091
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010092 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
93 // method for more details.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010094 shrl(addr, Immediate(Page::kRegionSizeLog2));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010095 andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000096
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010097 // Set dirty mark for region.
98 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +000099}
100
101
Steve Blocka7e24c12009-10-30 11:49:00 +0000102void MacroAssembler::RecordWrite(Register object,
103 int offset,
104 Register value,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100105 Register index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000106 // The compiled code assumes that record write doesn't change the
107 // context register, so we check that none of the clobbered
108 // registers are rsi.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100109 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
Leon Clarke4515c472010-02-03 11:58:03 +0000110
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100111 // First, check if a write barrier is even needed. The tests below
112 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000113 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000114 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000115
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100116 RecordWriteNonSmi(object, offset, value, index);
Steve Block3ce2e202009-11-05 08:53:23 +0000117 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000118
119 // Clobber all input registers when running with the debug-code flag
120 // turned on to provoke errors. This clobbering repeats the
121 // clobbering done inside RecordWriteNonSmi but it's necessary to
122 // avoid having the fast case for smis leave the registers
123 // unchanged.
124 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100125 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
126 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100127 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000128 }
Steve Block3ce2e202009-11-05 08:53:23 +0000129}
130
131
Steve Block8defd9f2010-07-08 12:39:36 +0100132void MacroAssembler::RecordWrite(Register object,
133 Register address,
134 Register value) {
135 // The compiled code assumes that record write doesn't change the
136 // context register, so we check that none of the clobbered
137 // registers are esi.
138 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
139
140 // First, check if a write barrier is even needed. The tests below
141 // catch stores of Smis and stores into young gen.
142 Label done;
143 JumpIfSmi(value, &done);
144
145 InNewSpace(object, value, equal, &done);
146
147 RecordWriteHelper(object, address, value);
148
149 bind(&done);
150
151 // Clobber all input registers when running with the debug-code flag
152 // turned on to provoke errors.
153 if (FLAG_debug_code) {
154 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
155 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
156 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
157 }
158}
159
160
Steve Block3ce2e202009-11-05 08:53:23 +0000161void MacroAssembler::RecordWriteNonSmi(Register object,
162 int offset,
163 Register scratch,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100164 Register index) {
Steve Block3ce2e202009-11-05 08:53:23 +0000165 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000166
167 if (FLAG_debug_code) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100168 NearLabel okay;
Leon Clarke4515c472010-02-03 11:58:03 +0000169 JumpIfNotSmi(object, &okay);
170 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
171 bind(&okay);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100172
173 if (offset == 0) {
174 // index must be int32.
175 Register tmp = index.is(rax) ? rbx : rax;
176 push(tmp);
177 movl(tmp, index);
178 cmpq(tmp, index);
179 Check(equal, "Index register for RecordWrite must be untagged int32.");
180 pop(tmp);
181 }
Leon Clarke4515c472010-02-03 11:58:03 +0000182 }
183
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100184 // Test that the object address is not in the new space. We cannot
185 // update page dirty marks for new space pages.
Steve Block6ded16b2010-05-10 14:33:55 +0100186 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000187
Steve Block6ded16b2010-05-10 14:33:55 +0100188 // The offset is relative to a tagged or untagged HeapObject pointer,
189 // so either offset or offset + kHeapObjectTag must be a
190 // multiple of kPointerSize.
191 ASSERT(IsAligned(offset, kPointerSize) ||
192 IsAligned(offset + kHeapObjectTag, kPointerSize));
193
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100194 Register dst = index;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100195 if (offset != 0) {
196 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000197 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100198 // array access: calculate the destination address in the same manner as
199 // KeyedStoreIC::GenerateGeneric.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100200 lea(dst, FieldOperand(object,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100201 index,
202 times_pointer_size,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100203 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000204 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100205 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000206
207 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000208
209 // Clobber all input registers when running with the debug-code flag
210 // turned on to provoke errors.
211 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100212 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
213 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100214 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Steve Block6ded16b2010-05-10 14:33:55 +0100215 }
216}
217
Steve Blocka7e24c12009-10-30 11:49:00 +0000218void MacroAssembler::Assert(Condition cc, const char* msg) {
219 if (FLAG_debug_code) Check(cc, msg);
220}
221
222
Iain Merrick75681382010-08-19 15:07:18 +0100223void MacroAssembler::AssertFastElements(Register elements) {
224 if (FLAG_debug_code) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100225 NearLabel ok;
Iain Merrick75681382010-08-19 15:07:18 +0100226 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
227 Heap::kFixedArrayMapRootIndex);
228 j(equal, &ok);
229 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
230 Heap::kFixedCOWArrayMapRootIndex);
231 j(equal, &ok);
232 Abort("JSObject with fast elements map has slow elements");
233 bind(&ok);
234 }
235}
236
237
Steve Blocka7e24c12009-10-30 11:49:00 +0000238void MacroAssembler::Check(Condition cc, const char* msg) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100239 NearLabel L;
Steve Blocka7e24c12009-10-30 11:49:00 +0000240 j(cc, &L);
241 Abort(msg);
242 // will not return here
243 bind(&L);
244}
245
246
Steve Block6ded16b2010-05-10 14:33:55 +0100247void MacroAssembler::CheckStackAlignment() {
248 int frame_alignment = OS::ActivationFrameAlignment();
249 int frame_alignment_mask = frame_alignment - 1;
250 if (frame_alignment > kPointerSize) {
251 ASSERT(IsPowerOf2(frame_alignment));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100252 NearLabel alignment_as_expected;
Steve Block6ded16b2010-05-10 14:33:55 +0100253 testq(rsp, Immediate(frame_alignment_mask));
254 j(zero, &alignment_as_expected);
255 // Abort if stack is not aligned.
256 int3();
257 bind(&alignment_as_expected);
258 }
259}
260
261
Steve Blocka7e24c12009-10-30 11:49:00 +0000262void MacroAssembler::NegativeZeroTest(Register result,
263 Register op,
264 Label* then_label) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100265 NearLabel ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000266 testl(result, result);
267 j(not_zero, &ok);
268 testl(op, op);
269 j(sign, then_label);
270 bind(&ok);
271}
272
273
274void MacroAssembler::Abort(const char* msg) {
275 // We want to pass the msg string like a smi to avoid GC
276 // problems, however msg is not guaranteed to be aligned
277 // properly. Instead, we pass an aligned pointer that is
278 // a proper v8 smi, but also pass the alignment difference
279 // from the real pointer as a smi.
280 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
281 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
282 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
283 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
284#ifdef DEBUG
285 if (msg != NULL) {
286 RecordComment("Abort message: ");
287 RecordComment(msg);
288 }
289#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000290 // Disable stub call restrictions to always allow calls to abort.
291 set_allow_stub_calls(true);
292
Steve Blocka7e24c12009-10-30 11:49:00 +0000293 push(rax);
294 movq(kScratchRegister, p0, RelocInfo::NONE);
295 push(kScratchRegister);
296 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000297 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000298 RelocInfo::NONE);
299 push(kScratchRegister);
300 CallRuntime(Runtime::kAbort, 2);
301 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000302 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000303}
304
305
306void MacroAssembler::CallStub(CodeStub* stub) {
307 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
308 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
309}
310
311
John Reck59135872010-11-02 12:39:01 -0700312MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100313 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700314 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100315 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700316 call(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
317 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100318 }
319 return result;
320}
321
322
Leon Clarkee46be812010-01-19 14:06:41 +0000323void MacroAssembler::TailCallStub(CodeStub* stub) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800324 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Leon Clarkee46be812010-01-19 14:06:41 +0000325 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
326}
327
328
John Reck59135872010-11-02 12:39:01 -0700329MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100330 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700331 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100332 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700333 jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
334 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100335 }
336 return result;
337}
338
339
Steve Blocka7e24c12009-10-30 11:49:00 +0000340void MacroAssembler::StubReturn(int argc) {
341 ASSERT(argc >= 1 && generating_stub());
342 ret((argc - 1) * kPointerSize);
343}
344
345
346void MacroAssembler::IllegalOperation(int num_arguments) {
347 if (num_arguments > 0) {
348 addq(rsp, Immediate(num_arguments * kPointerSize));
349 }
350 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
351}
352
353
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100354void MacroAssembler::IndexFromHash(Register hash, Register index) {
355 // The assert checks that the constants for the maximum number of digits
356 // for an array index cached in the hash field and the number of bits
357 // reserved for it does not conflict.
358 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
359 (1 << String::kArrayIndexValueBits));
360 // We want the smi-tagged index in key. Even if we subsequently go to
361 // the slow case, converting the key to a smi is always valid.
362 // key: string key
363 // hash: key's hash field, including its array index value.
364 and_(hash, Immediate(String::kArrayIndexValueMask));
365 shr(hash, Immediate(String::kHashShift));
366 // Here we actually clobber the key which will be used if calling into
367 // runtime later. However as the new key is the numeric value of a string key
368 // there is no difference in using either key.
369 Integer32ToSmi(index, hash);
370}
371
372
Steve Blocka7e24c12009-10-30 11:49:00 +0000373void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
374 CallRuntime(Runtime::FunctionForId(id), num_arguments);
375}
376
377
John Reck59135872010-11-02 12:39:01 -0700378MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
379 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100380 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
381}
382
383
Steve Blocka7e24c12009-10-30 11:49:00 +0000384void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
385 // If the expected number of arguments of the runtime function is
386 // constant, we check that the actual number of arguments match the
387 // expectation.
388 if (f->nargs >= 0 && f->nargs != num_arguments) {
389 IllegalOperation(num_arguments);
390 return;
391 }
392
Leon Clarke4515c472010-02-03 11:58:03 +0000393 // TODO(1236192): Most runtime routines don't need the number of
394 // arguments passed in because it is constant. At some point we
395 // should remove this need and make the runtime routine entry code
396 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100397 Set(rax, num_arguments);
Leon Clarke4515c472010-02-03 11:58:03 +0000398 movq(rbx, ExternalReference(f));
399 CEntryStub ces(f->result_size);
400 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000401}
402
403
John Reck59135872010-11-02 12:39:01 -0700404MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
405 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100406 if (f->nargs >= 0 && f->nargs != num_arguments) {
407 IllegalOperation(num_arguments);
408 // Since we did not call the stub, there was no allocation failure.
409 // Return some non-failure object.
410 return Heap::undefined_value();
411 }
412
413 // TODO(1236192): Most runtime routines don't need the number of
414 // arguments passed in because it is constant. At some point we
415 // should remove this need and make the runtime routine entry code
416 // smarter.
417 Set(rax, num_arguments);
418 movq(rbx, ExternalReference(f));
419 CEntryStub ces(f->result_size);
420 return TryCallStub(&ces);
421}
422
423
Andrei Popescu402d9372010-02-26 13:31:12 +0000424void MacroAssembler::CallExternalReference(const ExternalReference& ext,
425 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100426 Set(rax, num_arguments);
Andrei Popescu402d9372010-02-26 13:31:12 +0000427 movq(rbx, ext);
428
429 CEntryStub stub(1);
430 CallStub(&stub);
431}
432
433
Steve Block6ded16b2010-05-10 14:33:55 +0100434void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
435 int num_arguments,
436 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000437 // ----------- S t a t e -------------
438 // -- rsp[0] : return address
439 // -- rsp[8] : argument num_arguments - 1
440 // ...
441 // -- rsp[8 * num_arguments] : argument 0 (receiver)
442 // -----------------------------------
443
444 // TODO(1236192): Most runtime routines don't need the number of
445 // arguments passed in because it is constant. At some point we
446 // should remove this need and make the runtime routine entry code
447 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100448 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100449 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000450}
451
452
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800453MaybeObject* MacroAssembler::TryTailCallExternalReference(
454 const ExternalReference& ext, int num_arguments, int result_size) {
455 // ----------- S t a t e -------------
456 // -- rsp[0] : return address
457 // -- rsp[8] : argument num_arguments - 1
458 // ...
459 // -- rsp[8 * num_arguments] : argument 0 (receiver)
460 // -----------------------------------
461
462 // TODO(1236192): Most runtime routines don't need the number of
463 // arguments passed in because it is constant. At some point we
464 // should remove this need and make the runtime routine entry code
465 // smarter.
466 Set(rax, num_arguments);
467 return TryJumpToExternalReference(ext, result_size);
468}
469
470
Steve Block6ded16b2010-05-10 14:33:55 +0100471void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
472 int num_arguments,
473 int result_size) {
474 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
475}
476
477
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800478MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
479 int num_arguments,
480 int result_size) {
481 return TryTailCallExternalReference(ExternalReference(fid),
482 num_arguments,
483 result_size);
484}
485
486
Ben Murdochbb769b22010-08-11 14:56:33 +0100487static int Offset(ExternalReference ref0, ExternalReference ref1) {
488 int64_t offset = (ref0.address() - ref1.address());
489 // Check that fits into int.
490 ASSERT(static_cast<int>(offset) == offset);
491 return static_cast<int>(offset);
492}
493
494
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800495void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
496#ifdef _WIN64
497 // We need to prepare a slot for result handle on stack and put
498 // a pointer to it into 1st arg register.
499 EnterApiExitFrame(arg_stack_space + 1);
500
501 // rcx must be used to pass the pointer to the return value slot.
502 lea(rcx, StackSpaceOperand(arg_stack_space));
503#else
504 EnterApiExitFrame(arg_stack_space);
505#endif
Ben Murdochbb769b22010-08-11 14:56:33 +0100506}
507
508
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800509MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
510 ApiFunction* function, int stack_space) {
John Reck59135872010-11-02 12:39:01 -0700511 Label empty_result;
512 Label prologue;
513 Label promote_scheduled_exception;
514 Label delete_allocated_handles;
515 Label leave_exit_frame;
Ben Murdochbb769b22010-08-11 14:56:33 +0100516 Label write_back;
Ben Murdochbb769b22010-08-11 14:56:33 +0100517
John Reck59135872010-11-02 12:39:01 -0700518 ExternalReference next_address =
519 ExternalReference::handle_scope_next_address();
520 const int kNextOffset = 0;
521 const int kLimitOffset = Offset(
522 ExternalReference::handle_scope_limit_address(),
523 next_address);
524 const int kLevelOffset = Offset(
525 ExternalReference::handle_scope_level_address(),
526 next_address);
527 ExternalReference scheduled_exception_address =
528 ExternalReference::scheduled_exception_address();
Ben Murdochbb769b22010-08-11 14:56:33 +0100529
John Reck59135872010-11-02 12:39:01 -0700530 // Allocate HandleScope in callee-save registers.
531 Register prev_next_address_reg = r14;
532 Register prev_limit_reg = rbx;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800533 Register base_reg = r12;
John Reck59135872010-11-02 12:39:01 -0700534 movq(base_reg, next_address);
535 movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
536 movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
537 addl(Operand(base_reg, kLevelOffset), Immediate(1));
538 // Call the api function!
539 movq(rax,
540 reinterpret_cast<int64_t>(function->address()),
541 RelocInfo::RUNTIME_ENTRY);
542 call(rax);
Ben Murdochbb769b22010-08-11 14:56:33 +0100543
John Reck59135872010-11-02 12:39:01 -0700544#ifdef _WIN64
545 // rax keeps a pointer to v8::Handle, unpack it.
546 movq(rax, Operand(rax, 0));
547#endif
548 // Check if the result handle holds 0.
549 testq(rax, rax);
550 j(zero, &empty_result);
551 // It was non-zero. Dereference to get the result value.
552 movq(rax, Operand(rax, 0));
553 bind(&prologue);
Ben Murdochbb769b22010-08-11 14:56:33 +0100554
John Reck59135872010-11-02 12:39:01 -0700555 // No more valid handles (the result handle was the last one). Restore
556 // previous handle scope.
557 subl(Operand(base_reg, kLevelOffset), Immediate(1));
558 movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
559 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
560 j(not_equal, &delete_allocated_handles);
561 bind(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100562
John Reck59135872010-11-02 12:39:01 -0700563 // Check if the function scheduled an exception.
564 movq(rsi, scheduled_exception_address);
565 Cmp(Operand(rsi, 0), Factory::the_hole_value());
566 j(not_equal, &promote_scheduled_exception);
Ben Murdochbb769b22010-08-11 14:56:33 +0100567
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800568 LeaveApiExitFrame();
569 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -0700570
571 bind(&promote_scheduled_exception);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800572 MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException,
573 0, 1);
574 if (result->IsFailure()) {
575 return result;
576 }
John Reck59135872010-11-02 12:39:01 -0700577
578 bind(&empty_result);
579 // It was zero; the result is undefined.
580 Move(rax, Factory::undefined_value());
581 jmp(&prologue);
582
583 // HandleScope limit has changed. Delete allocated extensions.
584 bind(&delete_allocated_handles);
585 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
586 movq(prev_limit_reg, rax);
587 movq(rax, ExternalReference::delete_handle_scope_extensions());
588 call(rax);
589 movq(rax, prev_limit_reg);
590 jmp(&leave_exit_frame);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800591
592 return result;
Ben Murdochbb769b22010-08-11 14:56:33 +0100593}
594
595
Steve Block6ded16b2010-05-10 14:33:55 +0100596void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
597 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000598 // Set the entry point and jump to the C entry runtime stub.
599 movq(rbx, ext);
600 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000601 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000602}
603
604
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800605MaybeObject* MacroAssembler::TryJumpToExternalReference(
606 const ExternalReference& ext, int result_size) {
607 // Set the entry point and jump to the C entry runtime stub.
608 movq(rbx, ext);
609 CEntryStub ces(result_size);
610 return TryTailCallStub(&ces);
611}
612
613
Andrei Popescu402d9372010-02-26 13:31:12 +0000614void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
615 // Calls are not allowed in some stubs.
616 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000617
Andrei Popescu402d9372010-02-26 13:31:12 +0000618 // Rely on the assertion to check that the number of provided
619 // arguments match the expected number of arguments. Fake a
620 // parameter count to avoid emitting code to do the check.
621 ParameterCount expected(0);
622 GetBuiltinEntry(rdx, id);
623 InvokeCode(rdx, expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +0000624}
625
Andrei Popescu402d9372010-02-26 13:31:12 +0000626
Steve Block791712a2010-08-27 10:21:07 +0100627void MacroAssembler::GetBuiltinFunction(Register target,
628 Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100629 // Load the builtins object into target register.
630 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
631 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
Steve Block791712a2010-08-27 10:21:07 +0100632 movq(target, FieldOperand(target,
633 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
634}
Steve Block6ded16b2010-05-10 14:33:55 +0100635
Steve Block791712a2010-08-27 10:21:07 +0100636
637void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
638 ASSERT(!target.is(rdi));
Andrei Popescu402d9372010-02-26 13:31:12 +0000639 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +0100640 GetBuiltinFunction(rdi, id);
641 movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000642}
643
644
645void MacroAssembler::Set(Register dst, int64_t x) {
646 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100647 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000648 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000649 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000650 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000651 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000652 } else {
653 movq(dst, x, RelocInfo::NONE);
654 }
655}
656
Steve Blocka7e24c12009-10-30 11:49:00 +0000657void MacroAssembler::Set(const Operand& dst, int64_t x) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100658 if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000659 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000660 } else {
661 movq(kScratchRegister, x, RelocInfo::NONE);
662 movq(dst, kScratchRegister);
663 }
664}
665
Steve Blocka7e24c12009-10-30 11:49:00 +0000666// ----------------------------------------------------------------------------
667// Smi tagging, untagging and tag detection.
668
Steve Block8defd9f2010-07-08 12:39:36 +0100669Register MacroAssembler::GetSmiConstant(Smi* source) {
670 int value = source->value();
671 if (value == 0) {
672 xorl(kScratchRegister, kScratchRegister);
673 return kScratchRegister;
674 }
675 if (value == 1) {
676 return kSmiConstantRegister;
677 }
678 LoadSmiConstant(kScratchRegister, source);
679 return kScratchRegister;
680}
681
682void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
683 if (FLAG_debug_code) {
684 movq(dst,
685 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
686 RelocInfo::NONE);
687 cmpq(dst, kSmiConstantRegister);
688 if (allow_stub_calls()) {
689 Assert(equal, "Uninitialized kSmiConstantRegister");
690 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100691 NearLabel ok;
Steve Block8defd9f2010-07-08 12:39:36 +0100692 j(equal, &ok);
693 int3();
694 bind(&ok);
695 }
696 }
697 if (source->value() == 0) {
698 xorl(dst, dst);
699 return;
700 }
701 int value = source->value();
702 bool negative = value < 0;
703 unsigned int uvalue = negative ? -value : value;
704
705 switch (uvalue) {
706 case 9:
707 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
708 break;
709 case 8:
710 xorl(dst, dst);
711 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
712 break;
713 case 4:
714 xorl(dst, dst);
715 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
716 break;
717 case 5:
718 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
719 break;
720 case 3:
721 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
722 break;
723 case 2:
724 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
725 break;
726 case 1:
727 movq(dst, kSmiConstantRegister);
728 break;
729 case 0:
730 UNREACHABLE();
731 return;
732 default:
733 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
734 return;
735 }
736 if (negative) {
737 neg(dst);
738 }
739}
740
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100741
Steve Blocka7e24c12009-10-30 11:49:00 +0000742void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000743 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000744 if (!dst.is(src)) {
745 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000746 }
Steve Block3ce2e202009-11-05 08:53:23 +0000747 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000748}
749
750
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100751void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
752 if (FLAG_debug_code) {
753 testb(dst, Immediate(0x01));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100754 NearLabel ok;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100755 j(zero, &ok);
756 if (allow_stub_calls()) {
757 Abort("Integer32ToSmiField writing to non-smi location");
758 } else {
759 int3();
760 }
761 bind(&ok);
762 }
763 ASSERT(kSmiShift % kBitsPerByte == 0);
764 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
765}
766
767
Steve Block3ce2e202009-11-05 08:53:23 +0000768void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
769 Register src,
770 int constant) {
771 if (dst.is(src)) {
772 addq(dst, Immediate(constant));
773 } else {
774 lea(dst, Operand(src, constant));
775 }
776 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000777}
778
779
780void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000781 ASSERT_EQ(0, kSmiTag);
782 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000783 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000784 }
Steve Block3ce2e202009-11-05 08:53:23 +0000785 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000786}
787
788
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100789void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
790 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
791}
792
793
Steve Blocka7e24c12009-10-30 11:49:00 +0000794void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000795 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000796 if (!dst.is(src)) {
797 movq(dst, src);
798 }
799 sar(dst, Immediate(kSmiShift));
800}
801
802
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100803void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
804 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
805}
806
807
Steve Block3ce2e202009-11-05 08:53:23 +0000808void MacroAssembler::SmiTest(Register src) {
809 testq(src, src);
810}
811
812
813void MacroAssembler::SmiCompare(Register dst, Register src) {
814 cmpq(dst, src);
815}
816
817
818void MacroAssembler::SmiCompare(Register dst, Smi* src) {
819 ASSERT(!dst.is(kScratchRegister));
820 if (src->value() == 0) {
821 testq(dst, dst);
822 } else {
Iain Merrick75681382010-08-19 15:07:18 +0100823 Register constant_reg = GetSmiConstant(src);
824 cmpq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +0000825 }
826}
827
828
Leon Clarkef7060e22010-06-03 12:02:55 +0100829void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block6ded16b2010-05-10 14:33:55 +0100830 cmpq(dst, src);
831}
832
833
Steve Block3ce2e202009-11-05 08:53:23 +0000834void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
835 cmpq(dst, src);
836}
837
838
839void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100840 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000841}
842
843
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100844void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
845 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
846}
847
848
Steve Blocka7e24c12009-10-30 11:49:00 +0000849void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
850 Register src,
851 int power) {
852 ASSERT(power >= 0);
853 ASSERT(power < 64);
854 if (power == 0) {
855 SmiToInteger64(dst, src);
856 return;
857 }
Steve Block3ce2e202009-11-05 08:53:23 +0000858 if (!dst.is(src)) {
859 movq(dst, src);
860 }
861 if (power < kSmiShift) {
862 sar(dst, Immediate(kSmiShift - power));
863 } else if (power > kSmiShift) {
864 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000865 }
866}
867
868
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100869void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
870 Register src,
871 int power) {
872 ASSERT((0 <= power) && (power < 32));
873 if (dst.is(src)) {
874 shr(dst, Immediate(power + kSmiShift));
875 } else {
876 UNIMPLEMENTED(); // Not used.
877 }
878}
879
880
Steve Blocka7e24c12009-10-30 11:49:00 +0000881Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000882 ASSERT_EQ(0, kSmiTag);
883 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000884 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000885}
886
887
Ben Murdochf87a2032010-10-22 12:50:53 +0100888Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000889 ASSERT_EQ(0, kSmiTag);
Steve Block8defd9f2010-07-08 12:39:36 +0100890 // Make mask 0x8000000000000001 and test that both bits are zero.
Steve Block3ce2e202009-11-05 08:53:23 +0000891 movq(kScratchRegister, src);
892 rol(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +0100893 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +0000894 return zero;
895}
896
897
Steve Blocka7e24c12009-10-30 11:49:00 +0000898Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
899 if (first.is(second)) {
900 return CheckSmi(first);
901 }
Steve Block8defd9f2010-07-08 12:39:36 +0100902 ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
903 leal(kScratchRegister, Operand(first, second, times_1, 0));
904 testb(kScratchRegister, Immediate(0x03));
Steve Block3ce2e202009-11-05 08:53:23 +0000905 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000906}
907
908
Ben Murdochf87a2032010-10-22 12:50:53 +0100909Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
910 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +0000911 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +0100912 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +0000913 }
Steve Block8defd9f2010-07-08 12:39:36 +0100914 movq(kScratchRegister, first);
915 or_(kScratchRegister, second);
Leon Clarked91b9f72010-01-27 17:25:45 +0000916 rol(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +0100917 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +0000918 return zero;
919}
920
921
Ben Murdochbb769b22010-08-11 14:56:33 +0100922Condition MacroAssembler::CheckEitherSmi(Register first,
923 Register second,
924 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +0000925 if (first.is(second)) {
926 return CheckSmi(first);
927 }
Ben Murdochbb769b22010-08-11 14:56:33 +0100928 if (scratch.is(second)) {
929 andl(scratch, first);
930 } else {
931 if (!scratch.is(first)) {
932 movl(scratch, first);
933 }
934 andl(scratch, second);
935 }
936 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +0000937 return zero;
938}
939
940
Steve Blocka7e24c12009-10-30 11:49:00 +0000941Condition MacroAssembler::CheckIsMinSmi(Register src) {
Steve Block8defd9f2010-07-08 12:39:36 +0100942 ASSERT(!src.is(kScratchRegister));
943 // If we overflow by subtracting one, it's the minimal smi value.
944 cmpq(src, kSmiConstantRegister);
945 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +0000946}
947
Steve Blocka7e24c12009-10-30 11:49:00 +0000948
949Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000950 // A 32-bit integer value can always be converted to a smi.
951 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +0000952}
953
954
Steve Block3ce2e202009-11-05 08:53:23 +0000955Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
956 // An unsigned 32-bit integer value is valid as long as the high bit
957 // is not set.
Steve Block8defd9f2010-07-08 12:39:36 +0100958 testl(src, src);
959 return positive;
Steve Block3ce2e202009-11-05 08:53:23 +0000960}
961
962
Steve Block3ce2e202009-11-05 08:53:23 +0000963void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
964 if (constant->value() == 0) {
965 if (!dst.is(src)) {
966 movq(dst, src);
967 }
Steve Block8defd9f2010-07-08 12:39:36 +0100968 return;
Steve Block3ce2e202009-11-05 08:53:23 +0000969 } else if (dst.is(src)) {
970 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +0100971 switch (constant->value()) {
972 case 1:
973 addq(dst, kSmiConstantRegister);
974 return;
975 case 2:
976 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
977 return;
978 case 4:
979 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
980 return;
981 case 8:
982 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
983 return;
984 default:
985 Register constant_reg = GetSmiConstant(constant);
986 addq(dst, constant_reg);
987 return;
988 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000989 } else {
Steve Block8defd9f2010-07-08 12:39:36 +0100990 switch (constant->value()) {
991 case 1:
992 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
993 return;
994 case 2:
995 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
996 return;
997 case 4:
998 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
999 return;
1000 case 8:
1001 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1002 return;
1003 default:
1004 LoadSmiConstant(dst, constant);
1005 addq(dst, src);
1006 return;
1007 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001008 }
1009}
1010
1011
Leon Clarkef7060e22010-06-03 12:02:55 +01001012void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1013 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001014 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01001015 }
1016}
1017
1018
Steve Block3ce2e202009-11-05 08:53:23 +00001019void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1020 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001021 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001022 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001023 }
Steve Block3ce2e202009-11-05 08:53:23 +00001024 } else if (dst.is(src)) {
1025 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001026 Register constant_reg = GetSmiConstant(constant);
1027 subq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001028 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001029 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001030 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001031 // Adding and subtracting the min-value gives the same result, it only
1032 // differs on the overflow bit, which we don't check here.
1033 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001034 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001035 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001036 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Steve Block3ce2e202009-11-05 08:53:23 +00001037 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001038 }
1039 }
1040}
1041
1042
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001043void MacroAssembler::SmiAdd(Register dst,
1044 Register src1,
1045 Register src2) {
1046 // No overflow checking. Use only when it's known that
1047 // overflowing is impossible.
1048 ASSERT(!dst.is(src2));
1049 if (dst.is(src1)) {
1050 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001051 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001052 movq(dst, src1);
1053 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001054 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001055 Assert(no_overflow, "Smi addition overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001056}
1057
1058
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001059void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1060 // No overflow checking. Use only when it's known that
1061 // overflowing is impossible (e.g., subtracting two positive smis).
1062 ASSERT(!dst.is(src2));
1063 if (dst.is(src1)) {
1064 subq(dst, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001065 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001066 movq(dst, src1);
1067 subq(dst, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001068 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001069 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001070}
1071
1072
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001073void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001074 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001075 const Operand& src2) {
1076 // No overflow checking. Use only when it's known that
1077 // overflowing is impossible (e.g., subtracting two positive smis).
1078 if (dst.is(src1)) {
1079 subq(dst, src2);
1080 } else {
1081 movq(dst, src1);
1082 subq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001083 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001084 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001085}
1086
1087
1088void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001089 ASSERT(!dst.is(kScratchRegister));
1090 ASSERT(!src.is(kScratchRegister));
1091 // Set tag and padding bits before negating, so that they are zero afterwards.
1092 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001093 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001094 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001095 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001096 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001097 }
Steve Block3ce2e202009-11-05 08:53:23 +00001098 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001099}
1100
1101
1102void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001103 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001104 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001105 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001106 }
1107 and_(dst, src2);
1108}
1109
1110
Steve Block3ce2e202009-11-05 08:53:23 +00001111void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1112 if (constant->value() == 0) {
1113 xor_(dst, dst);
1114 } else if (dst.is(src)) {
1115 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001116 Register constant_reg = GetSmiConstant(constant);
1117 and_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001118 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001119 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001120 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001121 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001122}
1123
1124
1125void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1126 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001127 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001128 }
1129 or_(dst, src2);
1130}
1131
1132
Steve Block3ce2e202009-11-05 08:53:23 +00001133void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1134 if (dst.is(src)) {
1135 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001136 Register constant_reg = GetSmiConstant(constant);
1137 or_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001138 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001139 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001140 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001141 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001142}
1143
Steve Block3ce2e202009-11-05 08:53:23 +00001144
Steve Blocka7e24c12009-10-30 11:49:00 +00001145void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1146 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001147 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001148 }
1149 xor_(dst, src2);
1150}
1151
1152
Steve Block3ce2e202009-11-05 08:53:23 +00001153void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1154 if (dst.is(src)) {
1155 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001156 Register constant_reg = GetSmiConstant(constant);
1157 xor_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001158 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001159 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001160 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001161 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001162}
1163
1164
Steve Blocka7e24c12009-10-30 11:49:00 +00001165void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1166 Register src,
1167 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001168 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001169 if (shift_value > 0) {
1170 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001171 sar(dst, Immediate(shift_value + kSmiShift));
1172 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001173 } else {
1174 UNIMPLEMENTED(); // Not used.
1175 }
1176 }
1177}
1178
1179
Steve Blocka7e24c12009-10-30 11:49:00 +00001180void MacroAssembler::SmiShiftLeftConstant(Register dst,
1181 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001182 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001183 if (!dst.is(src)) {
1184 movq(dst, src);
1185 }
1186 if (shift_value > 0) {
1187 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001188 }
1189}
1190
1191
1192void MacroAssembler::SmiShiftLeft(Register dst,
1193 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001194 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001195 ASSERT(!dst.is(rcx));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001196 NearLabel result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001197 // Untag shift amount.
1198 if (!dst.is(src1)) {
1199 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001200 }
Steve Block3ce2e202009-11-05 08:53:23 +00001201 SmiToInteger32(rcx, src2);
1202 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1203 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001204 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001205}
1206
1207
Steve Blocka7e24c12009-10-30 11:49:00 +00001208void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1209 Register src1,
1210 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001211 ASSERT(!dst.is(kScratchRegister));
1212 ASSERT(!src1.is(kScratchRegister));
1213 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001214 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001215 if (src1.is(rcx)) {
1216 movq(kScratchRegister, src1);
1217 } else if (src2.is(rcx)) {
1218 movq(kScratchRegister, src2);
1219 }
1220 if (!dst.is(src1)) {
1221 movq(dst, src1);
1222 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001223 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001224 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001225 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001226 shl(dst, Immediate(kSmiShift));
1227 if (src1.is(rcx)) {
1228 movq(src1, kScratchRegister);
1229 } else if (src2.is(rcx)) {
1230 movq(src2, kScratchRegister);
1231 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001232}
1233
1234
Steve Block3ce2e202009-11-05 08:53:23 +00001235SmiIndex MacroAssembler::SmiToIndex(Register dst,
1236 Register src,
1237 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001238 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001239 // There is a possible optimization if shift is in the range 60-63, but that
1240 // will (and must) never happen.
1241 if (!dst.is(src)) {
1242 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001243 }
Steve Block3ce2e202009-11-05 08:53:23 +00001244 if (shift < kSmiShift) {
1245 sar(dst, Immediate(kSmiShift - shift));
1246 } else {
1247 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001248 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001249 return SmiIndex(dst, times_1);
1250}
1251
Steve Blocka7e24c12009-10-30 11:49:00 +00001252SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1253 Register src,
1254 int shift) {
1255 // Register src holds a positive smi.
1256 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001257 if (!dst.is(src)) {
1258 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001259 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001260 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001261 if (shift < kSmiShift) {
1262 sar(dst, Immediate(kSmiShift - shift));
1263 } else {
1264 shl(dst, Immediate(shift - kSmiShift));
1265 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001266 return SmiIndex(dst, times_1);
1267}
1268
1269
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001270void MacroAssembler::Move(Register dst, Register src) {
1271 if (!dst.is(src)) {
1272 movq(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01001273 }
Steve Block6ded16b2010-05-10 14:33:55 +01001274}
1275
1276
Steve Block6ded16b2010-05-10 14:33:55 +01001277
1278
Steve Blocka7e24c12009-10-30 11:49:00 +00001279void MacroAssembler::Move(Register dst, Handle<Object> source) {
1280 ASSERT(!source->IsFailure());
1281 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001282 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001283 } else {
1284 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1285 }
1286}
1287
1288
1289void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001290 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001291 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001292 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001293 } else {
1294 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1295 movq(dst, kScratchRegister);
1296 }
1297}
1298
1299
1300void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001301 if (source->IsSmi()) {
1302 SmiCompare(dst, Smi::cast(*source));
1303 } else {
1304 Move(kScratchRegister, source);
1305 cmpq(dst, kScratchRegister);
1306 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001307}
1308
1309
1310void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1311 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001312 SmiCompare(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001313 } else {
1314 ASSERT(source->IsHeapObject());
1315 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1316 cmpq(dst, kScratchRegister);
1317 }
1318}
1319
1320
1321void MacroAssembler::Push(Handle<Object> source) {
1322 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001323 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001324 } else {
1325 ASSERT(source->IsHeapObject());
1326 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1327 push(kScratchRegister);
1328 }
1329}
1330
1331
1332void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001333 intptr_t smi = reinterpret_cast<intptr_t>(source);
1334 if (is_int32(smi)) {
1335 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001336 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001337 Register constant = GetSmiConstant(source);
1338 push(constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001339 }
1340}
1341
1342
Leon Clarkee46be812010-01-19 14:06:41 +00001343void MacroAssembler::Drop(int stack_elements) {
1344 if (stack_elements > 0) {
1345 addq(rsp, Immediate(stack_elements * kPointerSize));
1346 }
1347}
1348
1349
Steve Block3ce2e202009-11-05 08:53:23 +00001350void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001351 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001352}
1353
1354
1355void MacroAssembler::Jump(ExternalReference ext) {
1356 movq(kScratchRegister, ext);
1357 jmp(kScratchRegister);
1358}
1359
1360
1361void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1362 movq(kScratchRegister, destination, rmode);
1363 jmp(kScratchRegister);
1364}
1365
1366
1367void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001368 // TODO(X64): Inline this
1369 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001370}
1371
1372
1373void MacroAssembler::Call(ExternalReference ext) {
1374 movq(kScratchRegister, ext);
1375 call(kScratchRegister);
1376}
1377
1378
1379void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1380 movq(kScratchRegister, destination, rmode);
1381 call(kScratchRegister);
1382}
1383
1384
1385void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1386 ASSERT(RelocInfo::IsCodeTarget(rmode));
Steve Block3ce2e202009-11-05 08:53:23 +00001387 call(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001388}
1389
1390
1391void MacroAssembler::PushTryHandler(CodeLocation try_location,
1392 HandlerType type) {
1393 // Adjust this code if not the case.
1394 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1395
1396 // The pc (return address) is already on TOS. This code pushes state,
1397 // frame pointer and current handler. Check that they are expected
1398 // next on the stack, in that order.
1399 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1400 StackHandlerConstants::kPCOffset - kPointerSize);
1401 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1402 StackHandlerConstants::kStateOffset - kPointerSize);
1403 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1404 StackHandlerConstants::kFPOffset - kPointerSize);
1405
1406 if (try_location == IN_JAVASCRIPT) {
1407 if (type == TRY_CATCH_HANDLER) {
1408 push(Immediate(StackHandler::TRY_CATCH));
1409 } else {
1410 push(Immediate(StackHandler::TRY_FINALLY));
1411 }
1412 push(rbp);
1413 } else {
1414 ASSERT(try_location == IN_JS_ENTRY);
1415 // The frame pointer does not point to a JS frame so we save NULL
1416 // for rbp. We expect the code throwing an exception to check rbp
1417 // before dereferencing it to restore the context.
1418 push(Immediate(StackHandler::ENTRY));
1419 push(Immediate(0)); // NULL frame pointer.
1420 }
1421 // Save the current handler.
1422 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1423 push(Operand(kScratchRegister, 0));
1424 // Link this handler.
1425 movq(Operand(kScratchRegister, 0), rsp);
1426}
1427
1428
Leon Clarkee46be812010-01-19 14:06:41 +00001429void MacroAssembler::PopTryHandler() {
1430 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1431 // Unlink this handler.
1432 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1433 pop(Operand(kScratchRegister, 0));
1434 // Remove the remaining fields.
1435 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1436}
1437
1438
Steve Blocka7e24c12009-10-30 11:49:00 +00001439void MacroAssembler::Ret() {
1440 ret(0);
1441}
1442
1443
1444void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001445 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01001446 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001447}
1448
1449
1450void MacroAssembler::CmpObjectType(Register heap_object,
1451 InstanceType type,
1452 Register map) {
1453 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1454 CmpInstanceType(map, type);
1455}
1456
1457
1458void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1459 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1460 Immediate(static_cast<int8_t>(type)));
1461}
1462
1463
Andrei Popescu31002712010-02-23 13:46:05 +00001464void MacroAssembler::CheckMap(Register obj,
1465 Handle<Map> map,
1466 Label* fail,
1467 bool is_heap_object) {
1468 if (!is_heap_object) {
1469 JumpIfSmi(obj, fail);
1470 }
1471 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1472 j(not_equal, fail);
1473}
1474
1475
Leon Clarkef7060e22010-06-03 12:02:55 +01001476void MacroAssembler::AbortIfNotNumber(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001477 NearLabel ok;
Andrei Popescu402d9372010-02-26 13:31:12 +00001478 Condition is_smi = CheckSmi(object);
1479 j(is_smi, &ok);
1480 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1481 Factory::heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01001482 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00001483 bind(&ok);
1484}
1485
1486
Iain Merrick75681382010-08-19 15:07:18 +01001487void MacroAssembler::AbortIfSmi(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001488 NearLabel ok;
Iain Merrick75681382010-08-19 15:07:18 +01001489 Condition is_smi = CheckSmi(object);
1490 Assert(NegateCondition(is_smi), "Operand is a smi");
1491}
1492
1493
Leon Clarkef7060e22010-06-03 12:02:55 +01001494void MacroAssembler::AbortIfNotSmi(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001495 NearLabel ok;
Steve Block6ded16b2010-05-10 14:33:55 +01001496 Condition is_smi = CheckSmi(object);
Iain Merrick75681382010-08-19 15:07:18 +01001497 Assert(is_smi, "Operand is not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01001498}
1499
1500
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001501void MacroAssembler::AbortIfNotRootValue(Register src,
1502 Heap::RootListIndex root_value_index,
1503 const char* message) {
1504 ASSERT(!src.is(kScratchRegister));
1505 LoadRoot(kScratchRegister, root_value_index);
1506 cmpq(src, kScratchRegister);
1507 Check(equal, message);
1508}
1509
1510
1511
Leon Clarked91b9f72010-01-27 17:25:45 +00001512Condition MacroAssembler::IsObjectStringType(Register heap_object,
1513 Register map,
1514 Register instance_type) {
1515 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00001516 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00001517 ASSERT(kNotStringTag != 0);
1518 testb(instance_type, Immediate(kIsNotStringMask));
1519 return zero;
1520}
1521
1522
Steve Blocka7e24c12009-10-30 11:49:00 +00001523void MacroAssembler::TryGetFunctionPrototype(Register function,
1524 Register result,
1525 Label* miss) {
1526 // Check that the receiver isn't a smi.
1527 testl(function, Immediate(kSmiTagMask));
1528 j(zero, miss);
1529
1530 // Check that the function really is a function.
1531 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1532 j(not_equal, miss);
1533
1534 // Make sure that the function has an instance prototype.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001535 NearLabel non_instance;
Steve Blocka7e24c12009-10-30 11:49:00 +00001536 testb(FieldOperand(result, Map::kBitFieldOffset),
1537 Immediate(1 << Map::kHasNonInstancePrototype));
1538 j(not_zero, &non_instance);
1539
1540 // Get the prototype or initial map from the function.
1541 movq(result,
1542 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1543
1544 // If the prototype or initial map is the hole, don't return it and
1545 // simply miss the cache instead. This will allow us to allocate a
1546 // prototype object on-demand in the runtime system.
1547 CompareRoot(result, Heap::kTheHoleValueRootIndex);
1548 j(equal, miss);
1549
1550 // If the function does not have an initial map, we're done.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001551 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001552 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1553 j(not_equal, &done);
1554
1555 // Get the prototype from the initial map.
1556 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1557 jmp(&done);
1558
1559 // Non-instance prototype: Fetch prototype from constructor field
1560 // in initial map.
1561 bind(&non_instance);
1562 movq(result, FieldOperand(result, Map::kConstructorOffset));
1563
1564 // All done.
1565 bind(&done);
1566}
1567
1568
1569void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1570 if (FLAG_native_code_counters && counter->Enabled()) {
1571 movq(kScratchRegister, ExternalReference(counter));
1572 movl(Operand(kScratchRegister, 0), Immediate(value));
1573 }
1574}
1575
1576
1577void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1578 ASSERT(value > 0);
1579 if (FLAG_native_code_counters && counter->Enabled()) {
1580 movq(kScratchRegister, ExternalReference(counter));
1581 Operand operand(kScratchRegister, 0);
1582 if (value == 1) {
1583 incl(operand);
1584 } else {
1585 addl(operand, Immediate(value));
1586 }
1587 }
1588}
1589
1590
1591void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1592 ASSERT(value > 0);
1593 if (FLAG_native_code_counters && counter->Enabled()) {
1594 movq(kScratchRegister, ExternalReference(counter));
1595 Operand operand(kScratchRegister, 0);
1596 if (value == 1) {
1597 decl(operand);
1598 } else {
1599 subl(operand, Immediate(value));
1600 }
1601 }
1602}
1603
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001604
Steve Blocka7e24c12009-10-30 11:49:00 +00001605#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00001606void MacroAssembler::DebugBreak() {
1607 ASSERT(allow_stub_calls());
1608 xor_(rax, rax); // no arguments
1609 movq(rbx, ExternalReference(Runtime::kDebugBreak));
1610 CEntryStub ces(1);
1611 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00001612}
Andrei Popescu402d9372010-02-26 13:31:12 +00001613#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00001614
1615
Steve Blocka7e24c12009-10-30 11:49:00 +00001616void MacroAssembler::InvokeCode(Register code,
1617 const ParameterCount& expected,
1618 const ParameterCount& actual,
1619 InvokeFlag flag) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001620 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001621 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1622 if (flag == CALL_FUNCTION) {
1623 call(code);
1624 } else {
1625 ASSERT(flag == JUMP_FUNCTION);
1626 jmp(code);
1627 }
1628 bind(&done);
1629}
1630
1631
1632void MacroAssembler::InvokeCode(Handle<Code> code,
1633 const ParameterCount& expected,
1634 const ParameterCount& actual,
1635 RelocInfo::Mode rmode,
1636 InvokeFlag flag) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001637 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001638 Register dummy = rax;
1639 InvokePrologue(expected, actual, code, dummy, &done, flag);
1640 if (flag == CALL_FUNCTION) {
1641 Call(code, rmode);
1642 } else {
1643 ASSERT(flag == JUMP_FUNCTION);
1644 Jump(code, rmode);
1645 }
1646 bind(&done);
1647}
1648
1649
1650void MacroAssembler::InvokeFunction(Register function,
1651 const ParameterCount& actual,
1652 InvokeFlag flag) {
1653 ASSERT(function.is(rdi));
1654 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1655 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
1656 movsxlq(rbx,
1657 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001658 // Advances rdx to the end of the Code object header, to the start of
1659 // the executable code.
Steve Block791712a2010-08-27 10:21:07 +01001660 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001661
1662 ParameterCount expected(rbx);
1663 InvokeCode(rdx, expected, actual, flag);
1664}
1665
1666
Andrei Popescu402d9372010-02-26 13:31:12 +00001667void MacroAssembler::InvokeFunction(JSFunction* function,
1668 const ParameterCount& actual,
1669 InvokeFlag flag) {
1670 ASSERT(function->is_compiled());
1671 // Get the function and setup the context.
1672 Move(rdi, Handle<JSFunction>(function));
1673 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1674
1675 // Invoke the cached code.
1676 Handle<Code> code(function->code());
1677 ParameterCount expected(function->shared()->formal_parameter_count());
1678 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1679}
1680
1681
Steve Blocka7e24c12009-10-30 11:49:00 +00001682void MacroAssembler::EnterFrame(StackFrame::Type type) {
1683 push(rbp);
1684 movq(rbp, rsp);
1685 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00001686 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00001687 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1688 push(kScratchRegister);
1689 if (FLAG_debug_code) {
1690 movq(kScratchRegister,
1691 Factory::undefined_value(),
1692 RelocInfo::EMBEDDED_OBJECT);
1693 cmpq(Operand(rsp, 0), kScratchRegister);
1694 Check(not_equal, "code object not properly patched");
1695 }
1696}
1697
1698
1699void MacroAssembler::LeaveFrame(StackFrame::Type type) {
1700 if (FLAG_debug_code) {
Steve Block3ce2e202009-11-05 08:53:23 +00001701 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00001702 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
1703 Check(equal, "stack frame types must match");
1704 }
1705 movq(rsp, rbp);
1706 pop(rbp);
1707}
1708
1709
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001710void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001711 // Setup the frame structure on the stack.
1712 // All constants are relative to the frame pointer of the exit frame.
1713 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
1714 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
1715 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
1716 push(rbp);
1717 movq(rbp, rsp);
1718
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001719 // Reserve room for entry stack pointer and push the code object.
Steve Block3ce2e202009-11-05 08:53:23 +00001720 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00001721 push(Immediate(0)); // Saved entry sp, patched before call.
1722 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1723 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00001724
1725 // Save the frame pointer and the context in top.
1726 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1727 ExternalReference context_address(Top::k_context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01001728 if (save_rax) {
1729 movq(r14, rax); // Backup rax before we use it.
1730 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001731
1732 movq(rax, rbp);
1733 store_rax(c_entry_fp_address);
1734 movq(rax, rsi);
1735 store_rax(context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01001736}
Steve Blocka7e24c12009-10-30 11:49:00 +00001737
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001738
1739void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001740#ifdef _WIN64
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001741 const int kShaddowSpace = 4;
1742 arg_stack_space += kShaddowSpace;
Steve Blocka7e24c12009-10-30 11:49:00 +00001743#endif
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001744 if (arg_stack_space > 0) {
1745 subq(rsp, Immediate(arg_stack_space * kPointerSize));
1746 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001747
1748 // Get the required frame alignment for the OS.
1749 static const int kFrameAlignment = OS::ActivationFrameAlignment();
1750 if (kFrameAlignment > 0) {
1751 ASSERT(IsPowerOf2(kFrameAlignment));
1752 movq(kScratchRegister, Immediate(-kFrameAlignment));
1753 and_(rsp, kScratchRegister);
1754 }
1755
1756 // Patch the saved entry sp.
1757 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
1758}
1759
1760
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001761void MacroAssembler::EnterExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001762 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01001763
1764 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
1765 // so it must be retained across the C-call.
1766 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1767 lea(r12, Operand(rbp, r14, times_pointer_size, offset));
1768
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001769 EnterExitFrameEpilogue(arg_stack_space);
Ben Murdochbb769b22010-08-11 14:56:33 +01001770}
1771
1772
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001773void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001774 EnterExitFramePrologue(false);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001775 EnterExitFrameEpilogue(arg_stack_space);
Ben Murdochbb769b22010-08-11 14:56:33 +01001776}
1777
1778
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001779void MacroAssembler::LeaveExitFrame() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001780 // Registers:
Steve Block8defd9f2010-07-08 12:39:36 +01001781 // r12 : argv
Steve Blocka7e24c12009-10-30 11:49:00 +00001782
1783 // Get the return address from the stack and restore the frame pointer.
1784 movq(rcx, Operand(rbp, 1 * kPointerSize));
1785 movq(rbp, Operand(rbp, 0 * kPointerSize));
1786
Steve Blocka7e24c12009-10-30 11:49:00 +00001787 // Pop everything up to and including the arguments and the receiver
1788 // from the caller stack.
Steve Block8defd9f2010-07-08 12:39:36 +01001789 lea(rsp, Operand(r12, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001790
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001791 // Push the return address to get ready to return.
1792 push(rcx);
1793
1794 LeaveExitFrameEpilogue();
1795}
1796
1797
1798void MacroAssembler::LeaveApiExitFrame() {
1799 movq(rsp, rbp);
1800 pop(rbp);
1801
1802 LeaveExitFrameEpilogue();
1803}
1804
1805
1806void MacroAssembler::LeaveExitFrameEpilogue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001807 // Restore current context from top and clear it in debug mode.
1808 ExternalReference context_address(Top::k_context_address);
1809 movq(kScratchRegister, context_address);
1810 movq(rsi, Operand(kScratchRegister, 0));
1811#ifdef DEBUG
1812 movq(Operand(kScratchRegister, 0), Immediate(0));
1813#endif
1814
Steve Blocka7e24c12009-10-30 11:49:00 +00001815 // Clear the top frame.
1816 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1817 movq(kScratchRegister, c_entry_fp_address);
1818 movq(Operand(kScratchRegister, 0), Immediate(0));
1819}
1820
1821
Steve Blocka7e24c12009-10-30 11:49:00 +00001822void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1823 Register scratch,
1824 Label* miss) {
1825 Label same_contexts;
1826
1827 ASSERT(!holder_reg.is(scratch));
1828 ASSERT(!scratch.is(kScratchRegister));
1829 // Load current lexical context from the stack frame.
1830 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
1831
1832 // When generating debug code, make sure the lexical context is set.
1833 if (FLAG_debug_code) {
1834 cmpq(scratch, Immediate(0));
1835 Check(not_equal, "we should not have an empty lexical context");
1836 }
1837 // Load the global context of the current context.
1838 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1839 movq(scratch, FieldOperand(scratch, offset));
1840 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
1841
1842 // Check the context is a global context.
1843 if (FLAG_debug_code) {
1844 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
1845 Factory::global_context_map());
1846 Check(equal, "JSGlobalObject::global_context should be a global context.");
1847 }
1848
1849 // Check if both contexts are the same.
1850 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1851 j(equal, &same_contexts);
1852
1853 // Compare security tokens.
1854 // Check that the security token in the calling global object is
1855 // compatible with the security token in the receiving global
1856 // object.
1857
1858 // Check the context is a global context.
1859 if (FLAG_debug_code) {
1860 // Preserve original value of holder_reg.
1861 push(holder_reg);
1862 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1863 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
1864 Check(not_equal, "JSGlobalProxy::context() should not be null.");
1865
1866 // Read the first word and compare to global_context_map(),
1867 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
1868 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
1869 Check(equal, "JSGlobalObject::global_context should be a global context.");
1870 pop(holder_reg);
1871 }
1872
1873 movq(kScratchRegister,
1874 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00001875 int token_offset =
1876 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00001877 movq(scratch, FieldOperand(scratch, token_offset));
1878 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
1879 j(not_equal, miss);
1880
1881 bind(&same_contexts);
1882}
1883
1884
1885void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00001886 Register scratch,
1887 AllocationFlags flags) {
1888 ExternalReference new_space_allocation_top =
1889 ExternalReference::new_space_allocation_top_address();
1890
1891 // Just return if allocation top is already known.
1892 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1893 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01001894 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00001895#ifdef DEBUG
1896 // Assert that result actually contains top on entry.
1897 movq(kScratchRegister, new_space_allocation_top);
1898 cmpq(result, Operand(kScratchRegister, 0));
1899 Check(equal, "Unexpected allocation top");
1900#endif
1901 return;
1902 }
1903
Steve Block6ded16b2010-05-10 14:33:55 +01001904 // Move address of new object to result. Use scratch register if available,
1905 // and keep address in scratch until call to UpdateAllocationTopHelper.
1906 if (scratch.is_valid()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001907 movq(scratch, new_space_allocation_top);
1908 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01001909 } else if (result.is(rax)) {
1910 load_rax(new_space_allocation_top);
1911 } else {
1912 movq(kScratchRegister, new_space_allocation_top);
1913 movq(result, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001914 }
1915}
1916
1917
1918void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1919 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +00001920 if (FLAG_debug_code) {
1921 testq(result_end, Immediate(kObjectAlignmentMask));
1922 Check(zero, "Unaligned allocation in new space");
1923 }
1924
Steve Blocka7e24c12009-10-30 11:49:00 +00001925 ExternalReference new_space_allocation_top =
1926 ExternalReference::new_space_allocation_top_address();
1927
1928 // Update new top.
1929 if (result_end.is(rax)) {
1930 // rax can be stored directly to a memory location.
1931 store_rax(new_space_allocation_top);
1932 } else {
1933 // Register required - use scratch provided if available.
Steve Block6ded16b2010-05-10 14:33:55 +01001934 if (scratch.is_valid()) {
1935 movq(Operand(scratch, 0), result_end);
1936 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00001937 movq(kScratchRegister, new_space_allocation_top);
1938 movq(Operand(kScratchRegister, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00001939 }
1940 }
1941}
1942
1943
1944void MacroAssembler::AllocateInNewSpace(int object_size,
1945 Register result,
1946 Register result_end,
1947 Register scratch,
1948 Label* gc_required,
1949 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07001950 if (!FLAG_inline_new) {
1951 if (FLAG_debug_code) {
1952 // Trash the registers to simulate an allocation failure.
1953 movl(result, Immediate(0x7091));
1954 if (result_end.is_valid()) {
1955 movl(result_end, Immediate(0x7191));
1956 }
1957 if (scratch.is_valid()) {
1958 movl(scratch, Immediate(0x7291));
1959 }
1960 }
1961 jmp(gc_required);
1962 return;
1963 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001964 ASSERT(!result.is(result_end));
1965
1966 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001967 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001968
1969 // Calculate new top and bail out if new space is exhausted.
1970 ExternalReference new_space_allocation_limit =
1971 ExternalReference::new_space_allocation_limit_address();
Steve Block6ded16b2010-05-10 14:33:55 +01001972
1973 Register top_reg = result_end.is_valid() ? result_end : result;
1974
1975 if (top_reg.is(result)) {
1976 addq(top_reg, Immediate(object_size));
1977 } else {
1978 lea(top_reg, Operand(result, object_size));
1979 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001980 movq(kScratchRegister, new_space_allocation_limit);
Steve Block6ded16b2010-05-10 14:33:55 +01001981 cmpq(top_reg, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001982 j(above, gc_required);
1983
1984 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01001985 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00001986
Steve Block6ded16b2010-05-10 14:33:55 +01001987 if (top_reg.is(result)) {
1988 if ((flags & TAG_OBJECT) != 0) {
1989 subq(result, Immediate(object_size - kHeapObjectTag));
1990 } else {
1991 subq(result, Immediate(object_size));
1992 }
1993 } else if ((flags & TAG_OBJECT) != 0) {
1994 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00001995 addq(result, Immediate(kHeapObjectTag));
1996 }
1997}
1998
1999
2000void MacroAssembler::AllocateInNewSpace(int header_size,
2001 ScaleFactor element_size,
2002 Register element_count,
2003 Register result,
2004 Register result_end,
2005 Register scratch,
2006 Label* gc_required,
2007 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07002008 if (!FLAG_inline_new) {
2009 if (FLAG_debug_code) {
2010 // Trash the registers to simulate an allocation failure.
2011 movl(result, Immediate(0x7091));
2012 movl(result_end, Immediate(0x7191));
2013 if (scratch.is_valid()) {
2014 movl(scratch, Immediate(0x7291));
2015 }
2016 // Register element_count is not modified by the function.
2017 }
2018 jmp(gc_required);
2019 return;
2020 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002021 ASSERT(!result.is(result_end));
2022
2023 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002024 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002025
2026 // Calculate new top and bail out if new space is exhausted.
2027 ExternalReference new_space_allocation_limit =
2028 ExternalReference::new_space_allocation_limit_address();
2029 lea(result_end, Operand(result, element_count, element_size, header_size));
2030 movq(kScratchRegister, new_space_allocation_limit);
2031 cmpq(result_end, Operand(kScratchRegister, 0));
2032 j(above, gc_required);
2033
2034 // Update allocation top.
2035 UpdateAllocationTopHelper(result_end, scratch);
2036
2037 // Tag the result if requested.
2038 if ((flags & TAG_OBJECT) != 0) {
2039 addq(result, Immediate(kHeapObjectTag));
2040 }
2041}
2042
2043
2044void MacroAssembler::AllocateInNewSpace(Register object_size,
2045 Register result,
2046 Register result_end,
2047 Register scratch,
2048 Label* gc_required,
2049 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07002050 if (!FLAG_inline_new) {
2051 if (FLAG_debug_code) {
2052 // Trash the registers to simulate an allocation failure.
2053 movl(result, Immediate(0x7091));
2054 movl(result_end, Immediate(0x7191));
2055 if (scratch.is_valid()) {
2056 movl(scratch, Immediate(0x7291));
2057 }
2058 // object_size is left unchanged by this function.
2059 }
2060 jmp(gc_required);
2061 return;
2062 }
2063 ASSERT(!result.is(result_end));
2064
Steve Blocka7e24c12009-10-30 11:49:00 +00002065 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002066 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002067
2068 // Calculate new top and bail out if new space is exhausted.
2069 ExternalReference new_space_allocation_limit =
2070 ExternalReference::new_space_allocation_limit_address();
2071 if (!object_size.is(result_end)) {
2072 movq(result_end, object_size);
2073 }
2074 addq(result_end, result);
2075 movq(kScratchRegister, new_space_allocation_limit);
2076 cmpq(result_end, Operand(kScratchRegister, 0));
2077 j(above, gc_required);
2078
2079 // Update allocation top.
2080 UpdateAllocationTopHelper(result_end, scratch);
2081
2082 // Tag the result if requested.
2083 if ((flags & TAG_OBJECT) != 0) {
2084 addq(result, Immediate(kHeapObjectTag));
2085 }
2086}
2087
2088
2089void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2090 ExternalReference new_space_allocation_top =
2091 ExternalReference::new_space_allocation_top_address();
2092
2093 // Make sure the object has no tag before resetting top.
2094 and_(object, Immediate(~kHeapObjectTagMask));
2095 movq(kScratchRegister, new_space_allocation_top);
2096#ifdef DEBUG
2097 cmpq(object, Operand(kScratchRegister, 0));
2098 Check(below, "Undo allocation of non allocated memory");
2099#endif
2100 movq(Operand(kScratchRegister, 0), object);
2101}
2102
2103
Steve Block3ce2e202009-11-05 08:53:23 +00002104void MacroAssembler::AllocateHeapNumber(Register result,
2105 Register scratch,
2106 Label* gc_required) {
2107 // Allocate heap number in new space.
2108 AllocateInNewSpace(HeapNumber::kSize,
2109 result,
2110 scratch,
2111 no_reg,
2112 gc_required,
2113 TAG_OBJECT);
2114
2115 // Set the map.
2116 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2117 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2118}
2119
2120
Leon Clarkee46be812010-01-19 14:06:41 +00002121void MacroAssembler::AllocateTwoByteString(Register result,
2122 Register length,
2123 Register scratch1,
2124 Register scratch2,
2125 Register scratch3,
2126 Label* gc_required) {
2127 // Calculate the number of bytes needed for the characters in the string while
2128 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002129 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2130 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002131 ASSERT(kShortSize == 2);
2132 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01002133 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2134 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002135 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002136 if (kHeaderAlignment > 0) {
2137 subq(scratch1, Immediate(kHeaderAlignment));
2138 }
Leon Clarkee46be812010-01-19 14:06:41 +00002139
2140 // Allocate two byte string in new space.
2141 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2142 times_1,
2143 scratch1,
2144 result,
2145 scratch2,
2146 scratch3,
2147 gc_required,
2148 TAG_OBJECT);
2149
2150 // Set the map, length and hash field.
2151 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2152 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002153 Integer32ToSmi(scratch1, length);
2154 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002155 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002156 Immediate(String::kEmptyHashField));
2157}
2158
2159
2160void MacroAssembler::AllocateAsciiString(Register result,
2161 Register length,
2162 Register scratch1,
2163 Register scratch2,
2164 Register scratch3,
2165 Label* gc_required) {
2166 // Calculate the number of bytes needed for the characters in the string while
2167 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002168 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2169 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002170 movl(scratch1, length);
2171 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01002172 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002173 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002174 if (kHeaderAlignment > 0) {
2175 subq(scratch1, Immediate(kHeaderAlignment));
2176 }
Leon Clarkee46be812010-01-19 14:06:41 +00002177
2178 // Allocate ascii string in new space.
2179 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2180 times_1,
2181 scratch1,
2182 result,
2183 scratch2,
2184 scratch3,
2185 gc_required,
2186 TAG_OBJECT);
2187
2188 // Set the map, length and hash field.
2189 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2190 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002191 Integer32ToSmi(scratch1, length);
2192 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002193 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002194 Immediate(String::kEmptyHashField));
2195}
2196
2197
2198void MacroAssembler::AllocateConsString(Register result,
2199 Register scratch1,
2200 Register scratch2,
2201 Label* gc_required) {
2202 // Allocate heap number in new space.
2203 AllocateInNewSpace(ConsString::kSize,
2204 result,
2205 scratch1,
2206 scratch2,
2207 gc_required,
2208 TAG_OBJECT);
2209
2210 // Set the map. The other fields are left uninitialized.
2211 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2212 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2213}
2214
2215
2216void MacroAssembler::AllocateAsciiConsString(Register result,
2217 Register scratch1,
2218 Register scratch2,
2219 Label* gc_required) {
2220 // Allocate heap number in new space.
2221 AllocateInNewSpace(ConsString::kSize,
2222 result,
2223 scratch1,
2224 scratch2,
2225 gc_required,
2226 TAG_OBJECT);
2227
2228 // Set the map. The other fields are left uninitialized.
2229 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2230 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2231}
2232
2233
Steve Blockd0582a62009-12-15 09:54:21 +00002234void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2235 if (context_chain_length > 0) {
2236 // Move up the chain of contexts to the context containing the slot.
2237 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2238 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002239 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002240 for (int i = 1; i < context_chain_length; i++) {
2241 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2242 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2243 }
2244 // The context may be an intermediate context, not a function context.
2245 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2246 } else { // context is the current function context.
2247 // The context may be an intermediate context, not a function context.
2248 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2249 }
2250}
2251
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002252
Ben Murdochb0fe1622011-05-05 13:52:32 +01002253void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2254 // Load the global or builtins object from the current context.
2255 movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2256 // Load the global context from the global or builtins object.
2257 movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
2258 // Load the function from the global context.
2259 movq(function, Operand(function, Context::SlotOffset(index)));
2260}
2261
2262
2263void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2264 Register map) {
2265 // Load the initial map. The global functions all have initial maps.
2266 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2267 if (FLAG_debug_code) {
2268 Label ok, fail;
2269 CheckMap(map, Factory::meta_map(), &fail, false);
2270 jmp(&ok);
2271 bind(&fail);
2272 Abort("Global functions must have initial map");
2273 bind(&ok);
2274 }
2275}
2276
2277
Leon Clarke4515c472010-02-03 11:58:03 +00002278int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002279 // On Windows 64 stack slots are reserved by the caller for all arguments
2280 // including the ones passed in registers, and space is always allocated for
2281 // the four register arguments even if the function takes fewer than four
2282 // arguments.
2283 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2284 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00002285 ASSERT(num_arguments >= 0);
2286#ifdef _WIN64
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002287 static const int kMinimumStackSlots = 4;
2288 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2289 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002290#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002291 static const int kRegisterPassedArguments = 6;
2292 if (num_arguments < kRegisterPassedArguments) return 0;
2293 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002294#endif
Leon Clarke4515c472010-02-03 11:58:03 +00002295}
2296
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002297
Leon Clarke4515c472010-02-03 11:58:03 +00002298void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2299 int frame_alignment = OS::ActivationFrameAlignment();
2300 ASSERT(frame_alignment != 0);
2301 ASSERT(num_arguments >= 0);
2302 // Make stack end at alignment and allocate space for arguments and old rsp.
2303 movq(kScratchRegister, rsp);
2304 ASSERT(IsPowerOf2(frame_alignment));
2305 int argument_slots_on_stack =
2306 ArgumentStackSlotsForCFunctionCall(num_arguments);
2307 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2308 and_(rsp, Immediate(-frame_alignment));
2309 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2310}
2311
2312
2313void MacroAssembler::CallCFunction(ExternalReference function,
2314 int num_arguments) {
2315 movq(rax, function);
2316 CallCFunction(rax, num_arguments);
2317}
2318
2319
2320void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01002321 // Check stack alignment.
2322 if (FLAG_debug_code) {
2323 CheckStackAlignment();
2324 }
2325
Leon Clarke4515c472010-02-03 11:58:03 +00002326 call(function);
2327 ASSERT(OS::ActivationFrameAlignment() != 0);
2328 ASSERT(num_arguments >= 0);
2329 int argument_slots_on_stack =
2330 ArgumentStackSlotsForCFunctionCall(num_arguments);
2331 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2332}
2333
Steve Blockd0582a62009-12-15 09:54:21 +00002334
Steve Blocka7e24c12009-10-30 11:49:00 +00002335CodePatcher::CodePatcher(byte* address, int size)
2336 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2337 // Create a new macro assembler pointing to the address of the code to patch.
2338 // The size is adjusted with kGap on order for the assembler to generate size
2339 // bytes of instructions without failing with buffer size constraints.
2340 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2341}
2342
2343
2344CodePatcher::~CodePatcher() {
2345 // Indicate that code has changed.
2346 CPU::FlushICache(address_, size_);
2347
2348 // Check that the code was patched as expected.
2349 ASSERT(masm_.pc_ == address_ + size_);
2350 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2351}
2352
Steve Blocka7e24c12009-10-30 11:49:00 +00002353} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002354
2355#endif // V8_TARGET_ARCH_X64