blob: 293d8a5633f6ed33e40a47dc864cc305cf1654c1 [file] [log] [blame]
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010038#include "heap.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
40namespace v8 {
41namespace internal {
42
43MacroAssembler::MacroAssembler(void* buffer, int size)
Steve Block3ce2e202009-11-05 08:53:23 +000044 : Assembler(buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000045 generating_stub_(false),
46 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
Steve Blocka7e24c12009-10-30 11:49:00 +000048}
49
50
Steve Block3ce2e202009-11-05 08:53:23 +000051void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010052 movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000053}
54
55
Kristian Monsen25f61362010-05-21 11:50:48 +010056void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
57 movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
58}
59
60
Steve Blocka7e24c12009-10-30 11:49:00 +000061void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010062 push(Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000063}
64
65
Steve Block3ce2e202009-11-05 08:53:23 +000066void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010067 cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000068}
69
70
Steve Block3ce2e202009-11-05 08:53:23 +000071void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000072 LoadRoot(kScratchRegister, index);
73 cmpq(with, kScratchRegister);
74}
75
76
Steve Blockd0582a62009-12-15 09:54:21 +000077void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
78 CompareRoot(rsp, Heap::kStackLimitRootIndex);
79 j(below, on_stack_overflow);
80}
81
82
Steve Block6ded16b2010-05-10 14:33:55 +010083void MacroAssembler::RecordWriteHelper(Register object,
84 Register addr,
85 Register scratch) {
86 if (FLAG_debug_code) {
87 // Check that the object is not in new space.
Kristian Monsen0d5e1162010-09-30 15:31:59 +010088 NearLabel not_in_new_space;
Steve Block6ded16b2010-05-10 14:33:55 +010089 InNewSpace(object, scratch, not_equal, &not_in_new_space);
90 Abort("new-space object passed to RecordWriteHelper");
91 bind(&not_in_new_space);
92 }
93
Steve Blocka7e24c12009-10-30 11:49:00 +000094 // Compute the page start address from the heap object pointer, and reuse
95 // the 'object' register for it.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010096 and_(object, Immediate(~Page::kPageAlignmentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +000097
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010098 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
99 // method for more details.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100100 shrl(addr, Immediate(Page::kRegionSizeLog2));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100101 andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000102
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100103 // Set dirty mark for region.
104 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000105}
106
107
Steve Blocka7e24c12009-10-30 11:49:00 +0000108void MacroAssembler::RecordWrite(Register object,
109 int offset,
110 Register value,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100111 Register index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000112 // The compiled code assumes that record write doesn't change the
113 // context register, so we check that none of the clobbered
114 // registers are rsi.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100115 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
Leon Clarke4515c472010-02-03 11:58:03 +0000116
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100117 // First, check if a write barrier is even needed. The tests below
118 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000119 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000120 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000121
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100122 RecordWriteNonSmi(object, offset, value, index);
Steve Block3ce2e202009-11-05 08:53:23 +0000123 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000124
125 // Clobber all input registers when running with the debug-code flag
126 // turned on to provoke errors. This clobbering repeats the
127 // clobbering done inside RecordWriteNonSmi but it's necessary to
128 // avoid having the fast case for smis leave the registers
129 // unchanged.
130 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100131 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
132 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100133 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000134 }
Steve Block3ce2e202009-11-05 08:53:23 +0000135}
136
137
Steve Block8defd9f2010-07-08 12:39:36 +0100138void MacroAssembler::RecordWrite(Register object,
139 Register address,
140 Register value) {
141 // The compiled code assumes that record write doesn't change the
142 // context register, so we check that none of the clobbered
143 // registers are esi.
144 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
145
146 // First, check if a write barrier is even needed. The tests below
147 // catch stores of Smis and stores into young gen.
148 Label done;
149 JumpIfSmi(value, &done);
150
151 InNewSpace(object, value, equal, &done);
152
153 RecordWriteHelper(object, address, value);
154
155 bind(&done);
156
157 // Clobber all input registers when running with the debug-code flag
158 // turned on to provoke errors.
159 if (FLAG_debug_code) {
160 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
161 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
162 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
163 }
164}
165
166
Steve Block3ce2e202009-11-05 08:53:23 +0000167void MacroAssembler::RecordWriteNonSmi(Register object,
168 int offset,
169 Register scratch,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100170 Register index) {
Steve Block3ce2e202009-11-05 08:53:23 +0000171 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000172
173 if (FLAG_debug_code) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100174 NearLabel okay;
Leon Clarke4515c472010-02-03 11:58:03 +0000175 JumpIfNotSmi(object, &okay);
176 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
177 bind(&okay);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100178
179 if (offset == 0) {
180 // index must be int32.
181 Register tmp = index.is(rax) ? rbx : rax;
182 push(tmp);
183 movl(tmp, index);
184 cmpq(tmp, index);
185 Check(equal, "Index register for RecordWrite must be untagged int32.");
186 pop(tmp);
187 }
Leon Clarke4515c472010-02-03 11:58:03 +0000188 }
189
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100190 // Test that the object address is not in the new space. We cannot
191 // update page dirty marks for new space pages.
Steve Block6ded16b2010-05-10 14:33:55 +0100192 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000193
Steve Block6ded16b2010-05-10 14:33:55 +0100194 // The offset is relative to a tagged or untagged HeapObject pointer,
195 // so either offset or offset + kHeapObjectTag must be a
196 // multiple of kPointerSize.
197 ASSERT(IsAligned(offset, kPointerSize) ||
198 IsAligned(offset + kHeapObjectTag, kPointerSize));
199
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100200 Register dst = index;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100201 if (offset != 0) {
202 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000203 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100204 // array access: calculate the destination address in the same manner as
205 // KeyedStoreIC::GenerateGeneric.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100206 lea(dst, FieldOperand(object,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100207 index,
208 times_pointer_size,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100209 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000210 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100211 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000212
213 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000214
215 // Clobber all input registers when running with the debug-code flag
216 // turned on to provoke errors.
217 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100218 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
219 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100220 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Steve Block6ded16b2010-05-10 14:33:55 +0100221 }
222}
223
Steve Blocka7e24c12009-10-30 11:49:00 +0000224void MacroAssembler::Assert(Condition cc, const char* msg) {
225 if (FLAG_debug_code) Check(cc, msg);
226}
227
228
Iain Merrick75681382010-08-19 15:07:18 +0100229void MacroAssembler::AssertFastElements(Register elements) {
230 if (FLAG_debug_code) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100231 NearLabel ok;
Iain Merrick75681382010-08-19 15:07:18 +0100232 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
233 Heap::kFixedArrayMapRootIndex);
234 j(equal, &ok);
235 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
236 Heap::kFixedCOWArrayMapRootIndex);
237 j(equal, &ok);
238 Abort("JSObject with fast elements map has slow elements");
239 bind(&ok);
240 }
241}
242
243
Steve Blocka7e24c12009-10-30 11:49:00 +0000244void MacroAssembler::Check(Condition cc, const char* msg) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100245 NearLabel L;
Steve Blocka7e24c12009-10-30 11:49:00 +0000246 j(cc, &L);
247 Abort(msg);
248 // will not return here
249 bind(&L);
250}
251
252
Steve Block6ded16b2010-05-10 14:33:55 +0100253void MacroAssembler::CheckStackAlignment() {
254 int frame_alignment = OS::ActivationFrameAlignment();
255 int frame_alignment_mask = frame_alignment - 1;
256 if (frame_alignment > kPointerSize) {
257 ASSERT(IsPowerOf2(frame_alignment));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100258 NearLabel alignment_as_expected;
Steve Block6ded16b2010-05-10 14:33:55 +0100259 testq(rsp, Immediate(frame_alignment_mask));
260 j(zero, &alignment_as_expected);
261 // Abort if stack is not aligned.
262 int3();
263 bind(&alignment_as_expected);
264 }
265}
266
267
Steve Blocka7e24c12009-10-30 11:49:00 +0000268void MacroAssembler::NegativeZeroTest(Register result,
269 Register op,
270 Label* then_label) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100271 NearLabel ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000272 testl(result, result);
273 j(not_zero, &ok);
274 testl(op, op);
275 j(sign, then_label);
276 bind(&ok);
277}
278
279
280void MacroAssembler::Abort(const char* msg) {
281 // We want to pass the msg string like a smi to avoid GC
282 // problems, however msg is not guaranteed to be aligned
283 // properly. Instead, we pass an aligned pointer that is
284 // a proper v8 smi, but also pass the alignment difference
285 // from the real pointer as a smi.
286 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
287 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
288 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
289 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
290#ifdef DEBUG
291 if (msg != NULL) {
292 RecordComment("Abort message: ");
293 RecordComment(msg);
294 }
295#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000296 // Disable stub call restrictions to always allow calls to abort.
297 set_allow_stub_calls(true);
298
Steve Blocka7e24c12009-10-30 11:49:00 +0000299 push(rax);
300 movq(kScratchRegister, p0, RelocInfo::NONE);
301 push(kScratchRegister);
302 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000303 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000304 RelocInfo::NONE);
305 push(kScratchRegister);
306 CallRuntime(Runtime::kAbort, 2);
307 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000308 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000309}
310
311
312void MacroAssembler::CallStub(CodeStub* stub) {
313 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
314 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
315}
316
317
John Reck59135872010-11-02 12:39:01 -0700318MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100319 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700320 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100321 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700322 call(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
323 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100324 }
325 return result;
326}
327
328
Leon Clarkee46be812010-01-19 14:06:41 +0000329void MacroAssembler::TailCallStub(CodeStub* stub) {
330 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
331 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
332}
333
334
John Reck59135872010-11-02 12:39:01 -0700335MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100336 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700337 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100338 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700339 jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
340 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100341 }
342 return result;
343}
344
345
Steve Blocka7e24c12009-10-30 11:49:00 +0000346void MacroAssembler::StubReturn(int argc) {
347 ASSERT(argc >= 1 && generating_stub());
348 ret((argc - 1) * kPointerSize);
349}
350
351
352void MacroAssembler::IllegalOperation(int num_arguments) {
353 if (num_arguments > 0) {
354 addq(rsp, Immediate(num_arguments * kPointerSize));
355 }
356 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
357}
358
359
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100360void MacroAssembler::IndexFromHash(Register hash, Register index) {
361 // The assert checks that the constants for the maximum number of digits
362 // for an array index cached in the hash field and the number of bits
363 // reserved for it does not conflict.
364 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
365 (1 << String::kArrayIndexValueBits));
366 // We want the smi-tagged index in key. Even if we subsequently go to
367 // the slow case, converting the key to a smi is always valid.
368 // key: string key
369 // hash: key's hash field, including its array index value.
370 and_(hash, Immediate(String::kArrayIndexValueMask));
371 shr(hash, Immediate(String::kHashShift));
372 // Here we actually clobber the key which will be used if calling into
373 // runtime later. However as the new key is the numeric value of a string key
374 // there is no difference in using either key.
375 Integer32ToSmi(index, hash);
376}
377
378
Steve Blocka7e24c12009-10-30 11:49:00 +0000379void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
380 CallRuntime(Runtime::FunctionForId(id), num_arguments);
381}
382
383
John Reck59135872010-11-02 12:39:01 -0700384MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
385 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100386 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
387}
388
389
Steve Blocka7e24c12009-10-30 11:49:00 +0000390void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
391 // If the expected number of arguments of the runtime function is
392 // constant, we check that the actual number of arguments match the
393 // expectation.
394 if (f->nargs >= 0 && f->nargs != num_arguments) {
395 IllegalOperation(num_arguments);
396 return;
397 }
398
Leon Clarke4515c472010-02-03 11:58:03 +0000399 // TODO(1236192): Most runtime routines don't need the number of
400 // arguments passed in because it is constant. At some point we
401 // should remove this need and make the runtime routine entry code
402 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100403 Set(rax, num_arguments);
Leon Clarke4515c472010-02-03 11:58:03 +0000404 movq(rbx, ExternalReference(f));
405 CEntryStub ces(f->result_size);
406 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000407}
408
409
John Reck59135872010-11-02 12:39:01 -0700410MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
411 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100412 if (f->nargs >= 0 && f->nargs != num_arguments) {
413 IllegalOperation(num_arguments);
414 // Since we did not call the stub, there was no allocation failure.
415 // Return some non-failure object.
416 return Heap::undefined_value();
417 }
418
419 // TODO(1236192): Most runtime routines don't need the number of
420 // arguments passed in because it is constant. At some point we
421 // should remove this need and make the runtime routine entry code
422 // smarter.
423 Set(rax, num_arguments);
424 movq(rbx, ExternalReference(f));
425 CEntryStub ces(f->result_size);
426 return TryCallStub(&ces);
427}
428
429
Andrei Popescu402d9372010-02-26 13:31:12 +0000430void MacroAssembler::CallExternalReference(const ExternalReference& ext,
431 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100432 Set(rax, num_arguments);
Andrei Popescu402d9372010-02-26 13:31:12 +0000433 movq(rbx, ext);
434
435 CEntryStub stub(1);
436 CallStub(&stub);
437}
438
439
Steve Block6ded16b2010-05-10 14:33:55 +0100440void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
441 int num_arguments,
442 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000443 // ----------- S t a t e -------------
444 // -- rsp[0] : return address
445 // -- rsp[8] : argument num_arguments - 1
446 // ...
447 // -- rsp[8 * num_arguments] : argument 0 (receiver)
448 // -----------------------------------
449
450 // TODO(1236192): Most runtime routines don't need the number of
451 // arguments passed in because it is constant. At some point we
452 // should remove this need and make the runtime routine entry code
453 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100454 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100455 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000456}
457
458
Steve Block6ded16b2010-05-10 14:33:55 +0100459void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
460 int num_arguments,
461 int result_size) {
462 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
463}
464
465
Ben Murdochbb769b22010-08-11 14:56:33 +0100466static int Offset(ExternalReference ref0, ExternalReference ref1) {
467 int64_t offset = (ref0.address() - ref1.address());
468 // Check that fits into int.
469 ASSERT(static_cast<int>(offset) == offset);
470 return static_cast<int>(offset);
471}
472
473
John Reck59135872010-11-02 12:39:01 -0700474void MacroAssembler::PrepareCallApiFunction(int stack_space) {
475 EnterApiExitFrame(stack_space, 0);
Ben Murdochbb769b22010-08-11 14:56:33 +0100476}
477
478
John Reck59135872010-11-02 12:39:01 -0700479void MacroAssembler::CallApiFunctionAndReturn(ApiFunction* function) {
480 Label empty_result;
481 Label prologue;
482 Label promote_scheduled_exception;
483 Label delete_allocated_handles;
484 Label leave_exit_frame;
Ben Murdochbb769b22010-08-11 14:56:33 +0100485 Label write_back;
Ben Murdochbb769b22010-08-11 14:56:33 +0100486
John Reck59135872010-11-02 12:39:01 -0700487 ExternalReference next_address =
488 ExternalReference::handle_scope_next_address();
489 const int kNextOffset = 0;
490 const int kLimitOffset = Offset(
491 ExternalReference::handle_scope_limit_address(),
492 next_address);
493 const int kLevelOffset = Offset(
494 ExternalReference::handle_scope_level_address(),
495 next_address);
496 ExternalReference scheduled_exception_address =
497 ExternalReference::scheduled_exception_address();
Ben Murdochbb769b22010-08-11 14:56:33 +0100498
John Reck59135872010-11-02 12:39:01 -0700499 // Allocate HandleScope in callee-save registers.
500 Register prev_next_address_reg = r14;
501 Register prev_limit_reg = rbx;
502 Register base_reg = kSmiConstantRegister;
503 movq(base_reg, next_address);
504 movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
505 movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
506 addl(Operand(base_reg, kLevelOffset), Immediate(1));
507 // Call the api function!
508 movq(rax,
509 reinterpret_cast<int64_t>(function->address()),
510 RelocInfo::RUNTIME_ENTRY);
511 call(rax);
Ben Murdochbb769b22010-08-11 14:56:33 +0100512
John Reck59135872010-11-02 12:39:01 -0700513#ifdef _WIN64
514 // rax keeps a pointer to v8::Handle, unpack it.
515 movq(rax, Operand(rax, 0));
516#endif
517 // Check if the result handle holds 0.
518 testq(rax, rax);
519 j(zero, &empty_result);
520 // It was non-zero. Dereference to get the result value.
521 movq(rax, Operand(rax, 0));
522 bind(&prologue);
Ben Murdochbb769b22010-08-11 14:56:33 +0100523
John Reck59135872010-11-02 12:39:01 -0700524 // No more valid handles (the result handle was the last one). Restore
525 // previous handle scope.
526 subl(Operand(base_reg, kLevelOffset), Immediate(1));
527 movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
528 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
529 j(not_equal, &delete_allocated_handles);
530 bind(&leave_exit_frame);
531 InitializeSmiConstantRegister();
Ben Murdochbb769b22010-08-11 14:56:33 +0100532
John Reck59135872010-11-02 12:39:01 -0700533 // Check if the function scheduled an exception.
534 movq(rsi, scheduled_exception_address);
535 Cmp(Operand(rsi, 0), Factory::the_hole_value());
536 j(not_equal, &promote_scheduled_exception);
Ben Murdochbb769b22010-08-11 14:56:33 +0100537
John Reck59135872010-11-02 12:39:01 -0700538 LeaveExitFrame();
539 ret(0);
540
541 bind(&promote_scheduled_exception);
542 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
543
544 bind(&empty_result);
545 // It was zero; the result is undefined.
546 Move(rax, Factory::undefined_value());
547 jmp(&prologue);
548
549 // HandleScope limit has changed. Delete allocated extensions.
550 bind(&delete_allocated_handles);
551 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
552 movq(prev_limit_reg, rax);
553 movq(rax, ExternalReference::delete_handle_scope_extensions());
554 call(rax);
555 movq(rax, prev_limit_reg);
556 jmp(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100557}
558
559
Steve Block6ded16b2010-05-10 14:33:55 +0100560void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
561 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000562 // Set the entry point and jump to the C entry runtime stub.
563 movq(rbx, ext);
564 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000565 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000566}
567
568
Andrei Popescu402d9372010-02-26 13:31:12 +0000569void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
570 // Calls are not allowed in some stubs.
571 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000572
Andrei Popescu402d9372010-02-26 13:31:12 +0000573 // Rely on the assertion to check that the number of provided
574 // arguments match the expected number of arguments. Fake a
575 // parameter count to avoid emitting code to do the check.
576 ParameterCount expected(0);
577 GetBuiltinEntry(rdx, id);
578 InvokeCode(rdx, expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +0000579}
580
Andrei Popescu402d9372010-02-26 13:31:12 +0000581
Steve Block791712a2010-08-27 10:21:07 +0100582void MacroAssembler::GetBuiltinFunction(Register target,
583 Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100584 // Load the builtins object into target register.
585 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
586 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
Steve Block791712a2010-08-27 10:21:07 +0100587 movq(target, FieldOperand(target,
588 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
589}
Steve Block6ded16b2010-05-10 14:33:55 +0100590
Steve Block791712a2010-08-27 10:21:07 +0100591
592void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
593 ASSERT(!target.is(rdi));
Andrei Popescu402d9372010-02-26 13:31:12 +0000594 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +0100595 GetBuiltinFunction(rdi, id);
596 movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000597}
598
599
600void MacroAssembler::Set(Register dst, int64_t x) {
601 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100602 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000603 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000604 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000605 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000606 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000607 } else {
608 movq(dst, x, RelocInfo::NONE);
609 }
610}
611
Steve Blocka7e24c12009-10-30 11:49:00 +0000612void MacroAssembler::Set(const Operand& dst, int64_t x) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100613 if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000614 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000615 } else {
616 movq(kScratchRegister, x, RelocInfo::NONE);
617 movq(dst, kScratchRegister);
618 }
619}
620
Steve Blocka7e24c12009-10-30 11:49:00 +0000621// ----------------------------------------------------------------------------
622// Smi tagging, untagging and tag detection.
623
Steve Block8defd9f2010-07-08 12:39:36 +0100624Register MacroAssembler::GetSmiConstant(Smi* source) {
625 int value = source->value();
626 if (value == 0) {
627 xorl(kScratchRegister, kScratchRegister);
628 return kScratchRegister;
629 }
630 if (value == 1) {
631 return kSmiConstantRegister;
632 }
633 LoadSmiConstant(kScratchRegister, source);
634 return kScratchRegister;
635}
636
637void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
638 if (FLAG_debug_code) {
639 movq(dst,
640 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
641 RelocInfo::NONE);
642 cmpq(dst, kSmiConstantRegister);
643 if (allow_stub_calls()) {
644 Assert(equal, "Uninitialized kSmiConstantRegister");
645 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100646 NearLabel ok;
Steve Block8defd9f2010-07-08 12:39:36 +0100647 j(equal, &ok);
648 int3();
649 bind(&ok);
650 }
651 }
652 if (source->value() == 0) {
653 xorl(dst, dst);
654 return;
655 }
656 int value = source->value();
657 bool negative = value < 0;
658 unsigned int uvalue = negative ? -value : value;
659
660 switch (uvalue) {
661 case 9:
662 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
663 break;
664 case 8:
665 xorl(dst, dst);
666 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
667 break;
668 case 4:
669 xorl(dst, dst);
670 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
671 break;
672 case 5:
673 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
674 break;
675 case 3:
676 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
677 break;
678 case 2:
679 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
680 break;
681 case 1:
682 movq(dst, kSmiConstantRegister);
683 break;
684 case 0:
685 UNREACHABLE();
686 return;
687 default:
688 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
689 return;
690 }
691 if (negative) {
692 neg(dst);
693 }
694}
695
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100696
Steve Blocka7e24c12009-10-30 11:49:00 +0000697void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000698 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000699 if (!dst.is(src)) {
700 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000701 }
Steve Block3ce2e202009-11-05 08:53:23 +0000702 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000703}
704
705
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100706void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
707 if (FLAG_debug_code) {
708 testb(dst, Immediate(0x01));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100709 NearLabel ok;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100710 j(zero, &ok);
711 if (allow_stub_calls()) {
712 Abort("Integer32ToSmiField writing to non-smi location");
713 } else {
714 int3();
715 }
716 bind(&ok);
717 }
718 ASSERT(kSmiShift % kBitsPerByte == 0);
719 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
720}
721
722
Steve Block3ce2e202009-11-05 08:53:23 +0000723void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
724 Register src,
725 int constant) {
726 if (dst.is(src)) {
727 addq(dst, Immediate(constant));
728 } else {
729 lea(dst, Operand(src, constant));
730 }
731 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000732}
733
734
735void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000736 ASSERT_EQ(0, kSmiTag);
737 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000738 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000739 }
Steve Block3ce2e202009-11-05 08:53:23 +0000740 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000741}
742
743
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100744void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
745 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
746}
747
748
Steve Blocka7e24c12009-10-30 11:49:00 +0000749void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000750 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000751 if (!dst.is(src)) {
752 movq(dst, src);
753 }
754 sar(dst, Immediate(kSmiShift));
755}
756
757
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100758void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
759 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
760}
761
762
Steve Block3ce2e202009-11-05 08:53:23 +0000763void MacroAssembler::SmiTest(Register src) {
764 testq(src, src);
765}
766
767
768void MacroAssembler::SmiCompare(Register dst, Register src) {
769 cmpq(dst, src);
770}
771
772
773void MacroAssembler::SmiCompare(Register dst, Smi* src) {
774 ASSERT(!dst.is(kScratchRegister));
775 if (src->value() == 0) {
776 testq(dst, dst);
777 } else {
Iain Merrick75681382010-08-19 15:07:18 +0100778 Register constant_reg = GetSmiConstant(src);
779 cmpq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +0000780 }
781}
782
783
Leon Clarkef7060e22010-06-03 12:02:55 +0100784void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block6ded16b2010-05-10 14:33:55 +0100785 cmpq(dst, src);
786}
787
788
Steve Block3ce2e202009-11-05 08:53:23 +0000789void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
790 cmpq(dst, src);
791}
792
793
794void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100795 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000796}
797
798
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100799void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
800 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
801}
802
803
Steve Blocka7e24c12009-10-30 11:49:00 +0000804void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
805 Register src,
806 int power) {
807 ASSERT(power >= 0);
808 ASSERT(power < 64);
809 if (power == 0) {
810 SmiToInteger64(dst, src);
811 return;
812 }
Steve Block3ce2e202009-11-05 08:53:23 +0000813 if (!dst.is(src)) {
814 movq(dst, src);
815 }
816 if (power < kSmiShift) {
817 sar(dst, Immediate(kSmiShift - power));
818 } else if (power > kSmiShift) {
819 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000820 }
821}
822
823
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100824void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
825 Register src,
826 int power) {
827 ASSERT((0 <= power) && (power < 32));
828 if (dst.is(src)) {
829 shr(dst, Immediate(power + kSmiShift));
830 } else {
831 UNIMPLEMENTED(); // Not used.
832 }
833}
834
835
Steve Blocka7e24c12009-10-30 11:49:00 +0000836Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000837 ASSERT_EQ(0, kSmiTag);
838 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000839 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000840}
841
842
Ben Murdochf87a2032010-10-22 12:50:53 +0100843Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000844 ASSERT_EQ(0, kSmiTag);
Steve Block8defd9f2010-07-08 12:39:36 +0100845 // Make mask 0x8000000000000001 and test that both bits are zero.
Steve Block3ce2e202009-11-05 08:53:23 +0000846 movq(kScratchRegister, src);
847 rol(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +0100848 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +0000849 return zero;
850}
851
852
Steve Blocka7e24c12009-10-30 11:49:00 +0000853Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
854 if (first.is(second)) {
855 return CheckSmi(first);
856 }
Steve Block8defd9f2010-07-08 12:39:36 +0100857 ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
858 leal(kScratchRegister, Operand(first, second, times_1, 0));
859 testb(kScratchRegister, Immediate(0x03));
Steve Block3ce2e202009-11-05 08:53:23 +0000860 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000861}
862
863
Ben Murdochf87a2032010-10-22 12:50:53 +0100864Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
865 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +0000866 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +0100867 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +0000868 }
Steve Block8defd9f2010-07-08 12:39:36 +0100869 movq(kScratchRegister, first);
870 or_(kScratchRegister, second);
Leon Clarked91b9f72010-01-27 17:25:45 +0000871 rol(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +0100872 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +0000873 return zero;
874}
875
876
Ben Murdochbb769b22010-08-11 14:56:33 +0100877Condition MacroAssembler::CheckEitherSmi(Register first,
878 Register second,
879 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +0000880 if (first.is(second)) {
881 return CheckSmi(first);
882 }
Ben Murdochbb769b22010-08-11 14:56:33 +0100883 if (scratch.is(second)) {
884 andl(scratch, first);
885 } else {
886 if (!scratch.is(first)) {
887 movl(scratch, first);
888 }
889 andl(scratch, second);
890 }
891 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +0000892 return zero;
893}
894
895
Steve Blocka7e24c12009-10-30 11:49:00 +0000896Condition MacroAssembler::CheckIsMinSmi(Register src) {
Steve Block8defd9f2010-07-08 12:39:36 +0100897 ASSERT(!src.is(kScratchRegister));
898 // If we overflow by subtracting one, it's the minimal smi value.
899 cmpq(src, kSmiConstantRegister);
900 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +0000901}
902
Steve Blocka7e24c12009-10-30 11:49:00 +0000903
904Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000905 // A 32-bit integer value can always be converted to a smi.
906 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +0000907}
908
909
Steve Block3ce2e202009-11-05 08:53:23 +0000910Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
911 // An unsigned 32-bit integer value is valid as long as the high bit
912 // is not set.
Steve Block8defd9f2010-07-08 12:39:36 +0100913 testl(src, src);
914 return positive;
Steve Block3ce2e202009-11-05 08:53:23 +0000915}
916
917
Steve Block3ce2e202009-11-05 08:53:23 +0000918void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
919 if (constant->value() == 0) {
920 if (!dst.is(src)) {
921 movq(dst, src);
922 }
Steve Block8defd9f2010-07-08 12:39:36 +0100923 return;
Steve Block3ce2e202009-11-05 08:53:23 +0000924 } else if (dst.is(src)) {
925 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +0100926 switch (constant->value()) {
927 case 1:
928 addq(dst, kSmiConstantRegister);
929 return;
930 case 2:
931 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
932 return;
933 case 4:
934 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
935 return;
936 case 8:
937 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
938 return;
939 default:
940 Register constant_reg = GetSmiConstant(constant);
941 addq(dst, constant_reg);
942 return;
943 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000944 } else {
Steve Block8defd9f2010-07-08 12:39:36 +0100945 switch (constant->value()) {
946 case 1:
947 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
948 return;
949 case 2:
950 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
951 return;
952 case 4:
953 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
954 return;
955 case 8:
956 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
957 return;
958 default:
959 LoadSmiConstant(dst, constant);
960 addq(dst, src);
961 return;
962 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000963 }
964}
965
966
Leon Clarkef7060e22010-06-03 12:02:55 +0100967void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
968 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100969 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +0100970 }
971}
972
973
Steve Block3ce2e202009-11-05 08:53:23 +0000974void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
975 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000976 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000977 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000978 }
Steve Block3ce2e202009-11-05 08:53:23 +0000979 } else if (dst.is(src)) {
980 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +0100981 Register constant_reg = GetSmiConstant(constant);
982 subq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +0000983 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000984 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +0100985 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100986 // Adding and subtracting the min-value gives the same result, it only
987 // differs on the overflow bit, which we don't check here.
988 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000989 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100990 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +0100991 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Steve Block3ce2e202009-11-05 08:53:23 +0000992 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000993 }
994 }
995}
996
997
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100998void MacroAssembler::SmiAdd(Register dst,
999 Register src1,
1000 Register src2) {
1001 // No overflow checking. Use only when it's known that
1002 // overflowing is impossible.
1003 ASSERT(!dst.is(src2));
1004 if (dst.is(src1)) {
1005 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001006 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001007 movq(dst, src1);
1008 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001009 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001010 Assert(no_overflow, "Smi addition overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001011}
1012
1013
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001014void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1015 // No overflow checking. Use only when it's known that
1016 // overflowing is impossible (e.g., subtracting two positive smis).
1017 ASSERT(!dst.is(src2));
1018 if (dst.is(src1)) {
1019 subq(dst, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001020 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001021 movq(dst, src1);
1022 subq(dst, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001023 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001024 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001025}
1026
1027
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001028void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001029 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001030 const Operand& src2) {
1031 // No overflow checking. Use only when it's known that
1032 // overflowing is impossible (e.g., subtracting two positive smis).
1033 if (dst.is(src1)) {
1034 subq(dst, src2);
1035 } else {
1036 movq(dst, src1);
1037 subq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001038 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001039 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001040}
1041
1042
1043void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001044 ASSERT(!dst.is(kScratchRegister));
1045 ASSERT(!src.is(kScratchRegister));
1046 // Set tag and padding bits before negating, so that they are zero afterwards.
1047 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001048 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001049 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001050 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001051 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001052 }
Steve Block3ce2e202009-11-05 08:53:23 +00001053 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001054}
1055
1056
1057void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001058 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001059 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001060 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001061 }
1062 and_(dst, src2);
1063}
1064
1065
Steve Block3ce2e202009-11-05 08:53:23 +00001066void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1067 if (constant->value() == 0) {
1068 xor_(dst, dst);
1069 } else if (dst.is(src)) {
1070 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001071 Register constant_reg = GetSmiConstant(constant);
1072 and_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001073 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001074 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001075 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001076 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001077}
1078
1079
1080void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1081 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001082 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001083 }
1084 or_(dst, src2);
1085}
1086
1087
Steve Block3ce2e202009-11-05 08:53:23 +00001088void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1089 if (dst.is(src)) {
1090 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001091 Register constant_reg = GetSmiConstant(constant);
1092 or_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001093 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001094 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001095 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001096 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001097}
1098
Steve Block3ce2e202009-11-05 08:53:23 +00001099
Steve Blocka7e24c12009-10-30 11:49:00 +00001100void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1101 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001102 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001103 }
1104 xor_(dst, src2);
1105}
1106
1107
Steve Block3ce2e202009-11-05 08:53:23 +00001108void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1109 if (dst.is(src)) {
1110 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001111 Register constant_reg = GetSmiConstant(constant);
1112 xor_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001113 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001114 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001115 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001116 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001117}
1118
1119
Steve Blocka7e24c12009-10-30 11:49:00 +00001120void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1121 Register src,
1122 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001123 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001124 if (shift_value > 0) {
1125 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001126 sar(dst, Immediate(shift_value + kSmiShift));
1127 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001128 } else {
1129 UNIMPLEMENTED(); // Not used.
1130 }
1131 }
1132}
1133
1134
Steve Blocka7e24c12009-10-30 11:49:00 +00001135void MacroAssembler::SmiShiftLeftConstant(Register dst,
1136 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001137 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001138 if (!dst.is(src)) {
1139 movq(dst, src);
1140 }
1141 if (shift_value > 0) {
1142 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001143 }
1144}
1145
1146
1147void MacroAssembler::SmiShiftLeft(Register dst,
1148 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001149 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001150 ASSERT(!dst.is(rcx));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001151 NearLabel result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001152 // Untag shift amount.
1153 if (!dst.is(src1)) {
1154 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001155 }
Steve Block3ce2e202009-11-05 08:53:23 +00001156 SmiToInteger32(rcx, src2);
1157 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1158 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001159 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001160}
1161
1162
Steve Blocka7e24c12009-10-30 11:49:00 +00001163void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1164 Register src1,
1165 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001166 ASSERT(!dst.is(kScratchRegister));
1167 ASSERT(!src1.is(kScratchRegister));
1168 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001169 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001170 if (src1.is(rcx)) {
1171 movq(kScratchRegister, src1);
1172 } else if (src2.is(rcx)) {
1173 movq(kScratchRegister, src2);
1174 }
1175 if (!dst.is(src1)) {
1176 movq(dst, src1);
1177 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001178 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001179 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001180 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001181 shl(dst, Immediate(kSmiShift));
1182 if (src1.is(rcx)) {
1183 movq(src1, kScratchRegister);
1184 } else if (src2.is(rcx)) {
1185 movq(src2, kScratchRegister);
1186 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001187}
1188
1189
Steve Block3ce2e202009-11-05 08:53:23 +00001190SmiIndex MacroAssembler::SmiToIndex(Register dst,
1191 Register src,
1192 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001193 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001194 // There is a possible optimization if shift is in the range 60-63, but that
1195 // will (and must) never happen.
1196 if (!dst.is(src)) {
1197 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001198 }
Steve Block3ce2e202009-11-05 08:53:23 +00001199 if (shift < kSmiShift) {
1200 sar(dst, Immediate(kSmiShift - shift));
1201 } else {
1202 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001203 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001204 return SmiIndex(dst, times_1);
1205}
1206
Steve Blocka7e24c12009-10-30 11:49:00 +00001207SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1208 Register src,
1209 int shift) {
1210 // Register src holds a positive smi.
1211 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001212 if (!dst.is(src)) {
1213 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001214 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001215 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001216 if (shift < kSmiShift) {
1217 sar(dst, Immediate(kSmiShift - shift));
1218 } else {
1219 shl(dst, Immediate(shift - kSmiShift));
1220 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001221 return SmiIndex(dst, times_1);
1222}
1223
1224
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001225void MacroAssembler::Move(Register dst, Register src) {
1226 if (!dst.is(src)) {
1227 movq(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01001228 }
Steve Block6ded16b2010-05-10 14:33:55 +01001229}
1230
1231
Steve Block6ded16b2010-05-10 14:33:55 +01001232
1233
Steve Blocka7e24c12009-10-30 11:49:00 +00001234void MacroAssembler::Move(Register dst, Handle<Object> source) {
1235 ASSERT(!source->IsFailure());
1236 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001237 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001238 } else {
1239 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1240 }
1241}
1242
1243
1244void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001245 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001246 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001247 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001248 } else {
1249 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1250 movq(dst, kScratchRegister);
1251 }
1252}
1253
1254
1255void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001256 if (source->IsSmi()) {
1257 SmiCompare(dst, Smi::cast(*source));
1258 } else {
1259 Move(kScratchRegister, source);
1260 cmpq(dst, kScratchRegister);
1261 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001262}
1263
1264
1265void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1266 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001267 SmiCompare(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001268 } else {
1269 ASSERT(source->IsHeapObject());
1270 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1271 cmpq(dst, kScratchRegister);
1272 }
1273}
1274
1275
1276void MacroAssembler::Push(Handle<Object> source) {
1277 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001278 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001279 } else {
1280 ASSERT(source->IsHeapObject());
1281 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1282 push(kScratchRegister);
1283 }
1284}
1285
1286
1287void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001288 intptr_t smi = reinterpret_cast<intptr_t>(source);
1289 if (is_int32(smi)) {
1290 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001291 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001292 Register constant = GetSmiConstant(source);
1293 push(constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001294 }
1295}
1296
1297
Leon Clarkee46be812010-01-19 14:06:41 +00001298void MacroAssembler::Drop(int stack_elements) {
1299 if (stack_elements > 0) {
1300 addq(rsp, Immediate(stack_elements * kPointerSize));
1301 }
1302}
1303
1304
Steve Block3ce2e202009-11-05 08:53:23 +00001305void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001306 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001307}
1308
1309
1310void MacroAssembler::Jump(ExternalReference ext) {
1311 movq(kScratchRegister, ext);
1312 jmp(kScratchRegister);
1313}
1314
1315
1316void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1317 movq(kScratchRegister, destination, rmode);
1318 jmp(kScratchRegister);
1319}
1320
1321
1322void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001323 // TODO(X64): Inline this
1324 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001325}
1326
1327
1328void MacroAssembler::Call(ExternalReference ext) {
1329 movq(kScratchRegister, ext);
1330 call(kScratchRegister);
1331}
1332
1333
1334void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1335 movq(kScratchRegister, destination, rmode);
1336 call(kScratchRegister);
1337}
1338
1339
1340void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1341 ASSERT(RelocInfo::IsCodeTarget(rmode));
Steve Block3ce2e202009-11-05 08:53:23 +00001342 call(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001343}
1344
1345
1346void MacroAssembler::PushTryHandler(CodeLocation try_location,
1347 HandlerType type) {
1348 // Adjust this code if not the case.
1349 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1350
1351 // The pc (return address) is already on TOS. This code pushes state,
1352 // frame pointer and current handler. Check that they are expected
1353 // next on the stack, in that order.
1354 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1355 StackHandlerConstants::kPCOffset - kPointerSize);
1356 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1357 StackHandlerConstants::kStateOffset - kPointerSize);
1358 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1359 StackHandlerConstants::kFPOffset - kPointerSize);
1360
1361 if (try_location == IN_JAVASCRIPT) {
1362 if (type == TRY_CATCH_HANDLER) {
1363 push(Immediate(StackHandler::TRY_CATCH));
1364 } else {
1365 push(Immediate(StackHandler::TRY_FINALLY));
1366 }
1367 push(rbp);
1368 } else {
1369 ASSERT(try_location == IN_JS_ENTRY);
1370 // The frame pointer does not point to a JS frame so we save NULL
1371 // for rbp. We expect the code throwing an exception to check rbp
1372 // before dereferencing it to restore the context.
1373 push(Immediate(StackHandler::ENTRY));
1374 push(Immediate(0)); // NULL frame pointer.
1375 }
1376 // Save the current handler.
1377 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1378 push(Operand(kScratchRegister, 0));
1379 // Link this handler.
1380 movq(Operand(kScratchRegister, 0), rsp);
1381}
1382
1383
Leon Clarkee46be812010-01-19 14:06:41 +00001384void MacroAssembler::PopTryHandler() {
1385 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1386 // Unlink this handler.
1387 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1388 pop(Operand(kScratchRegister, 0));
1389 // Remove the remaining fields.
1390 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1391}
1392
1393
Steve Blocka7e24c12009-10-30 11:49:00 +00001394void MacroAssembler::Ret() {
1395 ret(0);
1396}
1397
1398
1399void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001400 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01001401 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001402}
1403
1404
1405void MacroAssembler::CmpObjectType(Register heap_object,
1406 InstanceType type,
1407 Register map) {
1408 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1409 CmpInstanceType(map, type);
1410}
1411
1412
1413void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1414 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1415 Immediate(static_cast<int8_t>(type)));
1416}
1417
1418
Andrei Popescu31002712010-02-23 13:46:05 +00001419void MacroAssembler::CheckMap(Register obj,
1420 Handle<Map> map,
1421 Label* fail,
1422 bool is_heap_object) {
1423 if (!is_heap_object) {
1424 JumpIfSmi(obj, fail);
1425 }
1426 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1427 j(not_equal, fail);
1428}
1429
1430
Leon Clarkef7060e22010-06-03 12:02:55 +01001431void MacroAssembler::AbortIfNotNumber(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001432 NearLabel ok;
Andrei Popescu402d9372010-02-26 13:31:12 +00001433 Condition is_smi = CheckSmi(object);
1434 j(is_smi, &ok);
1435 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1436 Factory::heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01001437 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00001438 bind(&ok);
1439}
1440
1441
Iain Merrick75681382010-08-19 15:07:18 +01001442void MacroAssembler::AbortIfSmi(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001443 NearLabel ok;
Iain Merrick75681382010-08-19 15:07:18 +01001444 Condition is_smi = CheckSmi(object);
1445 Assert(NegateCondition(is_smi), "Operand is a smi");
1446}
1447
1448
Leon Clarkef7060e22010-06-03 12:02:55 +01001449void MacroAssembler::AbortIfNotSmi(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001450 NearLabel ok;
Steve Block6ded16b2010-05-10 14:33:55 +01001451 Condition is_smi = CheckSmi(object);
Iain Merrick75681382010-08-19 15:07:18 +01001452 Assert(is_smi, "Operand is not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01001453}
1454
1455
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001456void MacroAssembler::AbortIfNotRootValue(Register src,
1457 Heap::RootListIndex root_value_index,
1458 const char* message) {
1459 ASSERT(!src.is(kScratchRegister));
1460 LoadRoot(kScratchRegister, root_value_index);
1461 cmpq(src, kScratchRegister);
1462 Check(equal, message);
1463}
1464
1465
1466
Leon Clarked91b9f72010-01-27 17:25:45 +00001467Condition MacroAssembler::IsObjectStringType(Register heap_object,
1468 Register map,
1469 Register instance_type) {
1470 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00001471 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00001472 ASSERT(kNotStringTag != 0);
1473 testb(instance_type, Immediate(kIsNotStringMask));
1474 return zero;
1475}
1476
1477
Steve Blocka7e24c12009-10-30 11:49:00 +00001478void MacroAssembler::TryGetFunctionPrototype(Register function,
1479 Register result,
1480 Label* miss) {
1481 // Check that the receiver isn't a smi.
1482 testl(function, Immediate(kSmiTagMask));
1483 j(zero, miss);
1484
1485 // Check that the function really is a function.
1486 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1487 j(not_equal, miss);
1488
1489 // Make sure that the function has an instance prototype.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001490 NearLabel non_instance;
Steve Blocka7e24c12009-10-30 11:49:00 +00001491 testb(FieldOperand(result, Map::kBitFieldOffset),
1492 Immediate(1 << Map::kHasNonInstancePrototype));
1493 j(not_zero, &non_instance);
1494
1495 // Get the prototype or initial map from the function.
1496 movq(result,
1497 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1498
1499 // If the prototype or initial map is the hole, don't return it and
1500 // simply miss the cache instead. This will allow us to allocate a
1501 // prototype object on-demand in the runtime system.
1502 CompareRoot(result, Heap::kTheHoleValueRootIndex);
1503 j(equal, miss);
1504
1505 // If the function does not have an initial map, we're done.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001506 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001507 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1508 j(not_equal, &done);
1509
1510 // Get the prototype from the initial map.
1511 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1512 jmp(&done);
1513
1514 // Non-instance prototype: Fetch prototype from constructor field
1515 // in initial map.
1516 bind(&non_instance);
1517 movq(result, FieldOperand(result, Map::kConstructorOffset));
1518
1519 // All done.
1520 bind(&done);
1521}
1522
1523
1524void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1525 if (FLAG_native_code_counters && counter->Enabled()) {
1526 movq(kScratchRegister, ExternalReference(counter));
1527 movl(Operand(kScratchRegister, 0), Immediate(value));
1528 }
1529}
1530
1531
1532void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1533 ASSERT(value > 0);
1534 if (FLAG_native_code_counters && counter->Enabled()) {
1535 movq(kScratchRegister, ExternalReference(counter));
1536 Operand operand(kScratchRegister, 0);
1537 if (value == 1) {
1538 incl(operand);
1539 } else {
1540 addl(operand, Immediate(value));
1541 }
1542 }
1543}
1544
1545
1546void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1547 ASSERT(value > 0);
1548 if (FLAG_native_code_counters && counter->Enabled()) {
1549 movq(kScratchRegister, ExternalReference(counter));
1550 Operand operand(kScratchRegister, 0);
1551 if (value == 1) {
1552 decl(operand);
1553 } else {
1554 subl(operand, Immediate(value));
1555 }
1556 }
1557}
1558
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001559
Steve Blocka7e24c12009-10-30 11:49:00 +00001560#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00001561void MacroAssembler::DebugBreak() {
1562 ASSERT(allow_stub_calls());
1563 xor_(rax, rax); // no arguments
1564 movq(rbx, ExternalReference(Runtime::kDebugBreak));
1565 CEntryStub ces(1);
1566 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00001567}
Andrei Popescu402d9372010-02-26 13:31:12 +00001568#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00001569
1570
Steve Blocka7e24c12009-10-30 11:49:00 +00001571void MacroAssembler::InvokeCode(Register code,
1572 const ParameterCount& expected,
1573 const ParameterCount& actual,
1574 InvokeFlag flag) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001575 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001576 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1577 if (flag == CALL_FUNCTION) {
1578 call(code);
1579 } else {
1580 ASSERT(flag == JUMP_FUNCTION);
1581 jmp(code);
1582 }
1583 bind(&done);
1584}
1585
1586
1587void MacroAssembler::InvokeCode(Handle<Code> code,
1588 const ParameterCount& expected,
1589 const ParameterCount& actual,
1590 RelocInfo::Mode rmode,
1591 InvokeFlag flag) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001592 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001593 Register dummy = rax;
1594 InvokePrologue(expected, actual, code, dummy, &done, flag);
1595 if (flag == CALL_FUNCTION) {
1596 Call(code, rmode);
1597 } else {
1598 ASSERT(flag == JUMP_FUNCTION);
1599 Jump(code, rmode);
1600 }
1601 bind(&done);
1602}
1603
1604
1605void MacroAssembler::InvokeFunction(Register function,
1606 const ParameterCount& actual,
1607 InvokeFlag flag) {
1608 ASSERT(function.is(rdi));
1609 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1610 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
1611 movsxlq(rbx,
1612 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001613 // Advances rdx to the end of the Code object header, to the start of
1614 // the executable code.
Steve Block791712a2010-08-27 10:21:07 +01001615 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001616
1617 ParameterCount expected(rbx);
1618 InvokeCode(rdx, expected, actual, flag);
1619}
1620
1621
Andrei Popescu402d9372010-02-26 13:31:12 +00001622void MacroAssembler::InvokeFunction(JSFunction* function,
1623 const ParameterCount& actual,
1624 InvokeFlag flag) {
1625 ASSERT(function->is_compiled());
1626 // Get the function and setup the context.
1627 Move(rdi, Handle<JSFunction>(function));
1628 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1629
1630 // Invoke the cached code.
1631 Handle<Code> code(function->code());
1632 ParameterCount expected(function->shared()->formal_parameter_count());
1633 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1634}
1635
1636
Steve Blocka7e24c12009-10-30 11:49:00 +00001637void MacroAssembler::EnterFrame(StackFrame::Type type) {
1638 push(rbp);
1639 movq(rbp, rsp);
1640 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00001641 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00001642 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1643 push(kScratchRegister);
1644 if (FLAG_debug_code) {
1645 movq(kScratchRegister,
1646 Factory::undefined_value(),
1647 RelocInfo::EMBEDDED_OBJECT);
1648 cmpq(Operand(rsp, 0), kScratchRegister);
1649 Check(not_equal, "code object not properly patched");
1650 }
1651}
1652
1653
1654void MacroAssembler::LeaveFrame(StackFrame::Type type) {
1655 if (FLAG_debug_code) {
Steve Block3ce2e202009-11-05 08:53:23 +00001656 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00001657 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
1658 Check(equal, "stack frame types must match");
1659 }
1660 movq(rsp, rbp);
1661 pop(rbp);
1662}
1663
1664
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001665void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001666 // Setup the frame structure on the stack.
1667 // All constants are relative to the frame pointer of the exit frame.
1668 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
1669 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
1670 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
1671 push(rbp);
1672 movq(rbp, rsp);
1673
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001674 // Reserve room for entry stack pointer and push the code object.
Steve Block3ce2e202009-11-05 08:53:23 +00001675 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00001676 push(Immediate(0)); // Saved entry sp, patched before call.
1677 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1678 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00001679
1680 // Save the frame pointer and the context in top.
1681 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1682 ExternalReference context_address(Top::k_context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01001683 if (save_rax) {
1684 movq(r14, rax); // Backup rax before we use it.
1685 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001686
1687 movq(rax, rbp);
1688 store_rax(c_entry_fp_address);
1689 movq(rax, rsi);
1690 store_rax(context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01001691}
Steve Blocka7e24c12009-10-30 11:49:00 +00001692
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001693void MacroAssembler::EnterExitFrameEpilogue(int result_size,
Ben Murdochbb769b22010-08-11 14:56:33 +01001694 int argc) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001695#ifdef _WIN64
1696 // Reserve space on stack for result and argument structures, if necessary.
1697 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
1698 // Reserve space for the Arguments object. The Windows 64-bit ABI
1699 // requires us to pass this structure as a pointer to its location on
1700 // the stack. The structure contains 2 values.
Ben Murdochbb769b22010-08-11 14:56:33 +01001701 int argument_stack_space = argc * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00001702 // We also need backing space for 4 parameters, even though
1703 // we only pass one or two parameter, and it is in a register.
1704 int argument_mirror_space = 4 * kPointerSize;
1705 int total_stack_space =
1706 argument_mirror_space + argument_stack_space + result_stack_space;
1707 subq(rsp, Immediate(total_stack_space));
1708#endif
1709
1710 // Get the required frame alignment for the OS.
1711 static const int kFrameAlignment = OS::ActivationFrameAlignment();
1712 if (kFrameAlignment > 0) {
1713 ASSERT(IsPowerOf2(kFrameAlignment));
1714 movq(kScratchRegister, Immediate(-kFrameAlignment));
1715 and_(rsp, kScratchRegister);
1716 }
1717
1718 // Patch the saved entry sp.
1719 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
1720}
1721
1722
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001723void MacroAssembler::EnterExitFrame(int result_size) {
1724 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01001725
1726 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
1727 // so it must be retained across the C-call.
1728 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1729 lea(r12, Operand(rbp, r14, times_pointer_size, offset));
1730
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001731 EnterExitFrameEpilogue(result_size, 2);
Ben Murdochbb769b22010-08-11 14:56:33 +01001732}
1733
1734
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001735void MacroAssembler::EnterApiExitFrame(int stack_space,
Ben Murdochbb769b22010-08-11 14:56:33 +01001736 int argc,
1737 int result_size) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001738 EnterExitFramePrologue(false);
Ben Murdochbb769b22010-08-11 14:56:33 +01001739
1740 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
1741 // so it must be retained across the C-call.
1742 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1743 lea(r12, Operand(rbp, (stack_space * kPointerSize) + offset));
1744
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001745 EnterExitFrameEpilogue(result_size, argc);
Ben Murdochbb769b22010-08-11 14:56:33 +01001746}
1747
1748
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001749void MacroAssembler::LeaveExitFrame(int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001750 // Registers:
Steve Block8defd9f2010-07-08 12:39:36 +01001751 // r12 : argv
Steve Blocka7e24c12009-10-30 11:49:00 +00001752
1753 // Get the return address from the stack and restore the frame pointer.
1754 movq(rcx, Operand(rbp, 1 * kPointerSize));
1755 movq(rbp, Operand(rbp, 0 * kPointerSize));
1756
Steve Blocka7e24c12009-10-30 11:49:00 +00001757 // Pop everything up to and including the arguments and the receiver
1758 // from the caller stack.
Steve Block8defd9f2010-07-08 12:39:36 +01001759 lea(rsp, Operand(r12, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001760
1761 // Restore current context from top and clear it in debug mode.
1762 ExternalReference context_address(Top::k_context_address);
1763 movq(kScratchRegister, context_address);
1764 movq(rsi, Operand(kScratchRegister, 0));
1765#ifdef DEBUG
1766 movq(Operand(kScratchRegister, 0), Immediate(0));
1767#endif
1768
1769 // Push the return address to get ready to return.
1770 push(rcx);
1771
1772 // Clear the top frame.
1773 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1774 movq(kScratchRegister, c_entry_fp_address);
1775 movq(Operand(kScratchRegister, 0), Immediate(0));
1776}
1777
1778
Steve Blocka7e24c12009-10-30 11:49:00 +00001779void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1780 Register scratch,
1781 Label* miss) {
1782 Label same_contexts;
1783
1784 ASSERT(!holder_reg.is(scratch));
1785 ASSERT(!scratch.is(kScratchRegister));
1786 // Load current lexical context from the stack frame.
1787 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
1788
1789 // When generating debug code, make sure the lexical context is set.
1790 if (FLAG_debug_code) {
1791 cmpq(scratch, Immediate(0));
1792 Check(not_equal, "we should not have an empty lexical context");
1793 }
1794 // Load the global context of the current context.
1795 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1796 movq(scratch, FieldOperand(scratch, offset));
1797 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
1798
1799 // Check the context is a global context.
1800 if (FLAG_debug_code) {
1801 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
1802 Factory::global_context_map());
1803 Check(equal, "JSGlobalObject::global_context should be a global context.");
1804 }
1805
1806 // Check if both contexts are the same.
1807 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1808 j(equal, &same_contexts);
1809
1810 // Compare security tokens.
1811 // Check that the security token in the calling global object is
1812 // compatible with the security token in the receiving global
1813 // object.
1814
1815 // Check the context is a global context.
1816 if (FLAG_debug_code) {
1817 // Preserve original value of holder_reg.
1818 push(holder_reg);
1819 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1820 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
1821 Check(not_equal, "JSGlobalProxy::context() should not be null.");
1822
1823 // Read the first word and compare to global_context_map(),
1824 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
1825 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
1826 Check(equal, "JSGlobalObject::global_context should be a global context.");
1827 pop(holder_reg);
1828 }
1829
1830 movq(kScratchRegister,
1831 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00001832 int token_offset =
1833 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00001834 movq(scratch, FieldOperand(scratch, token_offset));
1835 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
1836 j(not_equal, miss);
1837
1838 bind(&same_contexts);
1839}
1840
1841
1842void MacroAssembler::LoadAllocationTopHelper(Register result,
1843 Register result_end,
1844 Register scratch,
1845 AllocationFlags flags) {
1846 ExternalReference new_space_allocation_top =
1847 ExternalReference::new_space_allocation_top_address();
1848
1849 // Just return if allocation top is already known.
1850 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1851 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01001852 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00001853#ifdef DEBUG
1854 // Assert that result actually contains top on entry.
1855 movq(kScratchRegister, new_space_allocation_top);
1856 cmpq(result, Operand(kScratchRegister, 0));
1857 Check(equal, "Unexpected allocation top");
1858#endif
1859 return;
1860 }
1861
Steve Block6ded16b2010-05-10 14:33:55 +01001862 // Move address of new object to result. Use scratch register if available,
1863 // and keep address in scratch until call to UpdateAllocationTopHelper.
1864 if (scratch.is_valid()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001865 ASSERT(!scratch.is(result_end));
1866 movq(scratch, new_space_allocation_top);
1867 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01001868 } else if (result.is(rax)) {
1869 load_rax(new_space_allocation_top);
1870 } else {
1871 movq(kScratchRegister, new_space_allocation_top);
1872 movq(result, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001873 }
1874}
1875
1876
1877void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1878 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +00001879 if (FLAG_debug_code) {
1880 testq(result_end, Immediate(kObjectAlignmentMask));
1881 Check(zero, "Unaligned allocation in new space");
1882 }
1883
Steve Blocka7e24c12009-10-30 11:49:00 +00001884 ExternalReference new_space_allocation_top =
1885 ExternalReference::new_space_allocation_top_address();
1886
1887 // Update new top.
1888 if (result_end.is(rax)) {
1889 // rax can be stored directly to a memory location.
1890 store_rax(new_space_allocation_top);
1891 } else {
1892 // Register required - use scratch provided if available.
Steve Block6ded16b2010-05-10 14:33:55 +01001893 if (scratch.is_valid()) {
1894 movq(Operand(scratch, 0), result_end);
1895 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00001896 movq(kScratchRegister, new_space_allocation_top);
1897 movq(Operand(kScratchRegister, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00001898 }
1899 }
1900}
1901
1902
1903void MacroAssembler::AllocateInNewSpace(int object_size,
1904 Register result,
1905 Register result_end,
1906 Register scratch,
1907 Label* gc_required,
1908 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07001909 if (!FLAG_inline_new) {
1910 if (FLAG_debug_code) {
1911 // Trash the registers to simulate an allocation failure.
1912 movl(result, Immediate(0x7091));
1913 if (result_end.is_valid()) {
1914 movl(result_end, Immediate(0x7191));
1915 }
1916 if (scratch.is_valid()) {
1917 movl(scratch, Immediate(0x7291));
1918 }
1919 }
1920 jmp(gc_required);
1921 return;
1922 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001923 ASSERT(!result.is(result_end));
1924
1925 // Load address of new object into result.
1926 LoadAllocationTopHelper(result, result_end, scratch, flags);
1927
1928 // Calculate new top and bail out if new space is exhausted.
1929 ExternalReference new_space_allocation_limit =
1930 ExternalReference::new_space_allocation_limit_address();
Steve Block6ded16b2010-05-10 14:33:55 +01001931
1932 Register top_reg = result_end.is_valid() ? result_end : result;
1933
1934 if (top_reg.is(result)) {
1935 addq(top_reg, Immediate(object_size));
1936 } else {
1937 lea(top_reg, Operand(result, object_size));
1938 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001939 movq(kScratchRegister, new_space_allocation_limit);
Steve Block6ded16b2010-05-10 14:33:55 +01001940 cmpq(top_reg, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001941 j(above, gc_required);
1942
1943 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01001944 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00001945
Steve Block6ded16b2010-05-10 14:33:55 +01001946 if (top_reg.is(result)) {
1947 if ((flags & TAG_OBJECT) != 0) {
1948 subq(result, Immediate(object_size - kHeapObjectTag));
1949 } else {
1950 subq(result, Immediate(object_size));
1951 }
1952 } else if ((flags & TAG_OBJECT) != 0) {
1953 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00001954 addq(result, Immediate(kHeapObjectTag));
1955 }
1956}
1957
1958
1959void MacroAssembler::AllocateInNewSpace(int header_size,
1960 ScaleFactor element_size,
1961 Register element_count,
1962 Register result,
1963 Register result_end,
1964 Register scratch,
1965 Label* gc_required,
1966 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07001967 if (!FLAG_inline_new) {
1968 if (FLAG_debug_code) {
1969 // Trash the registers to simulate an allocation failure.
1970 movl(result, Immediate(0x7091));
1971 movl(result_end, Immediate(0x7191));
1972 if (scratch.is_valid()) {
1973 movl(scratch, Immediate(0x7291));
1974 }
1975 // Register element_count is not modified by the function.
1976 }
1977 jmp(gc_required);
1978 return;
1979 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001980 ASSERT(!result.is(result_end));
1981
1982 // Load address of new object into result.
1983 LoadAllocationTopHelper(result, result_end, scratch, flags);
1984
1985 // Calculate new top and bail out if new space is exhausted.
1986 ExternalReference new_space_allocation_limit =
1987 ExternalReference::new_space_allocation_limit_address();
1988 lea(result_end, Operand(result, element_count, element_size, header_size));
1989 movq(kScratchRegister, new_space_allocation_limit);
1990 cmpq(result_end, Operand(kScratchRegister, 0));
1991 j(above, gc_required);
1992
1993 // Update allocation top.
1994 UpdateAllocationTopHelper(result_end, scratch);
1995
1996 // Tag the result if requested.
1997 if ((flags & TAG_OBJECT) != 0) {
1998 addq(result, Immediate(kHeapObjectTag));
1999 }
2000}
2001
2002
2003void MacroAssembler::AllocateInNewSpace(Register object_size,
2004 Register result,
2005 Register result_end,
2006 Register scratch,
2007 Label* gc_required,
2008 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07002009 if (!FLAG_inline_new) {
2010 if (FLAG_debug_code) {
2011 // Trash the registers to simulate an allocation failure.
2012 movl(result, Immediate(0x7091));
2013 movl(result_end, Immediate(0x7191));
2014 if (scratch.is_valid()) {
2015 movl(scratch, Immediate(0x7291));
2016 }
2017 // object_size is left unchanged by this function.
2018 }
2019 jmp(gc_required);
2020 return;
2021 }
2022 ASSERT(!result.is(result_end));
2023
Steve Blocka7e24c12009-10-30 11:49:00 +00002024 // Load address of new object into result.
2025 LoadAllocationTopHelper(result, result_end, scratch, flags);
2026
2027 // Calculate new top and bail out if new space is exhausted.
2028 ExternalReference new_space_allocation_limit =
2029 ExternalReference::new_space_allocation_limit_address();
2030 if (!object_size.is(result_end)) {
2031 movq(result_end, object_size);
2032 }
2033 addq(result_end, result);
2034 movq(kScratchRegister, new_space_allocation_limit);
2035 cmpq(result_end, Operand(kScratchRegister, 0));
2036 j(above, gc_required);
2037
2038 // Update allocation top.
2039 UpdateAllocationTopHelper(result_end, scratch);
2040
2041 // Tag the result if requested.
2042 if ((flags & TAG_OBJECT) != 0) {
2043 addq(result, Immediate(kHeapObjectTag));
2044 }
2045}
2046
2047
2048void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2049 ExternalReference new_space_allocation_top =
2050 ExternalReference::new_space_allocation_top_address();
2051
2052 // Make sure the object has no tag before resetting top.
2053 and_(object, Immediate(~kHeapObjectTagMask));
2054 movq(kScratchRegister, new_space_allocation_top);
2055#ifdef DEBUG
2056 cmpq(object, Operand(kScratchRegister, 0));
2057 Check(below, "Undo allocation of non allocated memory");
2058#endif
2059 movq(Operand(kScratchRegister, 0), object);
2060}
2061
2062
Steve Block3ce2e202009-11-05 08:53:23 +00002063void MacroAssembler::AllocateHeapNumber(Register result,
2064 Register scratch,
2065 Label* gc_required) {
2066 // Allocate heap number in new space.
2067 AllocateInNewSpace(HeapNumber::kSize,
2068 result,
2069 scratch,
2070 no_reg,
2071 gc_required,
2072 TAG_OBJECT);
2073
2074 // Set the map.
2075 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2076 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2077}
2078
2079
Leon Clarkee46be812010-01-19 14:06:41 +00002080void MacroAssembler::AllocateTwoByteString(Register result,
2081 Register length,
2082 Register scratch1,
2083 Register scratch2,
2084 Register scratch3,
2085 Label* gc_required) {
2086 // Calculate the number of bytes needed for the characters in the string while
2087 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002088 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2089 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002090 ASSERT(kShortSize == 2);
2091 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01002092 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2093 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002094 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002095 if (kHeaderAlignment > 0) {
2096 subq(scratch1, Immediate(kHeaderAlignment));
2097 }
Leon Clarkee46be812010-01-19 14:06:41 +00002098
2099 // Allocate two byte string in new space.
2100 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2101 times_1,
2102 scratch1,
2103 result,
2104 scratch2,
2105 scratch3,
2106 gc_required,
2107 TAG_OBJECT);
2108
2109 // Set the map, length and hash field.
2110 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2111 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002112 Integer32ToSmi(scratch1, length);
2113 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002114 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002115 Immediate(String::kEmptyHashField));
2116}
2117
2118
2119void MacroAssembler::AllocateAsciiString(Register result,
2120 Register length,
2121 Register scratch1,
2122 Register scratch2,
2123 Register scratch3,
2124 Label* gc_required) {
2125 // Calculate the number of bytes needed for the characters in the string while
2126 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002127 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2128 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002129 movl(scratch1, length);
2130 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01002131 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002132 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002133 if (kHeaderAlignment > 0) {
2134 subq(scratch1, Immediate(kHeaderAlignment));
2135 }
Leon Clarkee46be812010-01-19 14:06:41 +00002136
2137 // Allocate ascii string in new space.
2138 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2139 times_1,
2140 scratch1,
2141 result,
2142 scratch2,
2143 scratch3,
2144 gc_required,
2145 TAG_OBJECT);
2146
2147 // Set the map, length and hash field.
2148 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2149 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002150 Integer32ToSmi(scratch1, length);
2151 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002152 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002153 Immediate(String::kEmptyHashField));
2154}
2155
2156
2157void MacroAssembler::AllocateConsString(Register result,
2158 Register scratch1,
2159 Register scratch2,
2160 Label* gc_required) {
2161 // Allocate heap number in new space.
2162 AllocateInNewSpace(ConsString::kSize,
2163 result,
2164 scratch1,
2165 scratch2,
2166 gc_required,
2167 TAG_OBJECT);
2168
2169 // Set the map. The other fields are left uninitialized.
2170 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2171 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2172}
2173
2174
2175void MacroAssembler::AllocateAsciiConsString(Register result,
2176 Register scratch1,
2177 Register scratch2,
2178 Label* gc_required) {
2179 // Allocate heap number in new space.
2180 AllocateInNewSpace(ConsString::kSize,
2181 result,
2182 scratch1,
2183 scratch2,
2184 gc_required,
2185 TAG_OBJECT);
2186
2187 // Set the map. The other fields are left uninitialized.
2188 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2189 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2190}
2191
2192
Steve Blockd0582a62009-12-15 09:54:21 +00002193void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2194 if (context_chain_length > 0) {
2195 // Move up the chain of contexts to the context containing the slot.
2196 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2197 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002198 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002199 for (int i = 1; i < context_chain_length; i++) {
2200 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2201 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2202 }
2203 // The context may be an intermediate context, not a function context.
2204 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2205 } else { // context is the current function context.
2206 // The context may be an intermediate context, not a function context.
2207 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2208 }
2209}
2210
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002211
Leon Clarke4515c472010-02-03 11:58:03 +00002212int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002213 // On Windows 64 stack slots are reserved by the caller for all arguments
2214 // including the ones passed in registers, and space is always allocated for
2215 // the four register arguments even if the function takes fewer than four
2216 // arguments.
2217 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2218 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00002219 ASSERT(num_arguments >= 0);
2220#ifdef _WIN64
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002221 static const int kMinimumStackSlots = 4;
2222 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2223 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002224#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002225 static const int kRegisterPassedArguments = 6;
2226 if (num_arguments < kRegisterPassedArguments) return 0;
2227 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002228#endif
Leon Clarke4515c472010-02-03 11:58:03 +00002229}
2230
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002231
Leon Clarke4515c472010-02-03 11:58:03 +00002232void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2233 int frame_alignment = OS::ActivationFrameAlignment();
2234 ASSERT(frame_alignment != 0);
2235 ASSERT(num_arguments >= 0);
2236 // Make stack end at alignment and allocate space for arguments and old rsp.
2237 movq(kScratchRegister, rsp);
2238 ASSERT(IsPowerOf2(frame_alignment));
2239 int argument_slots_on_stack =
2240 ArgumentStackSlotsForCFunctionCall(num_arguments);
2241 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2242 and_(rsp, Immediate(-frame_alignment));
2243 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2244}
2245
2246
2247void MacroAssembler::CallCFunction(ExternalReference function,
2248 int num_arguments) {
2249 movq(rax, function);
2250 CallCFunction(rax, num_arguments);
2251}
2252
2253
2254void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01002255 // Check stack alignment.
2256 if (FLAG_debug_code) {
2257 CheckStackAlignment();
2258 }
2259
Leon Clarke4515c472010-02-03 11:58:03 +00002260 call(function);
2261 ASSERT(OS::ActivationFrameAlignment() != 0);
2262 ASSERT(num_arguments >= 0);
2263 int argument_slots_on_stack =
2264 ArgumentStackSlotsForCFunctionCall(num_arguments);
2265 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2266}
2267
Steve Blockd0582a62009-12-15 09:54:21 +00002268
Steve Blocka7e24c12009-10-30 11:49:00 +00002269CodePatcher::CodePatcher(byte* address, int size)
2270 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2271 // Create a new macro assembler pointing to the address of the code to patch.
2272 // The size is adjusted with kGap on order for the assembler to generate size
2273 // bytes of instructions without failing with buffer size constraints.
2274 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2275}
2276
2277
2278CodePatcher::~CodePatcher() {
2279 // Indicate that code has changed.
2280 CPU::FlushICache(address_, size_);
2281
2282 // Check that the code was patched as expected.
2283 ASSERT(masm_.pc_ == address_ + size_);
2284 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2285}
2286
Steve Blocka7e24c12009-10-30 11:49:00 +00002287} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002288
2289#endif // V8_TARGET_ARCH_X64