blob: 2c946f56b9fc4c13c15e7d16ea59c76aeabf67f1 [file] [log] [blame]
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010038#include "heap.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
40namespace v8 {
41namespace internal {
42
43MacroAssembler::MacroAssembler(void* buffer, int size)
Steve Block3ce2e202009-11-05 08:53:23 +000044 : Assembler(buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000045 generating_stub_(false),
46 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
Steve Blocka7e24c12009-10-30 11:49:00 +000048}
49
50
Steve Block3ce2e202009-11-05 08:53:23 +000051void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010052 movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000053}
54
55
Kristian Monsen25f61362010-05-21 11:50:48 +010056void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
57 movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
58}
59
60
Steve Blocka7e24c12009-10-30 11:49:00 +000061void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010062 push(Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000063}
64
65
Steve Block3ce2e202009-11-05 08:53:23 +000066void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010067 cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000068}
69
70
Steve Block3ce2e202009-11-05 08:53:23 +000071void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000072 LoadRoot(kScratchRegister, index);
73 cmpq(with, kScratchRegister);
74}
75
76
Steve Blockd0582a62009-12-15 09:54:21 +000077void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
78 CompareRoot(rsp, Heap::kStackLimitRootIndex);
79 j(below, on_stack_overflow);
80}
81
82
Steve Block6ded16b2010-05-10 14:33:55 +010083void MacroAssembler::RecordWriteHelper(Register object,
84 Register addr,
85 Register scratch) {
86 if (FLAG_debug_code) {
87 // Check that the object is not in new space.
Kristian Monsen0d5e1162010-09-30 15:31:59 +010088 NearLabel not_in_new_space;
Steve Block6ded16b2010-05-10 14:33:55 +010089 InNewSpace(object, scratch, not_equal, &not_in_new_space);
90 Abort("new-space object passed to RecordWriteHelper");
91 bind(&not_in_new_space);
92 }
93
Steve Blocka7e24c12009-10-30 11:49:00 +000094 // Compute the page start address from the heap object pointer, and reuse
95 // the 'object' register for it.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010096 and_(object, Immediate(~Page::kPageAlignmentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +000097
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010098 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
99 // method for more details.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100100 shrl(addr, Immediate(Page::kRegionSizeLog2));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100101 andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000102
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100103 // Set dirty mark for region.
104 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000105}
106
107
Steve Blocka7e24c12009-10-30 11:49:00 +0000108void MacroAssembler::RecordWrite(Register object,
109 int offset,
110 Register value,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100111 Register index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000112 // The compiled code assumes that record write doesn't change the
113 // context register, so we check that none of the clobbered
114 // registers are rsi.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100115 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
Leon Clarke4515c472010-02-03 11:58:03 +0000116
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100117 // First, check if a write barrier is even needed. The tests below
118 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000119 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000120 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000121
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100122 RecordWriteNonSmi(object, offset, value, index);
Steve Block3ce2e202009-11-05 08:53:23 +0000123 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000124
125 // Clobber all input registers when running with the debug-code flag
126 // turned on to provoke errors. This clobbering repeats the
127 // clobbering done inside RecordWriteNonSmi but it's necessary to
128 // avoid having the fast case for smis leave the registers
129 // unchanged.
130 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100131 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
132 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100133 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000134 }
Steve Block3ce2e202009-11-05 08:53:23 +0000135}
136
137
Steve Block8defd9f2010-07-08 12:39:36 +0100138void MacroAssembler::RecordWrite(Register object,
139 Register address,
140 Register value) {
141 // The compiled code assumes that record write doesn't change the
142 // context register, so we check that none of the clobbered
143 // registers are esi.
144 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
145
146 // First, check if a write barrier is even needed. The tests below
147 // catch stores of Smis and stores into young gen.
148 Label done;
149 JumpIfSmi(value, &done);
150
151 InNewSpace(object, value, equal, &done);
152
153 RecordWriteHelper(object, address, value);
154
155 bind(&done);
156
157 // Clobber all input registers when running with the debug-code flag
158 // turned on to provoke errors.
159 if (FLAG_debug_code) {
160 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
161 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
162 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
163 }
164}
165
166
Steve Block3ce2e202009-11-05 08:53:23 +0000167void MacroAssembler::RecordWriteNonSmi(Register object,
168 int offset,
169 Register scratch,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100170 Register index) {
Steve Block3ce2e202009-11-05 08:53:23 +0000171 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000172
173 if (FLAG_debug_code) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100174 NearLabel okay;
Leon Clarke4515c472010-02-03 11:58:03 +0000175 JumpIfNotSmi(object, &okay);
176 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
177 bind(&okay);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100178
179 if (offset == 0) {
180 // index must be int32.
181 Register tmp = index.is(rax) ? rbx : rax;
182 push(tmp);
183 movl(tmp, index);
184 cmpq(tmp, index);
185 Check(equal, "Index register for RecordWrite must be untagged int32.");
186 pop(tmp);
187 }
Leon Clarke4515c472010-02-03 11:58:03 +0000188 }
189
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100190 // Test that the object address is not in the new space. We cannot
191 // update page dirty marks for new space pages.
Steve Block6ded16b2010-05-10 14:33:55 +0100192 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000193
Steve Block6ded16b2010-05-10 14:33:55 +0100194 // The offset is relative to a tagged or untagged HeapObject pointer,
195 // so either offset or offset + kHeapObjectTag must be a
196 // multiple of kPointerSize.
197 ASSERT(IsAligned(offset, kPointerSize) ||
198 IsAligned(offset + kHeapObjectTag, kPointerSize));
199
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100200 Register dst = index;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100201 if (offset != 0) {
202 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000203 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100204 // array access: calculate the destination address in the same manner as
205 // KeyedStoreIC::GenerateGeneric.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100206 lea(dst, FieldOperand(object,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100207 index,
208 times_pointer_size,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100209 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000210 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100211 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000212
213 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000214
215 // Clobber all input registers when running with the debug-code flag
216 // turned on to provoke errors.
217 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100218 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
219 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100220 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Steve Block6ded16b2010-05-10 14:33:55 +0100221 }
222}
223
Steve Blocka7e24c12009-10-30 11:49:00 +0000224void MacroAssembler::Assert(Condition cc, const char* msg) {
225 if (FLAG_debug_code) Check(cc, msg);
226}
227
228
Iain Merrick75681382010-08-19 15:07:18 +0100229void MacroAssembler::AssertFastElements(Register elements) {
230 if (FLAG_debug_code) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100231 NearLabel ok;
Iain Merrick75681382010-08-19 15:07:18 +0100232 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
233 Heap::kFixedArrayMapRootIndex);
234 j(equal, &ok);
235 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
236 Heap::kFixedCOWArrayMapRootIndex);
237 j(equal, &ok);
238 Abort("JSObject with fast elements map has slow elements");
239 bind(&ok);
240 }
241}
242
243
Steve Blocka7e24c12009-10-30 11:49:00 +0000244void MacroAssembler::Check(Condition cc, const char* msg) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100245 NearLabel L;
Steve Blocka7e24c12009-10-30 11:49:00 +0000246 j(cc, &L);
247 Abort(msg);
248 // will not return here
249 bind(&L);
250}
251
252
Steve Block6ded16b2010-05-10 14:33:55 +0100253void MacroAssembler::CheckStackAlignment() {
254 int frame_alignment = OS::ActivationFrameAlignment();
255 int frame_alignment_mask = frame_alignment - 1;
256 if (frame_alignment > kPointerSize) {
257 ASSERT(IsPowerOf2(frame_alignment));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100258 NearLabel alignment_as_expected;
Steve Block6ded16b2010-05-10 14:33:55 +0100259 testq(rsp, Immediate(frame_alignment_mask));
260 j(zero, &alignment_as_expected);
261 // Abort if stack is not aligned.
262 int3();
263 bind(&alignment_as_expected);
264 }
265}
266
267
Steve Blocka7e24c12009-10-30 11:49:00 +0000268void MacroAssembler::NegativeZeroTest(Register result,
269 Register op,
270 Label* then_label) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100271 NearLabel ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000272 testl(result, result);
273 j(not_zero, &ok);
274 testl(op, op);
275 j(sign, then_label);
276 bind(&ok);
277}
278
279
280void MacroAssembler::Abort(const char* msg) {
281 // We want to pass the msg string like a smi to avoid GC
282 // problems, however msg is not guaranteed to be aligned
283 // properly. Instead, we pass an aligned pointer that is
284 // a proper v8 smi, but also pass the alignment difference
285 // from the real pointer as a smi.
286 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
287 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
288 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
289 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
290#ifdef DEBUG
291 if (msg != NULL) {
292 RecordComment("Abort message: ");
293 RecordComment(msg);
294 }
295#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000296 // Disable stub call restrictions to always allow calls to abort.
297 set_allow_stub_calls(true);
298
Steve Blocka7e24c12009-10-30 11:49:00 +0000299 push(rax);
300 movq(kScratchRegister, p0, RelocInfo::NONE);
301 push(kScratchRegister);
302 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000303 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000304 RelocInfo::NONE);
305 push(kScratchRegister);
306 CallRuntime(Runtime::kAbort, 2);
307 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000308 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000309}
310
311
312void MacroAssembler::CallStub(CodeStub* stub) {
313 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
314 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
315}
316
317
Ben Murdochbb769b22010-08-11 14:56:33 +0100318Object* MacroAssembler::TryCallStub(CodeStub* stub) {
319 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
320 Object* result = stub->TryGetCode();
321 if (!result->IsFailure()) {
322 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
323 }
324 return result;
325}
326
327
Leon Clarkee46be812010-01-19 14:06:41 +0000328void MacroAssembler::TailCallStub(CodeStub* stub) {
329 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
330 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
331}
332
333
Ben Murdochbb769b22010-08-11 14:56:33 +0100334Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
335 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
336 Object* result = stub->TryGetCode();
337 if (!result->IsFailure()) {
338 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
339 }
340 return result;
341}
342
343
Steve Blocka7e24c12009-10-30 11:49:00 +0000344void MacroAssembler::StubReturn(int argc) {
345 ASSERT(argc >= 1 && generating_stub());
346 ret((argc - 1) * kPointerSize);
347}
348
349
350void MacroAssembler::IllegalOperation(int num_arguments) {
351 if (num_arguments > 0) {
352 addq(rsp, Immediate(num_arguments * kPointerSize));
353 }
354 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
355}
356
357
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100358void MacroAssembler::IndexFromHash(Register hash, Register index) {
359 // The assert checks that the constants for the maximum number of digits
360 // for an array index cached in the hash field and the number of bits
361 // reserved for it does not conflict.
362 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
363 (1 << String::kArrayIndexValueBits));
364 // We want the smi-tagged index in key. Even if we subsequently go to
365 // the slow case, converting the key to a smi is always valid.
366 // key: string key
367 // hash: key's hash field, including its array index value.
368 and_(hash, Immediate(String::kArrayIndexValueMask));
369 shr(hash, Immediate(String::kHashShift));
370 // Here we actually clobber the key which will be used if calling into
371 // runtime later. However as the new key is the numeric value of a string key
372 // there is no difference in using either key.
373 Integer32ToSmi(index, hash);
374}
375
376
Steve Blocka7e24c12009-10-30 11:49:00 +0000377void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
378 CallRuntime(Runtime::FunctionForId(id), num_arguments);
379}
380
381
Ben Murdochbb769b22010-08-11 14:56:33 +0100382Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
383 int num_arguments) {
384 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
385}
386
387
Steve Blocka7e24c12009-10-30 11:49:00 +0000388void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
389 // If the expected number of arguments of the runtime function is
390 // constant, we check that the actual number of arguments match the
391 // expectation.
392 if (f->nargs >= 0 && f->nargs != num_arguments) {
393 IllegalOperation(num_arguments);
394 return;
395 }
396
Leon Clarke4515c472010-02-03 11:58:03 +0000397 // TODO(1236192): Most runtime routines don't need the number of
398 // arguments passed in because it is constant. At some point we
399 // should remove this need and make the runtime routine entry code
400 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100401 Set(rax, num_arguments);
Leon Clarke4515c472010-02-03 11:58:03 +0000402 movq(rbx, ExternalReference(f));
403 CEntryStub ces(f->result_size);
404 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000405}
406
407
Ben Murdochbb769b22010-08-11 14:56:33 +0100408Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
409 int num_arguments) {
410 if (f->nargs >= 0 && f->nargs != num_arguments) {
411 IllegalOperation(num_arguments);
412 // Since we did not call the stub, there was no allocation failure.
413 // Return some non-failure object.
414 return Heap::undefined_value();
415 }
416
417 // TODO(1236192): Most runtime routines don't need the number of
418 // arguments passed in because it is constant. At some point we
419 // should remove this need and make the runtime routine entry code
420 // smarter.
421 Set(rax, num_arguments);
422 movq(rbx, ExternalReference(f));
423 CEntryStub ces(f->result_size);
424 return TryCallStub(&ces);
425}
426
427
Andrei Popescu402d9372010-02-26 13:31:12 +0000428void MacroAssembler::CallExternalReference(const ExternalReference& ext,
429 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100430 Set(rax, num_arguments);
Andrei Popescu402d9372010-02-26 13:31:12 +0000431 movq(rbx, ext);
432
433 CEntryStub stub(1);
434 CallStub(&stub);
435}
436
437
Steve Block6ded16b2010-05-10 14:33:55 +0100438void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
439 int num_arguments,
440 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000441 // ----------- S t a t e -------------
442 // -- rsp[0] : return address
443 // -- rsp[8] : argument num_arguments - 1
444 // ...
445 // -- rsp[8 * num_arguments] : argument 0 (receiver)
446 // -----------------------------------
447
448 // TODO(1236192): Most runtime routines don't need the number of
449 // arguments passed in because it is constant. At some point we
450 // should remove this need and make the runtime routine entry code
451 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100452 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100453 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000454}
455
456
Steve Block6ded16b2010-05-10 14:33:55 +0100457void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
458 int num_arguments,
459 int result_size) {
460 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
461}
462
463
Ben Murdochbb769b22010-08-11 14:56:33 +0100464static int Offset(ExternalReference ref0, ExternalReference ref1) {
465 int64_t offset = (ref0.address() - ref1.address());
466 // Check that fits into int.
467 ASSERT(static_cast<int>(offset) == offset);
468 return static_cast<int>(offset);
469}
470
471
472void MacroAssembler::PushHandleScope(Register scratch) {
473 ExternalReference extensions_address =
474 ExternalReference::handle_scope_extensions_address();
475 const int kExtensionsOffset = 0;
476 const int kNextOffset = Offset(
477 ExternalReference::handle_scope_next_address(),
478 extensions_address);
479 const int kLimitOffset = Offset(
480 ExternalReference::handle_scope_limit_address(),
481 extensions_address);
482
483 // Push the number of extensions, smi-tagged so the gc will ignore it.
484 movq(kScratchRegister, extensions_address);
485 movq(scratch, Operand(kScratchRegister, kExtensionsOffset));
486 movq(Operand(kScratchRegister, kExtensionsOffset), Immediate(0));
487 Integer32ToSmi(scratch, scratch);
488 push(scratch);
489 // Push next and limit pointers which will be wordsize aligned and
490 // hence automatically smi tagged.
491 push(Operand(kScratchRegister, kNextOffset));
492 push(Operand(kScratchRegister, kLimitOffset));
493}
494
495
496Object* MacroAssembler::PopHandleScopeHelper(Register saved,
497 Register scratch,
498 bool gc_allowed) {
499 ExternalReference extensions_address =
500 ExternalReference::handle_scope_extensions_address();
501 const int kExtensionsOffset = 0;
502 const int kNextOffset = Offset(
503 ExternalReference::handle_scope_next_address(),
504 extensions_address);
505 const int kLimitOffset = Offset(
506 ExternalReference::handle_scope_limit_address(),
507 extensions_address);
508
509 Object* result = NULL;
510 Label write_back;
511 movq(kScratchRegister, extensions_address);
512 cmpq(Operand(kScratchRegister, kExtensionsOffset), Immediate(0));
513 j(equal, &write_back);
514 push(saved);
515 if (gc_allowed) {
516 CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
517 } else {
518 result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
519 if (result->IsFailure()) return result;
520 }
521 pop(saved);
522 movq(kScratchRegister, extensions_address);
523
524 bind(&write_back);
525 pop(Operand(kScratchRegister, kLimitOffset));
526 pop(Operand(kScratchRegister, kNextOffset));
527 pop(scratch);
528 SmiToInteger32(scratch, scratch);
529 movq(Operand(kScratchRegister, kExtensionsOffset), scratch);
530
531 return result;
532}
533
534
535void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
536 PopHandleScopeHelper(saved, scratch, true);
537}
538
539
540Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
541 return PopHandleScopeHelper(saved, scratch, false);
542}
543
544
Steve Block6ded16b2010-05-10 14:33:55 +0100545void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
546 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000547 // Set the entry point and jump to the C entry runtime stub.
548 movq(rbx, ext);
549 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000550 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000551}
552
553
Andrei Popescu402d9372010-02-26 13:31:12 +0000554void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
555 // Calls are not allowed in some stubs.
556 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000557
Andrei Popescu402d9372010-02-26 13:31:12 +0000558 // Rely on the assertion to check that the number of provided
559 // arguments match the expected number of arguments. Fake a
560 // parameter count to avoid emitting code to do the check.
561 ParameterCount expected(0);
562 GetBuiltinEntry(rdx, id);
563 InvokeCode(rdx, expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +0000564}
565
Andrei Popescu402d9372010-02-26 13:31:12 +0000566
Steve Block791712a2010-08-27 10:21:07 +0100567void MacroAssembler::GetBuiltinFunction(Register target,
568 Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100569 // Load the builtins object into target register.
570 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
571 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
Steve Block791712a2010-08-27 10:21:07 +0100572 movq(target, FieldOperand(target,
573 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
574}
Steve Block6ded16b2010-05-10 14:33:55 +0100575
Steve Block791712a2010-08-27 10:21:07 +0100576
577void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
578 ASSERT(!target.is(rdi));
Andrei Popescu402d9372010-02-26 13:31:12 +0000579 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +0100580 GetBuiltinFunction(rdi, id);
581 movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000582}
583
584
585void MacroAssembler::Set(Register dst, int64_t x) {
586 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100587 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000588 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000589 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000590 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000591 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000592 } else {
593 movq(dst, x, RelocInfo::NONE);
594 }
595}
596
Steve Blocka7e24c12009-10-30 11:49:00 +0000597void MacroAssembler::Set(const Operand& dst, int64_t x) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100598 if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000599 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000600 } else {
601 movq(kScratchRegister, x, RelocInfo::NONE);
602 movq(dst, kScratchRegister);
603 }
604}
605
Steve Blocka7e24c12009-10-30 11:49:00 +0000606// ----------------------------------------------------------------------------
607// Smi tagging, untagging and tag detection.
608
Steve Block8defd9f2010-07-08 12:39:36 +0100609Register MacroAssembler::GetSmiConstant(Smi* source) {
610 int value = source->value();
611 if (value == 0) {
612 xorl(kScratchRegister, kScratchRegister);
613 return kScratchRegister;
614 }
615 if (value == 1) {
616 return kSmiConstantRegister;
617 }
618 LoadSmiConstant(kScratchRegister, source);
619 return kScratchRegister;
620}
621
622void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
623 if (FLAG_debug_code) {
624 movq(dst,
625 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
626 RelocInfo::NONE);
627 cmpq(dst, kSmiConstantRegister);
628 if (allow_stub_calls()) {
629 Assert(equal, "Uninitialized kSmiConstantRegister");
630 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100631 NearLabel ok;
Steve Block8defd9f2010-07-08 12:39:36 +0100632 j(equal, &ok);
633 int3();
634 bind(&ok);
635 }
636 }
637 if (source->value() == 0) {
638 xorl(dst, dst);
639 return;
640 }
641 int value = source->value();
642 bool negative = value < 0;
643 unsigned int uvalue = negative ? -value : value;
644
645 switch (uvalue) {
646 case 9:
647 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
648 break;
649 case 8:
650 xorl(dst, dst);
651 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
652 break;
653 case 4:
654 xorl(dst, dst);
655 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
656 break;
657 case 5:
658 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
659 break;
660 case 3:
661 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
662 break;
663 case 2:
664 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
665 break;
666 case 1:
667 movq(dst, kSmiConstantRegister);
668 break;
669 case 0:
670 UNREACHABLE();
671 return;
672 default:
673 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
674 return;
675 }
676 if (negative) {
677 neg(dst);
678 }
679}
680
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100681
Steve Blocka7e24c12009-10-30 11:49:00 +0000682void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000683 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000684 if (!dst.is(src)) {
685 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000686 }
Steve Block3ce2e202009-11-05 08:53:23 +0000687 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000688}
689
690
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100691void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
692 if (FLAG_debug_code) {
693 testb(dst, Immediate(0x01));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100694 NearLabel ok;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100695 j(zero, &ok);
696 if (allow_stub_calls()) {
697 Abort("Integer32ToSmiField writing to non-smi location");
698 } else {
699 int3();
700 }
701 bind(&ok);
702 }
703 ASSERT(kSmiShift % kBitsPerByte == 0);
704 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
705}
706
707
Steve Block3ce2e202009-11-05 08:53:23 +0000708void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
709 Register src,
710 int constant) {
711 if (dst.is(src)) {
712 addq(dst, Immediate(constant));
713 } else {
714 lea(dst, Operand(src, constant));
715 }
716 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000717}
718
719
720void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000721 ASSERT_EQ(0, kSmiTag);
722 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000723 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000724 }
Steve Block3ce2e202009-11-05 08:53:23 +0000725 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000726}
727
728
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100729void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
730 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
731}
732
733
Steve Blocka7e24c12009-10-30 11:49:00 +0000734void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000735 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000736 if (!dst.is(src)) {
737 movq(dst, src);
738 }
739 sar(dst, Immediate(kSmiShift));
740}
741
742
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100743void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
744 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
745}
746
747
Steve Block3ce2e202009-11-05 08:53:23 +0000748void MacroAssembler::SmiTest(Register src) {
749 testq(src, src);
750}
751
752
753void MacroAssembler::SmiCompare(Register dst, Register src) {
754 cmpq(dst, src);
755}
756
757
758void MacroAssembler::SmiCompare(Register dst, Smi* src) {
759 ASSERT(!dst.is(kScratchRegister));
760 if (src->value() == 0) {
761 testq(dst, dst);
762 } else {
Iain Merrick75681382010-08-19 15:07:18 +0100763 Register constant_reg = GetSmiConstant(src);
764 cmpq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +0000765 }
766}
767
768
Leon Clarkef7060e22010-06-03 12:02:55 +0100769void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block6ded16b2010-05-10 14:33:55 +0100770 cmpq(dst, src);
771}
772
773
Steve Block3ce2e202009-11-05 08:53:23 +0000774void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
775 cmpq(dst, src);
776}
777
778
779void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100780 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000781}
782
783
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100784void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
785 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
786}
787
788
Steve Blocka7e24c12009-10-30 11:49:00 +0000789void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
790 Register src,
791 int power) {
792 ASSERT(power >= 0);
793 ASSERT(power < 64);
794 if (power == 0) {
795 SmiToInteger64(dst, src);
796 return;
797 }
Steve Block3ce2e202009-11-05 08:53:23 +0000798 if (!dst.is(src)) {
799 movq(dst, src);
800 }
801 if (power < kSmiShift) {
802 sar(dst, Immediate(kSmiShift - power));
803 } else if (power > kSmiShift) {
804 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000805 }
806}
807
808
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100809void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
810 Register src,
811 int power) {
812 ASSERT((0 <= power) && (power < 32));
813 if (dst.is(src)) {
814 shr(dst, Immediate(power + kSmiShift));
815 } else {
816 UNIMPLEMENTED(); // Not used.
817 }
818}
819
820
Steve Blocka7e24c12009-10-30 11:49:00 +0000821Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000822 ASSERT_EQ(0, kSmiTag);
823 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000824 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000825}
826
827
Ben Murdochf87a2032010-10-22 12:50:53 +0100828Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000829 ASSERT_EQ(0, kSmiTag);
Steve Block8defd9f2010-07-08 12:39:36 +0100830 // Make mask 0x8000000000000001 and test that both bits are zero.
Steve Block3ce2e202009-11-05 08:53:23 +0000831 movq(kScratchRegister, src);
832 rol(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +0100833 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +0000834 return zero;
835}
836
837
Steve Blocka7e24c12009-10-30 11:49:00 +0000838Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
839 if (first.is(second)) {
840 return CheckSmi(first);
841 }
Steve Block8defd9f2010-07-08 12:39:36 +0100842 ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
843 leal(kScratchRegister, Operand(first, second, times_1, 0));
844 testb(kScratchRegister, Immediate(0x03));
Steve Block3ce2e202009-11-05 08:53:23 +0000845 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000846}
847
848
Ben Murdochf87a2032010-10-22 12:50:53 +0100849Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
850 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +0000851 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +0100852 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +0000853 }
Steve Block8defd9f2010-07-08 12:39:36 +0100854 movq(kScratchRegister, first);
855 or_(kScratchRegister, second);
Leon Clarked91b9f72010-01-27 17:25:45 +0000856 rol(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +0100857 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +0000858 return zero;
859}
860
861
Ben Murdochbb769b22010-08-11 14:56:33 +0100862Condition MacroAssembler::CheckEitherSmi(Register first,
863 Register second,
864 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +0000865 if (first.is(second)) {
866 return CheckSmi(first);
867 }
Ben Murdochbb769b22010-08-11 14:56:33 +0100868 if (scratch.is(second)) {
869 andl(scratch, first);
870 } else {
871 if (!scratch.is(first)) {
872 movl(scratch, first);
873 }
874 andl(scratch, second);
875 }
876 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +0000877 return zero;
878}
879
880
Steve Blocka7e24c12009-10-30 11:49:00 +0000881Condition MacroAssembler::CheckIsMinSmi(Register src) {
Steve Block8defd9f2010-07-08 12:39:36 +0100882 ASSERT(!src.is(kScratchRegister));
883 // If we overflow by subtracting one, it's the minimal smi value.
884 cmpq(src, kSmiConstantRegister);
885 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +0000886}
887
Steve Blocka7e24c12009-10-30 11:49:00 +0000888
889Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000890 // A 32-bit integer value can always be converted to a smi.
891 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +0000892}
893
894
Steve Block3ce2e202009-11-05 08:53:23 +0000895Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
896 // An unsigned 32-bit integer value is valid as long as the high bit
897 // is not set.
Steve Block8defd9f2010-07-08 12:39:36 +0100898 testl(src, src);
899 return positive;
Steve Block3ce2e202009-11-05 08:53:23 +0000900}
901
902
Steve Block3ce2e202009-11-05 08:53:23 +0000903void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
904 if (constant->value() == 0) {
905 if (!dst.is(src)) {
906 movq(dst, src);
907 }
Steve Block8defd9f2010-07-08 12:39:36 +0100908 return;
Steve Block3ce2e202009-11-05 08:53:23 +0000909 } else if (dst.is(src)) {
910 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +0100911 switch (constant->value()) {
912 case 1:
913 addq(dst, kSmiConstantRegister);
914 return;
915 case 2:
916 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
917 return;
918 case 4:
919 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
920 return;
921 case 8:
922 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
923 return;
924 default:
925 Register constant_reg = GetSmiConstant(constant);
926 addq(dst, constant_reg);
927 return;
928 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000929 } else {
Steve Block8defd9f2010-07-08 12:39:36 +0100930 switch (constant->value()) {
931 case 1:
932 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
933 return;
934 case 2:
935 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
936 return;
937 case 4:
938 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
939 return;
940 case 8:
941 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
942 return;
943 default:
944 LoadSmiConstant(dst, constant);
945 addq(dst, src);
946 return;
947 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000948 }
949}
950
951
Leon Clarkef7060e22010-06-03 12:02:55 +0100952void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
953 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100954 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +0100955 }
956}
957
958
Steve Block3ce2e202009-11-05 08:53:23 +0000959void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
960 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000961 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000962 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000963 }
Steve Block3ce2e202009-11-05 08:53:23 +0000964 } else if (dst.is(src)) {
965 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +0100966 Register constant_reg = GetSmiConstant(constant);
967 subq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +0000968 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000969 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +0100970 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100971 // Adding and subtracting the min-value gives the same result, it only
972 // differs on the overflow bit, which we don't check here.
973 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000974 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100975 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +0100976 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Steve Block3ce2e202009-11-05 08:53:23 +0000977 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000978 }
979 }
980}
981
982
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100983void MacroAssembler::SmiAdd(Register dst,
984 Register src1,
985 Register src2) {
986 // No overflow checking. Use only when it's known that
987 // overflowing is impossible.
988 ASSERT(!dst.is(src2));
989 if (dst.is(src1)) {
990 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000991 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100992 movq(dst, src1);
993 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000994 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100995 Assert(no_overflow, "Smi addition overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +0000996}
997
998
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100999void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1000 // No overflow checking. Use only when it's known that
1001 // overflowing is impossible (e.g., subtracting two positive smis).
1002 ASSERT(!dst.is(src2));
1003 if (dst.is(src1)) {
1004 subq(dst, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001005 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001006 movq(dst, src1);
1007 subq(dst, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001008 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001009 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001010}
1011
1012
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001013void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001014 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001015 const Operand& src2) {
1016 // No overflow checking. Use only when it's known that
1017 // overflowing is impossible (e.g., subtracting two positive smis).
1018 if (dst.is(src1)) {
1019 subq(dst, src2);
1020 } else {
1021 movq(dst, src1);
1022 subq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001023 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001024 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001025}
1026
1027
1028void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001029 ASSERT(!dst.is(kScratchRegister));
1030 ASSERT(!src.is(kScratchRegister));
1031 // Set tag and padding bits before negating, so that they are zero afterwards.
1032 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001033 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001034 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001035 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001036 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001037 }
Steve Block3ce2e202009-11-05 08:53:23 +00001038 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001039}
1040
1041
1042void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001043 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001044 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001045 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001046 }
1047 and_(dst, src2);
1048}
1049
1050
Steve Block3ce2e202009-11-05 08:53:23 +00001051void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1052 if (constant->value() == 0) {
1053 xor_(dst, dst);
1054 } else if (dst.is(src)) {
1055 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001056 Register constant_reg = GetSmiConstant(constant);
1057 and_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001058 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001059 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001060 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001061 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001062}
1063
1064
1065void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1066 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001067 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001068 }
1069 or_(dst, src2);
1070}
1071
1072
Steve Block3ce2e202009-11-05 08:53:23 +00001073void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1074 if (dst.is(src)) {
1075 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001076 Register constant_reg = GetSmiConstant(constant);
1077 or_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001078 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001079 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001080 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001081 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001082}
1083
Steve Block3ce2e202009-11-05 08:53:23 +00001084
Steve Blocka7e24c12009-10-30 11:49:00 +00001085void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1086 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001087 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001088 }
1089 xor_(dst, src2);
1090}
1091
1092
Steve Block3ce2e202009-11-05 08:53:23 +00001093void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1094 if (dst.is(src)) {
1095 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001096 Register constant_reg = GetSmiConstant(constant);
1097 xor_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001098 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001099 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001100 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001101 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001102}
1103
1104
Steve Blocka7e24c12009-10-30 11:49:00 +00001105void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1106 Register src,
1107 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001108 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001109 if (shift_value > 0) {
1110 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001111 sar(dst, Immediate(shift_value + kSmiShift));
1112 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001113 } else {
1114 UNIMPLEMENTED(); // Not used.
1115 }
1116 }
1117}
1118
1119
Steve Blocka7e24c12009-10-30 11:49:00 +00001120void MacroAssembler::SmiShiftLeftConstant(Register dst,
1121 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001122 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001123 if (!dst.is(src)) {
1124 movq(dst, src);
1125 }
1126 if (shift_value > 0) {
1127 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001128 }
1129}
1130
1131
1132void MacroAssembler::SmiShiftLeft(Register dst,
1133 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001134 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001135 ASSERT(!dst.is(rcx));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001136 NearLabel result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001137 // Untag shift amount.
1138 if (!dst.is(src1)) {
1139 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001140 }
Steve Block3ce2e202009-11-05 08:53:23 +00001141 SmiToInteger32(rcx, src2);
1142 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1143 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001144 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001145}
1146
1147
Steve Blocka7e24c12009-10-30 11:49:00 +00001148void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1149 Register src1,
1150 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001151 ASSERT(!dst.is(kScratchRegister));
1152 ASSERT(!src1.is(kScratchRegister));
1153 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001154 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001155 if (src1.is(rcx)) {
1156 movq(kScratchRegister, src1);
1157 } else if (src2.is(rcx)) {
1158 movq(kScratchRegister, src2);
1159 }
1160 if (!dst.is(src1)) {
1161 movq(dst, src1);
1162 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001163 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001164 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001165 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001166 shl(dst, Immediate(kSmiShift));
1167 if (src1.is(rcx)) {
1168 movq(src1, kScratchRegister);
1169 } else if (src2.is(rcx)) {
1170 movq(src2, kScratchRegister);
1171 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001172}
1173
1174
Steve Block3ce2e202009-11-05 08:53:23 +00001175SmiIndex MacroAssembler::SmiToIndex(Register dst,
1176 Register src,
1177 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001178 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001179 // There is a possible optimization if shift is in the range 60-63, but that
1180 // will (and must) never happen.
1181 if (!dst.is(src)) {
1182 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001183 }
Steve Block3ce2e202009-11-05 08:53:23 +00001184 if (shift < kSmiShift) {
1185 sar(dst, Immediate(kSmiShift - shift));
1186 } else {
1187 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001188 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001189 return SmiIndex(dst, times_1);
1190}
1191
Steve Blocka7e24c12009-10-30 11:49:00 +00001192SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1193 Register src,
1194 int shift) {
1195 // Register src holds a positive smi.
1196 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001197 if (!dst.is(src)) {
1198 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001199 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001200 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001201 if (shift < kSmiShift) {
1202 sar(dst, Immediate(kSmiShift - shift));
1203 } else {
1204 shl(dst, Immediate(shift - kSmiShift));
1205 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001206 return SmiIndex(dst, times_1);
1207}
1208
1209
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001210void MacroAssembler::Move(Register dst, Register src) {
1211 if (!dst.is(src)) {
1212 movq(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01001213 }
Steve Block6ded16b2010-05-10 14:33:55 +01001214}
1215
1216
Steve Block6ded16b2010-05-10 14:33:55 +01001217
1218
Steve Blocka7e24c12009-10-30 11:49:00 +00001219void MacroAssembler::Move(Register dst, Handle<Object> source) {
1220 ASSERT(!source->IsFailure());
1221 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001222 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001223 } else {
1224 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1225 }
1226}
1227
1228
1229void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001230 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001231 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001232 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001233 } else {
1234 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1235 movq(dst, kScratchRegister);
1236 }
1237}
1238
1239
1240void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001241 if (source->IsSmi()) {
1242 SmiCompare(dst, Smi::cast(*source));
1243 } else {
1244 Move(kScratchRegister, source);
1245 cmpq(dst, kScratchRegister);
1246 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001247}
1248
1249
1250void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1251 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001252 SmiCompare(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001253 } else {
1254 ASSERT(source->IsHeapObject());
1255 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1256 cmpq(dst, kScratchRegister);
1257 }
1258}
1259
1260
1261void MacroAssembler::Push(Handle<Object> source) {
1262 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001263 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001264 } else {
1265 ASSERT(source->IsHeapObject());
1266 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1267 push(kScratchRegister);
1268 }
1269}
1270
1271
1272void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001273 intptr_t smi = reinterpret_cast<intptr_t>(source);
1274 if (is_int32(smi)) {
1275 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001276 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001277 Register constant = GetSmiConstant(source);
1278 push(constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001279 }
1280}
1281
1282
Leon Clarkee46be812010-01-19 14:06:41 +00001283void MacroAssembler::Drop(int stack_elements) {
1284 if (stack_elements > 0) {
1285 addq(rsp, Immediate(stack_elements * kPointerSize));
1286 }
1287}
1288
1289
Steve Block3ce2e202009-11-05 08:53:23 +00001290void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001291 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001292}
1293
1294
1295void MacroAssembler::Jump(ExternalReference ext) {
1296 movq(kScratchRegister, ext);
1297 jmp(kScratchRegister);
1298}
1299
1300
1301void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1302 movq(kScratchRegister, destination, rmode);
1303 jmp(kScratchRegister);
1304}
1305
1306
1307void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001308 // TODO(X64): Inline this
1309 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001310}
1311
1312
1313void MacroAssembler::Call(ExternalReference ext) {
1314 movq(kScratchRegister, ext);
1315 call(kScratchRegister);
1316}
1317
1318
1319void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1320 movq(kScratchRegister, destination, rmode);
1321 call(kScratchRegister);
1322}
1323
1324
1325void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1326 ASSERT(RelocInfo::IsCodeTarget(rmode));
Steve Block3ce2e202009-11-05 08:53:23 +00001327 call(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001328}
1329
1330
1331void MacroAssembler::PushTryHandler(CodeLocation try_location,
1332 HandlerType type) {
1333 // Adjust this code if not the case.
1334 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1335
1336 // The pc (return address) is already on TOS. This code pushes state,
1337 // frame pointer and current handler. Check that they are expected
1338 // next on the stack, in that order.
1339 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1340 StackHandlerConstants::kPCOffset - kPointerSize);
1341 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1342 StackHandlerConstants::kStateOffset - kPointerSize);
1343 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1344 StackHandlerConstants::kFPOffset - kPointerSize);
1345
1346 if (try_location == IN_JAVASCRIPT) {
1347 if (type == TRY_CATCH_HANDLER) {
1348 push(Immediate(StackHandler::TRY_CATCH));
1349 } else {
1350 push(Immediate(StackHandler::TRY_FINALLY));
1351 }
1352 push(rbp);
1353 } else {
1354 ASSERT(try_location == IN_JS_ENTRY);
1355 // The frame pointer does not point to a JS frame so we save NULL
1356 // for rbp. We expect the code throwing an exception to check rbp
1357 // before dereferencing it to restore the context.
1358 push(Immediate(StackHandler::ENTRY));
1359 push(Immediate(0)); // NULL frame pointer.
1360 }
1361 // Save the current handler.
1362 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1363 push(Operand(kScratchRegister, 0));
1364 // Link this handler.
1365 movq(Operand(kScratchRegister, 0), rsp);
1366}
1367
1368
Leon Clarkee46be812010-01-19 14:06:41 +00001369void MacroAssembler::PopTryHandler() {
1370 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1371 // Unlink this handler.
1372 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1373 pop(Operand(kScratchRegister, 0));
1374 // Remove the remaining fields.
1375 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1376}
1377
1378
Steve Blocka7e24c12009-10-30 11:49:00 +00001379void MacroAssembler::Ret() {
1380 ret(0);
1381}
1382
1383
1384void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001385 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01001386 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001387}
1388
1389
1390void MacroAssembler::CmpObjectType(Register heap_object,
1391 InstanceType type,
1392 Register map) {
1393 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1394 CmpInstanceType(map, type);
1395}
1396
1397
1398void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1399 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1400 Immediate(static_cast<int8_t>(type)));
1401}
1402
1403
Andrei Popescu31002712010-02-23 13:46:05 +00001404void MacroAssembler::CheckMap(Register obj,
1405 Handle<Map> map,
1406 Label* fail,
1407 bool is_heap_object) {
1408 if (!is_heap_object) {
1409 JumpIfSmi(obj, fail);
1410 }
1411 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1412 j(not_equal, fail);
1413}
1414
1415
Leon Clarkef7060e22010-06-03 12:02:55 +01001416void MacroAssembler::AbortIfNotNumber(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001417 NearLabel ok;
Andrei Popescu402d9372010-02-26 13:31:12 +00001418 Condition is_smi = CheckSmi(object);
1419 j(is_smi, &ok);
1420 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1421 Factory::heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01001422 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00001423 bind(&ok);
1424}
1425
1426
Iain Merrick75681382010-08-19 15:07:18 +01001427void MacroAssembler::AbortIfSmi(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001428 NearLabel ok;
Iain Merrick75681382010-08-19 15:07:18 +01001429 Condition is_smi = CheckSmi(object);
1430 Assert(NegateCondition(is_smi), "Operand is a smi");
1431}
1432
1433
Leon Clarkef7060e22010-06-03 12:02:55 +01001434void MacroAssembler::AbortIfNotSmi(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001435 NearLabel ok;
Steve Block6ded16b2010-05-10 14:33:55 +01001436 Condition is_smi = CheckSmi(object);
Iain Merrick75681382010-08-19 15:07:18 +01001437 Assert(is_smi, "Operand is not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01001438}
1439
1440
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001441void MacroAssembler::AbortIfNotRootValue(Register src,
1442 Heap::RootListIndex root_value_index,
1443 const char* message) {
1444 ASSERT(!src.is(kScratchRegister));
1445 LoadRoot(kScratchRegister, root_value_index);
1446 cmpq(src, kScratchRegister);
1447 Check(equal, message);
1448}
1449
1450
1451
Leon Clarked91b9f72010-01-27 17:25:45 +00001452Condition MacroAssembler::IsObjectStringType(Register heap_object,
1453 Register map,
1454 Register instance_type) {
1455 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00001456 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00001457 ASSERT(kNotStringTag != 0);
1458 testb(instance_type, Immediate(kIsNotStringMask));
1459 return zero;
1460}
1461
1462
Steve Blocka7e24c12009-10-30 11:49:00 +00001463void MacroAssembler::TryGetFunctionPrototype(Register function,
1464 Register result,
1465 Label* miss) {
1466 // Check that the receiver isn't a smi.
1467 testl(function, Immediate(kSmiTagMask));
1468 j(zero, miss);
1469
1470 // Check that the function really is a function.
1471 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1472 j(not_equal, miss);
1473
1474 // Make sure that the function has an instance prototype.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001475 NearLabel non_instance;
Steve Blocka7e24c12009-10-30 11:49:00 +00001476 testb(FieldOperand(result, Map::kBitFieldOffset),
1477 Immediate(1 << Map::kHasNonInstancePrototype));
1478 j(not_zero, &non_instance);
1479
1480 // Get the prototype or initial map from the function.
1481 movq(result,
1482 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1483
1484 // If the prototype or initial map is the hole, don't return it and
1485 // simply miss the cache instead. This will allow us to allocate a
1486 // prototype object on-demand in the runtime system.
1487 CompareRoot(result, Heap::kTheHoleValueRootIndex);
1488 j(equal, miss);
1489
1490 // If the function does not have an initial map, we're done.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001491 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001492 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1493 j(not_equal, &done);
1494
1495 // Get the prototype from the initial map.
1496 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1497 jmp(&done);
1498
1499 // Non-instance prototype: Fetch prototype from constructor field
1500 // in initial map.
1501 bind(&non_instance);
1502 movq(result, FieldOperand(result, Map::kConstructorOffset));
1503
1504 // All done.
1505 bind(&done);
1506}
1507
1508
1509void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1510 if (FLAG_native_code_counters && counter->Enabled()) {
1511 movq(kScratchRegister, ExternalReference(counter));
1512 movl(Operand(kScratchRegister, 0), Immediate(value));
1513 }
1514}
1515
1516
1517void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1518 ASSERT(value > 0);
1519 if (FLAG_native_code_counters && counter->Enabled()) {
1520 movq(kScratchRegister, ExternalReference(counter));
1521 Operand operand(kScratchRegister, 0);
1522 if (value == 1) {
1523 incl(operand);
1524 } else {
1525 addl(operand, Immediate(value));
1526 }
1527 }
1528}
1529
1530
1531void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1532 ASSERT(value > 0);
1533 if (FLAG_native_code_counters && counter->Enabled()) {
1534 movq(kScratchRegister, ExternalReference(counter));
1535 Operand operand(kScratchRegister, 0);
1536 if (value == 1) {
1537 decl(operand);
1538 } else {
1539 subl(operand, Immediate(value));
1540 }
1541 }
1542}
1543
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001544
Steve Blocka7e24c12009-10-30 11:49:00 +00001545#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00001546void MacroAssembler::DebugBreak() {
1547 ASSERT(allow_stub_calls());
1548 xor_(rax, rax); // no arguments
1549 movq(rbx, ExternalReference(Runtime::kDebugBreak));
1550 CEntryStub ces(1);
1551 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00001552}
Andrei Popescu402d9372010-02-26 13:31:12 +00001553#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00001554
1555
Steve Blocka7e24c12009-10-30 11:49:00 +00001556void MacroAssembler::InvokeCode(Register code,
1557 const ParameterCount& expected,
1558 const ParameterCount& actual,
1559 InvokeFlag flag) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001560 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001561 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1562 if (flag == CALL_FUNCTION) {
1563 call(code);
1564 } else {
1565 ASSERT(flag == JUMP_FUNCTION);
1566 jmp(code);
1567 }
1568 bind(&done);
1569}
1570
1571
1572void MacroAssembler::InvokeCode(Handle<Code> code,
1573 const ParameterCount& expected,
1574 const ParameterCount& actual,
1575 RelocInfo::Mode rmode,
1576 InvokeFlag flag) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001577 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001578 Register dummy = rax;
1579 InvokePrologue(expected, actual, code, dummy, &done, flag);
1580 if (flag == CALL_FUNCTION) {
1581 Call(code, rmode);
1582 } else {
1583 ASSERT(flag == JUMP_FUNCTION);
1584 Jump(code, rmode);
1585 }
1586 bind(&done);
1587}
1588
1589
1590void MacroAssembler::InvokeFunction(Register function,
1591 const ParameterCount& actual,
1592 InvokeFlag flag) {
1593 ASSERT(function.is(rdi));
1594 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1595 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
1596 movsxlq(rbx,
1597 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001598 // Advances rdx to the end of the Code object header, to the start of
1599 // the executable code.
Steve Block791712a2010-08-27 10:21:07 +01001600 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001601
1602 ParameterCount expected(rbx);
1603 InvokeCode(rdx, expected, actual, flag);
1604}
1605
1606
Andrei Popescu402d9372010-02-26 13:31:12 +00001607void MacroAssembler::InvokeFunction(JSFunction* function,
1608 const ParameterCount& actual,
1609 InvokeFlag flag) {
1610 ASSERT(function->is_compiled());
1611 // Get the function and setup the context.
1612 Move(rdi, Handle<JSFunction>(function));
1613 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1614
1615 // Invoke the cached code.
1616 Handle<Code> code(function->code());
1617 ParameterCount expected(function->shared()->formal_parameter_count());
1618 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1619}
1620
1621
Steve Blocka7e24c12009-10-30 11:49:00 +00001622void MacroAssembler::EnterFrame(StackFrame::Type type) {
1623 push(rbp);
1624 movq(rbp, rsp);
1625 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00001626 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00001627 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1628 push(kScratchRegister);
1629 if (FLAG_debug_code) {
1630 movq(kScratchRegister,
1631 Factory::undefined_value(),
1632 RelocInfo::EMBEDDED_OBJECT);
1633 cmpq(Operand(rsp, 0), kScratchRegister);
1634 Check(not_equal, "code object not properly patched");
1635 }
1636}
1637
1638
1639void MacroAssembler::LeaveFrame(StackFrame::Type type) {
1640 if (FLAG_debug_code) {
Steve Block3ce2e202009-11-05 08:53:23 +00001641 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00001642 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
1643 Check(equal, "stack frame types must match");
1644 }
1645 movq(rsp, rbp);
1646 pop(rbp);
1647}
1648
1649
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001650void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001651 // Setup the frame structure on the stack.
1652 // All constants are relative to the frame pointer of the exit frame.
1653 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
1654 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
1655 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
1656 push(rbp);
1657 movq(rbp, rsp);
1658
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001659 // Reserve room for entry stack pointer and push the code object.
Steve Block3ce2e202009-11-05 08:53:23 +00001660 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00001661 push(Immediate(0)); // Saved entry sp, patched before call.
1662 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1663 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00001664
1665 // Save the frame pointer and the context in top.
1666 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1667 ExternalReference context_address(Top::k_context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01001668 if (save_rax) {
1669 movq(r14, rax); // Backup rax before we use it.
1670 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001671
1672 movq(rax, rbp);
1673 store_rax(c_entry_fp_address);
1674 movq(rax, rsi);
1675 store_rax(context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01001676}
Steve Blocka7e24c12009-10-30 11:49:00 +00001677
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001678void MacroAssembler::EnterExitFrameEpilogue(int result_size,
Ben Murdochbb769b22010-08-11 14:56:33 +01001679 int argc) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001680#ifdef _WIN64
1681 // Reserve space on stack for result and argument structures, if necessary.
1682 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
1683 // Reserve space for the Arguments object. The Windows 64-bit ABI
1684 // requires us to pass this structure as a pointer to its location on
1685 // the stack. The structure contains 2 values.
Ben Murdochbb769b22010-08-11 14:56:33 +01001686 int argument_stack_space = argc * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00001687 // We also need backing space for 4 parameters, even though
1688 // we only pass one or two parameter, and it is in a register.
1689 int argument_mirror_space = 4 * kPointerSize;
1690 int total_stack_space =
1691 argument_mirror_space + argument_stack_space + result_stack_space;
1692 subq(rsp, Immediate(total_stack_space));
1693#endif
1694
1695 // Get the required frame alignment for the OS.
1696 static const int kFrameAlignment = OS::ActivationFrameAlignment();
1697 if (kFrameAlignment > 0) {
1698 ASSERT(IsPowerOf2(kFrameAlignment));
1699 movq(kScratchRegister, Immediate(-kFrameAlignment));
1700 and_(rsp, kScratchRegister);
1701 }
1702
1703 // Patch the saved entry sp.
1704 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
1705}
1706
1707
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001708void MacroAssembler::EnterExitFrame(int result_size) {
1709 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01001710
1711 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
1712 // so it must be retained across the C-call.
1713 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1714 lea(r12, Operand(rbp, r14, times_pointer_size, offset));
1715
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001716 EnterExitFrameEpilogue(result_size, 2);
Ben Murdochbb769b22010-08-11 14:56:33 +01001717}
1718
1719
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001720void MacroAssembler::EnterApiExitFrame(int stack_space,
Ben Murdochbb769b22010-08-11 14:56:33 +01001721 int argc,
1722 int result_size) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001723 EnterExitFramePrologue(false);
Ben Murdochbb769b22010-08-11 14:56:33 +01001724
1725 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
1726 // so it must be retained across the C-call.
1727 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1728 lea(r12, Operand(rbp, (stack_space * kPointerSize) + offset));
1729
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001730 EnterExitFrameEpilogue(result_size, argc);
Ben Murdochbb769b22010-08-11 14:56:33 +01001731}
1732
1733
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001734void MacroAssembler::LeaveExitFrame(int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001735 // Registers:
Steve Block8defd9f2010-07-08 12:39:36 +01001736 // r12 : argv
Steve Blocka7e24c12009-10-30 11:49:00 +00001737
1738 // Get the return address from the stack and restore the frame pointer.
1739 movq(rcx, Operand(rbp, 1 * kPointerSize));
1740 movq(rbp, Operand(rbp, 0 * kPointerSize));
1741
Steve Blocka7e24c12009-10-30 11:49:00 +00001742 // Pop everything up to and including the arguments and the receiver
1743 // from the caller stack.
Steve Block8defd9f2010-07-08 12:39:36 +01001744 lea(rsp, Operand(r12, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001745
1746 // Restore current context from top and clear it in debug mode.
1747 ExternalReference context_address(Top::k_context_address);
1748 movq(kScratchRegister, context_address);
1749 movq(rsi, Operand(kScratchRegister, 0));
1750#ifdef DEBUG
1751 movq(Operand(kScratchRegister, 0), Immediate(0));
1752#endif
1753
1754 // Push the return address to get ready to return.
1755 push(rcx);
1756
1757 // Clear the top frame.
1758 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1759 movq(kScratchRegister, c_entry_fp_address);
1760 movq(Operand(kScratchRegister, 0), Immediate(0));
1761}
1762
1763
Steve Blocka7e24c12009-10-30 11:49:00 +00001764void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1765 Register scratch,
1766 Label* miss) {
1767 Label same_contexts;
1768
1769 ASSERT(!holder_reg.is(scratch));
1770 ASSERT(!scratch.is(kScratchRegister));
1771 // Load current lexical context from the stack frame.
1772 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
1773
1774 // When generating debug code, make sure the lexical context is set.
1775 if (FLAG_debug_code) {
1776 cmpq(scratch, Immediate(0));
1777 Check(not_equal, "we should not have an empty lexical context");
1778 }
1779 // Load the global context of the current context.
1780 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1781 movq(scratch, FieldOperand(scratch, offset));
1782 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
1783
1784 // Check the context is a global context.
1785 if (FLAG_debug_code) {
1786 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
1787 Factory::global_context_map());
1788 Check(equal, "JSGlobalObject::global_context should be a global context.");
1789 }
1790
1791 // Check if both contexts are the same.
1792 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1793 j(equal, &same_contexts);
1794
1795 // Compare security tokens.
1796 // Check that the security token in the calling global object is
1797 // compatible with the security token in the receiving global
1798 // object.
1799
1800 // Check the context is a global context.
1801 if (FLAG_debug_code) {
1802 // Preserve original value of holder_reg.
1803 push(holder_reg);
1804 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1805 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
1806 Check(not_equal, "JSGlobalProxy::context() should not be null.");
1807
1808 // Read the first word and compare to global_context_map(),
1809 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
1810 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
1811 Check(equal, "JSGlobalObject::global_context should be a global context.");
1812 pop(holder_reg);
1813 }
1814
1815 movq(kScratchRegister,
1816 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00001817 int token_offset =
1818 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00001819 movq(scratch, FieldOperand(scratch, token_offset));
1820 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
1821 j(not_equal, miss);
1822
1823 bind(&same_contexts);
1824}
1825
1826
1827void MacroAssembler::LoadAllocationTopHelper(Register result,
1828 Register result_end,
1829 Register scratch,
1830 AllocationFlags flags) {
1831 ExternalReference new_space_allocation_top =
1832 ExternalReference::new_space_allocation_top_address();
1833
1834 // Just return if allocation top is already known.
1835 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1836 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01001837 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00001838#ifdef DEBUG
1839 // Assert that result actually contains top on entry.
1840 movq(kScratchRegister, new_space_allocation_top);
1841 cmpq(result, Operand(kScratchRegister, 0));
1842 Check(equal, "Unexpected allocation top");
1843#endif
1844 return;
1845 }
1846
Steve Block6ded16b2010-05-10 14:33:55 +01001847 // Move address of new object to result. Use scratch register if available,
1848 // and keep address in scratch until call to UpdateAllocationTopHelper.
1849 if (scratch.is_valid()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001850 ASSERT(!scratch.is(result_end));
1851 movq(scratch, new_space_allocation_top);
1852 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01001853 } else if (result.is(rax)) {
1854 load_rax(new_space_allocation_top);
1855 } else {
1856 movq(kScratchRegister, new_space_allocation_top);
1857 movq(result, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001858 }
1859}
1860
1861
1862void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1863 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +00001864 if (FLAG_debug_code) {
1865 testq(result_end, Immediate(kObjectAlignmentMask));
1866 Check(zero, "Unaligned allocation in new space");
1867 }
1868
Steve Blocka7e24c12009-10-30 11:49:00 +00001869 ExternalReference new_space_allocation_top =
1870 ExternalReference::new_space_allocation_top_address();
1871
1872 // Update new top.
1873 if (result_end.is(rax)) {
1874 // rax can be stored directly to a memory location.
1875 store_rax(new_space_allocation_top);
1876 } else {
1877 // Register required - use scratch provided if available.
Steve Block6ded16b2010-05-10 14:33:55 +01001878 if (scratch.is_valid()) {
1879 movq(Operand(scratch, 0), result_end);
1880 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00001881 movq(kScratchRegister, new_space_allocation_top);
1882 movq(Operand(kScratchRegister, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00001883 }
1884 }
1885}
1886
1887
1888void MacroAssembler::AllocateInNewSpace(int object_size,
1889 Register result,
1890 Register result_end,
1891 Register scratch,
1892 Label* gc_required,
1893 AllocationFlags flags) {
1894 ASSERT(!result.is(result_end));
1895
1896 // Load address of new object into result.
1897 LoadAllocationTopHelper(result, result_end, scratch, flags);
1898
1899 // Calculate new top and bail out if new space is exhausted.
1900 ExternalReference new_space_allocation_limit =
1901 ExternalReference::new_space_allocation_limit_address();
Steve Block6ded16b2010-05-10 14:33:55 +01001902
1903 Register top_reg = result_end.is_valid() ? result_end : result;
1904
1905 if (top_reg.is(result)) {
1906 addq(top_reg, Immediate(object_size));
1907 } else {
1908 lea(top_reg, Operand(result, object_size));
1909 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001910 movq(kScratchRegister, new_space_allocation_limit);
Steve Block6ded16b2010-05-10 14:33:55 +01001911 cmpq(top_reg, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001912 j(above, gc_required);
1913
1914 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01001915 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00001916
Steve Block6ded16b2010-05-10 14:33:55 +01001917 if (top_reg.is(result)) {
1918 if ((flags & TAG_OBJECT) != 0) {
1919 subq(result, Immediate(object_size - kHeapObjectTag));
1920 } else {
1921 subq(result, Immediate(object_size));
1922 }
1923 } else if ((flags & TAG_OBJECT) != 0) {
1924 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00001925 addq(result, Immediate(kHeapObjectTag));
1926 }
1927}
1928
1929
1930void MacroAssembler::AllocateInNewSpace(int header_size,
1931 ScaleFactor element_size,
1932 Register element_count,
1933 Register result,
1934 Register result_end,
1935 Register scratch,
1936 Label* gc_required,
1937 AllocationFlags flags) {
1938 ASSERT(!result.is(result_end));
1939
1940 // Load address of new object into result.
1941 LoadAllocationTopHelper(result, result_end, scratch, flags);
1942
1943 // Calculate new top and bail out if new space is exhausted.
1944 ExternalReference new_space_allocation_limit =
1945 ExternalReference::new_space_allocation_limit_address();
1946 lea(result_end, Operand(result, element_count, element_size, header_size));
1947 movq(kScratchRegister, new_space_allocation_limit);
1948 cmpq(result_end, Operand(kScratchRegister, 0));
1949 j(above, gc_required);
1950
1951 // Update allocation top.
1952 UpdateAllocationTopHelper(result_end, scratch);
1953
1954 // Tag the result if requested.
1955 if ((flags & TAG_OBJECT) != 0) {
1956 addq(result, Immediate(kHeapObjectTag));
1957 }
1958}
1959
1960
1961void MacroAssembler::AllocateInNewSpace(Register object_size,
1962 Register result,
1963 Register result_end,
1964 Register scratch,
1965 Label* gc_required,
1966 AllocationFlags flags) {
1967 // Load address of new object into result.
1968 LoadAllocationTopHelper(result, result_end, scratch, flags);
1969
1970 // Calculate new top and bail out if new space is exhausted.
1971 ExternalReference new_space_allocation_limit =
1972 ExternalReference::new_space_allocation_limit_address();
1973 if (!object_size.is(result_end)) {
1974 movq(result_end, object_size);
1975 }
1976 addq(result_end, result);
1977 movq(kScratchRegister, new_space_allocation_limit);
1978 cmpq(result_end, Operand(kScratchRegister, 0));
1979 j(above, gc_required);
1980
1981 // Update allocation top.
1982 UpdateAllocationTopHelper(result_end, scratch);
1983
1984 // Tag the result if requested.
1985 if ((flags & TAG_OBJECT) != 0) {
1986 addq(result, Immediate(kHeapObjectTag));
1987 }
1988}
1989
1990
1991void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1992 ExternalReference new_space_allocation_top =
1993 ExternalReference::new_space_allocation_top_address();
1994
1995 // Make sure the object has no tag before resetting top.
1996 and_(object, Immediate(~kHeapObjectTagMask));
1997 movq(kScratchRegister, new_space_allocation_top);
1998#ifdef DEBUG
1999 cmpq(object, Operand(kScratchRegister, 0));
2000 Check(below, "Undo allocation of non allocated memory");
2001#endif
2002 movq(Operand(kScratchRegister, 0), object);
2003}
2004
2005
Steve Block3ce2e202009-11-05 08:53:23 +00002006void MacroAssembler::AllocateHeapNumber(Register result,
2007 Register scratch,
2008 Label* gc_required) {
2009 // Allocate heap number in new space.
2010 AllocateInNewSpace(HeapNumber::kSize,
2011 result,
2012 scratch,
2013 no_reg,
2014 gc_required,
2015 TAG_OBJECT);
2016
2017 // Set the map.
2018 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2019 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2020}
2021
2022
Leon Clarkee46be812010-01-19 14:06:41 +00002023void MacroAssembler::AllocateTwoByteString(Register result,
2024 Register length,
2025 Register scratch1,
2026 Register scratch2,
2027 Register scratch3,
2028 Label* gc_required) {
2029 // Calculate the number of bytes needed for the characters in the string while
2030 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002031 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2032 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002033 ASSERT(kShortSize == 2);
2034 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01002035 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2036 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002037 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002038 if (kHeaderAlignment > 0) {
2039 subq(scratch1, Immediate(kHeaderAlignment));
2040 }
Leon Clarkee46be812010-01-19 14:06:41 +00002041
2042 // Allocate two byte string in new space.
2043 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2044 times_1,
2045 scratch1,
2046 result,
2047 scratch2,
2048 scratch3,
2049 gc_required,
2050 TAG_OBJECT);
2051
2052 // Set the map, length and hash field.
2053 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2054 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002055 Integer32ToSmi(scratch1, length);
2056 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002057 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002058 Immediate(String::kEmptyHashField));
2059}
2060
2061
2062void MacroAssembler::AllocateAsciiString(Register result,
2063 Register length,
2064 Register scratch1,
2065 Register scratch2,
2066 Register scratch3,
2067 Label* gc_required) {
2068 // Calculate the number of bytes needed for the characters in the string while
2069 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002070 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2071 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002072 movl(scratch1, length);
2073 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01002074 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002075 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002076 if (kHeaderAlignment > 0) {
2077 subq(scratch1, Immediate(kHeaderAlignment));
2078 }
Leon Clarkee46be812010-01-19 14:06:41 +00002079
2080 // Allocate ascii string in new space.
2081 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2082 times_1,
2083 scratch1,
2084 result,
2085 scratch2,
2086 scratch3,
2087 gc_required,
2088 TAG_OBJECT);
2089
2090 // Set the map, length and hash field.
2091 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2092 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002093 Integer32ToSmi(scratch1, length);
2094 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002095 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002096 Immediate(String::kEmptyHashField));
2097}
2098
2099
2100void MacroAssembler::AllocateConsString(Register result,
2101 Register scratch1,
2102 Register scratch2,
2103 Label* gc_required) {
2104 // Allocate heap number in new space.
2105 AllocateInNewSpace(ConsString::kSize,
2106 result,
2107 scratch1,
2108 scratch2,
2109 gc_required,
2110 TAG_OBJECT);
2111
2112 // Set the map. The other fields are left uninitialized.
2113 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2114 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2115}
2116
2117
2118void MacroAssembler::AllocateAsciiConsString(Register result,
2119 Register scratch1,
2120 Register scratch2,
2121 Label* gc_required) {
2122 // Allocate heap number in new space.
2123 AllocateInNewSpace(ConsString::kSize,
2124 result,
2125 scratch1,
2126 scratch2,
2127 gc_required,
2128 TAG_OBJECT);
2129
2130 // Set the map. The other fields are left uninitialized.
2131 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2132 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2133}
2134
2135
Steve Blockd0582a62009-12-15 09:54:21 +00002136void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2137 if (context_chain_length > 0) {
2138 // Move up the chain of contexts to the context containing the slot.
2139 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2140 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002141 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002142 for (int i = 1; i < context_chain_length; i++) {
2143 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2144 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2145 }
2146 // The context may be an intermediate context, not a function context.
2147 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2148 } else { // context is the current function context.
2149 // The context may be an intermediate context, not a function context.
2150 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2151 }
2152}
2153
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002154
Leon Clarke4515c472010-02-03 11:58:03 +00002155int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002156 // On Windows 64 stack slots are reserved by the caller for all arguments
2157 // including the ones passed in registers, and space is always allocated for
2158 // the four register arguments even if the function takes fewer than four
2159 // arguments.
2160 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2161 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00002162 ASSERT(num_arguments >= 0);
2163#ifdef _WIN64
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002164 static const int kMinimumStackSlots = 4;
2165 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2166 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002167#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002168 static const int kRegisterPassedArguments = 6;
2169 if (num_arguments < kRegisterPassedArguments) return 0;
2170 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002171#endif
Leon Clarke4515c472010-02-03 11:58:03 +00002172}
2173
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002174
Leon Clarke4515c472010-02-03 11:58:03 +00002175void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2176 int frame_alignment = OS::ActivationFrameAlignment();
2177 ASSERT(frame_alignment != 0);
2178 ASSERT(num_arguments >= 0);
2179 // Make stack end at alignment and allocate space for arguments and old rsp.
2180 movq(kScratchRegister, rsp);
2181 ASSERT(IsPowerOf2(frame_alignment));
2182 int argument_slots_on_stack =
2183 ArgumentStackSlotsForCFunctionCall(num_arguments);
2184 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2185 and_(rsp, Immediate(-frame_alignment));
2186 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2187}
2188
2189
2190void MacroAssembler::CallCFunction(ExternalReference function,
2191 int num_arguments) {
2192 movq(rax, function);
2193 CallCFunction(rax, num_arguments);
2194}
2195
2196
2197void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01002198 // Check stack alignment.
2199 if (FLAG_debug_code) {
2200 CheckStackAlignment();
2201 }
2202
Leon Clarke4515c472010-02-03 11:58:03 +00002203 call(function);
2204 ASSERT(OS::ActivationFrameAlignment() != 0);
2205 ASSERT(num_arguments >= 0);
2206 int argument_slots_on_stack =
2207 ArgumentStackSlotsForCFunctionCall(num_arguments);
2208 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2209}
2210
Steve Blockd0582a62009-12-15 09:54:21 +00002211
Steve Blocka7e24c12009-10-30 11:49:00 +00002212CodePatcher::CodePatcher(byte* address, int size)
2213 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2214 // Create a new macro assembler pointing to the address of the code to patch.
2215 // The size is adjusted with kGap on order for the assembler to generate size
2216 // bytes of instructions without failing with buffer size constraints.
2217 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2218}
2219
2220
2221CodePatcher::~CodePatcher() {
2222 // Indicate that code has changed.
2223 CPU::FlushICache(address_, size_);
2224
2225 // Check that the code was patched as expected.
2226 ASSERT(masm_.pc_ == address_ + size_);
2227 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2228}
2229
Steve Blocka7e24c12009-10-30 11:49:00 +00002230} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002231
2232#endif // V8_TARGET_ARCH_X64