blob: 56a2d6f935467c393a905bfec7edb139bf078dac [file] [log] [blame]
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010038#include "heap.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
40namespace v8 {
41namespace internal {
42
43MacroAssembler::MacroAssembler(void* buffer, int size)
Steve Block3ce2e202009-11-05 08:53:23 +000044 : Assembler(buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000045 generating_stub_(false),
46 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
Steve Blocka7e24c12009-10-30 11:49:00 +000048}
49
50
Steve Block3ce2e202009-11-05 08:53:23 +000051void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010052 movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000053}
54
55
Kristian Monsen25f61362010-05-21 11:50:48 +010056void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
57 movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
58}
59
60
Steve Blocka7e24c12009-10-30 11:49:00 +000061void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010062 push(Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000063}
64
65
Steve Block3ce2e202009-11-05 08:53:23 +000066void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010067 cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000068}
69
70
Steve Block1e0659c2011-05-24 12:43:12 +010071void MacroAssembler::CompareRoot(const Operand& with,
72 Heap::RootListIndex index) {
73 ASSERT(!with.AddressUsesRegister(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +000074 LoadRoot(kScratchRegister, index);
75 cmpq(with, kScratchRegister);
76}
77
78
Steve Block6ded16b2010-05-10 14:33:55 +010079void MacroAssembler::RecordWriteHelper(Register object,
80 Register addr,
81 Register scratch) {
82 if (FLAG_debug_code) {
83 // Check that the object is not in new space.
Kristian Monsen0d5e1162010-09-30 15:31:59 +010084 NearLabel not_in_new_space;
Steve Block6ded16b2010-05-10 14:33:55 +010085 InNewSpace(object, scratch, not_equal, &not_in_new_space);
86 Abort("new-space object passed to RecordWriteHelper");
87 bind(&not_in_new_space);
88 }
89
Steve Blocka7e24c12009-10-30 11:49:00 +000090 // Compute the page start address from the heap object pointer, and reuse
91 // the 'object' register for it.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010092 and_(object, Immediate(~Page::kPageAlignmentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +000093
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010094 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
95 // method for more details.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010096 shrl(addr, Immediate(Page::kRegionSizeLog2));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010097 andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000098
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010099 // Set dirty mark for region.
100 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000101}
102
103
Steve Blocka7e24c12009-10-30 11:49:00 +0000104void MacroAssembler::RecordWrite(Register object,
105 int offset,
106 Register value,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100107 Register index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000108 // The compiled code assumes that record write doesn't change the
109 // context register, so we check that none of the clobbered
110 // registers are rsi.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100111 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
Leon Clarke4515c472010-02-03 11:58:03 +0000112
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100113 // First, check if a write barrier is even needed. The tests below
114 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000115 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000116 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000117
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100118 RecordWriteNonSmi(object, offset, value, index);
Steve Block3ce2e202009-11-05 08:53:23 +0000119 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000120
121 // Clobber all input registers when running with the debug-code flag
122 // turned on to provoke errors. This clobbering repeats the
123 // clobbering done inside RecordWriteNonSmi but it's necessary to
124 // avoid having the fast case for smis leave the registers
125 // unchanged.
126 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100127 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
128 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100129 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000130 }
Steve Block3ce2e202009-11-05 08:53:23 +0000131}
132
133
Steve Block8defd9f2010-07-08 12:39:36 +0100134void MacroAssembler::RecordWrite(Register object,
135 Register address,
136 Register value) {
137 // The compiled code assumes that record write doesn't change the
138 // context register, so we check that none of the clobbered
139 // registers are esi.
140 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
141
142 // First, check if a write barrier is even needed. The tests below
143 // catch stores of Smis and stores into young gen.
144 Label done;
145 JumpIfSmi(value, &done);
146
147 InNewSpace(object, value, equal, &done);
148
149 RecordWriteHelper(object, address, value);
150
151 bind(&done);
152
153 // Clobber all input registers when running with the debug-code flag
154 // turned on to provoke errors.
155 if (FLAG_debug_code) {
156 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
157 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
158 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
159 }
160}
161
162
Steve Block3ce2e202009-11-05 08:53:23 +0000163void MacroAssembler::RecordWriteNonSmi(Register object,
164 int offset,
165 Register scratch,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100166 Register index) {
Steve Block3ce2e202009-11-05 08:53:23 +0000167 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000168
169 if (FLAG_debug_code) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100170 NearLabel okay;
Leon Clarke4515c472010-02-03 11:58:03 +0000171 JumpIfNotSmi(object, &okay);
172 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
173 bind(&okay);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100174
175 if (offset == 0) {
176 // index must be int32.
177 Register tmp = index.is(rax) ? rbx : rax;
178 push(tmp);
179 movl(tmp, index);
180 cmpq(tmp, index);
181 Check(equal, "Index register for RecordWrite must be untagged int32.");
182 pop(tmp);
183 }
Leon Clarke4515c472010-02-03 11:58:03 +0000184 }
185
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100186 // Test that the object address is not in the new space. We cannot
187 // update page dirty marks for new space pages.
Steve Block6ded16b2010-05-10 14:33:55 +0100188 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000189
Steve Block6ded16b2010-05-10 14:33:55 +0100190 // The offset is relative to a tagged or untagged HeapObject pointer,
191 // so either offset or offset + kHeapObjectTag must be a
192 // multiple of kPointerSize.
193 ASSERT(IsAligned(offset, kPointerSize) ||
194 IsAligned(offset + kHeapObjectTag, kPointerSize));
195
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100196 Register dst = index;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100197 if (offset != 0) {
198 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000199 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100200 // array access: calculate the destination address in the same manner as
201 // KeyedStoreIC::GenerateGeneric.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100202 lea(dst, FieldOperand(object,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100203 index,
204 times_pointer_size,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100205 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000206 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100207 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000208
209 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000210
211 // Clobber all input registers when running with the debug-code flag
212 // turned on to provoke errors.
213 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100214 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
215 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100216 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Steve Block6ded16b2010-05-10 14:33:55 +0100217 }
218}
219
Steve Blocka7e24c12009-10-30 11:49:00 +0000220void MacroAssembler::Assert(Condition cc, const char* msg) {
221 if (FLAG_debug_code) Check(cc, msg);
222}
223
224
Iain Merrick75681382010-08-19 15:07:18 +0100225void MacroAssembler::AssertFastElements(Register elements) {
226 if (FLAG_debug_code) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100227 NearLabel ok;
Iain Merrick75681382010-08-19 15:07:18 +0100228 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
229 Heap::kFixedArrayMapRootIndex);
230 j(equal, &ok);
231 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
232 Heap::kFixedCOWArrayMapRootIndex);
233 j(equal, &ok);
234 Abort("JSObject with fast elements map has slow elements");
235 bind(&ok);
236 }
237}
238
239
Steve Blocka7e24c12009-10-30 11:49:00 +0000240void MacroAssembler::Check(Condition cc, const char* msg) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100241 NearLabel L;
Steve Blocka7e24c12009-10-30 11:49:00 +0000242 j(cc, &L);
243 Abort(msg);
244 // will not return here
245 bind(&L);
246}
247
248
Steve Block6ded16b2010-05-10 14:33:55 +0100249void MacroAssembler::CheckStackAlignment() {
250 int frame_alignment = OS::ActivationFrameAlignment();
251 int frame_alignment_mask = frame_alignment - 1;
252 if (frame_alignment > kPointerSize) {
253 ASSERT(IsPowerOf2(frame_alignment));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100254 NearLabel alignment_as_expected;
Steve Block6ded16b2010-05-10 14:33:55 +0100255 testq(rsp, Immediate(frame_alignment_mask));
256 j(zero, &alignment_as_expected);
257 // Abort if stack is not aligned.
258 int3();
259 bind(&alignment_as_expected);
260 }
261}
262
263
Steve Blocka7e24c12009-10-30 11:49:00 +0000264void MacroAssembler::NegativeZeroTest(Register result,
265 Register op,
266 Label* then_label) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100267 NearLabel ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000268 testl(result, result);
269 j(not_zero, &ok);
270 testl(op, op);
271 j(sign, then_label);
272 bind(&ok);
273}
274
275
276void MacroAssembler::Abort(const char* msg) {
277 // We want to pass the msg string like a smi to avoid GC
278 // problems, however msg is not guaranteed to be aligned
279 // properly. Instead, we pass an aligned pointer that is
280 // a proper v8 smi, but also pass the alignment difference
281 // from the real pointer as a smi.
282 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
283 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
284 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
285 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
286#ifdef DEBUG
287 if (msg != NULL) {
288 RecordComment("Abort message: ");
289 RecordComment(msg);
290 }
291#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000292 // Disable stub call restrictions to always allow calls to abort.
Ben Murdoch086aeea2011-05-13 15:57:08 +0100293 AllowStubCallsScope allow_scope(this, true);
Steve Blockd0582a62009-12-15 09:54:21 +0000294
Steve Blocka7e24c12009-10-30 11:49:00 +0000295 push(rax);
296 movq(kScratchRegister, p0, RelocInfo::NONE);
297 push(kScratchRegister);
298 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000299 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000300 RelocInfo::NONE);
301 push(kScratchRegister);
302 CallRuntime(Runtime::kAbort, 2);
303 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000304 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000305}
306
307
308void MacroAssembler::CallStub(CodeStub* stub) {
309 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
310 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
311}
312
313
John Reck59135872010-11-02 12:39:01 -0700314MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100315 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700316 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100317 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700318 call(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
319 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100320 }
321 return result;
322}
323
324
Leon Clarkee46be812010-01-19 14:06:41 +0000325void MacroAssembler::TailCallStub(CodeStub* stub) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800326 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Leon Clarkee46be812010-01-19 14:06:41 +0000327 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
328}
329
330
John Reck59135872010-11-02 12:39:01 -0700331MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100332 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700333 MaybeObject* result = stub->TryGetCode();
Ben Murdochbb769b22010-08-11 14:56:33 +0100334 if (!result->IsFailure()) {
John Reck59135872010-11-02 12:39:01 -0700335 jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
336 RelocInfo::CODE_TARGET);
Ben Murdochbb769b22010-08-11 14:56:33 +0100337 }
338 return result;
339}
340
341
Steve Blocka7e24c12009-10-30 11:49:00 +0000342void MacroAssembler::StubReturn(int argc) {
343 ASSERT(argc >= 1 && generating_stub());
344 ret((argc - 1) * kPointerSize);
345}
346
347
348void MacroAssembler::IllegalOperation(int num_arguments) {
349 if (num_arguments > 0) {
350 addq(rsp, Immediate(num_arguments * kPointerSize));
351 }
352 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
353}
354
355
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100356void MacroAssembler::IndexFromHash(Register hash, Register index) {
357 // The assert checks that the constants for the maximum number of digits
358 // for an array index cached in the hash field and the number of bits
359 // reserved for it does not conflict.
360 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
361 (1 << String::kArrayIndexValueBits));
362 // We want the smi-tagged index in key. Even if we subsequently go to
363 // the slow case, converting the key to a smi is always valid.
364 // key: string key
365 // hash: key's hash field, including its array index value.
366 and_(hash, Immediate(String::kArrayIndexValueMask));
367 shr(hash, Immediate(String::kHashShift));
368 // Here we actually clobber the key which will be used if calling into
369 // runtime later. However as the new key is the numeric value of a string key
370 // there is no difference in using either key.
371 Integer32ToSmi(index, hash);
372}
373
374
Steve Blocka7e24c12009-10-30 11:49:00 +0000375void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
376 CallRuntime(Runtime::FunctionForId(id), num_arguments);
377}
378
379
Steve Block1e0659c2011-05-24 12:43:12 +0100380void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
381 Runtime::Function* function = Runtime::FunctionForId(id);
382 Set(rax, function->nargs);
383 movq(rbx, ExternalReference(function));
384 CEntryStub ces(1);
385 ces.SaveDoubles();
386 CallStub(&ces);
387}
388
389
John Reck59135872010-11-02 12:39:01 -0700390MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
391 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100392 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
393}
394
395
Steve Blocka7e24c12009-10-30 11:49:00 +0000396void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
397 // If the expected number of arguments of the runtime function is
398 // constant, we check that the actual number of arguments match the
399 // expectation.
400 if (f->nargs >= 0 && f->nargs != num_arguments) {
401 IllegalOperation(num_arguments);
402 return;
403 }
404
Leon Clarke4515c472010-02-03 11:58:03 +0000405 // TODO(1236192): Most runtime routines don't need the number of
406 // arguments passed in because it is constant. At some point we
407 // should remove this need and make the runtime routine entry code
408 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100409 Set(rax, num_arguments);
Leon Clarke4515c472010-02-03 11:58:03 +0000410 movq(rbx, ExternalReference(f));
411 CEntryStub ces(f->result_size);
412 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000413}
414
415
John Reck59135872010-11-02 12:39:01 -0700416MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
417 int num_arguments) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100418 if (f->nargs >= 0 && f->nargs != num_arguments) {
419 IllegalOperation(num_arguments);
420 // Since we did not call the stub, there was no allocation failure.
421 // Return some non-failure object.
422 return Heap::undefined_value();
423 }
424
425 // TODO(1236192): Most runtime routines don't need the number of
426 // arguments passed in because it is constant. At some point we
427 // should remove this need and make the runtime routine entry code
428 // smarter.
429 Set(rax, num_arguments);
430 movq(rbx, ExternalReference(f));
431 CEntryStub ces(f->result_size);
432 return TryCallStub(&ces);
433}
434
435
Andrei Popescu402d9372010-02-26 13:31:12 +0000436void MacroAssembler::CallExternalReference(const ExternalReference& ext,
437 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100438 Set(rax, num_arguments);
Andrei Popescu402d9372010-02-26 13:31:12 +0000439 movq(rbx, ext);
440
441 CEntryStub stub(1);
442 CallStub(&stub);
443}
444
445
Steve Block6ded16b2010-05-10 14:33:55 +0100446void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
447 int num_arguments,
448 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000449 // ----------- S t a t e -------------
450 // -- rsp[0] : return address
451 // -- rsp[8] : argument num_arguments - 1
452 // ...
453 // -- rsp[8 * num_arguments] : argument 0 (receiver)
454 // -----------------------------------
455
456 // TODO(1236192): Most runtime routines don't need the number of
457 // arguments passed in because it is constant. At some point we
458 // should remove this need and make the runtime routine entry code
459 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100460 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100461 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000462}
463
464
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800465MaybeObject* MacroAssembler::TryTailCallExternalReference(
466 const ExternalReference& ext, int num_arguments, int result_size) {
467 // ----------- S t a t e -------------
468 // -- rsp[0] : return address
469 // -- rsp[8] : argument num_arguments - 1
470 // ...
471 // -- rsp[8 * num_arguments] : argument 0 (receiver)
472 // -----------------------------------
473
474 // TODO(1236192): Most runtime routines don't need the number of
475 // arguments passed in because it is constant. At some point we
476 // should remove this need and make the runtime routine entry code
477 // smarter.
478 Set(rax, num_arguments);
479 return TryJumpToExternalReference(ext, result_size);
480}
481
482
Steve Block6ded16b2010-05-10 14:33:55 +0100483void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
484 int num_arguments,
485 int result_size) {
486 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
487}
488
489
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800490MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
491 int num_arguments,
492 int result_size) {
493 return TryTailCallExternalReference(ExternalReference(fid),
494 num_arguments,
495 result_size);
496}
497
498
Ben Murdochbb769b22010-08-11 14:56:33 +0100499static int Offset(ExternalReference ref0, ExternalReference ref1) {
500 int64_t offset = (ref0.address() - ref1.address());
501 // Check that fits into int.
502 ASSERT(static_cast<int>(offset) == offset);
503 return static_cast<int>(offset);
504}
505
506
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800507void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
508#ifdef _WIN64
509 // We need to prepare a slot for result handle on stack and put
510 // a pointer to it into 1st arg register.
511 EnterApiExitFrame(arg_stack_space + 1);
512
513 // rcx must be used to pass the pointer to the return value slot.
514 lea(rcx, StackSpaceOperand(arg_stack_space));
515#else
516 EnterApiExitFrame(arg_stack_space);
517#endif
Ben Murdochbb769b22010-08-11 14:56:33 +0100518}
519
520
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800521MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
522 ApiFunction* function, int stack_space) {
John Reck59135872010-11-02 12:39:01 -0700523 Label empty_result;
524 Label prologue;
525 Label promote_scheduled_exception;
526 Label delete_allocated_handles;
527 Label leave_exit_frame;
Ben Murdochbb769b22010-08-11 14:56:33 +0100528 Label write_back;
Ben Murdochbb769b22010-08-11 14:56:33 +0100529
John Reck59135872010-11-02 12:39:01 -0700530 ExternalReference next_address =
531 ExternalReference::handle_scope_next_address();
532 const int kNextOffset = 0;
533 const int kLimitOffset = Offset(
534 ExternalReference::handle_scope_limit_address(),
535 next_address);
536 const int kLevelOffset = Offset(
537 ExternalReference::handle_scope_level_address(),
538 next_address);
539 ExternalReference scheduled_exception_address =
540 ExternalReference::scheduled_exception_address();
Ben Murdochbb769b22010-08-11 14:56:33 +0100541
John Reck59135872010-11-02 12:39:01 -0700542 // Allocate HandleScope in callee-save registers.
543 Register prev_next_address_reg = r14;
544 Register prev_limit_reg = rbx;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800545 Register base_reg = r12;
John Reck59135872010-11-02 12:39:01 -0700546 movq(base_reg, next_address);
547 movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
548 movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
549 addl(Operand(base_reg, kLevelOffset), Immediate(1));
550 // Call the api function!
551 movq(rax,
552 reinterpret_cast<int64_t>(function->address()),
553 RelocInfo::RUNTIME_ENTRY);
554 call(rax);
Ben Murdochbb769b22010-08-11 14:56:33 +0100555
John Reck59135872010-11-02 12:39:01 -0700556#ifdef _WIN64
557 // rax keeps a pointer to v8::Handle, unpack it.
558 movq(rax, Operand(rax, 0));
559#endif
560 // Check if the result handle holds 0.
561 testq(rax, rax);
562 j(zero, &empty_result);
563 // It was non-zero. Dereference to get the result value.
564 movq(rax, Operand(rax, 0));
565 bind(&prologue);
Ben Murdochbb769b22010-08-11 14:56:33 +0100566
John Reck59135872010-11-02 12:39:01 -0700567 // No more valid handles (the result handle was the last one). Restore
568 // previous handle scope.
569 subl(Operand(base_reg, kLevelOffset), Immediate(1));
570 movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
571 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
572 j(not_equal, &delete_allocated_handles);
573 bind(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100574
John Reck59135872010-11-02 12:39:01 -0700575 // Check if the function scheduled an exception.
576 movq(rsi, scheduled_exception_address);
577 Cmp(Operand(rsi, 0), Factory::the_hole_value());
578 j(not_equal, &promote_scheduled_exception);
Ben Murdochbb769b22010-08-11 14:56:33 +0100579
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800580 LeaveApiExitFrame();
581 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -0700582
583 bind(&promote_scheduled_exception);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800584 MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException,
585 0, 1);
586 if (result->IsFailure()) {
587 return result;
588 }
John Reck59135872010-11-02 12:39:01 -0700589
590 bind(&empty_result);
591 // It was zero; the result is undefined.
592 Move(rax, Factory::undefined_value());
593 jmp(&prologue);
594
595 // HandleScope limit has changed. Delete allocated extensions.
596 bind(&delete_allocated_handles);
597 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
598 movq(prev_limit_reg, rax);
599 movq(rax, ExternalReference::delete_handle_scope_extensions());
600 call(rax);
601 movq(rax, prev_limit_reg);
602 jmp(&leave_exit_frame);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800603
604 return result;
Ben Murdochbb769b22010-08-11 14:56:33 +0100605}
606
607
Steve Block6ded16b2010-05-10 14:33:55 +0100608void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
609 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000610 // Set the entry point and jump to the C entry runtime stub.
611 movq(rbx, ext);
612 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000613 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000614}
615
616
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800617MaybeObject* MacroAssembler::TryJumpToExternalReference(
618 const ExternalReference& ext, int result_size) {
619 // Set the entry point and jump to the C entry runtime stub.
620 movq(rbx, ext);
621 CEntryStub ces(result_size);
622 return TryTailCallStub(&ces);
623}
624
625
Andrei Popescu402d9372010-02-26 13:31:12 +0000626void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
627 // Calls are not allowed in some stubs.
628 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000629
Andrei Popescu402d9372010-02-26 13:31:12 +0000630 // Rely on the assertion to check that the number of provided
631 // arguments match the expected number of arguments. Fake a
632 // parameter count to avoid emitting code to do the check.
633 ParameterCount expected(0);
634 GetBuiltinEntry(rdx, id);
635 InvokeCode(rdx, expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +0000636}
637
Andrei Popescu402d9372010-02-26 13:31:12 +0000638
Steve Block791712a2010-08-27 10:21:07 +0100639void MacroAssembler::GetBuiltinFunction(Register target,
640 Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100641 // Load the builtins object into target register.
642 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
643 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
Steve Block791712a2010-08-27 10:21:07 +0100644 movq(target, FieldOperand(target,
645 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
646}
Steve Block6ded16b2010-05-10 14:33:55 +0100647
Steve Block791712a2010-08-27 10:21:07 +0100648
649void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
650 ASSERT(!target.is(rdi));
Andrei Popescu402d9372010-02-26 13:31:12 +0000651 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +0100652 GetBuiltinFunction(rdi, id);
653 movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000654}
655
656
657void MacroAssembler::Set(Register dst, int64_t x) {
658 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100659 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000660 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000661 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000662 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000663 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000664 } else {
665 movq(dst, x, RelocInfo::NONE);
666 }
667}
668
Steve Blocka7e24c12009-10-30 11:49:00 +0000669void MacroAssembler::Set(const Operand& dst, int64_t x) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100670 if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000671 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000672 } else {
673 movq(kScratchRegister, x, RelocInfo::NONE);
674 movq(dst, kScratchRegister);
675 }
676}
677
Steve Blocka7e24c12009-10-30 11:49:00 +0000678// ----------------------------------------------------------------------------
679// Smi tagging, untagging and tag detection.
680
Steve Block8defd9f2010-07-08 12:39:36 +0100681Register MacroAssembler::GetSmiConstant(Smi* source) {
682 int value = source->value();
683 if (value == 0) {
684 xorl(kScratchRegister, kScratchRegister);
685 return kScratchRegister;
686 }
687 if (value == 1) {
688 return kSmiConstantRegister;
689 }
690 LoadSmiConstant(kScratchRegister, source);
691 return kScratchRegister;
692}
693
694void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
695 if (FLAG_debug_code) {
696 movq(dst,
697 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
698 RelocInfo::NONE);
699 cmpq(dst, kSmiConstantRegister);
700 if (allow_stub_calls()) {
701 Assert(equal, "Uninitialized kSmiConstantRegister");
702 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100703 NearLabel ok;
Steve Block8defd9f2010-07-08 12:39:36 +0100704 j(equal, &ok);
705 int3();
706 bind(&ok);
707 }
708 }
709 if (source->value() == 0) {
710 xorl(dst, dst);
711 return;
712 }
713 int value = source->value();
714 bool negative = value < 0;
715 unsigned int uvalue = negative ? -value : value;
716
717 switch (uvalue) {
718 case 9:
719 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
720 break;
721 case 8:
722 xorl(dst, dst);
723 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
724 break;
725 case 4:
726 xorl(dst, dst);
727 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
728 break;
729 case 5:
730 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
731 break;
732 case 3:
733 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
734 break;
735 case 2:
736 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
737 break;
738 case 1:
739 movq(dst, kSmiConstantRegister);
740 break;
741 case 0:
742 UNREACHABLE();
743 return;
744 default:
745 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
746 return;
747 }
748 if (negative) {
749 neg(dst);
750 }
751}
752
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100753
Steve Blocka7e24c12009-10-30 11:49:00 +0000754void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000755 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000756 if (!dst.is(src)) {
757 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000758 }
Steve Block3ce2e202009-11-05 08:53:23 +0000759 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000760}
761
762
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100763void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
764 if (FLAG_debug_code) {
765 testb(dst, Immediate(0x01));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100766 NearLabel ok;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100767 j(zero, &ok);
768 if (allow_stub_calls()) {
769 Abort("Integer32ToSmiField writing to non-smi location");
770 } else {
771 int3();
772 }
773 bind(&ok);
774 }
775 ASSERT(kSmiShift % kBitsPerByte == 0);
776 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
777}
778
779
Steve Block3ce2e202009-11-05 08:53:23 +0000780void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
781 Register src,
782 int constant) {
783 if (dst.is(src)) {
784 addq(dst, Immediate(constant));
785 } else {
786 lea(dst, Operand(src, constant));
787 }
788 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000789}
790
791
792void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000793 ASSERT_EQ(0, kSmiTag);
794 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000795 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000796 }
Steve Block3ce2e202009-11-05 08:53:23 +0000797 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000798}
799
800
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100801void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
802 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
803}
804
805
Steve Blocka7e24c12009-10-30 11:49:00 +0000806void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000807 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000808 if (!dst.is(src)) {
809 movq(dst, src);
810 }
811 sar(dst, Immediate(kSmiShift));
812}
813
814
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100815void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
816 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
817}
818
819
Steve Block3ce2e202009-11-05 08:53:23 +0000820void MacroAssembler::SmiTest(Register src) {
821 testq(src, src);
822}
823
824
825void MacroAssembler::SmiCompare(Register dst, Register src) {
826 cmpq(dst, src);
827}
828
829
830void MacroAssembler::SmiCompare(Register dst, Smi* src) {
831 ASSERT(!dst.is(kScratchRegister));
832 if (src->value() == 0) {
833 testq(dst, dst);
834 } else {
Iain Merrick75681382010-08-19 15:07:18 +0100835 Register constant_reg = GetSmiConstant(src);
836 cmpq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +0000837 }
838}
839
840
Leon Clarkef7060e22010-06-03 12:02:55 +0100841void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block6ded16b2010-05-10 14:33:55 +0100842 cmpq(dst, src);
843}
844
845
Steve Block3ce2e202009-11-05 08:53:23 +0000846void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
847 cmpq(dst, src);
848}
849
850
851void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100852 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000853}
854
855
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100856void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
857 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
858}
859
860
Steve Blocka7e24c12009-10-30 11:49:00 +0000861void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
862 Register src,
863 int power) {
864 ASSERT(power >= 0);
865 ASSERT(power < 64);
866 if (power == 0) {
867 SmiToInteger64(dst, src);
868 return;
869 }
Steve Block3ce2e202009-11-05 08:53:23 +0000870 if (!dst.is(src)) {
871 movq(dst, src);
872 }
873 if (power < kSmiShift) {
874 sar(dst, Immediate(kSmiShift - power));
875 } else if (power > kSmiShift) {
876 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000877 }
878}
879
880
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100881void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
882 Register src,
883 int power) {
884 ASSERT((0 <= power) && (power < 32));
885 if (dst.is(src)) {
886 shr(dst, Immediate(power + kSmiShift));
887 } else {
888 UNIMPLEMENTED(); // Not used.
889 }
890}
891
892
Steve Blocka7e24c12009-10-30 11:49:00 +0000893Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000894 ASSERT_EQ(0, kSmiTag);
895 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000896 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000897}
898
899
Steve Block1e0659c2011-05-24 12:43:12 +0100900Condition MacroAssembler::CheckSmi(const Operand& src) {
901 ASSERT_EQ(0, kSmiTag);
902 testb(src, Immediate(kSmiTagMask));
903 return zero;
904}
905
906
Ben Murdochf87a2032010-10-22 12:50:53 +0100907Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000908 ASSERT_EQ(0, kSmiTag);
Steve Block8defd9f2010-07-08 12:39:36 +0100909 // Make mask 0x8000000000000001 and test that both bits are zero.
Steve Block3ce2e202009-11-05 08:53:23 +0000910 movq(kScratchRegister, src);
911 rol(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +0100912 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +0000913 return zero;
914}
915
916
Steve Blocka7e24c12009-10-30 11:49:00 +0000917Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
918 if (first.is(second)) {
919 return CheckSmi(first);
920 }
Steve Block8defd9f2010-07-08 12:39:36 +0100921 ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
922 leal(kScratchRegister, Operand(first, second, times_1, 0));
923 testb(kScratchRegister, Immediate(0x03));
Steve Block3ce2e202009-11-05 08:53:23 +0000924 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000925}
926
927
Ben Murdochf87a2032010-10-22 12:50:53 +0100928Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
929 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +0000930 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +0100931 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +0000932 }
Steve Block8defd9f2010-07-08 12:39:36 +0100933 movq(kScratchRegister, first);
934 or_(kScratchRegister, second);
Leon Clarked91b9f72010-01-27 17:25:45 +0000935 rol(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +0100936 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +0000937 return zero;
938}
939
940
Ben Murdochbb769b22010-08-11 14:56:33 +0100941Condition MacroAssembler::CheckEitherSmi(Register first,
942 Register second,
943 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +0000944 if (first.is(second)) {
945 return CheckSmi(first);
946 }
Ben Murdochbb769b22010-08-11 14:56:33 +0100947 if (scratch.is(second)) {
948 andl(scratch, first);
949 } else {
950 if (!scratch.is(first)) {
951 movl(scratch, first);
952 }
953 andl(scratch, second);
954 }
955 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +0000956 return zero;
957}
958
959
Steve Blocka7e24c12009-10-30 11:49:00 +0000960Condition MacroAssembler::CheckIsMinSmi(Register src) {
Steve Block8defd9f2010-07-08 12:39:36 +0100961 ASSERT(!src.is(kScratchRegister));
962 // If we overflow by subtracting one, it's the minimal smi value.
963 cmpq(src, kSmiConstantRegister);
964 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +0000965}
966
Steve Blocka7e24c12009-10-30 11:49:00 +0000967
968Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000969 // A 32-bit integer value can always be converted to a smi.
970 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +0000971}
972
973
Steve Block3ce2e202009-11-05 08:53:23 +0000974Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
975 // An unsigned 32-bit integer value is valid as long as the high bit
976 // is not set.
Steve Block8defd9f2010-07-08 12:39:36 +0100977 testl(src, src);
978 return positive;
Steve Block3ce2e202009-11-05 08:53:23 +0000979}
980
981
Steve Block1e0659c2011-05-24 12:43:12 +0100982void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
983 if (dst.is(src)) {
984 andl(dst, Immediate(kSmiTagMask));
985 } else {
986 movl(dst, Immediate(kSmiTagMask));
987 andl(dst, src);
988 }
989}
990
991
992void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
993 if (!(src.AddressUsesRegister(dst))) {
994 movl(dst, Immediate(kSmiTagMask));
995 andl(dst, src);
996 } else {
997 movl(dst, src);
998 andl(dst, Immediate(kSmiTagMask));
999 }
1000}
1001
1002
Steve Block3ce2e202009-11-05 08:53:23 +00001003void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1004 if (constant->value() == 0) {
1005 if (!dst.is(src)) {
1006 movq(dst, src);
1007 }
Steve Block8defd9f2010-07-08 12:39:36 +01001008 return;
Steve Block3ce2e202009-11-05 08:53:23 +00001009 } else if (dst.is(src)) {
1010 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001011 switch (constant->value()) {
1012 case 1:
1013 addq(dst, kSmiConstantRegister);
1014 return;
1015 case 2:
1016 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1017 return;
1018 case 4:
1019 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1020 return;
1021 case 8:
1022 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1023 return;
1024 default:
1025 Register constant_reg = GetSmiConstant(constant);
1026 addq(dst, constant_reg);
1027 return;
1028 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001029 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001030 switch (constant->value()) {
1031 case 1:
1032 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
1033 return;
1034 case 2:
1035 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1036 return;
1037 case 4:
1038 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1039 return;
1040 case 8:
1041 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1042 return;
1043 default:
1044 LoadSmiConstant(dst, constant);
1045 addq(dst, src);
1046 return;
1047 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001048 }
1049}
1050
1051
Leon Clarkef7060e22010-06-03 12:02:55 +01001052void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1053 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001054 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01001055 }
1056}
1057
1058
Steve Block3ce2e202009-11-05 08:53:23 +00001059void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1060 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001061 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001062 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001063 }
Steve Block3ce2e202009-11-05 08:53:23 +00001064 } else if (dst.is(src)) {
1065 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001066 Register constant_reg = GetSmiConstant(constant);
1067 subq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001068 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001069 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001070 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001071 // Adding and subtracting the min-value gives the same result, it only
1072 // differs on the overflow bit, which we don't check here.
1073 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001074 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001075 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001076 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Steve Block3ce2e202009-11-05 08:53:23 +00001077 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001078 }
1079 }
1080}
1081
1082
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001083void MacroAssembler::SmiAdd(Register dst,
1084 Register src1,
1085 Register src2) {
1086 // No overflow checking. Use only when it's known that
1087 // overflowing is impossible.
1088 ASSERT(!dst.is(src2));
1089 if (dst.is(src1)) {
1090 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001091 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001092 movq(dst, src1);
1093 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001094 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001095 Assert(no_overflow, "Smi addition overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001096}
1097
1098
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001099void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1100 // No overflow checking. Use only when it's known that
1101 // overflowing is impossible (e.g., subtracting two positive smis).
1102 ASSERT(!dst.is(src2));
1103 if (dst.is(src1)) {
1104 subq(dst, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001105 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001106 movq(dst, src1);
1107 subq(dst, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001108 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001109 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001110}
1111
1112
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001113void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001114 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001115 const Operand& src2) {
1116 // No overflow checking. Use only when it's known that
1117 // overflowing is impossible (e.g., subtracting two positive smis).
1118 if (dst.is(src1)) {
1119 subq(dst, src2);
1120 } else {
1121 movq(dst, src1);
1122 subq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001123 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001124 Assert(no_overflow, "Smi subtraction overflow");
Steve Blocka7e24c12009-10-30 11:49:00 +00001125}
1126
1127
1128void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001129 ASSERT(!dst.is(kScratchRegister));
1130 ASSERT(!src.is(kScratchRegister));
1131 // Set tag and padding bits before negating, so that they are zero afterwards.
1132 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001133 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001134 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001135 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001136 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001137 }
Steve Block3ce2e202009-11-05 08:53:23 +00001138 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001139}
1140
1141
1142void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001143 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001144 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001145 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001146 }
1147 and_(dst, src2);
1148}
1149
1150
Steve Block3ce2e202009-11-05 08:53:23 +00001151void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1152 if (constant->value() == 0) {
Steve Block9fac8402011-05-12 15:51:54 +01001153 Set(dst, 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001154 } else if (dst.is(src)) {
1155 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001156 Register constant_reg = GetSmiConstant(constant);
1157 and_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001158 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001159 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001160 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001161 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001162}
1163
1164
1165void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1166 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001167 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001168 }
1169 or_(dst, src2);
1170}
1171
1172
Steve Block3ce2e202009-11-05 08:53:23 +00001173void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1174 if (dst.is(src)) {
1175 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001176 Register constant_reg = GetSmiConstant(constant);
1177 or_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001178 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001179 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001180 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001181 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001182}
1183
Steve Block3ce2e202009-11-05 08:53:23 +00001184
Steve Blocka7e24c12009-10-30 11:49:00 +00001185void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1186 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001187 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001188 }
1189 xor_(dst, src2);
1190}
1191
1192
Steve Block3ce2e202009-11-05 08:53:23 +00001193void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1194 if (dst.is(src)) {
1195 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001196 Register constant_reg = GetSmiConstant(constant);
1197 xor_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001198 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001199 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001200 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001201 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001202}
1203
1204
Steve Blocka7e24c12009-10-30 11:49:00 +00001205void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1206 Register src,
1207 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001208 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001209 if (shift_value > 0) {
1210 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001211 sar(dst, Immediate(shift_value + kSmiShift));
1212 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001213 } else {
1214 UNIMPLEMENTED(); // Not used.
1215 }
1216 }
1217}
1218
1219
Steve Blocka7e24c12009-10-30 11:49:00 +00001220void MacroAssembler::SmiShiftLeftConstant(Register dst,
1221 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001222 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001223 if (!dst.is(src)) {
1224 movq(dst, src);
1225 }
1226 if (shift_value > 0) {
1227 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001228 }
1229}
1230
1231
1232void MacroAssembler::SmiShiftLeft(Register dst,
1233 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001234 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001235 ASSERT(!dst.is(rcx));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001236 NearLabel result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001237 // Untag shift amount.
1238 if (!dst.is(src1)) {
1239 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001240 }
Steve Block3ce2e202009-11-05 08:53:23 +00001241 SmiToInteger32(rcx, src2);
1242 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1243 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001244 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001245}
1246
1247
Steve Blocka7e24c12009-10-30 11:49:00 +00001248void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1249 Register src1,
1250 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001251 ASSERT(!dst.is(kScratchRegister));
1252 ASSERT(!src1.is(kScratchRegister));
1253 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001254 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001255 if (src1.is(rcx)) {
1256 movq(kScratchRegister, src1);
1257 } else if (src2.is(rcx)) {
1258 movq(kScratchRegister, src2);
1259 }
1260 if (!dst.is(src1)) {
1261 movq(dst, src1);
1262 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001263 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001264 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001265 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001266 shl(dst, Immediate(kSmiShift));
1267 if (src1.is(rcx)) {
1268 movq(src1, kScratchRegister);
1269 } else if (src2.is(rcx)) {
1270 movq(src2, kScratchRegister);
1271 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001272}
1273
1274
Steve Block3ce2e202009-11-05 08:53:23 +00001275SmiIndex MacroAssembler::SmiToIndex(Register dst,
1276 Register src,
1277 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001278 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001279 // There is a possible optimization if shift is in the range 60-63, but that
1280 // will (and must) never happen.
1281 if (!dst.is(src)) {
1282 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001283 }
Steve Block3ce2e202009-11-05 08:53:23 +00001284 if (shift < kSmiShift) {
1285 sar(dst, Immediate(kSmiShift - shift));
1286 } else {
1287 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001288 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001289 return SmiIndex(dst, times_1);
1290}
1291
Steve Blocka7e24c12009-10-30 11:49:00 +00001292SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1293 Register src,
1294 int shift) {
1295 // Register src holds a positive smi.
1296 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001297 if (!dst.is(src)) {
1298 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001299 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001300 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001301 if (shift < kSmiShift) {
1302 sar(dst, Immediate(kSmiShift - shift));
1303 } else {
1304 shl(dst, Immediate(shift - kSmiShift));
1305 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001306 return SmiIndex(dst, times_1);
1307}
1308
1309
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001310void MacroAssembler::Move(Register dst, Register src) {
1311 if (!dst.is(src)) {
1312 movq(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01001313 }
Steve Block6ded16b2010-05-10 14:33:55 +01001314}
1315
1316
Steve Blocka7e24c12009-10-30 11:49:00 +00001317void MacroAssembler::Move(Register dst, Handle<Object> source) {
1318 ASSERT(!source->IsFailure());
1319 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001320 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001321 } else {
1322 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1323 }
1324}
1325
1326
1327void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001328 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001329 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001330 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001331 } else {
1332 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1333 movq(dst, kScratchRegister);
1334 }
1335}
1336
1337
1338void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001339 if (source->IsSmi()) {
1340 SmiCompare(dst, Smi::cast(*source));
1341 } else {
1342 Move(kScratchRegister, source);
1343 cmpq(dst, kScratchRegister);
1344 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001345}
1346
1347
1348void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1349 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001350 SmiCompare(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001351 } else {
1352 ASSERT(source->IsHeapObject());
1353 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1354 cmpq(dst, kScratchRegister);
1355 }
1356}
1357
1358
1359void MacroAssembler::Push(Handle<Object> source) {
1360 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001361 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001362 } else {
1363 ASSERT(source->IsHeapObject());
1364 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1365 push(kScratchRegister);
1366 }
1367}
1368
1369
1370void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001371 intptr_t smi = reinterpret_cast<intptr_t>(source);
1372 if (is_int32(smi)) {
1373 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001374 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001375 Register constant = GetSmiConstant(source);
1376 push(constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001377 }
1378}
1379
1380
Leon Clarkee46be812010-01-19 14:06:41 +00001381void MacroAssembler::Drop(int stack_elements) {
1382 if (stack_elements > 0) {
1383 addq(rsp, Immediate(stack_elements * kPointerSize));
1384 }
1385}
1386
1387
Steve Block3ce2e202009-11-05 08:53:23 +00001388void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001389 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001390}
1391
1392
1393void MacroAssembler::Jump(ExternalReference ext) {
1394 movq(kScratchRegister, ext);
1395 jmp(kScratchRegister);
1396}
1397
1398
1399void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1400 movq(kScratchRegister, destination, rmode);
1401 jmp(kScratchRegister);
1402}
1403
1404
1405void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001406 // TODO(X64): Inline this
1407 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001408}
1409
1410
1411void MacroAssembler::Call(ExternalReference ext) {
1412 movq(kScratchRegister, ext);
1413 call(kScratchRegister);
1414}
1415
1416
1417void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1418 movq(kScratchRegister, destination, rmode);
1419 call(kScratchRegister);
1420}
1421
1422
1423void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1424 ASSERT(RelocInfo::IsCodeTarget(rmode));
Steve Block3ce2e202009-11-05 08:53:23 +00001425 call(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001426}
1427
1428
Steve Block1e0659c2011-05-24 12:43:12 +01001429void MacroAssembler::Pushad() {
1430 push(rax);
1431 push(rcx);
1432 push(rdx);
1433 push(rbx);
1434 // Not pushing rsp or rbp.
1435 push(rsi);
1436 push(rdi);
1437 push(r8);
1438 push(r9);
1439 // r10 is kScratchRegister.
1440 push(r11);
1441 push(r12);
1442 // r13 is kRootRegister.
1443 push(r14);
1444 // r15 is kSmiConstantRegister
1445}
1446
1447
1448void MacroAssembler::Popad() {
1449 pop(r14);
1450 pop(r12);
1451 pop(r11);
1452 pop(r9);
1453 pop(r8);
1454 pop(rdi);
1455 pop(rsi);
1456 pop(rbx);
1457 pop(rdx);
1458 pop(rcx);
1459 pop(rax);
1460}
1461
1462
1463void MacroAssembler::Dropad() {
1464 const int kRegistersPushedByPushad = 11;
1465 addq(rsp, Immediate(kRegistersPushedByPushad * kPointerSize));
1466}
1467
1468
1469// Order general registers are pushed by Pushad:
1470// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r12, r14.
1471int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
1472 0,
1473 1,
1474 2,
1475 3,
1476 -1,
1477 -1,
1478 4,
1479 5,
1480 6,
1481 7,
1482 -1,
1483 8,
1484 9,
1485 -1,
1486 10,
1487 -1
1488};
1489
1490
Steve Blocka7e24c12009-10-30 11:49:00 +00001491void MacroAssembler::PushTryHandler(CodeLocation try_location,
1492 HandlerType type) {
1493 // Adjust this code if not the case.
1494 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1495
1496 // The pc (return address) is already on TOS. This code pushes state,
1497 // frame pointer and current handler. Check that they are expected
1498 // next on the stack, in that order.
1499 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1500 StackHandlerConstants::kPCOffset - kPointerSize);
1501 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1502 StackHandlerConstants::kStateOffset - kPointerSize);
1503 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1504 StackHandlerConstants::kFPOffset - kPointerSize);
1505
1506 if (try_location == IN_JAVASCRIPT) {
1507 if (type == TRY_CATCH_HANDLER) {
1508 push(Immediate(StackHandler::TRY_CATCH));
1509 } else {
1510 push(Immediate(StackHandler::TRY_FINALLY));
1511 }
1512 push(rbp);
1513 } else {
1514 ASSERT(try_location == IN_JS_ENTRY);
1515 // The frame pointer does not point to a JS frame so we save NULL
1516 // for rbp. We expect the code throwing an exception to check rbp
1517 // before dereferencing it to restore the context.
1518 push(Immediate(StackHandler::ENTRY));
1519 push(Immediate(0)); // NULL frame pointer.
1520 }
1521 // Save the current handler.
1522 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1523 push(Operand(kScratchRegister, 0));
1524 // Link this handler.
1525 movq(Operand(kScratchRegister, 0), rsp);
1526}
1527
1528
Leon Clarkee46be812010-01-19 14:06:41 +00001529void MacroAssembler::PopTryHandler() {
1530 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1531 // Unlink this handler.
1532 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1533 pop(Operand(kScratchRegister, 0));
1534 // Remove the remaining fields.
1535 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1536}
1537
1538
Steve Blocka7e24c12009-10-30 11:49:00 +00001539void MacroAssembler::Ret() {
1540 ret(0);
1541}
1542
1543
Steve Block1e0659c2011-05-24 12:43:12 +01001544void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
1545 if (is_uint16(bytes_dropped)) {
1546 ret(bytes_dropped);
1547 } else {
1548 pop(scratch);
1549 addq(rsp, Immediate(bytes_dropped));
1550 push(scratch);
1551 ret(0);
1552 }
1553}
1554
1555
Steve Blocka7e24c12009-10-30 11:49:00 +00001556void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001557 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01001558 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001559}
1560
1561
1562void MacroAssembler::CmpObjectType(Register heap_object,
1563 InstanceType type,
1564 Register map) {
1565 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1566 CmpInstanceType(map, type);
1567}
1568
1569
1570void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1571 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1572 Immediate(static_cast<int8_t>(type)));
1573}
1574
1575
Andrei Popescu31002712010-02-23 13:46:05 +00001576void MacroAssembler::CheckMap(Register obj,
1577 Handle<Map> map,
1578 Label* fail,
1579 bool is_heap_object) {
1580 if (!is_heap_object) {
1581 JumpIfSmi(obj, fail);
1582 }
1583 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1584 j(not_equal, fail);
1585}
1586
1587
Leon Clarkef7060e22010-06-03 12:02:55 +01001588void MacroAssembler::AbortIfNotNumber(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001589 NearLabel ok;
Andrei Popescu402d9372010-02-26 13:31:12 +00001590 Condition is_smi = CheckSmi(object);
1591 j(is_smi, &ok);
1592 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1593 Factory::heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01001594 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00001595 bind(&ok);
1596}
1597
1598
Iain Merrick75681382010-08-19 15:07:18 +01001599void MacroAssembler::AbortIfSmi(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001600 NearLabel ok;
Iain Merrick75681382010-08-19 15:07:18 +01001601 Condition is_smi = CheckSmi(object);
1602 Assert(NegateCondition(is_smi), "Operand is a smi");
1603}
1604
1605
Leon Clarkef7060e22010-06-03 12:02:55 +01001606void MacroAssembler::AbortIfNotSmi(Register object) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001607 NearLabel ok;
Steve Block6ded16b2010-05-10 14:33:55 +01001608 Condition is_smi = CheckSmi(object);
Iain Merrick75681382010-08-19 15:07:18 +01001609 Assert(is_smi, "Operand is not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01001610}
1611
1612
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001613void MacroAssembler::AbortIfNotRootValue(Register src,
1614 Heap::RootListIndex root_value_index,
1615 const char* message) {
1616 ASSERT(!src.is(kScratchRegister));
1617 LoadRoot(kScratchRegister, root_value_index);
1618 cmpq(src, kScratchRegister);
1619 Check(equal, message);
1620}
1621
1622
1623
Leon Clarked91b9f72010-01-27 17:25:45 +00001624Condition MacroAssembler::IsObjectStringType(Register heap_object,
1625 Register map,
1626 Register instance_type) {
1627 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00001628 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00001629 ASSERT(kNotStringTag != 0);
1630 testb(instance_type, Immediate(kIsNotStringMask));
1631 return zero;
1632}
1633
1634
Steve Blocka7e24c12009-10-30 11:49:00 +00001635void MacroAssembler::TryGetFunctionPrototype(Register function,
1636 Register result,
1637 Label* miss) {
1638 // Check that the receiver isn't a smi.
1639 testl(function, Immediate(kSmiTagMask));
1640 j(zero, miss);
1641
1642 // Check that the function really is a function.
1643 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1644 j(not_equal, miss);
1645
1646 // Make sure that the function has an instance prototype.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001647 NearLabel non_instance;
Steve Blocka7e24c12009-10-30 11:49:00 +00001648 testb(FieldOperand(result, Map::kBitFieldOffset),
1649 Immediate(1 << Map::kHasNonInstancePrototype));
1650 j(not_zero, &non_instance);
1651
1652 // Get the prototype or initial map from the function.
1653 movq(result,
1654 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1655
1656 // If the prototype or initial map is the hole, don't return it and
1657 // simply miss the cache instead. This will allow us to allocate a
1658 // prototype object on-demand in the runtime system.
1659 CompareRoot(result, Heap::kTheHoleValueRootIndex);
1660 j(equal, miss);
1661
1662 // If the function does not have an initial map, we're done.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001663 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001664 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1665 j(not_equal, &done);
1666
1667 // Get the prototype from the initial map.
1668 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1669 jmp(&done);
1670
1671 // Non-instance prototype: Fetch prototype from constructor field
1672 // in initial map.
1673 bind(&non_instance);
1674 movq(result, FieldOperand(result, Map::kConstructorOffset));
1675
1676 // All done.
1677 bind(&done);
1678}
1679
1680
1681void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1682 if (FLAG_native_code_counters && counter->Enabled()) {
1683 movq(kScratchRegister, ExternalReference(counter));
1684 movl(Operand(kScratchRegister, 0), Immediate(value));
1685 }
1686}
1687
1688
1689void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1690 ASSERT(value > 0);
1691 if (FLAG_native_code_counters && counter->Enabled()) {
1692 movq(kScratchRegister, ExternalReference(counter));
1693 Operand operand(kScratchRegister, 0);
1694 if (value == 1) {
1695 incl(operand);
1696 } else {
1697 addl(operand, Immediate(value));
1698 }
1699 }
1700}
1701
1702
1703void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1704 ASSERT(value > 0);
1705 if (FLAG_native_code_counters && counter->Enabled()) {
1706 movq(kScratchRegister, ExternalReference(counter));
1707 Operand operand(kScratchRegister, 0);
1708 if (value == 1) {
1709 decl(operand);
1710 } else {
1711 subl(operand, Immediate(value));
1712 }
1713 }
1714}
1715
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001716
Steve Blocka7e24c12009-10-30 11:49:00 +00001717#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00001718void MacroAssembler::DebugBreak() {
1719 ASSERT(allow_stub_calls());
Steve Block9fac8402011-05-12 15:51:54 +01001720 Set(rax, 0); // No arguments.
Andrei Popescu402d9372010-02-26 13:31:12 +00001721 movq(rbx, ExternalReference(Runtime::kDebugBreak));
1722 CEntryStub ces(1);
1723 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00001724}
Andrei Popescu402d9372010-02-26 13:31:12 +00001725#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00001726
1727
Steve Blocka7e24c12009-10-30 11:49:00 +00001728void MacroAssembler::InvokeCode(Register code,
1729 const ParameterCount& expected,
1730 const ParameterCount& actual,
1731 InvokeFlag flag) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001732 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001733 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1734 if (flag == CALL_FUNCTION) {
1735 call(code);
1736 } else {
1737 ASSERT(flag == JUMP_FUNCTION);
1738 jmp(code);
1739 }
1740 bind(&done);
1741}
1742
1743
1744void MacroAssembler::InvokeCode(Handle<Code> code,
1745 const ParameterCount& expected,
1746 const ParameterCount& actual,
1747 RelocInfo::Mode rmode,
1748 InvokeFlag flag) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001749 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001750 Register dummy = rax;
1751 InvokePrologue(expected, actual, code, dummy, &done, flag);
1752 if (flag == CALL_FUNCTION) {
1753 Call(code, rmode);
1754 } else {
1755 ASSERT(flag == JUMP_FUNCTION);
1756 Jump(code, rmode);
1757 }
1758 bind(&done);
1759}
1760
1761
1762void MacroAssembler::InvokeFunction(Register function,
1763 const ParameterCount& actual,
1764 InvokeFlag flag) {
1765 ASSERT(function.is(rdi));
1766 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1767 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
1768 movsxlq(rbx,
1769 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001770 // Advances rdx to the end of the Code object header, to the start of
1771 // the executable code.
Steve Block791712a2010-08-27 10:21:07 +01001772 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001773
1774 ParameterCount expected(rbx);
1775 InvokeCode(rdx, expected, actual, flag);
1776}
1777
1778
Andrei Popescu402d9372010-02-26 13:31:12 +00001779void MacroAssembler::InvokeFunction(JSFunction* function,
1780 const ParameterCount& actual,
1781 InvokeFlag flag) {
1782 ASSERT(function->is_compiled());
1783 // Get the function and setup the context.
1784 Move(rdi, Handle<JSFunction>(function));
1785 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1786
Steve Block1e0659c2011-05-24 12:43:12 +01001787 if (V8::UseCrankshaft()) {
1788 // Since Crankshaft can recompile a function, we need to load
1789 // the Code object every time we call the function.
1790 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1791 ParameterCount expected(function->shared()->formal_parameter_count());
1792 InvokeCode(rdx, expected, actual, flag);
1793 } else {
1794 // Invoke the cached code.
1795 Handle<Code> code(function->code());
1796 ParameterCount expected(function->shared()->formal_parameter_count());
1797 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1798 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001799}
1800
1801
Steve Blocka7e24c12009-10-30 11:49:00 +00001802void MacroAssembler::EnterFrame(StackFrame::Type type) {
1803 push(rbp);
1804 movq(rbp, rsp);
1805 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00001806 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00001807 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1808 push(kScratchRegister);
1809 if (FLAG_debug_code) {
1810 movq(kScratchRegister,
1811 Factory::undefined_value(),
1812 RelocInfo::EMBEDDED_OBJECT);
1813 cmpq(Operand(rsp, 0), kScratchRegister);
1814 Check(not_equal, "code object not properly patched");
1815 }
1816}
1817
1818
1819void MacroAssembler::LeaveFrame(StackFrame::Type type) {
1820 if (FLAG_debug_code) {
Steve Block3ce2e202009-11-05 08:53:23 +00001821 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00001822 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
1823 Check(equal, "stack frame types must match");
1824 }
1825 movq(rsp, rbp);
1826 pop(rbp);
1827}
1828
1829
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001830void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001831 // Setup the frame structure on the stack.
1832 // All constants are relative to the frame pointer of the exit frame.
1833 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
1834 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
1835 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
1836 push(rbp);
1837 movq(rbp, rsp);
1838
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001839 // Reserve room for entry stack pointer and push the code object.
Steve Block3ce2e202009-11-05 08:53:23 +00001840 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00001841 push(Immediate(0)); // Saved entry sp, patched before call.
1842 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1843 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00001844
1845 // Save the frame pointer and the context in top.
1846 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1847 ExternalReference context_address(Top::k_context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01001848 if (save_rax) {
1849 movq(r14, rax); // Backup rax before we use it.
1850 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001851
1852 movq(rax, rbp);
1853 store_rax(c_entry_fp_address);
1854 movq(rax, rsi);
1855 store_rax(context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01001856}
Steve Blocka7e24c12009-10-30 11:49:00 +00001857
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001858
Steve Block1e0659c2011-05-24 12:43:12 +01001859void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
1860 bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001861#ifdef _WIN64
Steve Block1e0659c2011-05-24 12:43:12 +01001862 const int kShadowSpace = 4;
1863 arg_stack_space += kShadowSpace;
Steve Blocka7e24c12009-10-30 11:49:00 +00001864#endif
Steve Block1e0659c2011-05-24 12:43:12 +01001865 // Optionally save all XMM registers.
1866 if (save_doubles) {
1867 CpuFeatures::Scope scope(SSE2);
1868 int space = XMMRegister::kNumRegisters * kDoubleSize +
1869 arg_stack_space * kPointerSize;
1870 subq(rsp, Immediate(space));
1871 int offset = -2 * kPointerSize;
1872 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
1873 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
1874 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
1875 }
1876 } else if (arg_stack_space > 0) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001877 subq(rsp, Immediate(arg_stack_space * kPointerSize));
1878 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001879
1880 // Get the required frame alignment for the OS.
1881 static const int kFrameAlignment = OS::ActivationFrameAlignment();
1882 if (kFrameAlignment > 0) {
1883 ASSERT(IsPowerOf2(kFrameAlignment));
1884 movq(kScratchRegister, Immediate(-kFrameAlignment));
1885 and_(rsp, kScratchRegister);
1886 }
1887
1888 // Patch the saved entry sp.
1889 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
1890}
1891
1892
Steve Block1e0659c2011-05-24 12:43:12 +01001893void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001894 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01001895
1896 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
1897 // so it must be retained across the C-call.
1898 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1899 lea(r12, Operand(rbp, r14, times_pointer_size, offset));
1900
Steve Block1e0659c2011-05-24 12:43:12 +01001901 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
Ben Murdochbb769b22010-08-11 14:56:33 +01001902}
1903
1904
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001905void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001906 EnterExitFramePrologue(false);
Steve Block1e0659c2011-05-24 12:43:12 +01001907 EnterExitFrameEpilogue(arg_stack_space, false);
Ben Murdochbb769b22010-08-11 14:56:33 +01001908}
1909
1910
Steve Block1e0659c2011-05-24 12:43:12 +01001911void MacroAssembler::LeaveExitFrame(bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001912 // Registers:
Steve Block8defd9f2010-07-08 12:39:36 +01001913 // r12 : argv
Steve Block1e0659c2011-05-24 12:43:12 +01001914 if (save_doubles) {
1915 int offset = -2 * kPointerSize;
1916 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
1917 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
1918 movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
1919 }
1920 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001921 // Get the return address from the stack and restore the frame pointer.
1922 movq(rcx, Operand(rbp, 1 * kPointerSize));
1923 movq(rbp, Operand(rbp, 0 * kPointerSize));
1924
Steve Block1e0659c2011-05-24 12:43:12 +01001925 // Drop everything up to and including the arguments and the receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00001926 // from the caller stack.
Steve Block8defd9f2010-07-08 12:39:36 +01001927 lea(rsp, Operand(r12, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001928
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001929 // Push the return address to get ready to return.
1930 push(rcx);
1931
1932 LeaveExitFrameEpilogue();
1933}
1934
1935
1936void MacroAssembler::LeaveApiExitFrame() {
1937 movq(rsp, rbp);
1938 pop(rbp);
1939
1940 LeaveExitFrameEpilogue();
1941}
1942
1943
1944void MacroAssembler::LeaveExitFrameEpilogue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001945 // Restore current context from top and clear it in debug mode.
1946 ExternalReference context_address(Top::k_context_address);
1947 movq(kScratchRegister, context_address);
1948 movq(rsi, Operand(kScratchRegister, 0));
1949#ifdef DEBUG
1950 movq(Operand(kScratchRegister, 0), Immediate(0));
1951#endif
1952
Steve Blocka7e24c12009-10-30 11:49:00 +00001953 // Clear the top frame.
1954 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1955 movq(kScratchRegister, c_entry_fp_address);
1956 movq(Operand(kScratchRegister, 0), Immediate(0));
1957}
1958
1959
Steve Blocka7e24c12009-10-30 11:49:00 +00001960void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1961 Register scratch,
1962 Label* miss) {
1963 Label same_contexts;
1964
1965 ASSERT(!holder_reg.is(scratch));
1966 ASSERT(!scratch.is(kScratchRegister));
1967 // Load current lexical context from the stack frame.
1968 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
1969
1970 // When generating debug code, make sure the lexical context is set.
1971 if (FLAG_debug_code) {
1972 cmpq(scratch, Immediate(0));
1973 Check(not_equal, "we should not have an empty lexical context");
1974 }
1975 // Load the global context of the current context.
1976 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1977 movq(scratch, FieldOperand(scratch, offset));
1978 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
1979
1980 // Check the context is a global context.
1981 if (FLAG_debug_code) {
1982 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
1983 Factory::global_context_map());
1984 Check(equal, "JSGlobalObject::global_context should be a global context.");
1985 }
1986
1987 // Check if both contexts are the same.
1988 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1989 j(equal, &same_contexts);
1990
1991 // Compare security tokens.
1992 // Check that the security token in the calling global object is
1993 // compatible with the security token in the receiving global
1994 // object.
1995
1996 // Check the context is a global context.
1997 if (FLAG_debug_code) {
1998 // Preserve original value of holder_reg.
1999 push(holder_reg);
2000 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2001 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2002 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2003
2004 // Read the first word and compare to global_context_map(),
2005 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2006 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2007 Check(equal, "JSGlobalObject::global_context should be a global context.");
2008 pop(holder_reg);
2009 }
2010
2011 movq(kScratchRegister,
2012 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00002013 int token_offset =
2014 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002015 movq(scratch, FieldOperand(scratch, token_offset));
2016 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2017 j(not_equal, miss);
2018
2019 bind(&same_contexts);
2020}
2021
2022
2023void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00002024 Register scratch,
2025 AllocationFlags flags) {
2026 ExternalReference new_space_allocation_top =
2027 ExternalReference::new_space_allocation_top_address();
2028
2029 // Just return if allocation top is already known.
2030 if ((flags & RESULT_CONTAINS_TOP) != 0) {
2031 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01002032 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00002033#ifdef DEBUG
2034 // Assert that result actually contains top on entry.
2035 movq(kScratchRegister, new_space_allocation_top);
2036 cmpq(result, Operand(kScratchRegister, 0));
2037 Check(equal, "Unexpected allocation top");
2038#endif
2039 return;
2040 }
2041
Steve Block6ded16b2010-05-10 14:33:55 +01002042 // Move address of new object to result. Use scratch register if available,
2043 // and keep address in scratch until call to UpdateAllocationTopHelper.
2044 if (scratch.is_valid()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002045 movq(scratch, new_space_allocation_top);
2046 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01002047 } else if (result.is(rax)) {
2048 load_rax(new_space_allocation_top);
2049 } else {
2050 movq(kScratchRegister, new_space_allocation_top);
2051 movq(result, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002052 }
2053}
2054
2055
2056void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2057 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +00002058 if (FLAG_debug_code) {
2059 testq(result_end, Immediate(kObjectAlignmentMask));
2060 Check(zero, "Unaligned allocation in new space");
2061 }
2062
Steve Blocka7e24c12009-10-30 11:49:00 +00002063 ExternalReference new_space_allocation_top =
2064 ExternalReference::new_space_allocation_top_address();
2065
2066 // Update new top.
2067 if (result_end.is(rax)) {
2068 // rax can be stored directly to a memory location.
2069 store_rax(new_space_allocation_top);
2070 } else {
2071 // Register required - use scratch provided if available.
Steve Block6ded16b2010-05-10 14:33:55 +01002072 if (scratch.is_valid()) {
2073 movq(Operand(scratch, 0), result_end);
2074 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00002075 movq(kScratchRegister, new_space_allocation_top);
2076 movq(Operand(kScratchRegister, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00002077 }
2078 }
2079}
2080
2081
2082void MacroAssembler::AllocateInNewSpace(int object_size,
2083 Register result,
2084 Register result_end,
2085 Register scratch,
2086 Label* gc_required,
2087 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07002088 if (!FLAG_inline_new) {
2089 if (FLAG_debug_code) {
2090 // Trash the registers to simulate an allocation failure.
2091 movl(result, Immediate(0x7091));
2092 if (result_end.is_valid()) {
2093 movl(result_end, Immediate(0x7191));
2094 }
2095 if (scratch.is_valid()) {
2096 movl(scratch, Immediate(0x7291));
2097 }
2098 }
2099 jmp(gc_required);
2100 return;
2101 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002102 ASSERT(!result.is(result_end));
2103
2104 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002105 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002106
2107 // Calculate new top and bail out if new space is exhausted.
2108 ExternalReference new_space_allocation_limit =
2109 ExternalReference::new_space_allocation_limit_address();
Steve Block6ded16b2010-05-10 14:33:55 +01002110
2111 Register top_reg = result_end.is_valid() ? result_end : result;
2112
Steve Block1e0659c2011-05-24 12:43:12 +01002113 if (!top_reg.is(result)) {
2114 movq(top_reg, result);
Steve Block6ded16b2010-05-10 14:33:55 +01002115 }
Steve Block1e0659c2011-05-24 12:43:12 +01002116 addq(top_reg, Immediate(object_size));
2117 j(carry, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00002118 movq(kScratchRegister, new_space_allocation_limit);
Steve Block6ded16b2010-05-10 14:33:55 +01002119 cmpq(top_reg, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002120 j(above, gc_required);
2121
2122 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01002123 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00002124
Steve Block6ded16b2010-05-10 14:33:55 +01002125 if (top_reg.is(result)) {
2126 if ((flags & TAG_OBJECT) != 0) {
2127 subq(result, Immediate(object_size - kHeapObjectTag));
2128 } else {
2129 subq(result, Immediate(object_size));
2130 }
2131 } else if ((flags & TAG_OBJECT) != 0) {
2132 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00002133 addq(result, Immediate(kHeapObjectTag));
2134 }
2135}
2136
2137
2138void MacroAssembler::AllocateInNewSpace(int header_size,
2139 ScaleFactor element_size,
2140 Register element_count,
2141 Register result,
2142 Register result_end,
2143 Register scratch,
2144 Label* gc_required,
2145 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07002146 if (!FLAG_inline_new) {
2147 if (FLAG_debug_code) {
2148 // Trash the registers to simulate an allocation failure.
2149 movl(result, Immediate(0x7091));
2150 movl(result_end, Immediate(0x7191));
2151 if (scratch.is_valid()) {
2152 movl(scratch, Immediate(0x7291));
2153 }
2154 // Register element_count is not modified by the function.
2155 }
2156 jmp(gc_required);
2157 return;
2158 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002159 ASSERT(!result.is(result_end));
2160
2161 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002162 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002163
2164 // Calculate new top and bail out if new space is exhausted.
2165 ExternalReference new_space_allocation_limit =
2166 ExternalReference::new_space_allocation_limit_address();
Steve Block1e0659c2011-05-24 12:43:12 +01002167
2168 // We assume that element_count*element_size + header_size does not
2169 // overflow.
2170 lea(result_end, Operand(element_count, element_size, header_size));
2171 addq(result_end, result);
2172 j(carry, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00002173 movq(kScratchRegister, new_space_allocation_limit);
2174 cmpq(result_end, Operand(kScratchRegister, 0));
2175 j(above, gc_required);
2176
2177 // Update allocation top.
2178 UpdateAllocationTopHelper(result_end, scratch);
2179
2180 // Tag the result if requested.
2181 if ((flags & TAG_OBJECT) != 0) {
2182 addq(result, Immediate(kHeapObjectTag));
2183 }
2184}
2185
2186
2187void MacroAssembler::AllocateInNewSpace(Register object_size,
2188 Register result,
2189 Register result_end,
2190 Register scratch,
2191 Label* gc_required,
2192 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07002193 if (!FLAG_inline_new) {
2194 if (FLAG_debug_code) {
2195 // Trash the registers to simulate an allocation failure.
2196 movl(result, Immediate(0x7091));
2197 movl(result_end, Immediate(0x7191));
2198 if (scratch.is_valid()) {
2199 movl(scratch, Immediate(0x7291));
2200 }
2201 // object_size is left unchanged by this function.
2202 }
2203 jmp(gc_required);
2204 return;
2205 }
2206 ASSERT(!result.is(result_end));
2207
Steve Blocka7e24c12009-10-30 11:49:00 +00002208 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002209 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002210
2211 // Calculate new top and bail out if new space is exhausted.
2212 ExternalReference new_space_allocation_limit =
2213 ExternalReference::new_space_allocation_limit_address();
2214 if (!object_size.is(result_end)) {
2215 movq(result_end, object_size);
2216 }
2217 addq(result_end, result);
Steve Block1e0659c2011-05-24 12:43:12 +01002218 j(carry, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00002219 movq(kScratchRegister, new_space_allocation_limit);
2220 cmpq(result_end, Operand(kScratchRegister, 0));
2221 j(above, gc_required);
2222
2223 // Update allocation top.
2224 UpdateAllocationTopHelper(result_end, scratch);
2225
2226 // Tag the result if requested.
2227 if ((flags & TAG_OBJECT) != 0) {
2228 addq(result, Immediate(kHeapObjectTag));
2229 }
2230}
2231
2232
2233void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2234 ExternalReference new_space_allocation_top =
2235 ExternalReference::new_space_allocation_top_address();
2236
2237 // Make sure the object has no tag before resetting top.
2238 and_(object, Immediate(~kHeapObjectTagMask));
2239 movq(kScratchRegister, new_space_allocation_top);
2240#ifdef DEBUG
2241 cmpq(object, Operand(kScratchRegister, 0));
2242 Check(below, "Undo allocation of non allocated memory");
2243#endif
2244 movq(Operand(kScratchRegister, 0), object);
2245}
2246
2247
Steve Block3ce2e202009-11-05 08:53:23 +00002248void MacroAssembler::AllocateHeapNumber(Register result,
2249 Register scratch,
2250 Label* gc_required) {
2251 // Allocate heap number in new space.
2252 AllocateInNewSpace(HeapNumber::kSize,
2253 result,
2254 scratch,
2255 no_reg,
2256 gc_required,
2257 TAG_OBJECT);
2258
2259 // Set the map.
2260 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2261 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2262}
2263
2264
Leon Clarkee46be812010-01-19 14:06:41 +00002265void MacroAssembler::AllocateTwoByteString(Register result,
2266 Register length,
2267 Register scratch1,
2268 Register scratch2,
2269 Register scratch3,
2270 Label* gc_required) {
2271 // Calculate the number of bytes needed for the characters in the string while
2272 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002273 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2274 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002275 ASSERT(kShortSize == 2);
2276 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01002277 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2278 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002279 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002280 if (kHeaderAlignment > 0) {
2281 subq(scratch1, Immediate(kHeaderAlignment));
2282 }
Leon Clarkee46be812010-01-19 14:06:41 +00002283
2284 // Allocate two byte string in new space.
2285 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2286 times_1,
2287 scratch1,
2288 result,
2289 scratch2,
2290 scratch3,
2291 gc_required,
2292 TAG_OBJECT);
2293
2294 // Set the map, length and hash field.
2295 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2296 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002297 Integer32ToSmi(scratch1, length);
2298 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002299 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002300 Immediate(String::kEmptyHashField));
2301}
2302
2303
2304void MacroAssembler::AllocateAsciiString(Register result,
2305 Register length,
2306 Register scratch1,
2307 Register scratch2,
2308 Register scratch3,
2309 Label* gc_required) {
2310 // Calculate the number of bytes needed for the characters in the string while
2311 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002312 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2313 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002314 movl(scratch1, length);
2315 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01002316 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002317 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002318 if (kHeaderAlignment > 0) {
2319 subq(scratch1, Immediate(kHeaderAlignment));
2320 }
Leon Clarkee46be812010-01-19 14:06:41 +00002321
2322 // Allocate ascii string in new space.
2323 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2324 times_1,
2325 scratch1,
2326 result,
2327 scratch2,
2328 scratch3,
2329 gc_required,
2330 TAG_OBJECT);
2331
2332 // Set the map, length and hash field.
2333 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2334 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002335 Integer32ToSmi(scratch1, length);
2336 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002337 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002338 Immediate(String::kEmptyHashField));
2339}
2340
2341
2342void MacroAssembler::AllocateConsString(Register result,
2343 Register scratch1,
2344 Register scratch2,
2345 Label* gc_required) {
2346 // Allocate heap number in new space.
2347 AllocateInNewSpace(ConsString::kSize,
2348 result,
2349 scratch1,
2350 scratch2,
2351 gc_required,
2352 TAG_OBJECT);
2353
2354 // Set the map. The other fields are left uninitialized.
2355 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2356 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2357}
2358
2359
2360void MacroAssembler::AllocateAsciiConsString(Register result,
2361 Register scratch1,
2362 Register scratch2,
2363 Label* gc_required) {
2364 // Allocate heap number in new space.
2365 AllocateInNewSpace(ConsString::kSize,
2366 result,
2367 scratch1,
2368 scratch2,
2369 gc_required,
2370 TAG_OBJECT);
2371
2372 // Set the map. The other fields are left uninitialized.
2373 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2374 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2375}
2376
2377
Steve Blockd0582a62009-12-15 09:54:21 +00002378void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2379 if (context_chain_length > 0) {
2380 // Move up the chain of contexts to the context containing the slot.
2381 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2382 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002383 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002384 for (int i = 1; i < context_chain_length; i++) {
2385 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2386 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2387 }
2388 // The context may be an intermediate context, not a function context.
2389 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2390 } else { // context is the current function context.
2391 // The context may be an intermediate context, not a function context.
2392 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2393 }
2394}
2395
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002396
Ben Murdochb0fe1622011-05-05 13:52:32 +01002397void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2398 // Load the global or builtins object from the current context.
2399 movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2400 // Load the global context from the global or builtins object.
2401 movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
2402 // Load the function from the global context.
2403 movq(function, Operand(function, Context::SlotOffset(index)));
2404}
2405
2406
2407void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2408 Register map) {
2409 // Load the initial map. The global functions all have initial maps.
2410 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2411 if (FLAG_debug_code) {
2412 Label ok, fail;
2413 CheckMap(map, Factory::meta_map(), &fail, false);
2414 jmp(&ok);
2415 bind(&fail);
2416 Abort("Global functions must have initial map");
2417 bind(&ok);
2418 }
2419}
2420
2421
Leon Clarke4515c472010-02-03 11:58:03 +00002422int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002423 // On Windows 64 stack slots are reserved by the caller for all arguments
2424 // including the ones passed in registers, and space is always allocated for
2425 // the four register arguments even if the function takes fewer than four
2426 // arguments.
2427 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2428 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00002429 ASSERT(num_arguments >= 0);
2430#ifdef _WIN64
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002431 static const int kMinimumStackSlots = 4;
2432 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2433 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002434#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002435 static const int kRegisterPassedArguments = 6;
2436 if (num_arguments < kRegisterPassedArguments) return 0;
2437 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002438#endif
Leon Clarke4515c472010-02-03 11:58:03 +00002439}
2440
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002441
Leon Clarke4515c472010-02-03 11:58:03 +00002442void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2443 int frame_alignment = OS::ActivationFrameAlignment();
2444 ASSERT(frame_alignment != 0);
2445 ASSERT(num_arguments >= 0);
2446 // Make stack end at alignment and allocate space for arguments and old rsp.
2447 movq(kScratchRegister, rsp);
2448 ASSERT(IsPowerOf2(frame_alignment));
2449 int argument_slots_on_stack =
2450 ArgumentStackSlotsForCFunctionCall(num_arguments);
2451 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2452 and_(rsp, Immediate(-frame_alignment));
2453 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2454}
2455
2456
2457void MacroAssembler::CallCFunction(ExternalReference function,
2458 int num_arguments) {
2459 movq(rax, function);
2460 CallCFunction(rax, num_arguments);
2461}
2462
2463
2464void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01002465 // Check stack alignment.
2466 if (FLAG_debug_code) {
2467 CheckStackAlignment();
2468 }
2469
Leon Clarke4515c472010-02-03 11:58:03 +00002470 call(function);
2471 ASSERT(OS::ActivationFrameAlignment() != 0);
2472 ASSERT(num_arguments >= 0);
2473 int argument_slots_on_stack =
2474 ArgumentStackSlotsForCFunctionCall(num_arguments);
2475 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2476}
2477
Steve Blockd0582a62009-12-15 09:54:21 +00002478
Steve Blocka7e24c12009-10-30 11:49:00 +00002479CodePatcher::CodePatcher(byte* address, int size)
2480 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2481 // Create a new macro assembler pointing to the address of the code to patch.
2482 // The size is adjusted with kGap on order for the assembler to generate size
2483 // bytes of instructions without failing with buffer size constraints.
2484 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2485}
2486
2487
2488CodePatcher::~CodePatcher() {
2489 // Indicate that code has changed.
2490 CPU::FlushICache(address_, size_);
2491
2492 // Check that the code was patched as expected.
2493 ASSERT(masm_.pc_ == address_ + size_);
2494 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2495}
2496
Steve Blocka7e24c12009-10-30 11:49:00 +00002497} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002498
2499#endif // V8_TARGET_ARCH_X64