blob: b7a6aaf9ef4d24d752b97594b2fc27047b08be38 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
38
39namespace v8 {
40namespace internal {
41
42MacroAssembler::MacroAssembler(void* buffer, int size)
Steve Block3ce2e202009-11-05 08:53:23 +000043 : Assembler(buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000044 generating_stub_(false),
45 allow_stub_calls_(true),
46 code_object_(Heap::undefined_value()) {
Steve Blocka7e24c12009-10-30 11:49:00 +000047}
48
49
Steve Block3ce2e202009-11-05 08:53:23 +000050void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010051 movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000052}
53
54
Kristian Monsen25f61362010-05-21 11:50:48 +010055void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
56 movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
57}
58
59
Steve Blocka7e24c12009-10-30 11:49:00 +000060void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010061 push(Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000062}
63
64
Steve Block3ce2e202009-11-05 08:53:23 +000065void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010066 cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000067}
68
69
Steve Block3ce2e202009-11-05 08:53:23 +000070void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000071 LoadRoot(kScratchRegister, index);
72 cmpq(with, kScratchRegister);
73}
74
75
Steve Blockd0582a62009-12-15 09:54:21 +000076void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
77 CompareRoot(rsp, Heap::kStackLimitRootIndex);
78 j(below, on_stack_overflow);
79}
80
81
Steve Block6ded16b2010-05-10 14:33:55 +010082void MacroAssembler::RecordWriteHelper(Register object,
83 Register addr,
84 Register scratch) {
85 if (FLAG_debug_code) {
86 // Check that the object is not in new space.
87 Label not_in_new_space;
88 InNewSpace(object, scratch, not_equal, &not_in_new_space);
89 Abort("new-space object passed to RecordWriteHelper");
90 bind(&not_in_new_space);
91 }
92
Steve Blocka7e24c12009-10-30 11:49:00 +000093 Label fast;
94
95 // Compute the page start address from the heap object pointer, and reuse
96 // the 'object' register for it.
97 ASSERT(is_int32(~Page::kPageAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +010098 and_(object,
99 Immediate(static_cast<int32_t>(~Page::kPageAlignmentMask)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000100 Register page_start = object;
101
102 // Compute the bit addr in the remembered set/index of the pointer in the
103 // page. Reuse 'addr' as pointer_offset.
Steve Block6ded16b2010-05-10 14:33:55 +0100104 subq(addr, page_start);
105 shr(addr, Immediate(kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000106 Register pointer_offset = addr;
107
108 // If the bit offset lies beyond the normal remembered set range, it is in
109 // the extra remembered set area of a large object.
Steve Block6ded16b2010-05-10 14:33:55 +0100110 cmpq(pointer_offset, Immediate(Page::kPageSize / kPointerSize));
111 j(below, &fast);
112
113 // We have a large object containing pointers. It must be a FixedArray.
Steve Blocka7e24c12009-10-30 11:49:00 +0000114
115 // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
116 // extra remembered set after the large object.
117
118 // Load the array length into 'scratch'.
Steve Block6ded16b2010-05-10 14:33:55 +0100119 movl(scratch,
120 Operand(page_start,
121 Page::kObjectStartOffset + FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000122 Register array_length = scratch;
123
124 // Extra remembered set starts right after the large object (a FixedArray), at
125 // page_start + kObjectStartOffset + objectSize
126 // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
127 // Add the delta between the end of the normal RSet and the start of the
128 // extra RSet to 'page_start', so that addressing the bit using
129 // 'pointer_offset' hits the extra RSet words.
Steve Block6ded16b2010-05-10 14:33:55 +0100130 lea(page_start,
131 Operand(page_start, array_length, times_pointer_size,
132 Page::kObjectStartOffset + FixedArray::kHeaderSize
133 - Page::kRSetEndOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000134
135 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
136 // to limit code size. We should probably evaluate this decision by
137 // measuring the performance of an equivalent implementation using
138 // "simpler" instructions
Steve Block6ded16b2010-05-10 14:33:55 +0100139 bind(&fast);
140 bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
Steve Blocka7e24c12009-10-30 11:49:00 +0000141}
142
143
144// Set the remembered set bit for [object+offset].
145// object is the object being stored into, value is the object being stored.
Steve Block3ce2e202009-11-05 08:53:23 +0000146// If offset is zero, then the smi_index register contains the array index into
147// the elements array represented as a smi. Otherwise it can be used as a
148// scratch register.
Steve Blocka7e24c12009-10-30 11:49:00 +0000149// All registers are clobbered by the operation.
150void MacroAssembler::RecordWrite(Register object,
151 int offset,
152 Register value,
Steve Block3ce2e202009-11-05 08:53:23 +0000153 Register smi_index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000154 // The compiled code assumes that record write doesn't change the
155 // context register, so we check that none of the clobbered
156 // registers are rsi.
157 ASSERT(!object.is(rsi) && !value.is(rsi) && !smi_index.is(rsi));
158
Steve Blocka7e24c12009-10-30 11:49:00 +0000159 // First, check if a remembered set write is even needed. The tests below
160 // catch stores of Smis and stores into young gen (which does not have space
Steve Block6ded16b2010-05-10 14:33:55 +0100161 // for the remembered set bits).
Steve Blocka7e24c12009-10-30 11:49:00 +0000162 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000163 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000164
Steve Block3ce2e202009-11-05 08:53:23 +0000165 RecordWriteNonSmi(object, offset, value, smi_index);
166 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000167
168 // Clobber all input registers when running with the debug-code flag
169 // turned on to provoke errors. This clobbering repeats the
170 // clobbering done inside RecordWriteNonSmi but it's necessary to
171 // avoid having the fast case for smis leave the registers
172 // unchanged.
173 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100174 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
175 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
176 movq(smi_index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000177 }
Steve Block3ce2e202009-11-05 08:53:23 +0000178}
179
180
181void MacroAssembler::RecordWriteNonSmi(Register object,
182 int offset,
183 Register scratch,
184 Register smi_index) {
185 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000186
187 if (FLAG_debug_code) {
188 Label okay;
189 JumpIfNotSmi(object, &okay);
190 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
191 bind(&okay);
192 }
193
Steve Blocka7e24c12009-10-30 11:49:00 +0000194 // Test that the object address is not in the new space. We cannot
195 // set remembered set bits in the new space.
Steve Block6ded16b2010-05-10 14:33:55 +0100196 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000197
Steve Block6ded16b2010-05-10 14:33:55 +0100198 // The offset is relative to a tagged or untagged HeapObject pointer,
199 // so either offset or offset + kHeapObjectTag must be a
200 // multiple of kPointerSize.
201 ASSERT(IsAligned(offset, kPointerSize) ||
202 IsAligned(offset + kHeapObjectTag, kPointerSize));
203
204 // We use optimized write barrier code if the word being written to is not in
205 // a large object page, or is in the first "page" of a large object page.
206 // We make sure that an offset is inside the right limits whether it is
207 // tagged or untagged.
208 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize - kHeapObjectTag)) {
209 // Compute the bit offset in the remembered set, leave it in 'scratch'.
Steve Block3ce2e202009-11-05 08:53:23 +0000210 lea(scratch, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000211 ASSERT(is_int32(Page::kPageAlignmentMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000212 and_(scratch, Immediate(static_cast<int32_t>(Page::kPageAlignmentMask)));
Steve Block6ded16b2010-05-10 14:33:55 +0100213 shr(scratch, Immediate(kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000214
215 // Compute the page address from the heap object pointer, leave it in
216 // 'object' (immediate value is sign extended).
217 and_(object, Immediate(~Page::kPageAlignmentMask));
218
219 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
220 // to limit code size. We should probably evaluate this decision by
221 // measuring the performance of an equivalent implementation using
222 // "simpler" instructions
Steve Block3ce2e202009-11-05 08:53:23 +0000223 bts(Operand(object, Page::kRSetOffset), scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000224 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000225 Register dst = smi_index;
Steve Blocka7e24c12009-10-30 11:49:00 +0000226 if (offset != 0) {
227 lea(dst, Operand(object, offset));
228 } else {
229 // array access: calculate the destination address in the same manner as
Steve Block3ce2e202009-11-05 08:53:23 +0000230 // KeyedStoreIC::GenerateGeneric.
231 SmiIndex index = SmiToIndex(smi_index, smi_index, kPointerSizeLog2);
Steve Block6ded16b2010-05-10 14:33:55 +0100232 lea(dst, FieldOperand(object,
233 index.reg,
234 index.scale,
235 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000236 }
237 // If we are already generating a shared stub, not inlining the
238 // record write code isn't going to save us any memory.
239 if (generating_stub()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100240 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000241 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000242 RecordWriteStub stub(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000243 CallStub(&stub);
244 }
245 }
246
247 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000248
249 // Clobber all input registers when running with the debug-code flag
250 // turned on to provoke errors.
251 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100252 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
253 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
254 movq(smi_index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
255 }
256}
257
258
259void MacroAssembler::InNewSpace(Register object,
260 Register scratch,
261 Condition cc,
262 Label* branch) {
263 if (Serializer::enabled()) {
264 // Can't do arithmetic on external references if it might get serialized.
265 // The mask isn't really an address. We load it as an external reference in
266 // case the size of the new space is different between the snapshot maker
267 // and the running system.
268 if (scratch.is(object)) {
269 movq(kScratchRegister, ExternalReference::new_space_mask());
270 and_(scratch, kScratchRegister);
271 } else {
272 movq(scratch, ExternalReference::new_space_mask());
273 and_(scratch, object);
274 }
275 movq(kScratchRegister, ExternalReference::new_space_start());
276 cmpq(scratch, kScratchRegister);
277 j(cc, branch);
278 } else {
279 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
280 intptr_t new_space_start =
281 reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
282 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
283 if (scratch.is(object)) {
284 addq(scratch, kScratchRegister);
285 } else {
286 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
287 }
288 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
289 j(cc, branch);
Leon Clarke4515c472010-02-03 11:58:03 +0000290 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000291}
292
293
294void MacroAssembler::Assert(Condition cc, const char* msg) {
295 if (FLAG_debug_code) Check(cc, msg);
296}
297
298
299void MacroAssembler::Check(Condition cc, const char* msg) {
300 Label L;
301 j(cc, &L);
302 Abort(msg);
303 // will not return here
304 bind(&L);
305}
306
307
Steve Block6ded16b2010-05-10 14:33:55 +0100308void MacroAssembler::CheckStackAlignment() {
309 int frame_alignment = OS::ActivationFrameAlignment();
310 int frame_alignment_mask = frame_alignment - 1;
311 if (frame_alignment > kPointerSize) {
312 ASSERT(IsPowerOf2(frame_alignment));
313 Label alignment_as_expected;
314 testq(rsp, Immediate(frame_alignment_mask));
315 j(zero, &alignment_as_expected);
316 // Abort if stack is not aligned.
317 int3();
318 bind(&alignment_as_expected);
319 }
320}
321
322
Steve Blocka7e24c12009-10-30 11:49:00 +0000323void MacroAssembler::NegativeZeroTest(Register result,
324 Register op,
325 Label* then_label) {
326 Label ok;
327 testl(result, result);
328 j(not_zero, &ok);
329 testl(op, op);
330 j(sign, then_label);
331 bind(&ok);
332}
333
334
335void MacroAssembler::Abort(const char* msg) {
336 // We want to pass the msg string like a smi to avoid GC
337 // problems, however msg is not guaranteed to be aligned
338 // properly. Instead, we pass an aligned pointer that is
339 // a proper v8 smi, but also pass the alignment difference
340 // from the real pointer as a smi.
341 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
342 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
343 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
344 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
345#ifdef DEBUG
346 if (msg != NULL) {
347 RecordComment("Abort message: ");
348 RecordComment(msg);
349 }
350#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000351 // Disable stub call restrictions to always allow calls to abort.
352 set_allow_stub_calls(true);
353
Steve Blocka7e24c12009-10-30 11:49:00 +0000354 push(rax);
355 movq(kScratchRegister, p0, RelocInfo::NONE);
356 push(kScratchRegister);
357 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000358 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000359 RelocInfo::NONE);
360 push(kScratchRegister);
361 CallRuntime(Runtime::kAbort, 2);
362 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000363 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000364}
365
366
367void MacroAssembler::CallStub(CodeStub* stub) {
368 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
369 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
370}
371
372
Leon Clarkee46be812010-01-19 14:06:41 +0000373void MacroAssembler::TailCallStub(CodeStub* stub) {
374 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
375 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
376}
377
378
Steve Blocka7e24c12009-10-30 11:49:00 +0000379void MacroAssembler::StubReturn(int argc) {
380 ASSERT(argc >= 1 && generating_stub());
381 ret((argc - 1) * kPointerSize);
382}
383
384
385void MacroAssembler::IllegalOperation(int num_arguments) {
386 if (num_arguments > 0) {
387 addq(rsp, Immediate(num_arguments * kPointerSize));
388 }
389 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
390}
391
392
393void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
394 CallRuntime(Runtime::FunctionForId(id), num_arguments);
395}
396
397
398void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
399 // If the expected number of arguments of the runtime function is
400 // constant, we check that the actual number of arguments match the
401 // expectation.
402 if (f->nargs >= 0 && f->nargs != num_arguments) {
403 IllegalOperation(num_arguments);
404 return;
405 }
406
Leon Clarke4515c472010-02-03 11:58:03 +0000407 // TODO(1236192): Most runtime routines don't need the number of
408 // arguments passed in because it is constant. At some point we
409 // should remove this need and make the runtime routine entry code
410 // smarter.
411 movq(rax, Immediate(num_arguments));
412 movq(rbx, ExternalReference(f));
413 CEntryStub ces(f->result_size);
414 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000415}
416
417
Andrei Popescu402d9372010-02-26 13:31:12 +0000418void MacroAssembler::CallExternalReference(const ExternalReference& ext,
419 int num_arguments) {
420 movq(rax, Immediate(num_arguments));
421 movq(rbx, ext);
422
423 CEntryStub stub(1);
424 CallStub(&stub);
425}
426
427
Steve Block6ded16b2010-05-10 14:33:55 +0100428void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
429 int num_arguments,
430 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000431 // ----------- S t a t e -------------
432 // -- rsp[0] : return address
433 // -- rsp[8] : argument num_arguments - 1
434 // ...
435 // -- rsp[8 * num_arguments] : argument 0 (receiver)
436 // -----------------------------------
437
438 // TODO(1236192): Most runtime routines don't need the number of
439 // arguments passed in because it is constant. At some point we
440 // should remove this need and make the runtime routine entry code
441 // smarter.
442 movq(rax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +0100443 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000444}
445
446
Steve Block6ded16b2010-05-10 14:33:55 +0100447void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
448 int num_arguments,
449 int result_size) {
450 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
451}
452
453
454void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
455 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000456 // Set the entry point and jump to the C entry runtime stub.
457 movq(rbx, ext);
458 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000459 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000460}
461
462
Andrei Popescu402d9372010-02-26 13:31:12 +0000463void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
464 // Calls are not allowed in some stubs.
465 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000466
Andrei Popescu402d9372010-02-26 13:31:12 +0000467 // Rely on the assertion to check that the number of provided
468 // arguments match the expected number of arguments. Fake a
469 // parameter count to avoid emitting code to do the check.
470 ParameterCount expected(0);
471 GetBuiltinEntry(rdx, id);
472 InvokeCode(rdx, expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +0000473}
474
Andrei Popescu402d9372010-02-26 13:31:12 +0000475
476void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100477 ASSERT(!target.is(rdi));
478
479 // Load the builtins object into target register.
480 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
481 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
482
Andrei Popescu402d9372010-02-26 13:31:12 +0000483 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +0100484 movq(rdi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
485
486 // Load the code entry point from the builtins object.
487 movq(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
488 if (FLAG_debug_code) {
489 // Make sure the code objects in the builtins object and in the
490 // builtin function are the same.
491 push(target);
492 movq(target, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
493 movq(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
494 cmpq(target, Operand(rsp, 0));
495 Assert(equal, "Builtin code object changed");
496 pop(target);
497 }
498 lea(target, FieldOperand(target, Code::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000499}
500
501
502void MacroAssembler::Set(Register dst, int64_t x) {
503 if (x == 0) {
504 xor_(dst, dst);
505 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000506 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000507 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000508 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000509 } else {
510 movq(dst, x, RelocInfo::NONE);
511 }
512}
513
514
515void MacroAssembler::Set(const Operand& dst, int64_t x) {
516 if (x == 0) {
517 xor_(kScratchRegister, kScratchRegister);
518 movq(dst, kScratchRegister);
519 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000520 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000521 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000522 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000523 } else {
524 movq(kScratchRegister, x, RelocInfo::NONE);
525 movq(dst, kScratchRegister);
526 }
527}
528
Steve Blocka7e24c12009-10-30 11:49:00 +0000529// ----------------------------------------------------------------------------
530// Smi tagging, untagging and tag detection.
531
Steve Block3ce2e202009-11-05 08:53:23 +0000532static int kSmiShift = kSmiTagSize + kSmiShiftSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000533
534void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000535 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000536 if (!dst.is(src)) {
537 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000538 }
Steve Block3ce2e202009-11-05 08:53:23 +0000539 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000540}
541
542
543void MacroAssembler::Integer32ToSmi(Register dst,
544 Register src,
545 Label* on_overflow) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000546 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000547 // 32-bit integer always fits in a long smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000548 if (!dst.is(src)) {
549 movl(dst, src);
550 }
Steve Block3ce2e202009-11-05 08:53:23 +0000551 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000552}
553
554
Steve Block3ce2e202009-11-05 08:53:23 +0000555void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
556 Register src,
557 int constant) {
558 if (dst.is(src)) {
559 addq(dst, Immediate(constant));
560 } else {
561 lea(dst, Operand(src, constant));
562 }
563 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000564}
565
566
567void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000568 ASSERT_EQ(0, kSmiTag);
569 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000570 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000571 }
Steve Block3ce2e202009-11-05 08:53:23 +0000572 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000573}
574
575
576void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000577 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000578 if (!dst.is(src)) {
579 movq(dst, src);
580 }
581 sar(dst, Immediate(kSmiShift));
582}
583
584
585void MacroAssembler::SmiTest(Register src) {
586 testq(src, src);
587}
588
589
590void MacroAssembler::SmiCompare(Register dst, Register src) {
591 cmpq(dst, src);
592}
593
594
595void MacroAssembler::SmiCompare(Register dst, Smi* src) {
596 ASSERT(!dst.is(kScratchRegister));
597 if (src->value() == 0) {
598 testq(dst, dst);
599 } else {
600 Move(kScratchRegister, src);
601 cmpq(dst, kScratchRegister);
602 }
603}
604
605
Leon Clarkef7060e22010-06-03 12:02:55 +0100606void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block6ded16b2010-05-10 14:33:55 +0100607 cmpq(dst, src);
608}
609
610
Steve Block3ce2e202009-11-05 08:53:23 +0000611void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
612 cmpq(dst, src);
613}
614
615
616void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Leon Clarkef7060e22010-06-03 12:02:55 +0100617 cmpl(Operand(dst, kIntSize), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000618}
619
620
621void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
622 Register src,
623 int power) {
624 ASSERT(power >= 0);
625 ASSERT(power < 64);
626 if (power == 0) {
627 SmiToInteger64(dst, src);
628 return;
629 }
Steve Block3ce2e202009-11-05 08:53:23 +0000630 if (!dst.is(src)) {
631 movq(dst, src);
632 }
633 if (power < kSmiShift) {
634 sar(dst, Immediate(kSmiShift - power));
635 } else if (power > kSmiShift) {
636 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000637 }
638}
639
640
Steve Blocka7e24c12009-10-30 11:49:00 +0000641Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000642 ASSERT_EQ(0, kSmiTag);
643 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000644 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000645}
646
647
648Condition MacroAssembler::CheckPositiveSmi(Register src) {
649 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000650 movq(kScratchRegister, src);
651 rol(kScratchRegister, Immediate(1));
652 testl(kScratchRegister, Immediate(0x03));
Steve Blocka7e24c12009-10-30 11:49:00 +0000653 return zero;
654}
655
656
Steve Blocka7e24c12009-10-30 11:49:00 +0000657Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
658 if (first.is(second)) {
659 return CheckSmi(first);
660 }
661 movl(kScratchRegister, first);
662 orl(kScratchRegister, second);
Steve Block3ce2e202009-11-05 08:53:23 +0000663 testb(kScratchRegister, Immediate(kSmiTagMask));
664 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000665}
666
667
Leon Clarked91b9f72010-01-27 17:25:45 +0000668Condition MacroAssembler::CheckBothPositiveSmi(Register first,
669 Register second) {
670 if (first.is(second)) {
671 return CheckPositiveSmi(first);
672 }
673 movl(kScratchRegister, first);
674 orl(kScratchRegister, second);
675 rol(kScratchRegister, Immediate(1));
676 testl(kScratchRegister, Immediate(0x03));
677 return zero;
678}
679
680
681
Leon Clarkee46be812010-01-19 14:06:41 +0000682Condition MacroAssembler::CheckEitherSmi(Register first, Register second) {
683 if (first.is(second)) {
684 return CheckSmi(first);
685 }
686 movl(kScratchRegister, first);
687 andl(kScratchRegister, second);
688 testb(kScratchRegister, Immediate(kSmiTagMask));
689 return zero;
690}
691
692
Steve Blocka7e24c12009-10-30 11:49:00 +0000693Condition MacroAssembler::CheckIsMinSmi(Register src) {
694 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Steve Block3ce2e202009-11-05 08:53:23 +0000695 movq(kScratchRegister, src);
696 rol(kScratchRegister, Immediate(1));
697 cmpq(kScratchRegister, Immediate(1));
Steve Blocka7e24c12009-10-30 11:49:00 +0000698 return equal;
699}
700
Steve Blocka7e24c12009-10-30 11:49:00 +0000701
702Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000703 // A 32-bit integer value can always be converted to a smi.
704 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +0000705}
706
707
Steve Block3ce2e202009-11-05 08:53:23 +0000708Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
709 // An unsigned 32-bit integer value is valid as long as the high bit
710 // is not set.
711 testq(src, Immediate(0x80000000));
712 return zero;
713}
714
715
716void MacroAssembler::SmiNeg(Register dst, Register src, Label* on_smi_result) {
717 if (dst.is(src)) {
718 ASSERT(!dst.is(kScratchRegister));
719 movq(kScratchRegister, src);
720 neg(dst); // Low 32 bits are retained as zero by negation.
721 // Test if result is zero or Smi::kMinValue.
722 cmpq(dst, kScratchRegister);
723 j(not_equal, on_smi_result);
724 movq(src, kScratchRegister);
725 } else {
726 movq(dst, src);
727 neg(dst);
728 cmpq(dst, src);
729 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
730 j(not_equal, on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000731 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000732}
733
734
735void MacroAssembler::SmiAdd(Register dst,
736 Register src1,
737 Register src2,
738 Label* on_not_smi_result) {
739 ASSERT(!dst.is(src2));
Steve Block6ded16b2010-05-10 14:33:55 +0100740 if (on_not_smi_result == NULL) {
741 // No overflow checking. Use only when it's known that
742 // overflowing is impossible.
743 if (dst.is(src1)) {
744 addq(dst, src2);
745 } else {
746 movq(dst, src1);
747 addq(dst, src2);
748 }
749 Assert(no_overflow, "Smi addition onverflow");
750 } else if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000751 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000752 Label smi_result;
753 j(no_overflow, &smi_result);
754 // Restore src1.
Steve Block3ce2e202009-11-05 08:53:23 +0000755 subq(src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000756 jmp(on_not_smi_result);
757 bind(&smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +0000758 } else {
759 movq(dst, src1);
760 addq(dst, src2);
761 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000762 }
763}
764
765
Steve Blocka7e24c12009-10-30 11:49:00 +0000766void MacroAssembler::SmiSub(Register dst,
767 Register src1,
768 Register src2,
769 Label* on_not_smi_result) {
770 ASSERT(!dst.is(src2));
Leon Clarked91b9f72010-01-27 17:25:45 +0000771 if (on_not_smi_result == NULL) {
772 // No overflow checking. Use only when it's known that
773 // overflowing is impossible (e.g., subtracting two positive smis).
774 if (dst.is(src1)) {
775 subq(dst, src2);
776 } else {
777 movq(dst, src1);
778 subq(dst, src2);
779 }
780 Assert(no_overflow, "Smi substraction onverflow");
781 } else if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000782 subq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000783 Label smi_result;
784 j(no_overflow, &smi_result);
785 // Restore src1.
Steve Block3ce2e202009-11-05 08:53:23 +0000786 addq(src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000787 jmp(on_not_smi_result);
788 bind(&smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +0000789 } else {
790 movq(dst, src1);
791 subq(dst, src2);
792 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000793 }
794}
795
796
Steve Block6ded16b2010-05-10 14:33:55 +0100797void MacroAssembler::SmiSub(Register dst,
798 Register src1,
Leon Clarkef7060e22010-06-03 12:02:55 +0100799 const Operand& src2,
Steve Block6ded16b2010-05-10 14:33:55 +0100800 Label* on_not_smi_result) {
801 if (on_not_smi_result == NULL) {
802 // No overflow checking. Use only when it's known that
803 // overflowing is impossible (e.g., subtracting two positive smis).
804 if (dst.is(src1)) {
805 subq(dst, src2);
806 } else {
807 movq(dst, src1);
808 subq(dst, src2);
809 }
810 Assert(no_overflow, "Smi substraction onverflow");
811 } else if (dst.is(src1)) {
812 subq(dst, src2);
813 Label smi_result;
814 j(no_overflow, &smi_result);
815 // Restore src1.
816 addq(src1, src2);
817 jmp(on_not_smi_result);
818 bind(&smi_result);
819 } else {
820 movq(dst, src1);
821 subq(dst, src2);
822 j(overflow, on_not_smi_result);
823 }
824}
825
Steve Blocka7e24c12009-10-30 11:49:00 +0000826void MacroAssembler::SmiMul(Register dst,
827 Register src1,
828 Register src2,
829 Label* on_not_smi_result) {
830 ASSERT(!dst.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +0000831 ASSERT(!dst.is(kScratchRegister));
832 ASSERT(!src1.is(kScratchRegister));
833 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000834
835 if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000836 Label failure, zero_correct_result;
837 movq(kScratchRegister, src1); // Create backup for later testing.
838 SmiToInteger64(dst, src1);
839 imul(dst, src2);
840 j(overflow, &failure);
841
842 // Check for negative zero result. If product is zero, and one
843 // argument is negative, go to slow case.
844 Label correct_result;
845 testq(dst, dst);
846 j(not_zero, &correct_result);
847
848 movq(dst, kScratchRegister);
849 xor_(dst, src2);
850 j(positive, &zero_correct_result); // Result was positive zero.
851
852 bind(&failure); // Reused failure exit, restores src1.
853 movq(src1, kScratchRegister);
854 jmp(on_not_smi_result);
855
856 bind(&zero_correct_result);
857 xor_(dst, dst);
858
859 bind(&correct_result);
860 } else {
861 SmiToInteger64(dst, src1);
862 imul(dst, src2);
863 j(overflow, on_not_smi_result);
864 // Check for negative zero result. If product is zero, and one
865 // argument is negative, go to slow case.
866 Label correct_result;
867 testq(dst, dst);
868 j(not_zero, &correct_result);
869 // One of src1 and src2 is zero, the check whether the other is
870 // negative.
Steve Blocka7e24c12009-10-30 11:49:00 +0000871 movq(kScratchRegister, src1);
Steve Block3ce2e202009-11-05 08:53:23 +0000872 xor_(kScratchRegister, src2);
873 j(negative, on_not_smi_result);
874 bind(&correct_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000875 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000876}
877
878
879void MacroAssembler::SmiTryAddConstant(Register dst,
880 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000881 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000882 Label* on_not_smi_result) {
883 // Does not assume that src is a smi.
Steve Block3ce2e202009-11-05 08:53:23 +0000884 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +0000885 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000886 ASSERT(!dst.is(kScratchRegister));
887 ASSERT(!src.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000888
Steve Block3ce2e202009-11-05 08:53:23 +0000889 JumpIfNotSmi(src, on_not_smi_result);
890 Register tmp = (dst.is(src) ? kScratchRegister : dst);
891 Move(tmp, constant);
892 addq(tmp, src);
893 j(overflow, on_not_smi_result);
894 if (dst.is(src)) {
895 movq(dst, tmp);
896 }
897}
898
899
900void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
901 if (constant->value() == 0) {
902 if (!dst.is(src)) {
903 movq(dst, src);
904 }
905 } else if (dst.is(src)) {
906 ASSERT(!dst.is(kScratchRegister));
907
908 Move(kScratchRegister, constant);
909 addq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000910 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000911 Move(dst, constant);
912 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000913 }
914}
915
916
Leon Clarkef7060e22010-06-03 12:02:55 +0100917void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
918 if (constant->value() != 0) {
919 addl(Operand(dst, kIntSize), Immediate(constant->value()));
920 }
921}
922
923
Steve Blocka7e24c12009-10-30 11:49:00 +0000924void MacroAssembler::SmiAddConstant(Register dst,
925 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000926 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000927 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +0000928 if (constant->value() == 0) {
929 if (!dst.is(src)) {
930 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000931 }
Steve Block3ce2e202009-11-05 08:53:23 +0000932 } else if (dst.is(src)) {
933 ASSERT(!dst.is(kScratchRegister));
934
935 Move(kScratchRegister, constant);
936 addq(dst, kScratchRegister);
937 Label result_ok;
938 j(no_overflow, &result_ok);
939 subq(dst, kScratchRegister);
940 jmp(on_not_smi_result);
941 bind(&result_ok);
Steve Blocka7e24c12009-10-30 11:49:00 +0000942 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000943 Move(dst, constant);
944 addq(dst, src);
945 j(overflow, on_not_smi_result);
946 }
947}
948
949
950void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
951 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000952 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000953 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000954 }
Steve Block3ce2e202009-11-05 08:53:23 +0000955 } else if (dst.is(src)) {
956 ASSERT(!dst.is(kScratchRegister));
957
958 Move(kScratchRegister, constant);
959 subq(dst, kScratchRegister);
960 } else {
961 // Subtract by adding the negative, to do it in two operations.
962 if (constant->value() == Smi::kMinValue) {
963 Move(kScratchRegister, constant);
964 movq(dst, src);
965 subq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000966 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000967 Move(dst, Smi::FromInt(-constant->value()));
968 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000969 }
970 }
971}
972
973
974void MacroAssembler::SmiSubConstant(Register dst,
975 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000976 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000977 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +0000978 if (constant->value() == 0) {
979 if (!dst.is(src)) {
980 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000981 }
Steve Block3ce2e202009-11-05 08:53:23 +0000982 } else if (dst.is(src)) {
983 ASSERT(!dst.is(kScratchRegister));
984
985 Move(kScratchRegister, constant);
986 subq(dst, kScratchRegister);
987 Label sub_success;
988 j(no_overflow, &sub_success);
989 addq(src, kScratchRegister);
990 jmp(on_not_smi_result);
991 bind(&sub_success);
Steve Blocka7e24c12009-10-30 11:49:00 +0000992 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000993 if (constant->value() == Smi::kMinValue) {
994 Move(kScratchRegister, constant);
995 movq(dst, src);
996 subq(dst, kScratchRegister);
997 j(overflow, on_not_smi_result);
998 } else {
999 Move(dst, Smi::FromInt(-(constant->value())));
1000 addq(dst, src);
1001 j(overflow, on_not_smi_result);
1002 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001003 }
1004}
1005
1006
1007void MacroAssembler::SmiDiv(Register dst,
1008 Register src1,
1009 Register src2,
1010 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001011 ASSERT(!src1.is(kScratchRegister));
1012 ASSERT(!src2.is(kScratchRegister));
1013 ASSERT(!dst.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001014 ASSERT(!src2.is(rax));
1015 ASSERT(!src2.is(rdx));
1016 ASSERT(!src1.is(rdx));
1017
1018 // Check for 0 divisor (result is +/-Infinity).
1019 Label positive_divisor;
Steve Block3ce2e202009-11-05 08:53:23 +00001020 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001021 j(zero, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001022
Steve Block3ce2e202009-11-05 08:53:23 +00001023 if (src1.is(rax)) {
1024 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001025 }
Steve Block3ce2e202009-11-05 08:53:23 +00001026 SmiToInteger32(rax, src1);
1027 // We need to rule out dividing Smi::kMinValue by -1, since that would
1028 // overflow in idiv and raise an exception.
1029 // We combine this with negative zero test (negative zero only happens
1030 // when dividing zero by a negative number).
Steve Blocka7e24c12009-10-30 11:49:00 +00001031
Steve Block3ce2e202009-11-05 08:53:23 +00001032 // We overshoot a little and go to slow case if we divide min-value
1033 // by any negative value, not just -1.
1034 Label safe_div;
1035 testl(rax, Immediate(0x7fffffff));
1036 j(not_zero, &safe_div);
1037 testq(src2, src2);
1038 if (src1.is(rax)) {
1039 j(positive, &safe_div);
1040 movq(src1, kScratchRegister);
1041 jmp(on_not_smi_result);
1042 } else {
1043 j(negative, on_not_smi_result);
1044 }
1045 bind(&safe_div);
1046
1047 SmiToInteger32(src2, src2);
1048 // Sign extend src1 into edx:eax.
1049 cdq();
Steve Blocka7e24c12009-10-30 11:49:00 +00001050 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001051 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001052 // Check that the remainder is zero.
1053 testl(rdx, rdx);
Steve Block3ce2e202009-11-05 08:53:23 +00001054 if (src1.is(rax)) {
1055 Label smi_result;
1056 j(zero, &smi_result);
1057 movq(src1, kScratchRegister);
1058 jmp(on_not_smi_result);
1059 bind(&smi_result);
1060 } else {
1061 j(not_zero, on_not_smi_result);
1062 }
1063 if (!dst.is(src1) && src1.is(rax)) {
1064 movq(src1, kScratchRegister);
1065 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001066 Integer32ToSmi(dst, rax);
1067}
1068
1069
1070void MacroAssembler::SmiMod(Register dst,
1071 Register src1,
1072 Register src2,
1073 Label* on_not_smi_result) {
1074 ASSERT(!dst.is(kScratchRegister));
1075 ASSERT(!src1.is(kScratchRegister));
1076 ASSERT(!src2.is(kScratchRegister));
1077 ASSERT(!src2.is(rax));
1078 ASSERT(!src2.is(rdx));
1079 ASSERT(!src1.is(rdx));
Steve Block3ce2e202009-11-05 08:53:23 +00001080 ASSERT(!src1.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001081
Steve Block3ce2e202009-11-05 08:53:23 +00001082 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001083 j(zero, on_not_smi_result);
1084
1085 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001086 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001087 }
Steve Block3ce2e202009-11-05 08:53:23 +00001088 SmiToInteger32(rax, src1);
1089 SmiToInteger32(src2, src2);
1090
1091 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1092 Label safe_div;
1093 cmpl(rax, Immediate(Smi::kMinValue));
1094 j(not_equal, &safe_div);
1095 cmpl(src2, Immediate(-1));
1096 j(not_equal, &safe_div);
1097 // Retag inputs and go slow case.
1098 Integer32ToSmi(src2, src2);
1099 if (src1.is(rax)) {
1100 movq(src1, kScratchRegister);
1101 }
1102 jmp(on_not_smi_result);
1103 bind(&safe_div);
1104
Steve Blocka7e24c12009-10-30 11:49:00 +00001105 // Sign extend eax into edx:eax.
1106 cdq();
1107 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001108 // Restore smi tags on inputs.
1109 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001110 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001111 movq(src1, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001112 }
Steve Block3ce2e202009-11-05 08:53:23 +00001113 // Check for a negative zero result. If the result is zero, and the
1114 // dividend is negative, go slow to return a floating point negative zero.
1115 Label smi_result;
1116 testl(rdx, rdx);
1117 j(not_zero, &smi_result);
1118 testq(src1, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001119 j(negative, on_not_smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +00001120 bind(&smi_result);
1121 Integer32ToSmi(dst, rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001122}
1123
1124
1125void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001126 ASSERT(!dst.is(kScratchRegister));
1127 ASSERT(!src.is(kScratchRegister));
1128 // Set tag and padding bits before negating, so that they are zero afterwards.
1129 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001130 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001131 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001132 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001133 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001134 }
Steve Block3ce2e202009-11-05 08:53:23 +00001135 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001136}
1137
1138
1139void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001140 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001141 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001142 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001143 }
1144 and_(dst, src2);
1145}
1146
1147
Steve Block3ce2e202009-11-05 08:53:23 +00001148void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1149 if (constant->value() == 0) {
1150 xor_(dst, dst);
1151 } else if (dst.is(src)) {
1152 ASSERT(!dst.is(kScratchRegister));
1153 Move(kScratchRegister, constant);
1154 and_(dst, kScratchRegister);
1155 } else {
1156 Move(dst, constant);
1157 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001158 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001159}
1160
1161
1162void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1163 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001164 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001165 }
1166 or_(dst, src2);
1167}
1168
1169
Steve Block3ce2e202009-11-05 08:53:23 +00001170void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1171 if (dst.is(src)) {
1172 ASSERT(!dst.is(kScratchRegister));
1173 Move(kScratchRegister, constant);
1174 or_(dst, kScratchRegister);
1175 } else {
1176 Move(dst, constant);
1177 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001178 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001179}
1180
Steve Block3ce2e202009-11-05 08:53:23 +00001181
Steve Blocka7e24c12009-10-30 11:49:00 +00001182void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1183 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001184 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001185 }
1186 xor_(dst, src2);
1187}
1188
1189
Steve Block3ce2e202009-11-05 08:53:23 +00001190void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1191 if (dst.is(src)) {
1192 ASSERT(!dst.is(kScratchRegister));
1193 Move(kScratchRegister, constant);
1194 xor_(dst, kScratchRegister);
1195 } else {
1196 Move(dst, constant);
1197 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001198 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001199}
1200
1201
Steve Blocka7e24c12009-10-30 11:49:00 +00001202void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1203 Register src,
1204 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001205 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001206 if (shift_value > 0) {
1207 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001208 sar(dst, Immediate(shift_value + kSmiShift));
1209 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001210 } else {
1211 UNIMPLEMENTED(); // Not used.
1212 }
1213 }
1214}
1215
1216
1217void MacroAssembler::SmiShiftLogicalRightConstant(Register dst,
1218 Register src,
1219 int shift_value,
1220 Label* on_not_smi_result) {
1221 // Logic right shift interprets its result as an *unsigned* number.
1222 if (dst.is(src)) {
1223 UNIMPLEMENTED(); // Not used.
1224 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001225 movq(dst, src);
1226 if (shift_value == 0) {
1227 testq(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001228 j(negative, on_not_smi_result);
1229 }
Steve Block3ce2e202009-11-05 08:53:23 +00001230 shr(dst, Immediate(shift_value + kSmiShift));
1231 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001232 }
1233}
1234
1235
1236void MacroAssembler::SmiShiftLeftConstant(Register dst,
1237 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001238 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001239 if (!dst.is(src)) {
1240 movq(dst, src);
1241 }
1242 if (shift_value > 0) {
1243 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001244 }
1245}
1246
1247
1248void MacroAssembler::SmiShiftLeft(Register dst,
1249 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001250 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001251 ASSERT(!dst.is(rcx));
1252 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001253 // Untag shift amount.
1254 if (!dst.is(src1)) {
1255 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001256 }
Steve Block3ce2e202009-11-05 08:53:23 +00001257 SmiToInteger32(rcx, src2);
1258 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1259 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001260 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001261}
1262
1263
1264void MacroAssembler::SmiShiftLogicalRight(Register dst,
1265 Register src1,
1266 Register src2,
1267 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001268 ASSERT(!dst.is(kScratchRegister));
1269 ASSERT(!src1.is(kScratchRegister));
1270 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001271 ASSERT(!dst.is(rcx));
1272 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001273 if (src1.is(rcx) || src2.is(rcx)) {
1274 movq(kScratchRegister, rcx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001275 }
Steve Block3ce2e202009-11-05 08:53:23 +00001276 if (!dst.is(src1)) {
1277 movq(dst, src1);
1278 }
1279 SmiToInteger32(rcx, src2);
1280 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001281 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
Steve Block3ce2e202009-11-05 08:53:23 +00001282 shl(dst, Immediate(kSmiShift));
1283 testq(dst, dst);
1284 if (src1.is(rcx) || src2.is(rcx)) {
1285 Label positive_result;
1286 j(positive, &positive_result);
1287 if (src1.is(rcx)) {
1288 movq(src1, kScratchRegister);
1289 } else {
1290 movq(src2, kScratchRegister);
1291 }
1292 jmp(on_not_smi_result);
1293 bind(&positive_result);
1294 } else {
1295 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1296 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001297}
1298
1299
1300void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1301 Register src1,
1302 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001303 ASSERT(!dst.is(kScratchRegister));
1304 ASSERT(!src1.is(kScratchRegister));
1305 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001306 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001307 if (src1.is(rcx)) {
1308 movq(kScratchRegister, src1);
1309 } else if (src2.is(rcx)) {
1310 movq(kScratchRegister, src2);
1311 }
1312 if (!dst.is(src1)) {
1313 movq(dst, src1);
1314 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001315 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001316 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001317 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001318 shl(dst, Immediate(kSmiShift));
1319 if (src1.is(rcx)) {
1320 movq(src1, kScratchRegister);
1321 } else if (src2.is(rcx)) {
1322 movq(src2, kScratchRegister);
1323 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001324}
1325
1326
1327void MacroAssembler::SelectNonSmi(Register dst,
1328 Register src1,
1329 Register src2,
1330 Label* on_not_smis) {
Steve Block3ce2e202009-11-05 08:53:23 +00001331 ASSERT(!dst.is(kScratchRegister));
1332 ASSERT(!src1.is(kScratchRegister));
1333 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001334 ASSERT(!dst.is(src1));
1335 ASSERT(!dst.is(src2));
1336 // Both operands must not be smis.
1337#ifdef DEBUG
Steve Block3ce2e202009-11-05 08:53:23 +00001338 if (allow_stub_calls()) { // Check contains a stub call.
1339 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1340 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1341 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001342#endif
1343 ASSERT_EQ(0, kSmiTag);
1344 ASSERT_EQ(0, Smi::FromInt(0));
Steve Block3ce2e202009-11-05 08:53:23 +00001345 movl(kScratchRegister, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001346 and_(kScratchRegister, src1);
1347 testl(kScratchRegister, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001348 // If non-zero then both are smis.
Steve Blocka7e24c12009-10-30 11:49:00 +00001349 j(not_zero, on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00001350
Steve Block3ce2e202009-11-05 08:53:23 +00001351 // Exactly one operand is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001352 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1353 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1354 subq(kScratchRegister, Immediate(1));
1355 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1356 movq(dst, src1);
1357 xor_(dst, src2);
1358 and_(dst, kScratchRegister);
1359 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1360 xor_(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001361 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001362}
1363
Steve Block3ce2e202009-11-05 08:53:23 +00001364SmiIndex MacroAssembler::SmiToIndex(Register dst,
1365 Register src,
1366 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001367 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001368 // There is a possible optimization if shift is in the range 60-63, but that
1369 // will (and must) never happen.
1370 if (!dst.is(src)) {
1371 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001372 }
Steve Block3ce2e202009-11-05 08:53:23 +00001373 if (shift < kSmiShift) {
1374 sar(dst, Immediate(kSmiShift - shift));
1375 } else {
1376 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001377 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001378 return SmiIndex(dst, times_1);
1379}
1380
Steve Blocka7e24c12009-10-30 11:49:00 +00001381SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1382 Register src,
1383 int shift) {
1384 // Register src holds a positive smi.
1385 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001386 if (!dst.is(src)) {
1387 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001388 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001389 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001390 if (shift < kSmiShift) {
1391 sar(dst, Immediate(kSmiShift - shift));
1392 } else {
1393 shl(dst, Immediate(shift - kSmiShift));
1394 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001395 return SmiIndex(dst, times_1);
1396}
1397
1398
Steve Block3ce2e202009-11-05 08:53:23 +00001399void MacroAssembler::JumpIfSmi(Register src, Label* on_smi) {
1400 ASSERT_EQ(0, kSmiTag);
1401 Condition smi = CheckSmi(src);
1402 j(smi, on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001403}
1404
Steve Block3ce2e202009-11-05 08:53:23 +00001405
1406void MacroAssembler::JumpIfNotSmi(Register src, Label* on_not_smi) {
1407 Condition smi = CheckSmi(src);
1408 j(NegateCondition(smi), on_not_smi);
1409}
1410
1411
1412void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1413 Label* on_not_positive_smi) {
1414 Condition positive_smi = CheckPositiveSmi(src);
1415 j(NegateCondition(positive_smi), on_not_positive_smi);
1416}
1417
1418
1419void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1420 Smi* constant,
1421 Label* on_equals) {
1422 SmiCompare(src, constant);
1423 j(equal, on_equals);
1424}
1425
1426
1427void MacroAssembler::JumpIfNotValidSmiValue(Register src, Label* on_invalid) {
1428 Condition is_valid = CheckInteger32ValidSmiValue(src);
1429 j(NegateCondition(is_valid), on_invalid);
1430}
1431
1432
1433void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1434 Label* on_invalid) {
1435 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1436 j(NegateCondition(is_valid), on_invalid);
1437}
1438
1439
1440void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2,
1441 Label* on_not_both_smi) {
1442 Condition both_smi = CheckBothSmi(src1, src2);
1443 j(NegateCondition(both_smi), on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001444}
1445
1446
Leon Clarked91b9f72010-01-27 17:25:45 +00001447void MacroAssembler::JumpIfNotBothPositiveSmi(Register src1, Register src2,
1448 Label* on_not_both_smi) {
1449 Condition both_smi = CheckBothPositiveSmi(src1, src2);
1450 j(NegateCondition(both_smi), on_not_both_smi);
1451}
1452
1453
1454
Leon Clarkee46be812010-01-19 14:06:41 +00001455void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1456 Register second_object,
1457 Register scratch1,
1458 Register scratch2,
1459 Label* on_fail) {
1460 // Check that both objects are not smis.
1461 Condition either_smi = CheckEitherSmi(first_object, second_object);
1462 j(either_smi, on_fail);
1463
1464 // Load instance type for both strings.
1465 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1466 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1467 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1468 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1469
1470 // Check that both are flat ascii strings.
1471 ASSERT(kNotStringTag != 0);
1472 const int kFlatAsciiStringMask =
1473 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Leon Clarked91b9f72010-01-27 17:25:45 +00001474 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
Leon Clarkee46be812010-01-19 14:06:41 +00001475
1476 andl(scratch1, Immediate(kFlatAsciiStringMask));
1477 andl(scratch2, Immediate(kFlatAsciiStringMask));
1478 // Interleave the bits to check both scratch1 and scratch2 in one test.
1479 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1480 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1481 cmpl(scratch1,
Leon Clarked91b9f72010-01-27 17:25:45 +00001482 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
Leon Clarkee46be812010-01-19 14:06:41 +00001483 j(not_equal, on_fail);
1484}
1485
1486
Steve Block6ded16b2010-05-10 14:33:55 +01001487void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1488 Register instance_type,
1489 Register scratch,
1490 Label *failure) {
1491 if (!scratch.is(instance_type)) {
1492 movl(scratch, instance_type);
1493 }
1494
1495 const int kFlatAsciiStringMask =
1496 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1497
1498 andl(scratch, Immediate(kFlatAsciiStringMask));
1499 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1500 j(not_equal, failure);
1501}
1502
1503
1504void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1505 Register first_object_instance_type,
1506 Register second_object_instance_type,
1507 Register scratch1,
1508 Register scratch2,
1509 Label* on_fail) {
1510 // Load instance type for both strings.
1511 movq(scratch1, first_object_instance_type);
1512 movq(scratch2, second_object_instance_type);
1513
1514 // Check that both are flat ascii strings.
1515 ASSERT(kNotStringTag != 0);
1516 const int kFlatAsciiStringMask =
1517 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1518 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1519
1520 andl(scratch1, Immediate(kFlatAsciiStringMask));
1521 andl(scratch2, Immediate(kFlatAsciiStringMask));
1522 // Interleave the bits to check both scratch1 and scratch2 in one test.
1523 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1524 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1525 cmpl(scratch1,
1526 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1527 j(not_equal, on_fail);
1528}
1529
1530
Steve Blocka7e24c12009-10-30 11:49:00 +00001531void MacroAssembler::Move(Register dst, Handle<Object> source) {
1532 ASSERT(!source->IsFailure());
1533 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001534 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001535 } else {
1536 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1537 }
1538}
1539
1540
1541void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001542 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001543 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001544 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001545 } else {
1546 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1547 movq(dst, kScratchRegister);
1548 }
1549}
1550
1551
1552void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001553 if (source->IsSmi()) {
1554 SmiCompare(dst, Smi::cast(*source));
1555 } else {
1556 Move(kScratchRegister, source);
1557 cmpq(dst, kScratchRegister);
1558 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001559}
1560
1561
1562void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1563 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001564 SmiCompare(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001565 } else {
1566 ASSERT(source->IsHeapObject());
1567 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1568 cmpq(dst, kScratchRegister);
1569 }
1570}
1571
1572
1573void MacroAssembler::Push(Handle<Object> source) {
1574 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001575 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001576 } else {
1577 ASSERT(source->IsHeapObject());
1578 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1579 push(kScratchRegister);
1580 }
1581}
1582
1583
1584void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001585 intptr_t smi = reinterpret_cast<intptr_t>(source);
1586 if (is_int32(smi)) {
1587 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001588 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001589 Set(kScratchRegister, smi);
1590 push(kScratchRegister);
1591 }
1592}
1593
1594
Leon Clarkee46be812010-01-19 14:06:41 +00001595void MacroAssembler::Drop(int stack_elements) {
1596 if (stack_elements > 0) {
1597 addq(rsp, Immediate(stack_elements * kPointerSize));
1598 }
1599}
1600
1601
Steve Block3ce2e202009-11-05 08:53:23 +00001602void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001603 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001604}
1605
1606
1607void MacroAssembler::Jump(ExternalReference ext) {
1608 movq(kScratchRegister, ext);
1609 jmp(kScratchRegister);
1610}
1611
1612
1613void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1614 movq(kScratchRegister, destination, rmode);
1615 jmp(kScratchRegister);
1616}
1617
1618
1619void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001620 // TODO(X64): Inline this
1621 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001622}
1623
1624
1625void MacroAssembler::Call(ExternalReference ext) {
1626 movq(kScratchRegister, ext);
1627 call(kScratchRegister);
1628}
1629
1630
1631void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1632 movq(kScratchRegister, destination, rmode);
1633 call(kScratchRegister);
1634}
1635
1636
1637void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1638 ASSERT(RelocInfo::IsCodeTarget(rmode));
1639 WriteRecordedPositions();
Steve Block3ce2e202009-11-05 08:53:23 +00001640 call(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001641}
1642
1643
1644void MacroAssembler::PushTryHandler(CodeLocation try_location,
1645 HandlerType type) {
1646 // Adjust this code if not the case.
1647 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1648
1649 // The pc (return address) is already on TOS. This code pushes state,
1650 // frame pointer and current handler. Check that they are expected
1651 // next on the stack, in that order.
1652 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1653 StackHandlerConstants::kPCOffset - kPointerSize);
1654 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1655 StackHandlerConstants::kStateOffset - kPointerSize);
1656 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1657 StackHandlerConstants::kFPOffset - kPointerSize);
1658
1659 if (try_location == IN_JAVASCRIPT) {
1660 if (type == TRY_CATCH_HANDLER) {
1661 push(Immediate(StackHandler::TRY_CATCH));
1662 } else {
1663 push(Immediate(StackHandler::TRY_FINALLY));
1664 }
1665 push(rbp);
1666 } else {
1667 ASSERT(try_location == IN_JS_ENTRY);
1668 // The frame pointer does not point to a JS frame so we save NULL
1669 // for rbp. We expect the code throwing an exception to check rbp
1670 // before dereferencing it to restore the context.
1671 push(Immediate(StackHandler::ENTRY));
1672 push(Immediate(0)); // NULL frame pointer.
1673 }
1674 // Save the current handler.
1675 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1676 push(Operand(kScratchRegister, 0));
1677 // Link this handler.
1678 movq(Operand(kScratchRegister, 0), rsp);
1679}
1680
1681
Leon Clarkee46be812010-01-19 14:06:41 +00001682void MacroAssembler::PopTryHandler() {
1683 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1684 // Unlink this handler.
1685 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1686 pop(Operand(kScratchRegister, 0));
1687 // Remove the remaining fields.
1688 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1689}
1690
1691
Steve Blocka7e24c12009-10-30 11:49:00 +00001692void MacroAssembler::Ret() {
1693 ret(0);
1694}
1695
1696
1697void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001698 fucomip();
1699 ffree(0);
1700 fincstp();
Steve Blocka7e24c12009-10-30 11:49:00 +00001701}
1702
1703
1704void MacroAssembler::CmpObjectType(Register heap_object,
1705 InstanceType type,
1706 Register map) {
1707 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1708 CmpInstanceType(map, type);
1709}
1710
1711
1712void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1713 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1714 Immediate(static_cast<int8_t>(type)));
1715}
1716
1717
Andrei Popescu31002712010-02-23 13:46:05 +00001718void MacroAssembler::CheckMap(Register obj,
1719 Handle<Map> map,
1720 Label* fail,
1721 bool is_heap_object) {
1722 if (!is_heap_object) {
1723 JumpIfSmi(obj, fail);
1724 }
1725 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1726 j(not_equal, fail);
1727}
1728
1729
Leon Clarkef7060e22010-06-03 12:02:55 +01001730void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001731 Label ok;
1732 Condition is_smi = CheckSmi(object);
1733 j(is_smi, &ok);
1734 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1735 Factory::heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01001736 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00001737 bind(&ok);
1738}
1739
1740
Leon Clarkef7060e22010-06-03 12:02:55 +01001741void MacroAssembler::AbortIfNotSmi(Register object) {
Steve Block6ded16b2010-05-10 14:33:55 +01001742 Label ok;
1743 Condition is_smi = CheckSmi(object);
Leon Clarkef7060e22010-06-03 12:02:55 +01001744 Assert(is_smi, "Operand not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01001745}
1746
1747
Leon Clarked91b9f72010-01-27 17:25:45 +00001748Condition MacroAssembler::IsObjectStringType(Register heap_object,
1749 Register map,
1750 Register instance_type) {
1751 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00001752 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00001753 ASSERT(kNotStringTag != 0);
1754 testb(instance_type, Immediate(kIsNotStringMask));
1755 return zero;
1756}
1757
1758
Steve Blocka7e24c12009-10-30 11:49:00 +00001759void MacroAssembler::TryGetFunctionPrototype(Register function,
1760 Register result,
1761 Label* miss) {
1762 // Check that the receiver isn't a smi.
1763 testl(function, Immediate(kSmiTagMask));
1764 j(zero, miss);
1765
1766 // Check that the function really is a function.
1767 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1768 j(not_equal, miss);
1769
1770 // Make sure that the function has an instance prototype.
1771 Label non_instance;
1772 testb(FieldOperand(result, Map::kBitFieldOffset),
1773 Immediate(1 << Map::kHasNonInstancePrototype));
1774 j(not_zero, &non_instance);
1775
1776 // Get the prototype or initial map from the function.
1777 movq(result,
1778 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1779
1780 // If the prototype or initial map is the hole, don't return it and
1781 // simply miss the cache instead. This will allow us to allocate a
1782 // prototype object on-demand in the runtime system.
1783 CompareRoot(result, Heap::kTheHoleValueRootIndex);
1784 j(equal, miss);
1785
1786 // If the function does not have an initial map, we're done.
1787 Label done;
1788 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1789 j(not_equal, &done);
1790
1791 // Get the prototype from the initial map.
1792 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1793 jmp(&done);
1794
1795 // Non-instance prototype: Fetch prototype from constructor field
1796 // in initial map.
1797 bind(&non_instance);
1798 movq(result, FieldOperand(result, Map::kConstructorOffset));
1799
1800 // All done.
1801 bind(&done);
1802}
1803
1804
1805void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1806 if (FLAG_native_code_counters && counter->Enabled()) {
1807 movq(kScratchRegister, ExternalReference(counter));
1808 movl(Operand(kScratchRegister, 0), Immediate(value));
1809 }
1810}
1811
1812
1813void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1814 ASSERT(value > 0);
1815 if (FLAG_native_code_counters && counter->Enabled()) {
1816 movq(kScratchRegister, ExternalReference(counter));
1817 Operand operand(kScratchRegister, 0);
1818 if (value == 1) {
1819 incl(operand);
1820 } else {
1821 addl(operand, Immediate(value));
1822 }
1823 }
1824}
1825
1826
1827void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1828 ASSERT(value > 0);
1829 if (FLAG_native_code_counters && counter->Enabled()) {
1830 movq(kScratchRegister, ExternalReference(counter));
1831 Operand operand(kScratchRegister, 0);
1832 if (value == 1) {
1833 decl(operand);
1834 } else {
1835 subl(operand, Immediate(value));
1836 }
1837 }
1838}
1839
Steve Blocka7e24c12009-10-30 11:49:00 +00001840#ifdef ENABLE_DEBUGGER_SUPPORT
1841
1842void MacroAssembler::PushRegistersFromMemory(RegList regs) {
1843 ASSERT((regs & ~kJSCallerSaved) == 0);
1844 // Push the content of the memory location to the stack.
1845 for (int i = 0; i < kNumJSCallerSaved; i++) {
1846 int r = JSCallerSavedCode(i);
1847 if ((regs & (1 << r)) != 0) {
1848 ExternalReference reg_addr =
1849 ExternalReference(Debug_Address::Register(i));
1850 movq(kScratchRegister, reg_addr);
1851 push(Operand(kScratchRegister, 0));
1852 }
1853 }
1854}
1855
Steve Block3ce2e202009-11-05 08:53:23 +00001856
Steve Blocka7e24c12009-10-30 11:49:00 +00001857void MacroAssembler::SaveRegistersToMemory(RegList regs) {
1858 ASSERT((regs & ~kJSCallerSaved) == 0);
1859 // Copy the content of registers to memory location.
1860 for (int i = 0; i < kNumJSCallerSaved; i++) {
1861 int r = JSCallerSavedCode(i);
1862 if ((regs & (1 << r)) != 0) {
1863 Register reg = { r };
1864 ExternalReference reg_addr =
1865 ExternalReference(Debug_Address::Register(i));
1866 movq(kScratchRegister, reg_addr);
1867 movq(Operand(kScratchRegister, 0), reg);
1868 }
1869 }
1870}
1871
1872
1873void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
1874 ASSERT((regs & ~kJSCallerSaved) == 0);
1875 // Copy the content of memory location to registers.
1876 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1877 int r = JSCallerSavedCode(i);
1878 if ((regs & (1 << r)) != 0) {
1879 Register reg = { r };
1880 ExternalReference reg_addr =
1881 ExternalReference(Debug_Address::Register(i));
1882 movq(kScratchRegister, reg_addr);
1883 movq(reg, Operand(kScratchRegister, 0));
1884 }
1885 }
1886}
1887
1888
1889void MacroAssembler::PopRegistersToMemory(RegList regs) {
1890 ASSERT((regs & ~kJSCallerSaved) == 0);
1891 // Pop the content from the stack to the memory location.
1892 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1893 int r = JSCallerSavedCode(i);
1894 if ((regs & (1 << r)) != 0) {
1895 ExternalReference reg_addr =
1896 ExternalReference(Debug_Address::Register(i));
1897 movq(kScratchRegister, reg_addr);
1898 pop(Operand(kScratchRegister, 0));
1899 }
1900 }
1901}
1902
1903
1904void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
1905 Register scratch,
1906 RegList regs) {
1907 ASSERT(!scratch.is(kScratchRegister));
1908 ASSERT(!base.is(kScratchRegister));
1909 ASSERT(!base.is(scratch));
1910 ASSERT((regs & ~kJSCallerSaved) == 0);
1911 // Copy the content of the stack to the memory location and adjust base.
1912 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1913 int r = JSCallerSavedCode(i);
1914 if ((regs & (1 << r)) != 0) {
1915 movq(scratch, Operand(base, 0));
1916 ExternalReference reg_addr =
1917 ExternalReference(Debug_Address::Register(i));
1918 movq(kScratchRegister, reg_addr);
1919 movq(Operand(kScratchRegister, 0), scratch);
1920 lea(base, Operand(base, kPointerSize));
1921 }
1922 }
1923}
1924
Andrei Popescu402d9372010-02-26 13:31:12 +00001925void MacroAssembler::DebugBreak() {
1926 ASSERT(allow_stub_calls());
1927 xor_(rax, rax); // no arguments
1928 movq(rbx, ExternalReference(Runtime::kDebugBreak));
1929 CEntryStub ces(1);
1930 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00001931}
Andrei Popescu402d9372010-02-26 13:31:12 +00001932#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00001933
1934
1935void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1936 const ParameterCount& actual,
1937 Handle<Code> code_constant,
1938 Register code_register,
1939 Label* done,
1940 InvokeFlag flag) {
1941 bool definitely_matches = false;
1942 Label invoke;
1943 if (expected.is_immediate()) {
1944 ASSERT(actual.is_immediate());
1945 if (expected.immediate() == actual.immediate()) {
1946 definitely_matches = true;
1947 } else {
1948 movq(rax, Immediate(actual.immediate()));
1949 if (expected.immediate() ==
Steve Block3ce2e202009-11-05 08:53:23 +00001950 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001951 // Don't worry about adapting arguments for built-ins that
1952 // don't want that done. Skip adaption code by making it look
1953 // like we have a match between expected and actual number of
1954 // arguments.
1955 definitely_matches = true;
1956 } else {
1957 movq(rbx, Immediate(expected.immediate()));
1958 }
1959 }
1960 } else {
1961 if (actual.is_immediate()) {
1962 // Expected is in register, actual is immediate. This is the
1963 // case when we invoke function values without going through the
1964 // IC mechanism.
1965 cmpq(expected.reg(), Immediate(actual.immediate()));
1966 j(equal, &invoke);
1967 ASSERT(expected.reg().is(rbx));
1968 movq(rax, Immediate(actual.immediate()));
1969 } else if (!expected.reg().is(actual.reg())) {
1970 // Both expected and actual are in (different) registers. This
1971 // is the case when we invoke functions using call and apply.
1972 cmpq(expected.reg(), actual.reg());
1973 j(equal, &invoke);
1974 ASSERT(actual.reg().is(rax));
1975 ASSERT(expected.reg().is(rbx));
1976 }
1977 }
1978
1979 if (!definitely_matches) {
1980 Handle<Code> adaptor =
1981 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1982 if (!code_constant.is_null()) {
1983 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1984 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1985 } else if (!code_register.is(rdx)) {
1986 movq(rdx, code_register);
1987 }
1988
1989 if (flag == CALL_FUNCTION) {
1990 Call(adaptor, RelocInfo::CODE_TARGET);
1991 jmp(done);
1992 } else {
1993 Jump(adaptor, RelocInfo::CODE_TARGET);
1994 }
1995 bind(&invoke);
1996 }
1997}
1998
1999
2000void MacroAssembler::InvokeCode(Register code,
2001 const ParameterCount& expected,
2002 const ParameterCount& actual,
2003 InvokeFlag flag) {
2004 Label done;
2005 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
2006 if (flag == CALL_FUNCTION) {
2007 call(code);
2008 } else {
2009 ASSERT(flag == JUMP_FUNCTION);
2010 jmp(code);
2011 }
2012 bind(&done);
2013}
2014
2015
2016void MacroAssembler::InvokeCode(Handle<Code> code,
2017 const ParameterCount& expected,
2018 const ParameterCount& actual,
2019 RelocInfo::Mode rmode,
2020 InvokeFlag flag) {
2021 Label done;
2022 Register dummy = rax;
2023 InvokePrologue(expected, actual, code, dummy, &done, flag);
2024 if (flag == CALL_FUNCTION) {
2025 Call(code, rmode);
2026 } else {
2027 ASSERT(flag == JUMP_FUNCTION);
2028 Jump(code, rmode);
2029 }
2030 bind(&done);
2031}
2032
2033
2034void MacroAssembler::InvokeFunction(Register function,
2035 const ParameterCount& actual,
2036 InvokeFlag flag) {
2037 ASSERT(function.is(rdi));
2038 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2039 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
2040 movsxlq(rbx,
2041 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2042 movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
2043 // Advances rdx to the end of the Code object header, to the start of
2044 // the executable code.
2045 lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
2046
2047 ParameterCount expected(rbx);
2048 InvokeCode(rdx, expected, actual, flag);
2049}
2050
2051
Andrei Popescu402d9372010-02-26 13:31:12 +00002052void MacroAssembler::InvokeFunction(JSFunction* function,
2053 const ParameterCount& actual,
2054 InvokeFlag flag) {
2055 ASSERT(function->is_compiled());
2056 // Get the function and setup the context.
2057 Move(rdi, Handle<JSFunction>(function));
2058 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2059
2060 // Invoke the cached code.
2061 Handle<Code> code(function->code());
2062 ParameterCount expected(function->shared()->formal_parameter_count());
2063 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
2064}
2065
2066
Steve Blocka7e24c12009-10-30 11:49:00 +00002067void MacroAssembler::EnterFrame(StackFrame::Type type) {
2068 push(rbp);
2069 movq(rbp, rsp);
2070 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00002071 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002072 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2073 push(kScratchRegister);
2074 if (FLAG_debug_code) {
2075 movq(kScratchRegister,
2076 Factory::undefined_value(),
2077 RelocInfo::EMBEDDED_OBJECT);
2078 cmpq(Operand(rsp, 0), kScratchRegister);
2079 Check(not_equal, "code object not properly patched");
2080 }
2081}
2082
2083
2084void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2085 if (FLAG_debug_code) {
Steve Block3ce2e202009-11-05 08:53:23 +00002086 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002087 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2088 Check(equal, "stack frame types must match");
2089 }
2090 movq(rsp, rbp);
2091 pop(rbp);
2092}
2093
2094
Steve Blockd0582a62009-12-15 09:54:21 +00002095void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002096 // Setup the frame structure on the stack.
2097 // All constants are relative to the frame pointer of the exit frame.
2098 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
2099 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
2100 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
2101 push(rbp);
2102 movq(rbp, rsp);
2103
2104 // Reserve room for entry stack pointer and push the debug marker.
Steve Block3ce2e202009-11-05 08:53:23 +00002105 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00002106 push(Immediate(0)); // Saved entry sp, patched before call.
2107 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2108 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00002109
2110 // Save the frame pointer and the context in top.
2111 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2112 ExternalReference context_address(Top::k_context_address);
2113 movq(r14, rax); // Backup rax before we use it.
2114
2115 movq(rax, rbp);
2116 store_rax(c_entry_fp_address);
2117 movq(rax, rsi);
2118 store_rax(context_address);
2119
2120 // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
2121 // so it must be retained across the C-call.
2122 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
2123 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
2124
2125#ifdef ENABLE_DEBUGGER_SUPPORT
2126 // Save the state of all registers to the stack from the memory
2127 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00002128 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002129 // TODO(1243899): This should be symmetric to
2130 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
2131 // correct here, but computed for the other call. Very error
2132 // prone! FIX THIS. Actually there are deeper problems with
2133 // register saving than this asymmetry (see the bug report
2134 // associated with this issue).
2135 PushRegistersFromMemory(kJSCallerSaved);
2136 }
2137#endif
2138
2139#ifdef _WIN64
2140 // Reserve space on stack for result and argument structures, if necessary.
2141 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
2142 // Reserve space for the Arguments object. The Windows 64-bit ABI
2143 // requires us to pass this structure as a pointer to its location on
2144 // the stack. The structure contains 2 values.
2145 int argument_stack_space = 2 * kPointerSize;
2146 // We also need backing space for 4 parameters, even though
2147 // we only pass one or two parameter, and it is in a register.
2148 int argument_mirror_space = 4 * kPointerSize;
2149 int total_stack_space =
2150 argument_mirror_space + argument_stack_space + result_stack_space;
2151 subq(rsp, Immediate(total_stack_space));
2152#endif
2153
2154 // Get the required frame alignment for the OS.
2155 static const int kFrameAlignment = OS::ActivationFrameAlignment();
2156 if (kFrameAlignment > 0) {
2157 ASSERT(IsPowerOf2(kFrameAlignment));
2158 movq(kScratchRegister, Immediate(-kFrameAlignment));
2159 and_(rsp, kScratchRegister);
2160 }
2161
2162 // Patch the saved entry sp.
2163 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2164}
2165
2166
Steve Blockd0582a62009-12-15 09:54:21 +00002167void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002168 // Registers:
2169 // r15 : argv
2170#ifdef ENABLE_DEBUGGER_SUPPORT
2171 // Restore the memory copy of the registers by digging them out from
2172 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00002173 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002174 // It's okay to clobber register rbx below because we don't need
2175 // the function pointer after this.
2176 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +00002177 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002178 lea(rbx, Operand(rbp, kOffset));
2179 CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
2180 }
2181#endif
2182
2183 // Get the return address from the stack and restore the frame pointer.
2184 movq(rcx, Operand(rbp, 1 * kPointerSize));
2185 movq(rbp, Operand(rbp, 0 * kPointerSize));
2186
Steve Blocka7e24c12009-10-30 11:49:00 +00002187 // Pop everything up to and including the arguments and the receiver
2188 // from the caller stack.
2189 lea(rsp, Operand(r15, 1 * kPointerSize));
2190
2191 // Restore current context from top and clear it in debug mode.
2192 ExternalReference context_address(Top::k_context_address);
2193 movq(kScratchRegister, context_address);
2194 movq(rsi, Operand(kScratchRegister, 0));
2195#ifdef DEBUG
2196 movq(Operand(kScratchRegister, 0), Immediate(0));
2197#endif
2198
2199 // Push the return address to get ready to return.
2200 push(rcx);
2201
2202 // Clear the top frame.
2203 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2204 movq(kScratchRegister, c_entry_fp_address);
2205 movq(Operand(kScratchRegister, 0), Immediate(0));
2206}
2207
2208
Steve Block3ce2e202009-11-05 08:53:23 +00002209Register MacroAssembler::CheckMaps(JSObject* object,
2210 Register object_reg,
2211 JSObject* holder,
2212 Register holder_reg,
Steve Blocka7e24c12009-10-30 11:49:00 +00002213 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01002214 int save_at_depth,
Steve Blocka7e24c12009-10-30 11:49:00 +00002215 Label* miss) {
2216 // Make sure there's no overlap between scratch and the other
2217 // registers.
2218 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
2219
2220 // Keep track of the current object in register reg. On the first
2221 // iteration, reg is an alias for object_reg, on later iterations,
2222 // it is an alias for holder_reg.
2223 Register reg = object_reg;
Steve Block6ded16b2010-05-10 14:33:55 +01002224 int depth = 0;
2225
2226 if (save_at_depth == depth) {
2227 movq(Operand(rsp, kPointerSize), object_reg);
2228 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002229
2230 // Check the maps in the prototype chain.
2231 // Traverse the prototype chain from the object and do map checks.
2232 while (object != holder) {
2233 depth++;
2234
2235 // Only global objects and objects that do not require access
2236 // checks are allowed in stubs.
2237 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2238
2239 JSObject* prototype = JSObject::cast(object->GetPrototype());
2240 if (Heap::InNewSpace(prototype)) {
2241 // Get the map of the current object.
2242 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2243 Cmp(scratch, Handle<Map>(object->map()));
2244 // Branch on the result of the map check.
2245 j(not_equal, miss);
2246 // Check access rights to the global object. This has to happen
2247 // after the map check so that we know that the object is
2248 // actually a global object.
2249 if (object->IsJSGlobalProxy()) {
2250 CheckAccessGlobalProxy(reg, scratch, miss);
2251
2252 // Restore scratch register to be the map of the object.
2253 // We load the prototype from the map in the scratch register.
2254 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2255 }
2256 // The prototype is in new space; we cannot store a reference
2257 // to it in the code. Load it from the map.
2258 reg = holder_reg; // from now the object is in holder_reg
2259 movq(reg, FieldOperand(scratch, Map::kPrototypeOffset));
2260
2261 } else {
2262 // Check the map of the current object.
2263 Cmp(FieldOperand(reg, HeapObject::kMapOffset),
2264 Handle<Map>(object->map()));
2265 // Branch on the result of the map check.
2266 j(not_equal, miss);
2267 // Check access rights to the global object. This has to happen
2268 // after the map check so that we know that the object is
2269 // actually a global object.
2270 if (object->IsJSGlobalProxy()) {
2271 CheckAccessGlobalProxy(reg, scratch, miss);
2272 }
2273 // The prototype is in old space; load it directly.
2274 reg = holder_reg; // from now the object is in holder_reg
2275 Move(reg, Handle<JSObject>(prototype));
2276 }
2277
Steve Block6ded16b2010-05-10 14:33:55 +01002278 if (save_at_depth == depth) {
2279 movq(Operand(rsp, kPointerSize), reg);
2280 }
2281
Steve Blocka7e24c12009-10-30 11:49:00 +00002282 // Go to the next object in the prototype chain.
2283 object = prototype;
2284 }
2285
2286 // Check the holder map.
Steve Block3ce2e202009-11-05 08:53:23 +00002287 Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002288 j(not_equal, miss);
2289
2290 // Log the check depth.
Steve Block6ded16b2010-05-10 14:33:55 +01002291 LOG(IntEvent("check-maps-depth", depth + 1));
Steve Blocka7e24c12009-10-30 11:49:00 +00002292
2293 // Perform security check for access to the global object and return
2294 // the holder register.
2295 ASSERT(object == holder);
2296 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2297 if (object->IsJSGlobalProxy()) {
2298 CheckAccessGlobalProxy(reg, scratch, miss);
2299 }
2300 return reg;
2301}
2302
2303
Steve Blocka7e24c12009-10-30 11:49:00 +00002304void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2305 Register scratch,
2306 Label* miss) {
2307 Label same_contexts;
2308
2309 ASSERT(!holder_reg.is(scratch));
2310 ASSERT(!scratch.is(kScratchRegister));
2311 // Load current lexical context from the stack frame.
2312 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2313
2314 // When generating debug code, make sure the lexical context is set.
2315 if (FLAG_debug_code) {
2316 cmpq(scratch, Immediate(0));
2317 Check(not_equal, "we should not have an empty lexical context");
2318 }
2319 // Load the global context of the current context.
2320 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2321 movq(scratch, FieldOperand(scratch, offset));
2322 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2323
2324 // Check the context is a global context.
2325 if (FLAG_debug_code) {
2326 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2327 Factory::global_context_map());
2328 Check(equal, "JSGlobalObject::global_context should be a global context.");
2329 }
2330
2331 // Check if both contexts are the same.
2332 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2333 j(equal, &same_contexts);
2334
2335 // Compare security tokens.
2336 // Check that the security token in the calling global object is
2337 // compatible with the security token in the receiving global
2338 // object.
2339
2340 // Check the context is a global context.
2341 if (FLAG_debug_code) {
2342 // Preserve original value of holder_reg.
2343 push(holder_reg);
2344 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2345 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2346 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2347
2348 // Read the first word and compare to global_context_map(),
2349 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2350 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2351 Check(equal, "JSGlobalObject::global_context should be a global context.");
2352 pop(holder_reg);
2353 }
2354
2355 movq(kScratchRegister,
2356 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00002357 int token_offset =
2358 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002359 movq(scratch, FieldOperand(scratch, token_offset));
2360 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2361 j(not_equal, miss);
2362
2363 bind(&same_contexts);
2364}
2365
2366
2367void MacroAssembler::LoadAllocationTopHelper(Register result,
2368 Register result_end,
2369 Register scratch,
2370 AllocationFlags flags) {
2371 ExternalReference new_space_allocation_top =
2372 ExternalReference::new_space_allocation_top_address();
2373
2374 // Just return if allocation top is already known.
2375 if ((flags & RESULT_CONTAINS_TOP) != 0) {
2376 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01002377 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00002378#ifdef DEBUG
2379 // Assert that result actually contains top on entry.
2380 movq(kScratchRegister, new_space_allocation_top);
2381 cmpq(result, Operand(kScratchRegister, 0));
2382 Check(equal, "Unexpected allocation top");
2383#endif
2384 return;
2385 }
2386
Steve Block6ded16b2010-05-10 14:33:55 +01002387 // Move address of new object to result. Use scratch register if available,
2388 // and keep address in scratch until call to UpdateAllocationTopHelper.
2389 if (scratch.is_valid()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002390 ASSERT(!scratch.is(result_end));
2391 movq(scratch, new_space_allocation_top);
2392 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01002393 } else if (result.is(rax)) {
2394 load_rax(new_space_allocation_top);
2395 } else {
2396 movq(kScratchRegister, new_space_allocation_top);
2397 movq(result, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002398 }
2399}
2400
2401
2402void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2403 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +00002404 if (FLAG_debug_code) {
2405 testq(result_end, Immediate(kObjectAlignmentMask));
2406 Check(zero, "Unaligned allocation in new space");
2407 }
2408
Steve Blocka7e24c12009-10-30 11:49:00 +00002409 ExternalReference new_space_allocation_top =
2410 ExternalReference::new_space_allocation_top_address();
2411
2412 // Update new top.
2413 if (result_end.is(rax)) {
2414 // rax can be stored directly to a memory location.
2415 store_rax(new_space_allocation_top);
2416 } else {
2417 // Register required - use scratch provided if available.
Steve Block6ded16b2010-05-10 14:33:55 +01002418 if (scratch.is_valid()) {
2419 movq(Operand(scratch, 0), result_end);
2420 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00002421 movq(kScratchRegister, new_space_allocation_top);
2422 movq(Operand(kScratchRegister, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00002423 }
2424 }
2425}
2426
2427
2428void MacroAssembler::AllocateInNewSpace(int object_size,
2429 Register result,
2430 Register result_end,
2431 Register scratch,
2432 Label* gc_required,
2433 AllocationFlags flags) {
2434 ASSERT(!result.is(result_end));
2435
2436 // Load address of new object into result.
2437 LoadAllocationTopHelper(result, result_end, scratch, flags);
2438
2439 // Calculate new top and bail out if new space is exhausted.
2440 ExternalReference new_space_allocation_limit =
2441 ExternalReference::new_space_allocation_limit_address();
Steve Block6ded16b2010-05-10 14:33:55 +01002442
2443 Register top_reg = result_end.is_valid() ? result_end : result;
2444
2445 if (top_reg.is(result)) {
2446 addq(top_reg, Immediate(object_size));
2447 } else {
2448 lea(top_reg, Operand(result, object_size));
2449 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002450 movq(kScratchRegister, new_space_allocation_limit);
Steve Block6ded16b2010-05-10 14:33:55 +01002451 cmpq(top_reg, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002452 j(above, gc_required);
2453
2454 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01002455 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00002456
Steve Block6ded16b2010-05-10 14:33:55 +01002457 if (top_reg.is(result)) {
2458 if ((flags & TAG_OBJECT) != 0) {
2459 subq(result, Immediate(object_size - kHeapObjectTag));
2460 } else {
2461 subq(result, Immediate(object_size));
2462 }
2463 } else if ((flags & TAG_OBJECT) != 0) {
2464 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00002465 addq(result, Immediate(kHeapObjectTag));
2466 }
2467}
2468
2469
2470void MacroAssembler::AllocateInNewSpace(int header_size,
2471 ScaleFactor element_size,
2472 Register element_count,
2473 Register result,
2474 Register result_end,
2475 Register scratch,
2476 Label* gc_required,
2477 AllocationFlags flags) {
2478 ASSERT(!result.is(result_end));
2479
2480 // Load address of new object into result.
2481 LoadAllocationTopHelper(result, result_end, scratch, flags);
2482
2483 // Calculate new top and bail out if new space is exhausted.
2484 ExternalReference new_space_allocation_limit =
2485 ExternalReference::new_space_allocation_limit_address();
2486 lea(result_end, Operand(result, element_count, element_size, header_size));
2487 movq(kScratchRegister, new_space_allocation_limit);
2488 cmpq(result_end, Operand(kScratchRegister, 0));
2489 j(above, gc_required);
2490
2491 // Update allocation top.
2492 UpdateAllocationTopHelper(result_end, scratch);
2493
2494 // Tag the result if requested.
2495 if ((flags & TAG_OBJECT) != 0) {
2496 addq(result, Immediate(kHeapObjectTag));
2497 }
2498}
2499
2500
2501void MacroAssembler::AllocateInNewSpace(Register object_size,
2502 Register result,
2503 Register result_end,
2504 Register scratch,
2505 Label* gc_required,
2506 AllocationFlags flags) {
2507 // Load address of new object into result.
2508 LoadAllocationTopHelper(result, result_end, scratch, flags);
2509
2510 // Calculate new top and bail out if new space is exhausted.
2511 ExternalReference new_space_allocation_limit =
2512 ExternalReference::new_space_allocation_limit_address();
2513 if (!object_size.is(result_end)) {
2514 movq(result_end, object_size);
2515 }
2516 addq(result_end, result);
2517 movq(kScratchRegister, new_space_allocation_limit);
2518 cmpq(result_end, Operand(kScratchRegister, 0));
2519 j(above, gc_required);
2520
2521 // Update allocation top.
2522 UpdateAllocationTopHelper(result_end, scratch);
2523
2524 // Tag the result if requested.
2525 if ((flags & TAG_OBJECT) != 0) {
2526 addq(result, Immediate(kHeapObjectTag));
2527 }
2528}
2529
2530
2531void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2532 ExternalReference new_space_allocation_top =
2533 ExternalReference::new_space_allocation_top_address();
2534
2535 // Make sure the object has no tag before resetting top.
2536 and_(object, Immediate(~kHeapObjectTagMask));
2537 movq(kScratchRegister, new_space_allocation_top);
2538#ifdef DEBUG
2539 cmpq(object, Operand(kScratchRegister, 0));
2540 Check(below, "Undo allocation of non allocated memory");
2541#endif
2542 movq(Operand(kScratchRegister, 0), object);
2543}
2544
2545
Steve Block3ce2e202009-11-05 08:53:23 +00002546void MacroAssembler::AllocateHeapNumber(Register result,
2547 Register scratch,
2548 Label* gc_required) {
2549 // Allocate heap number in new space.
2550 AllocateInNewSpace(HeapNumber::kSize,
2551 result,
2552 scratch,
2553 no_reg,
2554 gc_required,
2555 TAG_OBJECT);
2556
2557 // Set the map.
2558 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2559 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2560}
2561
2562
Leon Clarkee46be812010-01-19 14:06:41 +00002563void MacroAssembler::AllocateTwoByteString(Register result,
2564 Register length,
2565 Register scratch1,
2566 Register scratch2,
2567 Register scratch3,
2568 Label* gc_required) {
2569 // Calculate the number of bytes needed for the characters in the string while
2570 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002571 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2572 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002573 ASSERT(kShortSize == 2);
2574 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01002575 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2576 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002577 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002578 if (kHeaderAlignment > 0) {
2579 subq(scratch1, Immediate(kHeaderAlignment));
2580 }
Leon Clarkee46be812010-01-19 14:06:41 +00002581
2582 // Allocate two byte string in new space.
2583 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2584 times_1,
2585 scratch1,
2586 result,
2587 scratch2,
2588 scratch3,
2589 gc_required,
2590 TAG_OBJECT);
2591
2592 // Set the map, length and hash field.
2593 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2594 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002595 Integer32ToSmi(scratch1, length);
2596 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Leon Clarkee46be812010-01-19 14:06:41 +00002597 movl(FieldOperand(result, String::kHashFieldOffset),
2598 Immediate(String::kEmptyHashField));
2599}
2600
2601
2602void MacroAssembler::AllocateAsciiString(Register result,
2603 Register length,
2604 Register scratch1,
2605 Register scratch2,
2606 Register scratch3,
2607 Label* gc_required) {
2608 // Calculate the number of bytes needed for the characters in the string while
2609 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002610 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2611 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002612 movl(scratch1, length);
2613 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01002614 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002615 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002616 if (kHeaderAlignment > 0) {
2617 subq(scratch1, Immediate(kHeaderAlignment));
2618 }
Leon Clarkee46be812010-01-19 14:06:41 +00002619
2620 // Allocate ascii string in new space.
2621 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2622 times_1,
2623 scratch1,
2624 result,
2625 scratch2,
2626 scratch3,
2627 gc_required,
2628 TAG_OBJECT);
2629
2630 // Set the map, length and hash field.
2631 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2632 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002633 Integer32ToSmi(scratch1, length);
2634 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Leon Clarkee46be812010-01-19 14:06:41 +00002635 movl(FieldOperand(result, String::kHashFieldOffset),
2636 Immediate(String::kEmptyHashField));
2637}
2638
2639
2640void MacroAssembler::AllocateConsString(Register result,
2641 Register scratch1,
2642 Register scratch2,
2643 Label* gc_required) {
2644 // Allocate heap number in new space.
2645 AllocateInNewSpace(ConsString::kSize,
2646 result,
2647 scratch1,
2648 scratch2,
2649 gc_required,
2650 TAG_OBJECT);
2651
2652 // Set the map. The other fields are left uninitialized.
2653 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2654 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2655}
2656
2657
2658void MacroAssembler::AllocateAsciiConsString(Register result,
2659 Register scratch1,
2660 Register scratch2,
2661 Label* gc_required) {
2662 // Allocate heap number in new space.
2663 AllocateInNewSpace(ConsString::kSize,
2664 result,
2665 scratch1,
2666 scratch2,
2667 gc_required,
2668 TAG_OBJECT);
2669
2670 // Set the map. The other fields are left uninitialized.
2671 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2672 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2673}
2674
2675
Steve Blockd0582a62009-12-15 09:54:21 +00002676void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2677 if (context_chain_length > 0) {
2678 // Move up the chain of contexts to the context containing the slot.
2679 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2680 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002681 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002682 for (int i = 1; i < context_chain_length; i++) {
2683 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2684 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2685 }
2686 // The context may be an intermediate context, not a function context.
2687 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2688 } else { // context is the current function context.
2689 // The context may be an intermediate context, not a function context.
2690 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2691 }
2692}
2693
Leon Clarke4515c472010-02-03 11:58:03 +00002694int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2695 // On Windows stack slots are reserved by the caller for all arguments
2696 // including the ones passed in registers. On Linux 6 arguments are passed in
2697 // registers and the caller does not reserve stack slots for them.
2698 ASSERT(num_arguments >= 0);
2699#ifdef _WIN64
2700 static const int kArgumentsWithoutStackSlot = 0;
2701#else
2702 static const int kArgumentsWithoutStackSlot = 6;
2703#endif
2704 return num_arguments > kArgumentsWithoutStackSlot ?
2705 num_arguments - kArgumentsWithoutStackSlot : 0;
2706}
2707
2708void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2709 int frame_alignment = OS::ActivationFrameAlignment();
2710 ASSERT(frame_alignment != 0);
2711 ASSERT(num_arguments >= 0);
2712 // Make stack end at alignment and allocate space for arguments and old rsp.
2713 movq(kScratchRegister, rsp);
2714 ASSERT(IsPowerOf2(frame_alignment));
2715 int argument_slots_on_stack =
2716 ArgumentStackSlotsForCFunctionCall(num_arguments);
2717 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2718 and_(rsp, Immediate(-frame_alignment));
2719 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2720}
2721
2722
2723void MacroAssembler::CallCFunction(ExternalReference function,
2724 int num_arguments) {
2725 movq(rax, function);
2726 CallCFunction(rax, num_arguments);
2727}
2728
2729
2730void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01002731 // Check stack alignment.
2732 if (FLAG_debug_code) {
2733 CheckStackAlignment();
2734 }
2735
Leon Clarke4515c472010-02-03 11:58:03 +00002736 call(function);
2737 ASSERT(OS::ActivationFrameAlignment() != 0);
2738 ASSERT(num_arguments >= 0);
2739 int argument_slots_on_stack =
2740 ArgumentStackSlotsForCFunctionCall(num_arguments);
2741 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2742}
2743
Steve Blockd0582a62009-12-15 09:54:21 +00002744
Steve Blocka7e24c12009-10-30 11:49:00 +00002745CodePatcher::CodePatcher(byte* address, int size)
2746 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2747 // Create a new macro assembler pointing to the address of the code to patch.
2748 // The size is adjusted with kGap on order for the assembler to generate size
2749 // bytes of instructions without failing with buffer size constraints.
2750 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2751}
2752
2753
2754CodePatcher::~CodePatcher() {
2755 // Indicate that code has changed.
2756 CPU::FlushICache(address_, size_);
2757
2758 // Check that the code was patched as expected.
2759 ASSERT(masm_.pc_ == address_ + size_);
2760 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2761}
2762
Steve Blocka7e24c12009-10-30 11:49:00 +00002763} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002764
2765#endif // V8_TARGET_ARCH_X64