blob: a1976ec3f3640b01a4298670050c8f2c8c7ed4ad [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "assembler-x64.h"
33#include "macro-assembler-x64.h"
34#include "serialize.h"
35#include "debug.h"
36
37namespace v8 {
38namespace internal {
39
40MacroAssembler::MacroAssembler(void* buffer, int size)
Steve Block3ce2e202009-11-05 08:53:23 +000041 : Assembler(buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000042 generating_stub_(false),
43 allow_stub_calls_(true),
44 code_object_(Heap::undefined_value()) {
Steve Blocka7e24c12009-10-30 11:49:00 +000045}
46
47
Steve Block3ce2e202009-11-05 08:53:23 +000048void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010049 movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000050}
51
52
53void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010054 push(Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000055}
56
57
Steve Block3ce2e202009-11-05 08:53:23 +000058void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010059 cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000060}
61
62
Steve Block3ce2e202009-11-05 08:53:23 +000063void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000064 LoadRoot(kScratchRegister, index);
65 cmpq(with, kScratchRegister);
66}
67
68
Steve Blockd0582a62009-12-15 09:54:21 +000069void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
70 CompareRoot(rsp, Heap::kStackLimitRootIndex);
71 j(below, on_stack_overflow);
72}
73
74
Steve Block6ded16b2010-05-10 14:33:55 +010075void MacroAssembler::RecordWriteHelper(Register object,
76 Register addr,
77 Register scratch) {
78 if (FLAG_debug_code) {
79 // Check that the object is not in new space.
80 Label not_in_new_space;
81 InNewSpace(object, scratch, not_equal, &not_in_new_space);
82 Abort("new-space object passed to RecordWriteHelper");
83 bind(&not_in_new_space);
84 }
85
Steve Blocka7e24c12009-10-30 11:49:00 +000086 Label fast;
87
88 // Compute the page start address from the heap object pointer, and reuse
89 // the 'object' register for it.
90 ASSERT(is_int32(~Page::kPageAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +010091 and_(object,
92 Immediate(static_cast<int32_t>(~Page::kPageAlignmentMask)));
Steve Blocka7e24c12009-10-30 11:49:00 +000093 Register page_start = object;
94
95 // Compute the bit addr in the remembered set/index of the pointer in the
96 // page. Reuse 'addr' as pointer_offset.
Steve Block6ded16b2010-05-10 14:33:55 +010097 subq(addr, page_start);
98 shr(addr, Immediate(kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000099 Register pointer_offset = addr;
100
101 // If the bit offset lies beyond the normal remembered set range, it is in
102 // the extra remembered set area of a large object.
Steve Block6ded16b2010-05-10 14:33:55 +0100103 cmpq(pointer_offset, Immediate(Page::kPageSize / kPointerSize));
104 j(below, &fast);
105
106 // We have a large object containing pointers. It must be a FixedArray.
Steve Blocka7e24c12009-10-30 11:49:00 +0000107
108 // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
109 // extra remembered set after the large object.
110
111 // Load the array length into 'scratch'.
Steve Block6ded16b2010-05-10 14:33:55 +0100112 movl(scratch,
113 Operand(page_start,
114 Page::kObjectStartOffset + FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000115 Register array_length = scratch;
116
117 // Extra remembered set starts right after the large object (a FixedArray), at
118 // page_start + kObjectStartOffset + objectSize
119 // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
120 // Add the delta between the end of the normal RSet and the start of the
121 // extra RSet to 'page_start', so that addressing the bit using
122 // 'pointer_offset' hits the extra RSet words.
Steve Block6ded16b2010-05-10 14:33:55 +0100123 lea(page_start,
124 Operand(page_start, array_length, times_pointer_size,
125 Page::kObjectStartOffset + FixedArray::kHeaderSize
126 - Page::kRSetEndOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000127
128 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
129 // to limit code size. We should probably evaluate this decision by
130 // measuring the performance of an equivalent implementation using
131 // "simpler" instructions
Steve Block6ded16b2010-05-10 14:33:55 +0100132 bind(&fast);
133 bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
Steve Blocka7e24c12009-10-30 11:49:00 +0000134}
135
136
137// Set the remembered set bit for [object+offset].
138// object is the object being stored into, value is the object being stored.
Steve Block3ce2e202009-11-05 08:53:23 +0000139// If offset is zero, then the smi_index register contains the array index into
140// the elements array represented as a smi. Otherwise it can be used as a
141// scratch register.
Steve Blocka7e24c12009-10-30 11:49:00 +0000142// All registers are clobbered by the operation.
143void MacroAssembler::RecordWrite(Register object,
144 int offset,
145 Register value,
Steve Block3ce2e202009-11-05 08:53:23 +0000146 Register smi_index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000147 // The compiled code assumes that record write doesn't change the
148 // context register, so we check that none of the clobbered
149 // registers are rsi.
150 ASSERT(!object.is(rsi) && !value.is(rsi) && !smi_index.is(rsi));
151
Steve Blocka7e24c12009-10-30 11:49:00 +0000152 // First, check if a remembered set write is even needed. The tests below
153 // catch stores of Smis and stores into young gen (which does not have space
Steve Block6ded16b2010-05-10 14:33:55 +0100154 // for the remembered set bits).
Steve Blocka7e24c12009-10-30 11:49:00 +0000155 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000156 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000157
Steve Block3ce2e202009-11-05 08:53:23 +0000158 RecordWriteNonSmi(object, offset, value, smi_index);
159 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000160
161 // Clobber all input registers when running with the debug-code flag
162 // turned on to provoke errors. This clobbering repeats the
163 // clobbering done inside RecordWriteNonSmi but it's necessary to
164 // avoid having the fast case for smis leave the registers
165 // unchanged.
166 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100167 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
168 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
169 movq(smi_index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000170 }
Steve Block3ce2e202009-11-05 08:53:23 +0000171}
172
173
174void MacroAssembler::RecordWriteNonSmi(Register object,
175 int offset,
176 Register scratch,
177 Register smi_index) {
178 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000179
180 if (FLAG_debug_code) {
181 Label okay;
182 JumpIfNotSmi(object, &okay);
183 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
184 bind(&okay);
185 }
186
Steve Blocka7e24c12009-10-30 11:49:00 +0000187 // Test that the object address is not in the new space. We cannot
188 // set remembered set bits in the new space.
Steve Block6ded16b2010-05-10 14:33:55 +0100189 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000190
Steve Block6ded16b2010-05-10 14:33:55 +0100191 // The offset is relative to a tagged or untagged HeapObject pointer,
192 // so either offset or offset + kHeapObjectTag must be a
193 // multiple of kPointerSize.
194 ASSERT(IsAligned(offset, kPointerSize) ||
195 IsAligned(offset + kHeapObjectTag, kPointerSize));
196
197 // We use optimized write barrier code if the word being written to is not in
198 // a large object page, or is in the first "page" of a large object page.
199 // We make sure that an offset is inside the right limits whether it is
200 // tagged or untagged.
201 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize - kHeapObjectTag)) {
202 // Compute the bit offset in the remembered set, leave it in 'scratch'.
Steve Block3ce2e202009-11-05 08:53:23 +0000203 lea(scratch, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000204 ASSERT(is_int32(Page::kPageAlignmentMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000205 and_(scratch, Immediate(static_cast<int32_t>(Page::kPageAlignmentMask)));
Steve Block6ded16b2010-05-10 14:33:55 +0100206 shr(scratch, Immediate(kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000207
208 // Compute the page address from the heap object pointer, leave it in
209 // 'object' (immediate value is sign extended).
210 and_(object, Immediate(~Page::kPageAlignmentMask));
211
212 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
213 // to limit code size. We should probably evaluate this decision by
214 // measuring the performance of an equivalent implementation using
215 // "simpler" instructions
Steve Block3ce2e202009-11-05 08:53:23 +0000216 bts(Operand(object, Page::kRSetOffset), scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000217 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000218 Register dst = smi_index;
Steve Blocka7e24c12009-10-30 11:49:00 +0000219 if (offset != 0) {
220 lea(dst, Operand(object, offset));
221 } else {
222 // array access: calculate the destination address in the same manner as
Steve Block3ce2e202009-11-05 08:53:23 +0000223 // KeyedStoreIC::GenerateGeneric.
224 SmiIndex index = SmiToIndex(smi_index, smi_index, kPointerSizeLog2);
Steve Block6ded16b2010-05-10 14:33:55 +0100225 lea(dst, FieldOperand(object,
226 index.reg,
227 index.scale,
228 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000229 }
230 // If we are already generating a shared stub, not inlining the
231 // record write code isn't going to save us any memory.
232 if (generating_stub()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100233 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000234 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000235 RecordWriteStub stub(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000236 CallStub(&stub);
237 }
238 }
239
240 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000241
242 // Clobber all input registers when running with the debug-code flag
243 // turned on to provoke errors.
244 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100245 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
246 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
247 movq(smi_index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
248 }
249}
250
251
252void MacroAssembler::InNewSpace(Register object,
253 Register scratch,
254 Condition cc,
255 Label* branch) {
256 if (Serializer::enabled()) {
257 // Can't do arithmetic on external references if it might get serialized.
258 // The mask isn't really an address. We load it as an external reference in
259 // case the size of the new space is different between the snapshot maker
260 // and the running system.
261 if (scratch.is(object)) {
262 movq(kScratchRegister, ExternalReference::new_space_mask());
263 and_(scratch, kScratchRegister);
264 } else {
265 movq(scratch, ExternalReference::new_space_mask());
266 and_(scratch, object);
267 }
268 movq(kScratchRegister, ExternalReference::new_space_start());
269 cmpq(scratch, kScratchRegister);
270 j(cc, branch);
271 } else {
272 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
273 intptr_t new_space_start =
274 reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
275 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
276 if (scratch.is(object)) {
277 addq(scratch, kScratchRegister);
278 } else {
279 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
280 }
281 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
282 j(cc, branch);
Leon Clarke4515c472010-02-03 11:58:03 +0000283 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000284}
285
286
287void MacroAssembler::Assert(Condition cc, const char* msg) {
288 if (FLAG_debug_code) Check(cc, msg);
289}
290
291
292void MacroAssembler::Check(Condition cc, const char* msg) {
293 Label L;
294 j(cc, &L);
295 Abort(msg);
296 // will not return here
297 bind(&L);
298}
299
300
Steve Block6ded16b2010-05-10 14:33:55 +0100301void MacroAssembler::CheckStackAlignment() {
302 int frame_alignment = OS::ActivationFrameAlignment();
303 int frame_alignment_mask = frame_alignment - 1;
304 if (frame_alignment > kPointerSize) {
305 ASSERT(IsPowerOf2(frame_alignment));
306 Label alignment_as_expected;
307 testq(rsp, Immediate(frame_alignment_mask));
308 j(zero, &alignment_as_expected);
309 // Abort if stack is not aligned.
310 int3();
311 bind(&alignment_as_expected);
312 }
313}
314
315
Steve Blocka7e24c12009-10-30 11:49:00 +0000316void MacroAssembler::NegativeZeroTest(Register result,
317 Register op,
318 Label* then_label) {
319 Label ok;
320 testl(result, result);
321 j(not_zero, &ok);
322 testl(op, op);
323 j(sign, then_label);
324 bind(&ok);
325}
326
327
328void MacroAssembler::Abort(const char* msg) {
329 // We want to pass the msg string like a smi to avoid GC
330 // problems, however msg is not guaranteed to be aligned
331 // properly. Instead, we pass an aligned pointer that is
332 // a proper v8 smi, but also pass the alignment difference
333 // from the real pointer as a smi.
334 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
335 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
336 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
337 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
338#ifdef DEBUG
339 if (msg != NULL) {
340 RecordComment("Abort message: ");
341 RecordComment(msg);
342 }
343#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000344 // Disable stub call restrictions to always allow calls to abort.
345 set_allow_stub_calls(true);
346
Steve Blocka7e24c12009-10-30 11:49:00 +0000347 push(rax);
348 movq(kScratchRegister, p0, RelocInfo::NONE);
349 push(kScratchRegister);
350 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000351 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000352 RelocInfo::NONE);
353 push(kScratchRegister);
354 CallRuntime(Runtime::kAbort, 2);
355 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000356 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000357}
358
359
360void MacroAssembler::CallStub(CodeStub* stub) {
361 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
362 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
363}
364
365
Leon Clarkee46be812010-01-19 14:06:41 +0000366void MacroAssembler::TailCallStub(CodeStub* stub) {
367 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
368 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
369}
370
371
Steve Blocka7e24c12009-10-30 11:49:00 +0000372void MacroAssembler::StubReturn(int argc) {
373 ASSERT(argc >= 1 && generating_stub());
374 ret((argc - 1) * kPointerSize);
375}
376
377
378void MacroAssembler::IllegalOperation(int num_arguments) {
379 if (num_arguments > 0) {
380 addq(rsp, Immediate(num_arguments * kPointerSize));
381 }
382 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
383}
384
385
386void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
387 CallRuntime(Runtime::FunctionForId(id), num_arguments);
388}
389
390
391void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
392 // If the expected number of arguments of the runtime function is
393 // constant, we check that the actual number of arguments match the
394 // expectation.
395 if (f->nargs >= 0 && f->nargs != num_arguments) {
396 IllegalOperation(num_arguments);
397 return;
398 }
399
Leon Clarke4515c472010-02-03 11:58:03 +0000400 // TODO(1236192): Most runtime routines don't need the number of
401 // arguments passed in because it is constant. At some point we
402 // should remove this need and make the runtime routine entry code
403 // smarter.
404 movq(rax, Immediate(num_arguments));
405 movq(rbx, ExternalReference(f));
406 CEntryStub ces(f->result_size);
407 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000408}
409
410
Andrei Popescu402d9372010-02-26 13:31:12 +0000411void MacroAssembler::CallExternalReference(const ExternalReference& ext,
412 int num_arguments) {
413 movq(rax, Immediate(num_arguments));
414 movq(rbx, ext);
415
416 CEntryStub stub(1);
417 CallStub(&stub);
418}
419
420
Steve Block6ded16b2010-05-10 14:33:55 +0100421void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
422 int num_arguments,
423 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000424 // ----------- S t a t e -------------
425 // -- rsp[0] : return address
426 // -- rsp[8] : argument num_arguments - 1
427 // ...
428 // -- rsp[8 * num_arguments] : argument 0 (receiver)
429 // -----------------------------------
430
431 // TODO(1236192): Most runtime routines don't need the number of
432 // arguments passed in because it is constant. At some point we
433 // should remove this need and make the runtime routine entry code
434 // smarter.
435 movq(rax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +0100436 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000437}
438
439
Steve Block6ded16b2010-05-10 14:33:55 +0100440void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
441 int num_arguments,
442 int result_size) {
443 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
444}
445
446
447void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
448 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000449 // Set the entry point and jump to the C entry runtime stub.
450 movq(rbx, ext);
451 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000452 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000453}
454
455
Andrei Popescu402d9372010-02-26 13:31:12 +0000456void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
457 // Calls are not allowed in some stubs.
458 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000459
Andrei Popescu402d9372010-02-26 13:31:12 +0000460 // Rely on the assertion to check that the number of provided
461 // arguments match the expected number of arguments. Fake a
462 // parameter count to avoid emitting code to do the check.
463 ParameterCount expected(0);
464 GetBuiltinEntry(rdx, id);
465 InvokeCode(rdx, expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +0000466}
467
Andrei Popescu402d9372010-02-26 13:31:12 +0000468
469void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100470 ASSERT(!target.is(rdi));
471
472 // Load the builtins object into target register.
473 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
474 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
475
Andrei Popescu402d9372010-02-26 13:31:12 +0000476 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +0100477 movq(rdi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
478
479 // Load the code entry point from the builtins object.
480 movq(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
481 if (FLAG_debug_code) {
482 // Make sure the code objects in the builtins object and in the
483 // builtin function are the same.
484 push(target);
485 movq(target, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
486 movq(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
487 cmpq(target, Operand(rsp, 0));
488 Assert(equal, "Builtin code object changed");
489 pop(target);
490 }
491 lea(target, FieldOperand(target, Code::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000492}
493
494
495void MacroAssembler::Set(Register dst, int64_t x) {
496 if (x == 0) {
497 xor_(dst, dst);
498 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000499 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000500 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000501 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000502 } else {
503 movq(dst, x, RelocInfo::NONE);
504 }
505}
506
507
508void MacroAssembler::Set(const Operand& dst, int64_t x) {
509 if (x == 0) {
510 xor_(kScratchRegister, kScratchRegister);
511 movq(dst, kScratchRegister);
512 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000513 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000514 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000515 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000516 } else {
517 movq(kScratchRegister, x, RelocInfo::NONE);
518 movq(dst, kScratchRegister);
519 }
520}
521
Steve Blocka7e24c12009-10-30 11:49:00 +0000522// ----------------------------------------------------------------------------
523// Smi tagging, untagging and tag detection.
524
Steve Block3ce2e202009-11-05 08:53:23 +0000525static int kSmiShift = kSmiTagSize + kSmiShiftSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000526
527void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000528 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000529 if (!dst.is(src)) {
530 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000531 }
Steve Block3ce2e202009-11-05 08:53:23 +0000532 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000533}
534
535
536void MacroAssembler::Integer32ToSmi(Register dst,
537 Register src,
538 Label* on_overflow) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000539 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000540 // 32-bit integer always fits in a long smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000541 if (!dst.is(src)) {
542 movl(dst, src);
543 }
Steve Block3ce2e202009-11-05 08:53:23 +0000544 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000545}
546
547
Steve Block3ce2e202009-11-05 08:53:23 +0000548void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
549 Register src,
550 int constant) {
551 if (dst.is(src)) {
552 addq(dst, Immediate(constant));
553 } else {
554 lea(dst, Operand(src, constant));
555 }
556 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000557}
558
559
560void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000561 ASSERT_EQ(0, kSmiTag);
562 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000563 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000564 }
Steve Block3ce2e202009-11-05 08:53:23 +0000565 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000566}
567
568
569void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000570 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000571 if (!dst.is(src)) {
572 movq(dst, src);
573 }
574 sar(dst, Immediate(kSmiShift));
575}
576
577
578void MacroAssembler::SmiTest(Register src) {
579 testq(src, src);
580}
581
582
583void MacroAssembler::SmiCompare(Register dst, Register src) {
584 cmpq(dst, src);
585}
586
587
588void MacroAssembler::SmiCompare(Register dst, Smi* src) {
589 ASSERT(!dst.is(kScratchRegister));
590 if (src->value() == 0) {
591 testq(dst, dst);
592 } else {
593 Move(kScratchRegister, src);
594 cmpq(dst, kScratchRegister);
595 }
596}
597
598
Steve Block6ded16b2010-05-10 14:33:55 +0100599void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
600 cmpq(dst, src);
601}
602
603
Steve Block3ce2e202009-11-05 08:53:23 +0000604void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
605 cmpq(dst, src);
606}
607
608
609void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
610 if (src->value() == 0) {
611 // Only tagged long smi to have 32-bit representation.
612 cmpq(dst, Immediate(0));
613 } else {
614 Move(kScratchRegister, src);
615 cmpq(dst, kScratchRegister);
616 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000617}
618
619
620void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
621 Register src,
622 int power) {
623 ASSERT(power >= 0);
624 ASSERT(power < 64);
625 if (power == 0) {
626 SmiToInteger64(dst, src);
627 return;
628 }
Steve Block3ce2e202009-11-05 08:53:23 +0000629 if (!dst.is(src)) {
630 movq(dst, src);
631 }
632 if (power < kSmiShift) {
633 sar(dst, Immediate(kSmiShift - power));
634 } else if (power > kSmiShift) {
635 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000636 }
637}
638
639
Steve Blocka7e24c12009-10-30 11:49:00 +0000640Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000641 ASSERT_EQ(0, kSmiTag);
642 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000643 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000644}
645
646
647Condition MacroAssembler::CheckPositiveSmi(Register src) {
648 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000649 movq(kScratchRegister, src);
650 rol(kScratchRegister, Immediate(1));
651 testl(kScratchRegister, Immediate(0x03));
Steve Blocka7e24c12009-10-30 11:49:00 +0000652 return zero;
653}
654
655
Steve Blocka7e24c12009-10-30 11:49:00 +0000656Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
657 if (first.is(second)) {
658 return CheckSmi(first);
659 }
660 movl(kScratchRegister, first);
661 orl(kScratchRegister, second);
Steve Block3ce2e202009-11-05 08:53:23 +0000662 testb(kScratchRegister, Immediate(kSmiTagMask));
663 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000664}
665
666
Leon Clarked91b9f72010-01-27 17:25:45 +0000667Condition MacroAssembler::CheckBothPositiveSmi(Register first,
668 Register second) {
669 if (first.is(second)) {
670 return CheckPositiveSmi(first);
671 }
672 movl(kScratchRegister, first);
673 orl(kScratchRegister, second);
674 rol(kScratchRegister, Immediate(1));
675 testl(kScratchRegister, Immediate(0x03));
676 return zero;
677}
678
679
680
Leon Clarkee46be812010-01-19 14:06:41 +0000681Condition MacroAssembler::CheckEitherSmi(Register first, Register second) {
682 if (first.is(second)) {
683 return CheckSmi(first);
684 }
685 movl(kScratchRegister, first);
686 andl(kScratchRegister, second);
687 testb(kScratchRegister, Immediate(kSmiTagMask));
688 return zero;
689}
690
691
Steve Blocka7e24c12009-10-30 11:49:00 +0000692Condition MacroAssembler::CheckIsMinSmi(Register src) {
693 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Steve Block3ce2e202009-11-05 08:53:23 +0000694 movq(kScratchRegister, src);
695 rol(kScratchRegister, Immediate(1));
696 cmpq(kScratchRegister, Immediate(1));
Steve Blocka7e24c12009-10-30 11:49:00 +0000697 return equal;
698}
699
Steve Blocka7e24c12009-10-30 11:49:00 +0000700
701Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000702 // A 32-bit integer value can always be converted to a smi.
703 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +0000704}
705
706
Steve Block3ce2e202009-11-05 08:53:23 +0000707Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
708 // An unsigned 32-bit integer value is valid as long as the high bit
709 // is not set.
710 testq(src, Immediate(0x80000000));
711 return zero;
712}
713
714
715void MacroAssembler::SmiNeg(Register dst, Register src, Label* on_smi_result) {
716 if (dst.is(src)) {
717 ASSERT(!dst.is(kScratchRegister));
718 movq(kScratchRegister, src);
719 neg(dst); // Low 32 bits are retained as zero by negation.
720 // Test if result is zero or Smi::kMinValue.
721 cmpq(dst, kScratchRegister);
722 j(not_equal, on_smi_result);
723 movq(src, kScratchRegister);
724 } else {
725 movq(dst, src);
726 neg(dst);
727 cmpq(dst, src);
728 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
729 j(not_equal, on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000730 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000731}
732
733
734void MacroAssembler::SmiAdd(Register dst,
735 Register src1,
736 Register src2,
737 Label* on_not_smi_result) {
738 ASSERT(!dst.is(src2));
Steve Block6ded16b2010-05-10 14:33:55 +0100739 if (on_not_smi_result == NULL) {
740 // No overflow checking. Use only when it's known that
741 // overflowing is impossible.
742 if (dst.is(src1)) {
743 addq(dst, src2);
744 } else {
745 movq(dst, src1);
746 addq(dst, src2);
747 }
748 Assert(no_overflow, "Smi addition onverflow");
749 } else if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000750 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000751 Label smi_result;
752 j(no_overflow, &smi_result);
753 // Restore src1.
Steve Block3ce2e202009-11-05 08:53:23 +0000754 subq(src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000755 jmp(on_not_smi_result);
756 bind(&smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +0000757 } else {
758 movq(dst, src1);
759 addq(dst, src2);
760 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000761 }
762}
763
764
Steve Blocka7e24c12009-10-30 11:49:00 +0000765void MacroAssembler::SmiSub(Register dst,
766 Register src1,
767 Register src2,
768 Label* on_not_smi_result) {
769 ASSERT(!dst.is(src2));
Leon Clarked91b9f72010-01-27 17:25:45 +0000770 if (on_not_smi_result == NULL) {
771 // No overflow checking. Use only when it's known that
772 // overflowing is impossible (e.g., subtracting two positive smis).
773 if (dst.is(src1)) {
774 subq(dst, src2);
775 } else {
776 movq(dst, src1);
777 subq(dst, src2);
778 }
779 Assert(no_overflow, "Smi substraction onverflow");
780 } else if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000781 subq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000782 Label smi_result;
783 j(no_overflow, &smi_result);
784 // Restore src1.
Steve Block3ce2e202009-11-05 08:53:23 +0000785 addq(src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000786 jmp(on_not_smi_result);
787 bind(&smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +0000788 } else {
789 movq(dst, src1);
790 subq(dst, src2);
791 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000792 }
793}
794
795
Steve Block6ded16b2010-05-10 14:33:55 +0100796void MacroAssembler::SmiSub(Register dst,
797 Register src1,
798 Operand const& src2,
799 Label* on_not_smi_result) {
800 if (on_not_smi_result == NULL) {
801 // No overflow checking. Use only when it's known that
802 // overflowing is impossible (e.g., subtracting two positive smis).
803 if (dst.is(src1)) {
804 subq(dst, src2);
805 } else {
806 movq(dst, src1);
807 subq(dst, src2);
808 }
809 Assert(no_overflow, "Smi substraction onverflow");
810 } else if (dst.is(src1)) {
811 subq(dst, src2);
812 Label smi_result;
813 j(no_overflow, &smi_result);
814 // Restore src1.
815 addq(src1, src2);
816 jmp(on_not_smi_result);
817 bind(&smi_result);
818 } else {
819 movq(dst, src1);
820 subq(dst, src2);
821 j(overflow, on_not_smi_result);
822 }
823}
824
Steve Blocka7e24c12009-10-30 11:49:00 +0000825void MacroAssembler::SmiMul(Register dst,
826 Register src1,
827 Register src2,
828 Label* on_not_smi_result) {
829 ASSERT(!dst.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +0000830 ASSERT(!dst.is(kScratchRegister));
831 ASSERT(!src1.is(kScratchRegister));
832 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000833
834 if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000835 Label failure, zero_correct_result;
836 movq(kScratchRegister, src1); // Create backup for later testing.
837 SmiToInteger64(dst, src1);
838 imul(dst, src2);
839 j(overflow, &failure);
840
841 // Check for negative zero result. If product is zero, and one
842 // argument is negative, go to slow case.
843 Label correct_result;
844 testq(dst, dst);
845 j(not_zero, &correct_result);
846
847 movq(dst, kScratchRegister);
848 xor_(dst, src2);
849 j(positive, &zero_correct_result); // Result was positive zero.
850
851 bind(&failure); // Reused failure exit, restores src1.
852 movq(src1, kScratchRegister);
853 jmp(on_not_smi_result);
854
855 bind(&zero_correct_result);
856 xor_(dst, dst);
857
858 bind(&correct_result);
859 } else {
860 SmiToInteger64(dst, src1);
861 imul(dst, src2);
862 j(overflow, on_not_smi_result);
863 // Check for negative zero result. If product is zero, and one
864 // argument is negative, go to slow case.
865 Label correct_result;
866 testq(dst, dst);
867 j(not_zero, &correct_result);
868 // One of src1 and src2 is zero, the check whether the other is
869 // negative.
Steve Blocka7e24c12009-10-30 11:49:00 +0000870 movq(kScratchRegister, src1);
Steve Block3ce2e202009-11-05 08:53:23 +0000871 xor_(kScratchRegister, src2);
872 j(negative, on_not_smi_result);
873 bind(&correct_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000874 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000875}
876
877
878void MacroAssembler::SmiTryAddConstant(Register dst,
879 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000880 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000881 Label* on_not_smi_result) {
882 // Does not assume that src is a smi.
Steve Block3ce2e202009-11-05 08:53:23 +0000883 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +0000884 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000885 ASSERT(!dst.is(kScratchRegister));
886 ASSERT(!src.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000887
Steve Block3ce2e202009-11-05 08:53:23 +0000888 JumpIfNotSmi(src, on_not_smi_result);
889 Register tmp = (dst.is(src) ? kScratchRegister : dst);
890 Move(tmp, constant);
891 addq(tmp, src);
892 j(overflow, on_not_smi_result);
893 if (dst.is(src)) {
894 movq(dst, tmp);
895 }
896}
897
898
899void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
900 if (constant->value() == 0) {
901 if (!dst.is(src)) {
902 movq(dst, src);
903 }
904 } else if (dst.is(src)) {
905 ASSERT(!dst.is(kScratchRegister));
906
907 Move(kScratchRegister, constant);
908 addq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000909 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000910 Move(dst, constant);
911 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000912 }
913}
914
915
916void MacroAssembler::SmiAddConstant(Register dst,
917 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000918 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000919 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +0000920 if (constant->value() == 0) {
921 if (!dst.is(src)) {
922 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000923 }
Steve Block3ce2e202009-11-05 08:53:23 +0000924 } else if (dst.is(src)) {
925 ASSERT(!dst.is(kScratchRegister));
926
927 Move(kScratchRegister, constant);
928 addq(dst, kScratchRegister);
929 Label result_ok;
930 j(no_overflow, &result_ok);
931 subq(dst, kScratchRegister);
932 jmp(on_not_smi_result);
933 bind(&result_ok);
Steve Blocka7e24c12009-10-30 11:49:00 +0000934 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000935 Move(dst, constant);
936 addq(dst, src);
937 j(overflow, on_not_smi_result);
938 }
939}
940
941
942void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
943 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000944 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000945 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000946 }
Steve Block3ce2e202009-11-05 08:53:23 +0000947 } else if (dst.is(src)) {
948 ASSERT(!dst.is(kScratchRegister));
949
950 Move(kScratchRegister, constant);
951 subq(dst, kScratchRegister);
952 } else {
953 // Subtract by adding the negative, to do it in two operations.
954 if (constant->value() == Smi::kMinValue) {
955 Move(kScratchRegister, constant);
956 movq(dst, src);
957 subq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000958 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000959 Move(dst, Smi::FromInt(-constant->value()));
960 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000961 }
962 }
963}
964
965
966void MacroAssembler::SmiSubConstant(Register dst,
967 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000968 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000969 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +0000970 if (constant->value() == 0) {
971 if (!dst.is(src)) {
972 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000973 }
Steve Block3ce2e202009-11-05 08:53:23 +0000974 } else if (dst.is(src)) {
975 ASSERT(!dst.is(kScratchRegister));
976
977 Move(kScratchRegister, constant);
978 subq(dst, kScratchRegister);
979 Label sub_success;
980 j(no_overflow, &sub_success);
981 addq(src, kScratchRegister);
982 jmp(on_not_smi_result);
983 bind(&sub_success);
Steve Blocka7e24c12009-10-30 11:49:00 +0000984 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000985 if (constant->value() == Smi::kMinValue) {
986 Move(kScratchRegister, constant);
987 movq(dst, src);
988 subq(dst, kScratchRegister);
989 j(overflow, on_not_smi_result);
990 } else {
991 Move(dst, Smi::FromInt(-(constant->value())));
992 addq(dst, src);
993 j(overflow, on_not_smi_result);
994 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000995 }
996}
997
998
999void MacroAssembler::SmiDiv(Register dst,
1000 Register src1,
1001 Register src2,
1002 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001003 ASSERT(!src1.is(kScratchRegister));
1004 ASSERT(!src2.is(kScratchRegister));
1005 ASSERT(!dst.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001006 ASSERT(!src2.is(rax));
1007 ASSERT(!src2.is(rdx));
1008 ASSERT(!src1.is(rdx));
1009
1010 // Check for 0 divisor (result is +/-Infinity).
1011 Label positive_divisor;
Steve Block3ce2e202009-11-05 08:53:23 +00001012 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001013 j(zero, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001014
Steve Block3ce2e202009-11-05 08:53:23 +00001015 if (src1.is(rax)) {
1016 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001017 }
Steve Block3ce2e202009-11-05 08:53:23 +00001018 SmiToInteger32(rax, src1);
1019 // We need to rule out dividing Smi::kMinValue by -1, since that would
1020 // overflow in idiv and raise an exception.
1021 // We combine this with negative zero test (negative zero only happens
1022 // when dividing zero by a negative number).
Steve Blocka7e24c12009-10-30 11:49:00 +00001023
Steve Block3ce2e202009-11-05 08:53:23 +00001024 // We overshoot a little and go to slow case if we divide min-value
1025 // by any negative value, not just -1.
1026 Label safe_div;
1027 testl(rax, Immediate(0x7fffffff));
1028 j(not_zero, &safe_div);
1029 testq(src2, src2);
1030 if (src1.is(rax)) {
1031 j(positive, &safe_div);
1032 movq(src1, kScratchRegister);
1033 jmp(on_not_smi_result);
1034 } else {
1035 j(negative, on_not_smi_result);
1036 }
1037 bind(&safe_div);
1038
1039 SmiToInteger32(src2, src2);
1040 // Sign extend src1 into edx:eax.
1041 cdq();
Steve Blocka7e24c12009-10-30 11:49:00 +00001042 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001043 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001044 // Check that the remainder is zero.
1045 testl(rdx, rdx);
Steve Block3ce2e202009-11-05 08:53:23 +00001046 if (src1.is(rax)) {
1047 Label smi_result;
1048 j(zero, &smi_result);
1049 movq(src1, kScratchRegister);
1050 jmp(on_not_smi_result);
1051 bind(&smi_result);
1052 } else {
1053 j(not_zero, on_not_smi_result);
1054 }
1055 if (!dst.is(src1) && src1.is(rax)) {
1056 movq(src1, kScratchRegister);
1057 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001058 Integer32ToSmi(dst, rax);
1059}
1060
1061
1062void MacroAssembler::SmiMod(Register dst,
1063 Register src1,
1064 Register src2,
1065 Label* on_not_smi_result) {
1066 ASSERT(!dst.is(kScratchRegister));
1067 ASSERT(!src1.is(kScratchRegister));
1068 ASSERT(!src2.is(kScratchRegister));
1069 ASSERT(!src2.is(rax));
1070 ASSERT(!src2.is(rdx));
1071 ASSERT(!src1.is(rdx));
Steve Block3ce2e202009-11-05 08:53:23 +00001072 ASSERT(!src1.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001073
Steve Block3ce2e202009-11-05 08:53:23 +00001074 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001075 j(zero, on_not_smi_result);
1076
1077 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001078 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001079 }
Steve Block3ce2e202009-11-05 08:53:23 +00001080 SmiToInteger32(rax, src1);
1081 SmiToInteger32(src2, src2);
1082
1083 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1084 Label safe_div;
1085 cmpl(rax, Immediate(Smi::kMinValue));
1086 j(not_equal, &safe_div);
1087 cmpl(src2, Immediate(-1));
1088 j(not_equal, &safe_div);
1089 // Retag inputs and go slow case.
1090 Integer32ToSmi(src2, src2);
1091 if (src1.is(rax)) {
1092 movq(src1, kScratchRegister);
1093 }
1094 jmp(on_not_smi_result);
1095 bind(&safe_div);
1096
Steve Blocka7e24c12009-10-30 11:49:00 +00001097 // Sign extend eax into edx:eax.
1098 cdq();
1099 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001100 // Restore smi tags on inputs.
1101 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001102 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001103 movq(src1, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001104 }
Steve Block3ce2e202009-11-05 08:53:23 +00001105 // Check for a negative zero result. If the result is zero, and the
1106 // dividend is negative, go slow to return a floating point negative zero.
1107 Label smi_result;
1108 testl(rdx, rdx);
1109 j(not_zero, &smi_result);
1110 testq(src1, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001111 j(negative, on_not_smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +00001112 bind(&smi_result);
1113 Integer32ToSmi(dst, rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001114}
1115
1116
1117void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001118 ASSERT(!dst.is(kScratchRegister));
1119 ASSERT(!src.is(kScratchRegister));
1120 // Set tag and padding bits before negating, so that they are zero afterwards.
1121 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001122 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001123 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001124 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001125 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001126 }
Steve Block3ce2e202009-11-05 08:53:23 +00001127 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001128}
1129
1130
1131void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001132 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001133 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001134 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001135 }
1136 and_(dst, src2);
1137}
1138
1139
Steve Block3ce2e202009-11-05 08:53:23 +00001140void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1141 if (constant->value() == 0) {
1142 xor_(dst, dst);
1143 } else if (dst.is(src)) {
1144 ASSERT(!dst.is(kScratchRegister));
1145 Move(kScratchRegister, constant);
1146 and_(dst, kScratchRegister);
1147 } else {
1148 Move(dst, constant);
1149 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001150 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001151}
1152
1153
1154void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1155 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001156 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001157 }
1158 or_(dst, src2);
1159}
1160
1161
Steve Block3ce2e202009-11-05 08:53:23 +00001162void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1163 if (dst.is(src)) {
1164 ASSERT(!dst.is(kScratchRegister));
1165 Move(kScratchRegister, constant);
1166 or_(dst, kScratchRegister);
1167 } else {
1168 Move(dst, constant);
1169 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001170 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001171}
1172
Steve Block3ce2e202009-11-05 08:53:23 +00001173
Steve Blocka7e24c12009-10-30 11:49:00 +00001174void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1175 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001176 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001177 }
1178 xor_(dst, src2);
1179}
1180
1181
Steve Block3ce2e202009-11-05 08:53:23 +00001182void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1183 if (dst.is(src)) {
1184 ASSERT(!dst.is(kScratchRegister));
1185 Move(kScratchRegister, constant);
1186 xor_(dst, kScratchRegister);
1187 } else {
1188 Move(dst, constant);
1189 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001190 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001191}
1192
1193
Steve Blocka7e24c12009-10-30 11:49:00 +00001194void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1195 Register src,
1196 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001197 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001198 if (shift_value > 0) {
1199 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001200 sar(dst, Immediate(shift_value + kSmiShift));
1201 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001202 } else {
1203 UNIMPLEMENTED(); // Not used.
1204 }
1205 }
1206}
1207
1208
1209void MacroAssembler::SmiShiftLogicalRightConstant(Register dst,
1210 Register src,
1211 int shift_value,
1212 Label* on_not_smi_result) {
1213 // Logic right shift interprets its result as an *unsigned* number.
1214 if (dst.is(src)) {
1215 UNIMPLEMENTED(); // Not used.
1216 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001217 movq(dst, src);
1218 if (shift_value == 0) {
1219 testq(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001220 j(negative, on_not_smi_result);
1221 }
Steve Block3ce2e202009-11-05 08:53:23 +00001222 shr(dst, Immediate(shift_value + kSmiShift));
1223 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001224 }
1225}
1226
1227
1228void MacroAssembler::SmiShiftLeftConstant(Register dst,
1229 Register src,
1230 int shift_value,
1231 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001232 if (!dst.is(src)) {
1233 movq(dst, src);
1234 }
1235 if (shift_value > 0) {
1236 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001237 }
1238}
1239
1240
1241void MacroAssembler::SmiShiftLeft(Register dst,
1242 Register src1,
1243 Register src2,
1244 Label* on_not_smi_result) {
1245 ASSERT(!dst.is(rcx));
1246 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001247 // Untag shift amount.
1248 if (!dst.is(src1)) {
1249 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001250 }
Steve Block3ce2e202009-11-05 08:53:23 +00001251 SmiToInteger32(rcx, src2);
1252 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1253 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001254 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001255}
1256
1257
1258void MacroAssembler::SmiShiftLogicalRight(Register dst,
1259 Register src1,
1260 Register src2,
1261 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001262 ASSERT(!dst.is(kScratchRegister));
1263 ASSERT(!src1.is(kScratchRegister));
1264 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001265 ASSERT(!dst.is(rcx));
1266 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001267 if (src1.is(rcx) || src2.is(rcx)) {
1268 movq(kScratchRegister, rcx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001269 }
Steve Block3ce2e202009-11-05 08:53:23 +00001270 if (!dst.is(src1)) {
1271 movq(dst, src1);
1272 }
1273 SmiToInteger32(rcx, src2);
1274 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001275 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
Steve Block3ce2e202009-11-05 08:53:23 +00001276 shl(dst, Immediate(kSmiShift));
1277 testq(dst, dst);
1278 if (src1.is(rcx) || src2.is(rcx)) {
1279 Label positive_result;
1280 j(positive, &positive_result);
1281 if (src1.is(rcx)) {
1282 movq(src1, kScratchRegister);
1283 } else {
1284 movq(src2, kScratchRegister);
1285 }
1286 jmp(on_not_smi_result);
1287 bind(&positive_result);
1288 } else {
1289 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1290 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001291}
1292
1293
1294void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1295 Register src1,
1296 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001297 ASSERT(!dst.is(kScratchRegister));
1298 ASSERT(!src1.is(kScratchRegister));
1299 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001300 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001301 if (src1.is(rcx)) {
1302 movq(kScratchRegister, src1);
1303 } else if (src2.is(rcx)) {
1304 movq(kScratchRegister, src2);
1305 }
1306 if (!dst.is(src1)) {
1307 movq(dst, src1);
1308 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001309 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001310 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001311 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001312 shl(dst, Immediate(kSmiShift));
1313 if (src1.is(rcx)) {
1314 movq(src1, kScratchRegister);
1315 } else if (src2.is(rcx)) {
1316 movq(src2, kScratchRegister);
1317 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001318}
1319
1320
1321void MacroAssembler::SelectNonSmi(Register dst,
1322 Register src1,
1323 Register src2,
1324 Label* on_not_smis) {
Steve Block3ce2e202009-11-05 08:53:23 +00001325 ASSERT(!dst.is(kScratchRegister));
1326 ASSERT(!src1.is(kScratchRegister));
1327 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001328 ASSERT(!dst.is(src1));
1329 ASSERT(!dst.is(src2));
1330 // Both operands must not be smis.
1331#ifdef DEBUG
Steve Block3ce2e202009-11-05 08:53:23 +00001332 if (allow_stub_calls()) { // Check contains a stub call.
1333 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1334 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1335 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001336#endif
1337 ASSERT_EQ(0, kSmiTag);
1338 ASSERT_EQ(0, Smi::FromInt(0));
Steve Block3ce2e202009-11-05 08:53:23 +00001339 movl(kScratchRegister, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001340 and_(kScratchRegister, src1);
1341 testl(kScratchRegister, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001342 // If non-zero then both are smis.
Steve Blocka7e24c12009-10-30 11:49:00 +00001343 j(not_zero, on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00001344
Steve Block3ce2e202009-11-05 08:53:23 +00001345 // Exactly one operand is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001346 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1347 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1348 subq(kScratchRegister, Immediate(1));
1349 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1350 movq(dst, src1);
1351 xor_(dst, src2);
1352 and_(dst, kScratchRegister);
1353 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1354 xor_(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001355 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001356}
1357
Steve Block3ce2e202009-11-05 08:53:23 +00001358SmiIndex MacroAssembler::SmiToIndex(Register dst,
1359 Register src,
1360 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001361 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001362 // There is a possible optimization if shift is in the range 60-63, but that
1363 // will (and must) never happen.
1364 if (!dst.is(src)) {
1365 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001366 }
Steve Block3ce2e202009-11-05 08:53:23 +00001367 if (shift < kSmiShift) {
1368 sar(dst, Immediate(kSmiShift - shift));
1369 } else {
1370 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001371 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001372 return SmiIndex(dst, times_1);
1373}
1374
Steve Blocka7e24c12009-10-30 11:49:00 +00001375SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1376 Register src,
1377 int shift) {
1378 // Register src holds a positive smi.
1379 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001380 if (!dst.is(src)) {
1381 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001382 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001383 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001384 if (shift < kSmiShift) {
1385 sar(dst, Immediate(kSmiShift - shift));
1386 } else {
1387 shl(dst, Immediate(shift - kSmiShift));
1388 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001389 return SmiIndex(dst, times_1);
1390}
1391
1392
Steve Block3ce2e202009-11-05 08:53:23 +00001393void MacroAssembler::JumpIfSmi(Register src, Label* on_smi) {
1394 ASSERT_EQ(0, kSmiTag);
1395 Condition smi = CheckSmi(src);
1396 j(smi, on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001397}
1398
Steve Block3ce2e202009-11-05 08:53:23 +00001399
1400void MacroAssembler::JumpIfNotSmi(Register src, Label* on_not_smi) {
1401 Condition smi = CheckSmi(src);
1402 j(NegateCondition(smi), on_not_smi);
1403}
1404
1405
1406void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1407 Label* on_not_positive_smi) {
1408 Condition positive_smi = CheckPositiveSmi(src);
1409 j(NegateCondition(positive_smi), on_not_positive_smi);
1410}
1411
1412
1413void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1414 Smi* constant,
1415 Label* on_equals) {
1416 SmiCompare(src, constant);
1417 j(equal, on_equals);
1418}
1419
1420
1421void MacroAssembler::JumpIfNotValidSmiValue(Register src, Label* on_invalid) {
1422 Condition is_valid = CheckInteger32ValidSmiValue(src);
1423 j(NegateCondition(is_valid), on_invalid);
1424}
1425
1426
1427void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1428 Label* on_invalid) {
1429 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1430 j(NegateCondition(is_valid), on_invalid);
1431}
1432
1433
1434void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2,
1435 Label* on_not_both_smi) {
1436 Condition both_smi = CheckBothSmi(src1, src2);
1437 j(NegateCondition(both_smi), on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001438}
1439
1440
Leon Clarked91b9f72010-01-27 17:25:45 +00001441void MacroAssembler::JumpIfNotBothPositiveSmi(Register src1, Register src2,
1442 Label* on_not_both_smi) {
1443 Condition both_smi = CheckBothPositiveSmi(src1, src2);
1444 j(NegateCondition(both_smi), on_not_both_smi);
1445}
1446
1447
1448
Leon Clarkee46be812010-01-19 14:06:41 +00001449void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1450 Register second_object,
1451 Register scratch1,
1452 Register scratch2,
1453 Label* on_fail) {
1454 // Check that both objects are not smis.
1455 Condition either_smi = CheckEitherSmi(first_object, second_object);
1456 j(either_smi, on_fail);
1457
1458 // Load instance type for both strings.
1459 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1460 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1461 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1462 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1463
1464 // Check that both are flat ascii strings.
1465 ASSERT(kNotStringTag != 0);
1466 const int kFlatAsciiStringMask =
1467 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Leon Clarked91b9f72010-01-27 17:25:45 +00001468 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
Leon Clarkee46be812010-01-19 14:06:41 +00001469
1470 andl(scratch1, Immediate(kFlatAsciiStringMask));
1471 andl(scratch2, Immediate(kFlatAsciiStringMask));
1472 // Interleave the bits to check both scratch1 and scratch2 in one test.
1473 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1474 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1475 cmpl(scratch1,
Leon Clarked91b9f72010-01-27 17:25:45 +00001476 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
Leon Clarkee46be812010-01-19 14:06:41 +00001477 j(not_equal, on_fail);
1478}
1479
1480
Steve Block6ded16b2010-05-10 14:33:55 +01001481void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1482 Register instance_type,
1483 Register scratch,
1484 Label *failure) {
1485 if (!scratch.is(instance_type)) {
1486 movl(scratch, instance_type);
1487 }
1488
1489 const int kFlatAsciiStringMask =
1490 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1491
1492 andl(scratch, Immediate(kFlatAsciiStringMask));
1493 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1494 j(not_equal, failure);
1495}
1496
1497
1498void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1499 Register first_object_instance_type,
1500 Register second_object_instance_type,
1501 Register scratch1,
1502 Register scratch2,
1503 Label* on_fail) {
1504 // Load instance type for both strings.
1505 movq(scratch1, first_object_instance_type);
1506 movq(scratch2, second_object_instance_type);
1507
1508 // Check that both are flat ascii strings.
1509 ASSERT(kNotStringTag != 0);
1510 const int kFlatAsciiStringMask =
1511 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1512 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1513
1514 andl(scratch1, Immediate(kFlatAsciiStringMask));
1515 andl(scratch2, Immediate(kFlatAsciiStringMask));
1516 // Interleave the bits to check both scratch1 and scratch2 in one test.
1517 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1518 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1519 cmpl(scratch1,
1520 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1521 j(not_equal, on_fail);
1522}
1523
1524
Steve Blocka7e24c12009-10-30 11:49:00 +00001525void MacroAssembler::Move(Register dst, Handle<Object> source) {
1526 ASSERT(!source->IsFailure());
1527 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001528 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001529 } else {
1530 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1531 }
1532}
1533
1534
1535void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001536 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001537 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001538 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001539 } else {
1540 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1541 movq(dst, kScratchRegister);
1542 }
1543}
1544
1545
1546void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001547 if (source->IsSmi()) {
1548 SmiCompare(dst, Smi::cast(*source));
1549 } else {
1550 Move(kScratchRegister, source);
1551 cmpq(dst, kScratchRegister);
1552 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001553}
1554
1555
1556void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1557 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001558 SmiCompare(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001559 } else {
1560 ASSERT(source->IsHeapObject());
1561 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1562 cmpq(dst, kScratchRegister);
1563 }
1564}
1565
1566
1567void MacroAssembler::Push(Handle<Object> source) {
1568 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001569 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001570 } else {
1571 ASSERT(source->IsHeapObject());
1572 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1573 push(kScratchRegister);
1574 }
1575}
1576
1577
1578void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001579 intptr_t smi = reinterpret_cast<intptr_t>(source);
1580 if (is_int32(smi)) {
1581 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001582 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001583 Set(kScratchRegister, smi);
1584 push(kScratchRegister);
1585 }
1586}
1587
1588
Leon Clarkee46be812010-01-19 14:06:41 +00001589void MacroAssembler::Drop(int stack_elements) {
1590 if (stack_elements > 0) {
1591 addq(rsp, Immediate(stack_elements * kPointerSize));
1592 }
1593}
1594
1595
Steve Block3ce2e202009-11-05 08:53:23 +00001596void MacroAssembler::Test(const Operand& src, Smi* source) {
1597 intptr_t smi = reinterpret_cast<intptr_t>(source);
1598 if (is_int32(smi)) {
1599 testl(src, Immediate(static_cast<int32_t>(smi)));
1600 } else {
1601 Move(kScratchRegister, source);
1602 testq(src, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001603 }
1604}
1605
1606
1607void MacroAssembler::Jump(ExternalReference ext) {
1608 movq(kScratchRegister, ext);
1609 jmp(kScratchRegister);
1610}
1611
1612
1613void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1614 movq(kScratchRegister, destination, rmode);
1615 jmp(kScratchRegister);
1616}
1617
1618
1619void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001620 // TODO(X64): Inline this
1621 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001622}
1623
1624
1625void MacroAssembler::Call(ExternalReference ext) {
1626 movq(kScratchRegister, ext);
1627 call(kScratchRegister);
1628}
1629
1630
1631void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1632 movq(kScratchRegister, destination, rmode);
1633 call(kScratchRegister);
1634}
1635
1636
1637void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1638 ASSERT(RelocInfo::IsCodeTarget(rmode));
1639 WriteRecordedPositions();
Steve Block3ce2e202009-11-05 08:53:23 +00001640 call(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001641}
1642
1643
1644void MacroAssembler::PushTryHandler(CodeLocation try_location,
1645 HandlerType type) {
1646 // Adjust this code if not the case.
1647 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1648
1649 // The pc (return address) is already on TOS. This code pushes state,
1650 // frame pointer and current handler. Check that they are expected
1651 // next on the stack, in that order.
1652 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1653 StackHandlerConstants::kPCOffset - kPointerSize);
1654 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1655 StackHandlerConstants::kStateOffset - kPointerSize);
1656 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1657 StackHandlerConstants::kFPOffset - kPointerSize);
1658
1659 if (try_location == IN_JAVASCRIPT) {
1660 if (type == TRY_CATCH_HANDLER) {
1661 push(Immediate(StackHandler::TRY_CATCH));
1662 } else {
1663 push(Immediate(StackHandler::TRY_FINALLY));
1664 }
1665 push(rbp);
1666 } else {
1667 ASSERT(try_location == IN_JS_ENTRY);
1668 // The frame pointer does not point to a JS frame so we save NULL
1669 // for rbp. We expect the code throwing an exception to check rbp
1670 // before dereferencing it to restore the context.
1671 push(Immediate(StackHandler::ENTRY));
1672 push(Immediate(0)); // NULL frame pointer.
1673 }
1674 // Save the current handler.
1675 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1676 push(Operand(kScratchRegister, 0));
1677 // Link this handler.
1678 movq(Operand(kScratchRegister, 0), rsp);
1679}
1680
1681
Leon Clarkee46be812010-01-19 14:06:41 +00001682void MacroAssembler::PopTryHandler() {
1683 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1684 // Unlink this handler.
1685 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1686 pop(Operand(kScratchRegister, 0));
1687 // Remove the remaining fields.
1688 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1689}
1690
1691
Steve Blocka7e24c12009-10-30 11:49:00 +00001692void MacroAssembler::Ret() {
1693 ret(0);
1694}
1695
1696
1697void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001698 fucomip();
1699 ffree(0);
1700 fincstp();
Steve Blocka7e24c12009-10-30 11:49:00 +00001701}
1702
1703
1704void MacroAssembler::CmpObjectType(Register heap_object,
1705 InstanceType type,
1706 Register map) {
1707 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1708 CmpInstanceType(map, type);
1709}
1710
1711
1712void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1713 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1714 Immediate(static_cast<int8_t>(type)));
1715}
1716
1717
Andrei Popescu31002712010-02-23 13:46:05 +00001718void MacroAssembler::CheckMap(Register obj,
1719 Handle<Map> map,
1720 Label* fail,
1721 bool is_heap_object) {
1722 if (!is_heap_object) {
1723 JumpIfSmi(obj, fail);
1724 }
1725 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1726 j(not_equal, fail);
1727}
1728
1729
Andrei Popescu402d9372010-02-26 13:31:12 +00001730void MacroAssembler::AbortIfNotNumber(Register object, const char* msg) {
1731 Label ok;
1732 Condition is_smi = CheckSmi(object);
1733 j(is_smi, &ok);
1734 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1735 Factory::heap_number_map());
1736 Assert(equal, msg);
1737 bind(&ok);
1738}
1739
1740
Steve Block6ded16b2010-05-10 14:33:55 +01001741void MacroAssembler::AbortIfNotSmi(Register object, const char* msg) {
1742 Label ok;
1743 Condition is_smi = CheckSmi(object);
1744 j(is_smi, &ok);
1745 Assert(equal, msg);
1746 bind(&ok);
1747}
1748
1749
Leon Clarked91b9f72010-01-27 17:25:45 +00001750Condition MacroAssembler::IsObjectStringType(Register heap_object,
1751 Register map,
1752 Register instance_type) {
1753 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00001754 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00001755 ASSERT(kNotStringTag != 0);
1756 testb(instance_type, Immediate(kIsNotStringMask));
1757 return zero;
1758}
1759
1760
Steve Blocka7e24c12009-10-30 11:49:00 +00001761void MacroAssembler::TryGetFunctionPrototype(Register function,
1762 Register result,
1763 Label* miss) {
1764 // Check that the receiver isn't a smi.
1765 testl(function, Immediate(kSmiTagMask));
1766 j(zero, miss);
1767
1768 // Check that the function really is a function.
1769 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1770 j(not_equal, miss);
1771
1772 // Make sure that the function has an instance prototype.
1773 Label non_instance;
1774 testb(FieldOperand(result, Map::kBitFieldOffset),
1775 Immediate(1 << Map::kHasNonInstancePrototype));
1776 j(not_zero, &non_instance);
1777
1778 // Get the prototype or initial map from the function.
1779 movq(result,
1780 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1781
1782 // If the prototype or initial map is the hole, don't return it and
1783 // simply miss the cache instead. This will allow us to allocate a
1784 // prototype object on-demand in the runtime system.
1785 CompareRoot(result, Heap::kTheHoleValueRootIndex);
1786 j(equal, miss);
1787
1788 // If the function does not have an initial map, we're done.
1789 Label done;
1790 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1791 j(not_equal, &done);
1792
1793 // Get the prototype from the initial map.
1794 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1795 jmp(&done);
1796
1797 // Non-instance prototype: Fetch prototype from constructor field
1798 // in initial map.
1799 bind(&non_instance);
1800 movq(result, FieldOperand(result, Map::kConstructorOffset));
1801
1802 // All done.
1803 bind(&done);
1804}
1805
1806
1807void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1808 if (FLAG_native_code_counters && counter->Enabled()) {
1809 movq(kScratchRegister, ExternalReference(counter));
1810 movl(Operand(kScratchRegister, 0), Immediate(value));
1811 }
1812}
1813
1814
1815void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1816 ASSERT(value > 0);
1817 if (FLAG_native_code_counters && counter->Enabled()) {
1818 movq(kScratchRegister, ExternalReference(counter));
1819 Operand operand(kScratchRegister, 0);
1820 if (value == 1) {
1821 incl(operand);
1822 } else {
1823 addl(operand, Immediate(value));
1824 }
1825 }
1826}
1827
1828
1829void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1830 ASSERT(value > 0);
1831 if (FLAG_native_code_counters && counter->Enabled()) {
1832 movq(kScratchRegister, ExternalReference(counter));
1833 Operand operand(kScratchRegister, 0);
1834 if (value == 1) {
1835 decl(operand);
1836 } else {
1837 subl(operand, Immediate(value));
1838 }
1839 }
1840}
1841
Steve Blocka7e24c12009-10-30 11:49:00 +00001842#ifdef ENABLE_DEBUGGER_SUPPORT
1843
1844void MacroAssembler::PushRegistersFromMemory(RegList regs) {
1845 ASSERT((regs & ~kJSCallerSaved) == 0);
1846 // Push the content of the memory location to the stack.
1847 for (int i = 0; i < kNumJSCallerSaved; i++) {
1848 int r = JSCallerSavedCode(i);
1849 if ((regs & (1 << r)) != 0) {
1850 ExternalReference reg_addr =
1851 ExternalReference(Debug_Address::Register(i));
1852 movq(kScratchRegister, reg_addr);
1853 push(Operand(kScratchRegister, 0));
1854 }
1855 }
1856}
1857
Steve Block3ce2e202009-11-05 08:53:23 +00001858
Steve Blocka7e24c12009-10-30 11:49:00 +00001859void MacroAssembler::SaveRegistersToMemory(RegList regs) {
1860 ASSERT((regs & ~kJSCallerSaved) == 0);
1861 // Copy the content of registers to memory location.
1862 for (int i = 0; i < kNumJSCallerSaved; i++) {
1863 int r = JSCallerSavedCode(i);
1864 if ((regs & (1 << r)) != 0) {
1865 Register reg = { r };
1866 ExternalReference reg_addr =
1867 ExternalReference(Debug_Address::Register(i));
1868 movq(kScratchRegister, reg_addr);
1869 movq(Operand(kScratchRegister, 0), reg);
1870 }
1871 }
1872}
1873
1874
1875void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
1876 ASSERT((regs & ~kJSCallerSaved) == 0);
1877 // Copy the content of memory location to registers.
1878 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1879 int r = JSCallerSavedCode(i);
1880 if ((regs & (1 << r)) != 0) {
1881 Register reg = { r };
1882 ExternalReference reg_addr =
1883 ExternalReference(Debug_Address::Register(i));
1884 movq(kScratchRegister, reg_addr);
1885 movq(reg, Operand(kScratchRegister, 0));
1886 }
1887 }
1888}
1889
1890
1891void MacroAssembler::PopRegistersToMemory(RegList regs) {
1892 ASSERT((regs & ~kJSCallerSaved) == 0);
1893 // Pop the content from the stack to the memory location.
1894 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1895 int r = JSCallerSavedCode(i);
1896 if ((regs & (1 << r)) != 0) {
1897 ExternalReference reg_addr =
1898 ExternalReference(Debug_Address::Register(i));
1899 movq(kScratchRegister, reg_addr);
1900 pop(Operand(kScratchRegister, 0));
1901 }
1902 }
1903}
1904
1905
1906void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
1907 Register scratch,
1908 RegList regs) {
1909 ASSERT(!scratch.is(kScratchRegister));
1910 ASSERT(!base.is(kScratchRegister));
1911 ASSERT(!base.is(scratch));
1912 ASSERT((regs & ~kJSCallerSaved) == 0);
1913 // Copy the content of the stack to the memory location and adjust base.
1914 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1915 int r = JSCallerSavedCode(i);
1916 if ((regs & (1 << r)) != 0) {
1917 movq(scratch, Operand(base, 0));
1918 ExternalReference reg_addr =
1919 ExternalReference(Debug_Address::Register(i));
1920 movq(kScratchRegister, reg_addr);
1921 movq(Operand(kScratchRegister, 0), scratch);
1922 lea(base, Operand(base, kPointerSize));
1923 }
1924 }
1925}
1926
Andrei Popescu402d9372010-02-26 13:31:12 +00001927void MacroAssembler::DebugBreak() {
1928 ASSERT(allow_stub_calls());
1929 xor_(rax, rax); // no arguments
1930 movq(rbx, ExternalReference(Runtime::kDebugBreak));
1931 CEntryStub ces(1);
1932 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00001933}
Andrei Popescu402d9372010-02-26 13:31:12 +00001934#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00001935
1936
1937void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1938 const ParameterCount& actual,
1939 Handle<Code> code_constant,
1940 Register code_register,
1941 Label* done,
1942 InvokeFlag flag) {
1943 bool definitely_matches = false;
1944 Label invoke;
1945 if (expected.is_immediate()) {
1946 ASSERT(actual.is_immediate());
1947 if (expected.immediate() == actual.immediate()) {
1948 definitely_matches = true;
1949 } else {
1950 movq(rax, Immediate(actual.immediate()));
1951 if (expected.immediate() ==
Steve Block3ce2e202009-11-05 08:53:23 +00001952 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001953 // Don't worry about adapting arguments for built-ins that
1954 // don't want that done. Skip adaption code by making it look
1955 // like we have a match between expected and actual number of
1956 // arguments.
1957 definitely_matches = true;
1958 } else {
1959 movq(rbx, Immediate(expected.immediate()));
1960 }
1961 }
1962 } else {
1963 if (actual.is_immediate()) {
1964 // Expected is in register, actual is immediate. This is the
1965 // case when we invoke function values without going through the
1966 // IC mechanism.
1967 cmpq(expected.reg(), Immediate(actual.immediate()));
1968 j(equal, &invoke);
1969 ASSERT(expected.reg().is(rbx));
1970 movq(rax, Immediate(actual.immediate()));
1971 } else if (!expected.reg().is(actual.reg())) {
1972 // Both expected and actual are in (different) registers. This
1973 // is the case when we invoke functions using call and apply.
1974 cmpq(expected.reg(), actual.reg());
1975 j(equal, &invoke);
1976 ASSERT(actual.reg().is(rax));
1977 ASSERT(expected.reg().is(rbx));
1978 }
1979 }
1980
1981 if (!definitely_matches) {
1982 Handle<Code> adaptor =
1983 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1984 if (!code_constant.is_null()) {
1985 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1986 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1987 } else if (!code_register.is(rdx)) {
1988 movq(rdx, code_register);
1989 }
1990
1991 if (flag == CALL_FUNCTION) {
1992 Call(adaptor, RelocInfo::CODE_TARGET);
1993 jmp(done);
1994 } else {
1995 Jump(adaptor, RelocInfo::CODE_TARGET);
1996 }
1997 bind(&invoke);
1998 }
1999}
2000
2001
2002void MacroAssembler::InvokeCode(Register code,
2003 const ParameterCount& expected,
2004 const ParameterCount& actual,
2005 InvokeFlag flag) {
2006 Label done;
2007 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
2008 if (flag == CALL_FUNCTION) {
2009 call(code);
2010 } else {
2011 ASSERT(flag == JUMP_FUNCTION);
2012 jmp(code);
2013 }
2014 bind(&done);
2015}
2016
2017
2018void MacroAssembler::InvokeCode(Handle<Code> code,
2019 const ParameterCount& expected,
2020 const ParameterCount& actual,
2021 RelocInfo::Mode rmode,
2022 InvokeFlag flag) {
2023 Label done;
2024 Register dummy = rax;
2025 InvokePrologue(expected, actual, code, dummy, &done, flag);
2026 if (flag == CALL_FUNCTION) {
2027 Call(code, rmode);
2028 } else {
2029 ASSERT(flag == JUMP_FUNCTION);
2030 Jump(code, rmode);
2031 }
2032 bind(&done);
2033}
2034
2035
2036void MacroAssembler::InvokeFunction(Register function,
2037 const ParameterCount& actual,
2038 InvokeFlag flag) {
2039 ASSERT(function.is(rdi));
2040 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2041 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
2042 movsxlq(rbx,
2043 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2044 movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
2045 // Advances rdx to the end of the Code object header, to the start of
2046 // the executable code.
2047 lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
2048
2049 ParameterCount expected(rbx);
2050 InvokeCode(rdx, expected, actual, flag);
2051}
2052
2053
Andrei Popescu402d9372010-02-26 13:31:12 +00002054void MacroAssembler::InvokeFunction(JSFunction* function,
2055 const ParameterCount& actual,
2056 InvokeFlag flag) {
2057 ASSERT(function->is_compiled());
2058 // Get the function and setup the context.
2059 Move(rdi, Handle<JSFunction>(function));
2060 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2061
2062 // Invoke the cached code.
2063 Handle<Code> code(function->code());
2064 ParameterCount expected(function->shared()->formal_parameter_count());
2065 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
2066}
2067
2068
Steve Blocka7e24c12009-10-30 11:49:00 +00002069void MacroAssembler::EnterFrame(StackFrame::Type type) {
2070 push(rbp);
2071 movq(rbp, rsp);
2072 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00002073 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002074 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2075 push(kScratchRegister);
2076 if (FLAG_debug_code) {
2077 movq(kScratchRegister,
2078 Factory::undefined_value(),
2079 RelocInfo::EMBEDDED_OBJECT);
2080 cmpq(Operand(rsp, 0), kScratchRegister);
2081 Check(not_equal, "code object not properly patched");
2082 }
2083}
2084
2085
2086void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2087 if (FLAG_debug_code) {
Steve Block3ce2e202009-11-05 08:53:23 +00002088 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002089 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2090 Check(equal, "stack frame types must match");
2091 }
2092 movq(rsp, rbp);
2093 pop(rbp);
2094}
2095
2096
Steve Blockd0582a62009-12-15 09:54:21 +00002097void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002098 // Setup the frame structure on the stack.
2099 // All constants are relative to the frame pointer of the exit frame.
2100 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
2101 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
2102 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
2103 push(rbp);
2104 movq(rbp, rsp);
2105
2106 // Reserve room for entry stack pointer and push the debug marker.
Steve Block3ce2e202009-11-05 08:53:23 +00002107 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00002108 push(Immediate(0)); // Saved entry sp, patched before call.
2109 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2110 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00002111
2112 // Save the frame pointer and the context in top.
2113 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2114 ExternalReference context_address(Top::k_context_address);
2115 movq(r14, rax); // Backup rax before we use it.
2116
2117 movq(rax, rbp);
2118 store_rax(c_entry_fp_address);
2119 movq(rax, rsi);
2120 store_rax(context_address);
2121
2122 // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
2123 // so it must be retained across the C-call.
2124 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
2125 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
2126
2127#ifdef ENABLE_DEBUGGER_SUPPORT
2128 // Save the state of all registers to the stack from the memory
2129 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00002130 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002131 // TODO(1243899): This should be symmetric to
2132 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
2133 // correct here, but computed for the other call. Very error
2134 // prone! FIX THIS. Actually there are deeper problems with
2135 // register saving than this asymmetry (see the bug report
2136 // associated with this issue).
2137 PushRegistersFromMemory(kJSCallerSaved);
2138 }
2139#endif
2140
2141#ifdef _WIN64
2142 // Reserve space on stack for result and argument structures, if necessary.
2143 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
2144 // Reserve space for the Arguments object. The Windows 64-bit ABI
2145 // requires us to pass this structure as a pointer to its location on
2146 // the stack. The structure contains 2 values.
2147 int argument_stack_space = 2 * kPointerSize;
2148 // We also need backing space for 4 parameters, even though
2149 // we only pass one or two parameter, and it is in a register.
2150 int argument_mirror_space = 4 * kPointerSize;
2151 int total_stack_space =
2152 argument_mirror_space + argument_stack_space + result_stack_space;
2153 subq(rsp, Immediate(total_stack_space));
2154#endif
2155
2156 // Get the required frame alignment for the OS.
2157 static const int kFrameAlignment = OS::ActivationFrameAlignment();
2158 if (kFrameAlignment > 0) {
2159 ASSERT(IsPowerOf2(kFrameAlignment));
2160 movq(kScratchRegister, Immediate(-kFrameAlignment));
2161 and_(rsp, kScratchRegister);
2162 }
2163
2164 // Patch the saved entry sp.
2165 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2166}
2167
2168
Steve Blockd0582a62009-12-15 09:54:21 +00002169void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002170 // Registers:
2171 // r15 : argv
2172#ifdef ENABLE_DEBUGGER_SUPPORT
2173 // Restore the memory copy of the registers by digging them out from
2174 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00002175 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002176 // It's okay to clobber register rbx below because we don't need
2177 // the function pointer after this.
2178 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +00002179 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002180 lea(rbx, Operand(rbp, kOffset));
2181 CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
2182 }
2183#endif
2184
2185 // Get the return address from the stack and restore the frame pointer.
2186 movq(rcx, Operand(rbp, 1 * kPointerSize));
2187 movq(rbp, Operand(rbp, 0 * kPointerSize));
2188
Steve Blocka7e24c12009-10-30 11:49:00 +00002189 // Pop everything up to and including the arguments and the receiver
2190 // from the caller stack.
2191 lea(rsp, Operand(r15, 1 * kPointerSize));
2192
2193 // Restore current context from top and clear it in debug mode.
2194 ExternalReference context_address(Top::k_context_address);
2195 movq(kScratchRegister, context_address);
2196 movq(rsi, Operand(kScratchRegister, 0));
2197#ifdef DEBUG
2198 movq(Operand(kScratchRegister, 0), Immediate(0));
2199#endif
2200
2201 // Push the return address to get ready to return.
2202 push(rcx);
2203
2204 // Clear the top frame.
2205 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2206 movq(kScratchRegister, c_entry_fp_address);
2207 movq(Operand(kScratchRegister, 0), Immediate(0));
2208}
2209
2210
Steve Block3ce2e202009-11-05 08:53:23 +00002211Register MacroAssembler::CheckMaps(JSObject* object,
2212 Register object_reg,
2213 JSObject* holder,
2214 Register holder_reg,
Steve Blocka7e24c12009-10-30 11:49:00 +00002215 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01002216 int save_at_depth,
Steve Blocka7e24c12009-10-30 11:49:00 +00002217 Label* miss) {
2218 // Make sure there's no overlap between scratch and the other
2219 // registers.
2220 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
2221
2222 // Keep track of the current object in register reg. On the first
2223 // iteration, reg is an alias for object_reg, on later iterations,
2224 // it is an alias for holder_reg.
2225 Register reg = object_reg;
Steve Block6ded16b2010-05-10 14:33:55 +01002226 int depth = 0;
2227
2228 if (save_at_depth == depth) {
2229 movq(Operand(rsp, kPointerSize), object_reg);
2230 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002231
2232 // Check the maps in the prototype chain.
2233 // Traverse the prototype chain from the object and do map checks.
2234 while (object != holder) {
2235 depth++;
2236
2237 // Only global objects and objects that do not require access
2238 // checks are allowed in stubs.
2239 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2240
2241 JSObject* prototype = JSObject::cast(object->GetPrototype());
2242 if (Heap::InNewSpace(prototype)) {
2243 // Get the map of the current object.
2244 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2245 Cmp(scratch, Handle<Map>(object->map()));
2246 // Branch on the result of the map check.
2247 j(not_equal, miss);
2248 // Check access rights to the global object. This has to happen
2249 // after the map check so that we know that the object is
2250 // actually a global object.
2251 if (object->IsJSGlobalProxy()) {
2252 CheckAccessGlobalProxy(reg, scratch, miss);
2253
2254 // Restore scratch register to be the map of the object.
2255 // We load the prototype from the map in the scratch register.
2256 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2257 }
2258 // The prototype is in new space; we cannot store a reference
2259 // to it in the code. Load it from the map.
2260 reg = holder_reg; // from now the object is in holder_reg
2261 movq(reg, FieldOperand(scratch, Map::kPrototypeOffset));
2262
2263 } else {
2264 // Check the map of the current object.
2265 Cmp(FieldOperand(reg, HeapObject::kMapOffset),
2266 Handle<Map>(object->map()));
2267 // Branch on the result of the map check.
2268 j(not_equal, miss);
2269 // Check access rights to the global object. This has to happen
2270 // after the map check so that we know that the object is
2271 // actually a global object.
2272 if (object->IsJSGlobalProxy()) {
2273 CheckAccessGlobalProxy(reg, scratch, miss);
2274 }
2275 // The prototype is in old space; load it directly.
2276 reg = holder_reg; // from now the object is in holder_reg
2277 Move(reg, Handle<JSObject>(prototype));
2278 }
2279
Steve Block6ded16b2010-05-10 14:33:55 +01002280 if (save_at_depth == depth) {
2281 movq(Operand(rsp, kPointerSize), reg);
2282 }
2283
Steve Blocka7e24c12009-10-30 11:49:00 +00002284 // Go to the next object in the prototype chain.
2285 object = prototype;
2286 }
2287
2288 // Check the holder map.
Steve Block3ce2e202009-11-05 08:53:23 +00002289 Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002290 j(not_equal, miss);
2291
2292 // Log the check depth.
Steve Block6ded16b2010-05-10 14:33:55 +01002293 LOG(IntEvent("check-maps-depth", depth + 1));
Steve Blocka7e24c12009-10-30 11:49:00 +00002294
2295 // Perform security check for access to the global object and return
2296 // the holder register.
2297 ASSERT(object == holder);
2298 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2299 if (object->IsJSGlobalProxy()) {
2300 CheckAccessGlobalProxy(reg, scratch, miss);
2301 }
2302 return reg;
2303}
2304
2305
Steve Blocka7e24c12009-10-30 11:49:00 +00002306void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2307 Register scratch,
2308 Label* miss) {
2309 Label same_contexts;
2310
2311 ASSERT(!holder_reg.is(scratch));
2312 ASSERT(!scratch.is(kScratchRegister));
2313 // Load current lexical context from the stack frame.
2314 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2315
2316 // When generating debug code, make sure the lexical context is set.
2317 if (FLAG_debug_code) {
2318 cmpq(scratch, Immediate(0));
2319 Check(not_equal, "we should not have an empty lexical context");
2320 }
2321 // Load the global context of the current context.
2322 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2323 movq(scratch, FieldOperand(scratch, offset));
2324 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2325
2326 // Check the context is a global context.
2327 if (FLAG_debug_code) {
2328 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2329 Factory::global_context_map());
2330 Check(equal, "JSGlobalObject::global_context should be a global context.");
2331 }
2332
2333 // Check if both contexts are the same.
2334 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2335 j(equal, &same_contexts);
2336
2337 // Compare security tokens.
2338 // Check that the security token in the calling global object is
2339 // compatible with the security token in the receiving global
2340 // object.
2341
2342 // Check the context is a global context.
2343 if (FLAG_debug_code) {
2344 // Preserve original value of holder_reg.
2345 push(holder_reg);
2346 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2347 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2348 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2349
2350 // Read the first word and compare to global_context_map(),
2351 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2352 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2353 Check(equal, "JSGlobalObject::global_context should be a global context.");
2354 pop(holder_reg);
2355 }
2356
2357 movq(kScratchRegister,
2358 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00002359 int token_offset =
2360 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002361 movq(scratch, FieldOperand(scratch, token_offset));
2362 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2363 j(not_equal, miss);
2364
2365 bind(&same_contexts);
2366}
2367
2368
2369void MacroAssembler::LoadAllocationTopHelper(Register result,
2370 Register result_end,
2371 Register scratch,
2372 AllocationFlags flags) {
2373 ExternalReference new_space_allocation_top =
2374 ExternalReference::new_space_allocation_top_address();
2375
2376 // Just return if allocation top is already known.
2377 if ((flags & RESULT_CONTAINS_TOP) != 0) {
2378 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01002379 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00002380#ifdef DEBUG
2381 // Assert that result actually contains top on entry.
2382 movq(kScratchRegister, new_space_allocation_top);
2383 cmpq(result, Operand(kScratchRegister, 0));
2384 Check(equal, "Unexpected allocation top");
2385#endif
2386 return;
2387 }
2388
Steve Block6ded16b2010-05-10 14:33:55 +01002389 // Move address of new object to result. Use scratch register if available,
2390 // and keep address in scratch until call to UpdateAllocationTopHelper.
2391 if (scratch.is_valid()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002392 ASSERT(!scratch.is(result_end));
2393 movq(scratch, new_space_allocation_top);
2394 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01002395 } else if (result.is(rax)) {
2396 load_rax(new_space_allocation_top);
2397 } else {
2398 movq(kScratchRegister, new_space_allocation_top);
2399 movq(result, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002400 }
2401}
2402
2403
2404void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2405 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +00002406 if (FLAG_debug_code) {
2407 testq(result_end, Immediate(kObjectAlignmentMask));
2408 Check(zero, "Unaligned allocation in new space");
2409 }
2410
Steve Blocka7e24c12009-10-30 11:49:00 +00002411 ExternalReference new_space_allocation_top =
2412 ExternalReference::new_space_allocation_top_address();
2413
2414 // Update new top.
2415 if (result_end.is(rax)) {
2416 // rax can be stored directly to a memory location.
2417 store_rax(new_space_allocation_top);
2418 } else {
2419 // Register required - use scratch provided if available.
Steve Block6ded16b2010-05-10 14:33:55 +01002420 if (scratch.is_valid()) {
2421 movq(Operand(scratch, 0), result_end);
2422 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00002423 movq(kScratchRegister, new_space_allocation_top);
2424 movq(Operand(kScratchRegister, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00002425 }
2426 }
2427}
2428
2429
2430void MacroAssembler::AllocateInNewSpace(int object_size,
2431 Register result,
2432 Register result_end,
2433 Register scratch,
2434 Label* gc_required,
2435 AllocationFlags flags) {
2436 ASSERT(!result.is(result_end));
2437
2438 // Load address of new object into result.
2439 LoadAllocationTopHelper(result, result_end, scratch, flags);
2440
2441 // Calculate new top and bail out if new space is exhausted.
2442 ExternalReference new_space_allocation_limit =
2443 ExternalReference::new_space_allocation_limit_address();
Steve Block6ded16b2010-05-10 14:33:55 +01002444
2445 Register top_reg = result_end.is_valid() ? result_end : result;
2446
2447 if (top_reg.is(result)) {
2448 addq(top_reg, Immediate(object_size));
2449 } else {
2450 lea(top_reg, Operand(result, object_size));
2451 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002452 movq(kScratchRegister, new_space_allocation_limit);
Steve Block6ded16b2010-05-10 14:33:55 +01002453 cmpq(top_reg, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002454 j(above, gc_required);
2455
2456 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01002457 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00002458
Steve Block6ded16b2010-05-10 14:33:55 +01002459 if (top_reg.is(result)) {
2460 if ((flags & TAG_OBJECT) != 0) {
2461 subq(result, Immediate(object_size - kHeapObjectTag));
2462 } else {
2463 subq(result, Immediate(object_size));
2464 }
2465 } else if ((flags & TAG_OBJECT) != 0) {
2466 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00002467 addq(result, Immediate(kHeapObjectTag));
2468 }
2469}
2470
2471
2472void MacroAssembler::AllocateInNewSpace(int header_size,
2473 ScaleFactor element_size,
2474 Register element_count,
2475 Register result,
2476 Register result_end,
2477 Register scratch,
2478 Label* gc_required,
2479 AllocationFlags flags) {
2480 ASSERT(!result.is(result_end));
2481
2482 // Load address of new object into result.
2483 LoadAllocationTopHelper(result, result_end, scratch, flags);
2484
2485 // Calculate new top and bail out if new space is exhausted.
2486 ExternalReference new_space_allocation_limit =
2487 ExternalReference::new_space_allocation_limit_address();
2488 lea(result_end, Operand(result, element_count, element_size, header_size));
2489 movq(kScratchRegister, new_space_allocation_limit);
2490 cmpq(result_end, Operand(kScratchRegister, 0));
2491 j(above, gc_required);
2492
2493 // Update allocation top.
2494 UpdateAllocationTopHelper(result_end, scratch);
2495
2496 // Tag the result if requested.
2497 if ((flags & TAG_OBJECT) != 0) {
2498 addq(result, Immediate(kHeapObjectTag));
2499 }
2500}
2501
2502
2503void MacroAssembler::AllocateInNewSpace(Register object_size,
2504 Register result,
2505 Register result_end,
2506 Register scratch,
2507 Label* gc_required,
2508 AllocationFlags flags) {
2509 // Load address of new object into result.
2510 LoadAllocationTopHelper(result, result_end, scratch, flags);
2511
2512 // Calculate new top and bail out if new space is exhausted.
2513 ExternalReference new_space_allocation_limit =
2514 ExternalReference::new_space_allocation_limit_address();
2515 if (!object_size.is(result_end)) {
2516 movq(result_end, object_size);
2517 }
2518 addq(result_end, result);
2519 movq(kScratchRegister, new_space_allocation_limit);
2520 cmpq(result_end, Operand(kScratchRegister, 0));
2521 j(above, gc_required);
2522
2523 // Update allocation top.
2524 UpdateAllocationTopHelper(result_end, scratch);
2525
2526 // Tag the result if requested.
2527 if ((flags & TAG_OBJECT) != 0) {
2528 addq(result, Immediate(kHeapObjectTag));
2529 }
2530}
2531
2532
2533void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2534 ExternalReference new_space_allocation_top =
2535 ExternalReference::new_space_allocation_top_address();
2536
2537 // Make sure the object has no tag before resetting top.
2538 and_(object, Immediate(~kHeapObjectTagMask));
2539 movq(kScratchRegister, new_space_allocation_top);
2540#ifdef DEBUG
2541 cmpq(object, Operand(kScratchRegister, 0));
2542 Check(below, "Undo allocation of non allocated memory");
2543#endif
2544 movq(Operand(kScratchRegister, 0), object);
2545}
2546
2547
Steve Block3ce2e202009-11-05 08:53:23 +00002548void MacroAssembler::AllocateHeapNumber(Register result,
2549 Register scratch,
2550 Label* gc_required) {
2551 // Allocate heap number in new space.
2552 AllocateInNewSpace(HeapNumber::kSize,
2553 result,
2554 scratch,
2555 no_reg,
2556 gc_required,
2557 TAG_OBJECT);
2558
2559 // Set the map.
2560 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2561 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2562}
2563
2564
Leon Clarkee46be812010-01-19 14:06:41 +00002565void MacroAssembler::AllocateTwoByteString(Register result,
2566 Register length,
2567 Register scratch1,
2568 Register scratch2,
2569 Register scratch3,
2570 Label* gc_required) {
2571 // Calculate the number of bytes needed for the characters in the string while
2572 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002573 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2574 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002575 ASSERT(kShortSize == 2);
2576 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01002577 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2578 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002579 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002580 if (kHeaderAlignment > 0) {
2581 subq(scratch1, Immediate(kHeaderAlignment));
2582 }
Leon Clarkee46be812010-01-19 14:06:41 +00002583
2584 // Allocate two byte string in new space.
2585 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2586 times_1,
2587 scratch1,
2588 result,
2589 scratch2,
2590 scratch3,
2591 gc_required,
2592 TAG_OBJECT);
2593
2594 // Set the map, length and hash field.
2595 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2596 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002597 Integer32ToSmi(scratch1, length);
2598 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Leon Clarkee46be812010-01-19 14:06:41 +00002599 movl(FieldOperand(result, String::kHashFieldOffset),
2600 Immediate(String::kEmptyHashField));
2601}
2602
2603
2604void MacroAssembler::AllocateAsciiString(Register result,
2605 Register length,
2606 Register scratch1,
2607 Register scratch2,
2608 Register scratch3,
2609 Label* gc_required) {
2610 // Calculate the number of bytes needed for the characters in the string while
2611 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002612 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2613 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002614 movl(scratch1, length);
2615 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01002616 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002617 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002618 if (kHeaderAlignment > 0) {
2619 subq(scratch1, Immediate(kHeaderAlignment));
2620 }
Leon Clarkee46be812010-01-19 14:06:41 +00002621
2622 // Allocate ascii string in new space.
2623 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2624 times_1,
2625 scratch1,
2626 result,
2627 scratch2,
2628 scratch3,
2629 gc_required,
2630 TAG_OBJECT);
2631
2632 // Set the map, length and hash field.
2633 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2634 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002635 Integer32ToSmi(scratch1, length);
2636 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Leon Clarkee46be812010-01-19 14:06:41 +00002637 movl(FieldOperand(result, String::kHashFieldOffset),
2638 Immediate(String::kEmptyHashField));
2639}
2640
2641
2642void MacroAssembler::AllocateConsString(Register result,
2643 Register scratch1,
2644 Register scratch2,
2645 Label* gc_required) {
2646 // Allocate heap number in new space.
2647 AllocateInNewSpace(ConsString::kSize,
2648 result,
2649 scratch1,
2650 scratch2,
2651 gc_required,
2652 TAG_OBJECT);
2653
2654 // Set the map. The other fields are left uninitialized.
2655 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2656 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2657}
2658
2659
2660void MacroAssembler::AllocateAsciiConsString(Register result,
2661 Register scratch1,
2662 Register scratch2,
2663 Label* gc_required) {
2664 // Allocate heap number in new space.
2665 AllocateInNewSpace(ConsString::kSize,
2666 result,
2667 scratch1,
2668 scratch2,
2669 gc_required,
2670 TAG_OBJECT);
2671
2672 // Set the map. The other fields are left uninitialized.
2673 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2674 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2675}
2676
2677
Steve Blockd0582a62009-12-15 09:54:21 +00002678void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2679 if (context_chain_length > 0) {
2680 // Move up the chain of contexts to the context containing the slot.
2681 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2682 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002683 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002684 for (int i = 1; i < context_chain_length; i++) {
2685 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2686 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2687 }
2688 // The context may be an intermediate context, not a function context.
2689 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2690 } else { // context is the current function context.
2691 // The context may be an intermediate context, not a function context.
2692 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2693 }
2694}
2695
Leon Clarke4515c472010-02-03 11:58:03 +00002696int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2697 // On Windows stack slots are reserved by the caller for all arguments
2698 // including the ones passed in registers. On Linux 6 arguments are passed in
2699 // registers and the caller does not reserve stack slots for them.
2700 ASSERT(num_arguments >= 0);
2701#ifdef _WIN64
2702 static const int kArgumentsWithoutStackSlot = 0;
2703#else
2704 static const int kArgumentsWithoutStackSlot = 6;
2705#endif
2706 return num_arguments > kArgumentsWithoutStackSlot ?
2707 num_arguments - kArgumentsWithoutStackSlot : 0;
2708}
2709
2710void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2711 int frame_alignment = OS::ActivationFrameAlignment();
2712 ASSERT(frame_alignment != 0);
2713 ASSERT(num_arguments >= 0);
2714 // Make stack end at alignment and allocate space for arguments and old rsp.
2715 movq(kScratchRegister, rsp);
2716 ASSERT(IsPowerOf2(frame_alignment));
2717 int argument_slots_on_stack =
2718 ArgumentStackSlotsForCFunctionCall(num_arguments);
2719 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2720 and_(rsp, Immediate(-frame_alignment));
2721 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2722}
2723
2724
2725void MacroAssembler::CallCFunction(ExternalReference function,
2726 int num_arguments) {
2727 movq(rax, function);
2728 CallCFunction(rax, num_arguments);
2729}
2730
2731
2732void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01002733 // Check stack alignment.
2734 if (FLAG_debug_code) {
2735 CheckStackAlignment();
2736 }
2737
Leon Clarke4515c472010-02-03 11:58:03 +00002738 call(function);
2739 ASSERT(OS::ActivationFrameAlignment() != 0);
2740 ASSERT(num_arguments >= 0);
2741 int argument_slots_on_stack =
2742 ArgumentStackSlotsForCFunctionCall(num_arguments);
2743 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2744}
2745
Steve Blockd0582a62009-12-15 09:54:21 +00002746
Steve Blocka7e24c12009-10-30 11:49:00 +00002747CodePatcher::CodePatcher(byte* address, int size)
2748 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2749 // Create a new macro assembler pointing to the address of the code to patch.
2750 // The size is adjusted with kGap on order for the assembler to generate size
2751 // bytes of instructions without failing with buffer size constraints.
2752 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2753}
2754
2755
2756CodePatcher::~CodePatcher() {
2757 // Indicate that code has changed.
2758 CPU::FlushICache(address_, size_);
2759
2760 // Check that the code was patched as expected.
2761 ASSERT(masm_.pc_ == address_ + size_);
2762 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2763}
2764
Steve Blocka7e24c12009-10-30 11:49:00 +00002765} } // namespace v8::internal