blob: 76200d7e5482bfbeafa842ee31588c5700594e3f [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010038#include "heap.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
40namespace v8 {
41namespace internal {
42
43MacroAssembler::MacroAssembler(void* buffer, int size)
Steve Block3ce2e202009-11-05 08:53:23 +000044 : Assembler(buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000045 generating_stub_(false),
46 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
Steve Blocka7e24c12009-10-30 11:49:00 +000048}
49
50
Steve Block3ce2e202009-11-05 08:53:23 +000051void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010052 movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000053}
54
55
Kristian Monsen25f61362010-05-21 11:50:48 +010056void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
57 movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
58}
59
60
Steve Blocka7e24c12009-10-30 11:49:00 +000061void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010062 push(Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000063}
64
65
Steve Block3ce2e202009-11-05 08:53:23 +000066void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010067 cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000068}
69
70
Steve Block3ce2e202009-11-05 08:53:23 +000071void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000072 LoadRoot(kScratchRegister, index);
73 cmpq(with, kScratchRegister);
74}
75
76
Steve Blockd0582a62009-12-15 09:54:21 +000077void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
78 CompareRoot(rsp, Heap::kStackLimitRootIndex);
79 j(below, on_stack_overflow);
80}
81
82
Steve Block6ded16b2010-05-10 14:33:55 +010083void MacroAssembler::RecordWriteHelper(Register object,
84 Register addr,
85 Register scratch) {
86 if (FLAG_debug_code) {
87 // Check that the object is not in new space.
88 Label not_in_new_space;
89 InNewSpace(object, scratch, not_equal, &not_in_new_space);
90 Abort("new-space object passed to RecordWriteHelper");
91 bind(&not_in_new_space);
92 }
93
Steve Blocka7e24c12009-10-30 11:49:00 +000094 // Compute the page start address from the heap object pointer, and reuse
95 // the 'object' register for it.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010096 and_(object, Immediate(~Page::kPageAlignmentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +000097
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010098 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
99 // method for more details.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100100 shrl(addr, Immediate(Page::kRegionSizeLog2));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100101 andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000102
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100103 // Set dirty mark for region.
104 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000105}
106
107
Steve Blocka7e24c12009-10-30 11:49:00 +0000108void MacroAssembler::RecordWrite(Register object,
109 int offset,
110 Register value,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100111 Register index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000112 // The compiled code assumes that record write doesn't change the
113 // context register, so we check that none of the clobbered
114 // registers are rsi.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100115 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
Leon Clarke4515c472010-02-03 11:58:03 +0000116
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100117 // First, check if a write barrier is even needed. The tests below
118 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000119 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000120 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000121
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100122 RecordWriteNonSmi(object, offset, value, index);
Steve Block3ce2e202009-11-05 08:53:23 +0000123 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000124
125 // Clobber all input registers when running with the debug-code flag
126 // turned on to provoke errors. This clobbering repeats the
127 // clobbering done inside RecordWriteNonSmi but it's necessary to
128 // avoid having the fast case for smis leave the registers
129 // unchanged.
130 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100131 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
132 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100133 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000134 }
Steve Block3ce2e202009-11-05 08:53:23 +0000135}
136
137
Steve Block8defd9f2010-07-08 12:39:36 +0100138void MacroAssembler::RecordWrite(Register object,
139 Register address,
140 Register value) {
141 // The compiled code assumes that record write doesn't change the
142 // context register, so we check that none of the clobbered
143 // registers are esi.
144 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
145
146 // First, check if a write barrier is even needed. The tests below
147 // catch stores of Smis and stores into young gen.
148 Label done;
149 JumpIfSmi(value, &done);
150
151 InNewSpace(object, value, equal, &done);
152
153 RecordWriteHelper(object, address, value);
154
155 bind(&done);
156
157 // Clobber all input registers when running with the debug-code flag
158 // turned on to provoke errors.
159 if (FLAG_debug_code) {
160 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
161 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
162 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
163 }
164}
165
166
Steve Block3ce2e202009-11-05 08:53:23 +0000167void MacroAssembler::RecordWriteNonSmi(Register object,
168 int offset,
169 Register scratch,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100170 Register index) {
Steve Block3ce2e202009-11-05 08:53:23 +0000171 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000172
173 if (FLAG_debug_code) {
174 Label okay;
175 JumpIfNotSmi(object, &okay);
176 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
177 bind(&okay);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100178
179 if (offset == 0) {
180 // index must be int32.
181 Register tmp = index.is(rax) ? rbx : rax;
182 push(tmp);
183 movl(tmp, index);
184 cmpq(tmp, index);
185 Check(equal, "Index register for RecordWrite must be untagged int32.");
186 pop(tmp);
187 }
Leon Clarke4515c472010-02-03 11:58:03 +0000188 }
189
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100190 // Test that the object address is not in the new space. We cannot
191 // update page dirty marks for new space pages.
Steve Block6ded16b2010-05-10 14:33:55 +0100192 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000193
Steve Block6ded16b2010-05-10 14:33:55 +0100194 // The offset is relative to a tagged or untagged HeapObject pointer,
195 // so either offset or offset + kHeapObjectTag must be a
196 // multiple of kPointerSize.
197 ASSERT(IsAligned(offset, kPointerSize) ||
198 IsAligned(offset + kHeapObjectTag, kPointerSize));
199
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100200 Register dst = index;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100201 if (offset != 0) {
202 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000203 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100204 // array access: calculate the destination address in the same manner as
205 // KeyedStoreIC::GenerateGeneric.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100206 lea(dst, FieldOperand(object,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100207 index,
208 times_pointer_size,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100209 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000210 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100211 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000212
213 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000214
215 // Clobber all input registers when running with the debug-code flag
216 // turned on to provoke errors.
217 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100218 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
219 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100220 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Steve Block6ded16b2010-05-10 14:33:55 +0100221 }
222}
223
224
225void MacroAssembler::InNewSpace(Register object,
226 Register scratch,
227 Condition cc,
228 Label* branch) {
229 if (Serializer::enabled()) {
230 // Can't do arithmetic on external references if it might get serialized.
231 // The mask isn't really an address. We load it as an external reference in
232 // case the size of the new space is different between the snapshot maker
233 // and the running system.
234 if (scratch.is(object)) {
235 movq(kScratchRegister, ExternalReference::new_space_mask());
236 and_(scratch, kScratchRegister);
237 } else {
238 movq(scratch, ExternalReference::new_space_mask());
239 and_(scratch, object);
240 }
241 movq(kScratchRegister, ExternalReference::new_space_start());
242 cmpq(scratch, kScratchRegister);
243 j(cc, branch);
244 } else {
245 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
246 intptr_t new_space_start =
247 reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
248 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
249 if (scratch.is(object)) {
250 addq(scratch, kScratchRegister);
251 } else {
252 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
253 }
254 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
255 j(cc, branch);
Leon Clarke4515c472010-02-03 11:58:03 +0000256 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000257}
258
259
260void MacroAssembler::Assert(Condition cc, const char* msg) {
261 if (FLAG_debug_code) Check(cc, msg);
262}
263
264
265void MacroAssembler::Check(Condition cc, const char* msg) {
266 Label L;
267 j(cc, &L);
268 Abort(msg);
269 // will not return here
270 bind(&L);
271}
272
273
Steve Block6ded16b2010-05-10 14:33:55 +0100274void MacroAssembler::CheckStackAlignment() {
275 int frame_alignment = OS::ActivationFrameAlignment();
276 int frame_alignment_mask = frame_alignment - 1;
277 if (frame_alignment > kPointerSize) {
278 ASSERT(IsPowerOf2(frame_alignment));
279 Label alignment_as_expected;
280 testq(rsp, Immediate(frame_alignment_mask));
281 j(zero, &alignment_as_expected);
282 // Abort if stack is not aligned.
283 int3();
284 bind(&alignment_as_expected);
285 }
286}
287
288
Steve Blocka7e24c12009-10-30 11:49:00 +0000289void MacroAssembler::NegativeZeroTest(Register result,
290 Register op,
291 Label* then_label) {
292 Label ok;
293 testl(result, result);
294 j(not_zero, &ok);
295 testl(op, op);
296 j(sign, then_label);
297 bind(&ok);
298}
299
300
301void MacroAssembler::Abort(const char* msg) {
302 // We want to pass the msg string like a smi to avoid GC
303 // problems, however msg is not guaranteed to be aligned
304 // properly. Instead, we pass an aligned pointer that is
305 // a proper v8 smi, but also pass the alignment difference
306 // from the real pointer as a smi.
307 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
308 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
309 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
310 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
311#ifdef DEBUG
312 if (msg != NULL) {
313 RecordComment("Abort message: ");
314 RecordComment(msg);
315 }
316#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000317 // Disable stub call restrictions to always allow calls to abort.
318 set_allow_stub_calls(true);
319
Steve Blocka7e24c12009-10-30 11:49:00 +0000320 push(rax);
321 movq(kScratchRegister, p0, RelocInfo::NONE);
322 push(kScratchRegister);
323 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000324 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000325 RelocInfo::NONE);
326 push(kScratchRegister);
327 CallRuntime(Runtime::kAbort, 2);
328 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000329 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000330}
331
332
333void MacroAssembler::CallStub(CodeStub* stub) {
334 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
335 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
336}
337
338
Leon Clarkee46be812010-01-19 14:06:41 +0000339void MacroAssembler::TailCallStub(CodeStub* stub) {
340 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
341 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
342}
343
344
Steve Blocka7e24c12009-10-30 11:49:00 +0000345void MacroAssembler::StubReturn(int argc) {
346 ASSERT(argc >= 1 && generating_stub());
347 ret((argc - 1) * kPointerSize);
348}
349
350
351void MacroAssembler::IllegalOperation(int num_arguments) {
352 if (num_arguments > 0) {
353 addq(rsp, Immediate(num_arguments * kPointerSize));
354 }
355 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
356}
357
358
359void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
360 CallRuntime(Runtime::FunctionForId(id), num_arguments);
361}
362
363
364void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
365 // If the expected number of arguments of the runtime function is
366 // constant, we check that the actual number of arguments match the
367 // expectation.
368 if (f->nargs >= 0 && f->nargs != num_arguments) {
369 IllegalOperation(num_arguments);
370 return;
371 }
372
Leon Clarke4515c472010-02-03 11:58:03 +0000373 // TODO(1236192): Most runtime routines don't need the number of
374 // arguments passed in because it is constant. At some point we
375 // should remove this need and make the runtime routine entry code
376 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100377 Set(rax, num_arguments);
Leon Clarke4515c472010-02-03 11:58:03 +0000378 movq(rbx, ExternalReference(f));
379 CEntryStub ces(f->result_size);
380 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000381}
382
383
Andrei Popescu402d9372010-02-26 13:31:12 +0000384void MacroAssembler::CallExternalReference(const ExternalReference& ext,
385 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100386 Set(rax, num_arguments);
Andrei Popescu402d9372010-02-26 13:31:12 +0000387 movq(rbx, ext);
388
389 CEntryStub stub(1);
390 CallStub(&stub);
391}
392
393
Steve Block6ded16b2010-05-10 14:33:55 +0100394void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
395 int num_arguments,
396 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000397 // ----------- S t a t e -------------
398 // -- rsp[0] : return address
399 // -- rsp[8] : argument num_arguments - 1
400 // ...
401 // -- rsp[8 * num_arguments] : argument 0 (receiver)
402 // -----------------------------------
403
404 // TODO(1236192): Most runtime routines don't need the number of
405 // arguments passed in because it is constant. At some point we
406 // should remove this need and make the runtime routine entry code
407 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100408 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100409 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000410}
411
412
Steve Block6ded16b2010-05-10 14:33:55 +0100413void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
414 int num_arguments,
415 int result_size) {
416 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
417}
418
419
420void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
421 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000422 // Set the entry point and jump to the C entry runtime stub.
423 movq(rbx, ext);
424 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000425 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000426}
427
428
Andrei Popescu402d9372010-02-26 13:31:12 +0000429void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
430 // Calls are not allowed in some stubs.
431 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000432
Andrei Popescu402d9372010-02-26 13:31:12 +0000433 // Rely on the assertion to check that the number of provided
434 // arguments match the expected number of arguments. Fake a
435 // parameter count to avoid emitting code to do the check.
436 ParameterCount expected(0);
437 GetBuiltinEntry(rdx, id);
438 InvokeCode(rdx, expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +0000439}
440
Andrei Popescu402d9372010-02-26 13:31:12 +0000441
442void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100443 ASSERT(!target.is(rdi));
444
445 // Load the builtins object into target register.
446 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
447 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
448
Andrei Popescu402d9372010-02-26 13:31:12 +0000449 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +0100450 movq(rdi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
451
452 // Load the code entry point from the builtins object.
453 movq(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
454 if (FLAG_debug_code) {
455 // Make sure the code objects in the builtins object and in the
456 // builtin function are the same.
457 push(target);
458 movq(target, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
459 movq(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
460 cmpq(target, Operand(rsp, 0));
461 Assert(equal, "Builtin code object changed");
462 pop(target);
463 }
464 lea(target, FieldOperand(target, Code::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000465}
466
467
468void MacroAssembler::Set(Register dst, int64_t x) {
469 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100470 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000471 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000472 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000473 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000474 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000475 } else {
476 movq(dst, x, RelocInfo::NONE);
477 }
478}
479
Steve Blocka7e24c12009-10-30 11:49:00 +0000480void MacroAssembler::Set(const Operand& dst, int64_t x) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100481 if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000482 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000483 } else {
484 movq(kScratchRegister, x, RelocInfo::NONE);
485 movq(dst, kScratchRegister);
486 }
487}
488
Steve Blocka7e24c12009-10-30 11:49:00 +0000489// ----------------------------------------------------------------------------
490// Smi tagging, untagging and tag detection.
491
Steve Block3ce2e202009-11-05 08:53:23 +0000492static int kSmiShift = kSmiTagSize + kSmiShiftSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000493
Steve Block8defd9f2010-07-08 12:39:36 +0100494Register MacroAssembler::GetSmiConstant(Smi* source) {
495 int value = source->value();
496 if (value == 0) {
497 xorl(kScratchRegister, kScratchRegister);
498 return kScratchRegister;
499 }
500 if (value == 1) {
501 return kSmiConstantRegister;
502 }
503 LoadSmiConstant(kScratchRegister, source);
504 return kScratchRegister;
505}
506
507void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
508 if (FLAG_debug_code) {
509 movq(dst,
510 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
511 RelocInfo::NONE);
512 cmpq(dst, kSmiConstantRegister);
513 if (allow_stub_calls()) {
514 Assert(equal, "Uninitialized kSmiConstantRegister");
515 } else {
516 Label ok;
517 j(equal, &ok);
518 int3();
519 bind(&ok);
520 }
521 }
522 if (source->value() == 0) {
523 xorl(dst, dst);
524 return;
525 }
526 int value = source->value();
527 bool negative = value < 0;
528 unsigned int uvalue = negative ? -value : value;
529
530 switch (uvalue) {
531 case 9:
532 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
533 break;
534 case 8:
535 xorl(dst, dst);
536 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
537 break;
538 case 4:
539 xorl(dst, dst);
540 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
541 break;
542 case 5:
543 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
544 break;
545 case 3:
546 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
547 break;
548 case 2:
549 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
550 break;
551 case 1:
552 movq(dst, kSmiConstantRegister);
553 break;
554 case 0:
555 UNREACHABLE();
556 return;
557 default:
558 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
559 return;
560 }
561 if (negative) {
562 neg(dst);
563 }
564}
565
Steve Blocka7e24c12009-10-30 11:49:00 +0000566void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000567 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000568 if (!dst.is(src)) {
569 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000570 }
Steve Block3ce2e202009-11-05 08:53:23 +0000571 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000572}
573
574
575void MacroAssembler::Integer32ToSmi(Register dst,
576 Register src,
577 Label* on_overflow) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000578 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000579 // 32-bit integer always fits in a long smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000580 if (!dst.is(src)) {
581 movl(dst, src);
582 }
Steve Block3ce2e202009-11-05 08:53:23 +0000583 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000584}
585
586
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100587void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
588 if (FLAG_debug_code) {
589 testb(dst, Immediate(0x01));
590 Label ok;
591 j(zero, &ok);
592 if (allow_stub_calls()) {
593 Abort("Integer32ToSmiField writing to non-smi location");
594 } else {
595 int3();
596 }
597 bind(&ok);
598 }
599 ASSERT(kSmiShift % kBitsPerByte == 0);
600 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
601}
602
603
Steve Block3ce2e202009-11-05 08:53:23 +0000604void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
605 Register src,
606 int constant) {
607 if (dst.is(src)) {
608 addq(dst, Immediate(constant));
609 } else {
610 lea(dst, Operand(src, constant));
611 }
612 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000613}
614
615
616void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000617 ASSERT_EQ(0, kSmiTag);
618 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000619 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000620 }
Steve Block3ce2e202009-11-05 08:53:23 +0000621 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000622}
623
624
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100625void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
626 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
627}
628
629
Steve Blocka7e24c12009-10-30 11:49:00 +0000630void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000631 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000632 if (!dst.is(src)) {
633 movq(dst, src);
634 }
635 sar(dst, Immediate(kSmiShift));
636}
637
638
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100639void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
640 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
641}
642
643
Steve Block3ce2e202009-11-05 08:53:23 +0000644void MacroAssembler::SmiTest(Register src) {
645 testq(src, src);
646}
647
648
649void MacroAssembler::SmiCompare(Register dst, Register src) {
650 cmpq(dst, src);
651}
652
653
654void MacroAssembler::SmiCompare(Register dst, Smi* src) {
655 ASSERT(!dst.is(kScratchRegister));
656 if (src->value() == 0) {
657 testq(dst, dst);
658 } else {
659 Move(kScratchRegister, src);
660 cmpq(dst, kScratchRegister);
661 }
662}
663
664
Leon Clarkef7060e22010-06-03 12:02:55 +0100665void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block6ded16b2010-05-10 14:33:55 +0100666 cmpq(dst, src);
667}
668
669
Steve Block3ce2e202009-11-05 08:53:23 +0000670void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
671 cmpq(dst, src);
672}
673
674
675void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100676 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000677}
678
679
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100680void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
681 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
682}
683
684
Steve Blocka7e24c12009-10-30 11:49:00 +0000685void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
686 Register src,
687 int power) {
688 ASSERT(power >= 0);
689 ASSERT(power < 64);
690 if (power == 0) {
691 SmiToInteger64(dst, src);
692 return;
693 }
Steve Block3ce2e202009-11-05 08:53:23 +0000694 if (!dst.is(src)) {
695 movq(dst, src);
696 }
697 if (power < kSmiShift) {
698 sar(dst, Immediate(kSmiShift - power));
699 } else if (power > kSmiShift) {
700 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000701 }
702}
703
704
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100705void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
706 Register src,
707 int power) {
708 ASSERT((0 <= power) && (power < 32));
709 if (dst.is(src)) {
710 shr(dst, Immediate(power + kSmiShift));
711 } else {
712 UNIMPLEMENTED(); // Not used.
713 }
714}
715
716
Steve Blocka7e24c12009-10-30 11:49:00 +0000717Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000718 ASSERT_EQ(0, kSmiTag);
719 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000720 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000721}
722
723
724Condition MacroAssembler::CheckPositiveSmi(Register src) {
725 ASSERT_EQ(0, kSmiTag);
Steve Block8defd9f2010-07-08 12:39:36 +0100726 // Make mask 0x8000000000000001 and test that both bits are zero.
Steve Block3ce2e202009-11-05 08:53:23 +0000727 movq(kScratchRegister, src);
728 rol(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +0100729 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +0000730 return zero;
731}
732
733
Steve Blocka7e24c12009-10-30 11:49:00 +0000734Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
735 if (first.is(second)) {
736 return CheckSmi(first);
737 }
Steve Block8defd9f2010-07-08 12:39:36 +0100738 ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
739 leal(kScratchRegister, Operand(first, second, times_1, 0));
740 testb(kScratchRegister, Immediate(0x03));
Steve Block3ce2e202009-11-05 08:53:23 +0000741 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000742}
743
744
Leon Clarked91b9f72010-01-27 17:25:45 +0000745Condition MacroAssembler::CheckBothPositiveSmi(Register first,
746 Register second) {
747 if (first.is(second)) {
748 return CheckPositiveSmi(first);
749 }
Steve Block8defd9f2010-07-08 12:39:36 +0100750 movq(kScratchRegister, first);
751 or_(kScratchRegister, second);
Leon Clarked91b9f72010-01-27 17:25:45 +0000752 rol(kScratchRegister, Immediate(1));
753 testl(kScratchRegister, Immediate(0x03));
754 return zero;
755}
756
757
Leon Clarkee46be812010-01-19 14:06:41 +0000758Condition MacroAssembler::CheckEitherSmi(Register first, Register second) {
759 if (first.is(second)) {
760 return CheckSmi(first);
761 }
762 movl(kScratchRegister, first);
763 andl(kScratchRegister, second);
764 testb(kScratchRegister, Immediate(kSmiTagMask));
765 return zero;
766}
767
768
Steve Blocka7e24c12009-10-30 11:49:00 +0000769Condition MacroAssembler::CheckIsMinSmi(Register src) {
Steve Block8defd9f2010-07-08 12:39:36 +0100770 ASSERT(!src.is(kScratchRegister));
771 // If we overflow by subtracting one, it's the minimal smi value.
772 cmpq(src, kSmiConstantRegister);
773 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +0000774}
775
Steve Blocka7e24c12009-10-30 11:49:00 +0000776
777Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000778 // A 32-bit integer value can always be converted to a smi.
779 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +0000780}
781
782
Steve Block3ce2e202009-11-05 08:53:23 +0000783Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
784 // An unsigned 32-bit integer value is valid as long as the high bit
785 // is not set.
Steve Block8defd9f2010-07-08 12:39:36 +0100786 testl(src, src);
787 return positive;
Steve Block3ce2e202009-11-05 08:53:23 +0000788}
789
790
791void MacroAssembler::SmiNeg(Register dst, Register src, Label* on_smi_result) {
792 if (dst.is(src)) {
793 ASSERT(!dst.is(kScratchRegister));
794 movq(kScratchRegister, src);
795 neg(dst); // Low 32 bits are retained as zero by negation.
796 // Test if result is zero or Smi::kMinValue.
797 cmpq(dst, kScratchRegister);
798 j(not_equal, on_smi_result);
799 movq(src, kScratchRegister);
800 } else {
801 movq(dst, src);
802 neg(dst);
803 cmpq(dst, src);
804 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
805 j(not_equal, on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000806 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000807}
808
809
810void MacroAssembler::SmiAdd(Register dst,
811 Register src1,
812 Register src2,
813 Label* on_not_smi_result) {
814 ASSERT(!dst.is(src2));
Steve Block6ded16b2010-05-10 14:33:55 +0100815 if (on_not_smi_result == NULL) {
816 // No overflow checking. Use only when it's known that
817 // overflowing is impossible.
818 if (dst.is(src1)) {
819 addq(dst, src2);
820 } else {
821 movq(dst, src1);
822 addq(dst, src2);
823 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100824 Assert(no_overflow, "Smi addition overflow");
Steve Block6ded16b2010-05-10 14:33:55 +0100825 } else if (dst.is(src1)) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100826 movq(kScratchRegister, src1);
827 addq(kScratchRegister, src2);
828 j(overflow, on_not_smi_result);
829 movq(dst, kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +0000830 } else {
831 movq(dst, src1);
832 addq(dst, src2);
833 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000834 }
835}
836
837
Steve Blocka7e24c12009-10-30 11:49:00 +0000838void MacroAssembler::SmiSub(Register dst,
839 Register src1,
840 Register src2,
841 Label* on_not_smi_result) {
842 ASSERT(!dst.is(src2));
Leon Clarked91b9f72010-01-27 17:25:45 +0000843 if (on_not_smi_result == NULL) {
844 // No overflow checking. Use only when it's known that
845 // overflowing is impossible (e.g., subtracting two positive smis).
846 if (dst.is(src1)) {
847 subq(dst, src2);
848 } else {
849 movq(dst, src1);
850 subq(dst, src2);
851 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100852 Assert(no_overflow, "Smi subtraction overflow");
Leon Clarked91b9f72010-01-27 17:25:45 +0000853 } else if (dst.is(src1)) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100854 cmpq(dst, src2);
855 j(overflow, on_not_smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +0000856 subq(dst, src2);
Steve Block3ce2e202009-11-05 08:53:23 +0000857 } else {
858 movq(dst, src1);
859 subq(dst, src2);
860 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000861 }
862}
863
864
Steve Block6ded16b2010-05-10 14:33:55 +0100865void MacroAssembler::SmiSub(Register dst,
866 Register src1,
Leon Clarkef7060e22010-06-03 12:02:55 +0100867 const Operand& src2,
Steve Block6ded16b2010-05-10 14:33:55 +0100868 Label* on_not_smi_result) {
869 if (on_not_smi_result == NULL) {
870 // No overflow checking. Use only when it's known that
871 // overflowing is impossible (e.g., subtracting two positive smis).
872 if (dst.is(src1)) {
873 subq(dst, src2);
874 } else {
875 movq(dst, src1);
876 subq(dst, src2);
877 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100878 Assert(no_overflow, "Smi subtraction overflow");
Steve Block6ded16b2010-05-10 14:33:55 +0100879 } else if (dst.is(src1)) {
Steve Block8defd9f2010-07-08 12:39:36 +0100880 movq(kScratchRegister, src2);
881 cmpq(src1, kScratchRegister);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100882 j(overflow, on_not_smi_result);
Steve Block8defd9f2010-07-08 12:39:36 +0100883 subq(src1, kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +0100884 } else {
885 movq(dst, src1);
886 subq(dst, src2);
887 j(overflow, on_not_smi_result);
888 }
889}
890
Steve Blocka7e24c12009-10-30 11:49:00 +0000891void MacroAssembler::SmiMul(Register dst,
892 Register src1,
893 Register src2,
894 Label* on_not_smi_result) {
895 ASSERT(!dst.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +0000896 ASSERT(!dst.is(kScratchRegister));
897 ASSERT(!src1.is(kScratchRegister));
898 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000899
900 if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000901 Label failure, zero_correct_result;
902 movq(kScratchRegister, src1); // Create backup for later testing.
903 SmiToInteger64(dst, src1);
904 imul(dst, src2);
905 j(overflow, &failure);
906
907 // Check for negative zero result. If product is zero, and one
908 // argument is negative, go to slow case.
909 Label correct_result;
910 testq(dst, dst);
911 j(not_zero, &correct_result);
912
913 movq(dst, kScratchRegister);
914 xor_(dst, src2);
915 j(positive, &zero_correct_result); // Result was positive zero.
916
917 bind(&failure); // Reused failure exit, restores src1.
918 movq(src1, kScratchRegister);
919 jmp(on_not_smi_result);
920
921 bind(&zero_correct_result);
922 xor_(dst, dst);
923
924 bind(&correct_result);
925 } else {
926 SmiToInteger64(dst, src1);
927 imul(dst, src2);
928 j(overflow, on_not_smi_result);
929 // Check for negative zero result. If product is zero, and one
930 // argument is negative, go to slow case.
931 Label correct_result;
932 testq(dst, dst);
933 j(not_zero, &correct_result);
934 // One of src1 and src2 is zero, the check whether the other is
935 // negative.
Steve Blocka7e24c12009-10-30 11:49:00 +0000936 movq(kScratchRegister, src1);
Steve Block3ce2e202009-11-05 08:53:23 +0000937 xor_(kScratchRegister, src2);
938 j(negative, on_not_smi_result);
939 bind(&correct_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000940 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000941}
942
943
944void MacroAssembler::SmiTryAddConstant(Register dst,
945 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000946 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000947 Label* on_not_smi_result) {
948 // Does not assume that src is a smi.
Steve Block3ce2e202009-11-05 08:53:23 +0000949 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +0000950 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000951 ASSERT(!dst.is(kScratchRegister));
952 ASSERT(!src.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000953
Steve Block3ce2e202009-11-05 08:53:23 +0000954 JumpIfNotSmi(src, on_not_smi_result);
955 Register tmp = (dst.is(src) ? kScratchRegister : dst);
Steve Block8defd9f2010-07-08 12:39:36 +0100956 LoadSmiConstant(tmp, constant);
Steve Block3ce2e202009-11-05 08:53:23 +0000957 addq(tmp, src);
958 j(overflow, on_not_smi_result);
959 if (dst.is(src)) {
960 movq(dst, tmp);
961 }
962}
963
964
965void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
966 if (constant->value() == 0) {
967 if (!dst.is(src)) {
968 movq(dst, src);
969 }
Steve Block8defd9f2010-07-08 12:39:36 +0100970 return;
Steve Block3ce2e202009-11-05 08:53:23 +0000971 } else if (dst.is(src)) {
972 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +0100973 switch (constant->value()) {
974 case 1:
975 addq(dst, kSmiConstantRegister);
976 return;
977 case 2:
978 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
979 return;
980 case 4:
981 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
982 return;
983 case 8:
984 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
985 return;
986 default:
987 Register constant_reg = GetSmiConstant(constant);
988 addq(dst, constant_reg);
989 return;
990 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000991 } else {
Steve Block8defd9f2010-07-08 12:39:36 +0100992 switch (constant->value()) {
993 case 1:
994 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
995 return;
996 case 2:
997 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
998 return;
999 case 4:
1000 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1001 return;
1002 case 8:
1003 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1004 return;
1005 default:
1006 LoadSmiConstant(dst, constant);
1007 addq(dst, src);
1008 return;
1009 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001010 }
1011}
1012
1013
Leon Clarkef7060e22010-06-03 12:02:55 +01001014void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1015 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001016 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01001017 }
1018}
1019
1020
Steve Blocka7e24c12009-10-30 11:49:00 +00001021void MacroAssembler::SmiAddConstant(Register dst,
1022 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +00001023 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +00001024 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001025 if (constant->value() == 0) {
1026 if (!dst.is(src)) {
1027 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001028 }
Steve Block3ce2e202009-11-05 08:53:23 +00001029 } else if (dst.is(src)) {
1030 ASSERT(!dst.is(kScratchRegister));
1031
Steve Block8defd9f2010-07-08 12:39:36 +01001032 LoadSmiConstant(kScratchRegister, constant);
1033 addq(kScratchRegister, src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001034 j(overflow, on_not_smi_result);
1035 movq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001036 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001037 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001038 addq(dst, src);
1039 j(overflow, on_not_smi_result);
1040 }
1041}
1042
1043
1044void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1045 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001046 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001047 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001048 }
Steve Block3ce2e202009-11-05 08:53:23 +00001049 } else if (dst.is(src)) {
1050 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001051 Register constant_reg = GetSmiConstant(constant);
1052 subq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001053 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001054 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001055 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001056 // Adding and subtracting the min-value gives the same result, it only
1057 // differs on the overflow bit, which we don't check here.
1058 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001059 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001060 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001061 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Steve Block3ce2e202009-11-05 08:53:23 +00001062 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001063 }
1064 }
1065}
1066
1067
1068void MacroAssembler::SmiSubConstant(Register dst,
1069 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +00001070 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +00001071 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001072 if (constant->value() == 0) {
1073 if (!dst.is(src)) {
1074 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001075 }
Steve Block3ce2e202009-11-05 08:53:23 +00001076 } else if (dst.is(src)) {
1077 ASSERT(!dst.is(kScratchRegister));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001078 if (constant->value() == Smi::kMinValue) {
1079 // Subtracting min-value from any non-negative value will overflow.
1080 // We test the non-negativeness before doing the subtraction.
1081 testq(src, src);
1082 j(not_sign, on_not_smi_result);
Steve Block8defd9f2010-07-08 12:39:36 +01001083 LoadSmiConstant(kScratchRegister, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001084 subq(dst, kScratchRegister);
1085 } else {
1086 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001087 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001088 addq(kScratchRegister, dst);
1089 j(overflow, on_not_smi_result);
1090 movq(dst, kScratchRegister);
1091 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001092 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001093 if (constant->value() == Smi::kMinValue) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001094 // Subtracting min-value from any non-negative value will overflow.
1095 // We test the non-negativeness before doing the subtraction.
1096 testq(src, src);
1097 j(not_sign, on_not_smi_result);
Steve Block8defd9f2010-07-08 12:39:36 +01001098 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001099 // Adding and subtracting the min-value gives the same result, it only
1100 // differs on the overflow bit, which we don't check here.
1101 addq(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001102 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001103 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001104 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
Steve Block3ce2e202009-11-05 08:53:23 +00001105 addq(dst, src);
1106 j(overflow, on_not_smi_result);
1107 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001108 }
1109}
1110
1111
1112void MacroAssembler::SmiDiv(Register dst,
1113 Register src1,
1114 Register src2,
1115 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001116 ASSERT(!src1.is(kScratchRegister));
1117 ASSERT(!src2.is(kScratchRegister));
1118 ASSERT(!dst.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001119 ASSERT(!src2.is(rax));
1120 ASSERT(!src2.is(rdx));
1121 ASSERT(!src1.is(rdx));
1122
1123 // Check for 0 divisor (result is +/-Infinity).
1124 Label positive_divisor;
Steve Block3ce2e202009-11-05 08:53:23 +00001125 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001126 j(zero, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001127
Steve Block3ce2e202009-11-05 08:53:23 +00001128 if (src1.is(rax)) {
1129 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001130 }
Steve Block3ce2e202009-11-05 08:53:23 +00001131 SmiToInteger32(rax, src1);
1132 // We need to rule out dividing Smi::kMinValue by -1, since that would
1133 // overflow in idiv and raise an exception.
1134 // We combine this with negative zero test (negative zero only happens
1135 // when dividing zero by a negative number).
Steve Blocka7e24c12009-10-30 11:49:00 +00001136
Steve Block3ce2e202009-11-05 08:53:23 +00001137 // We overshoot a little and go to slow case if we divide min-value
1138 // by any negative value, not just -1.
1139 Label safe_div;
1140 testl(rax, Immediate(0x7fffffff));
1141 j(not_zero, &safe_div);
1142 testq(src2, src2);
1143 if (src1.is(rax)) {
1144 j(positive, &safe_div);
1145 movq(src1, kScratchRegister);
1146 jmp(on_not_smi_result);
1147 } else {
1148 j(negative, on_not_smi_result);
1149 }
1150 bind(&safe_div);
1151
1152 SmiToInteger32(src2, src2);
1153 // Sign extend src1 into edx:eax.
1154 cdq();
Steve Blocka7e24c12009-10-30 11:49:00 +00001155 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001156 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001157 // Check that the remainder is zero.
1158 testl(rdx, rdx);
Steve Block3ce2e202009-11-05 08:53:23 +00001159 if (src1.is(rax)) {
1160 Label smi_result;
1161 j(zero, &smi_result);
1162 movq(src1, kScratchRegister);
1163 jmp(on_not_smi_result);
1164 bind(&smi_result);
1165 } else {
1166 j(not_zero, on_not_smi_result);
1167 }
1168 if (!dst.is(src1) && src1.is(rax)) {
1169 movq(src1, kScratchRegister);
1170 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001171 Integer32ToSmi(dst, rax);
1172}
1173
1174
1175void MacroAssembler::SmiMod(Register dst,
1176 Register src1,
1177 Register src2,
1178 Label* on_not_smi_result) {
1179 ASSERT(!dst.is(kScratchRegister));
1180 ASSERT(!src1.is(kScratchRegister));
1181 ASSERT(!src2.is(kScratchRegister));
1182 ASSERT(!src2.is(rax));
1183 ASSERT(!src2.is(rdx));
1184 ASSERT(!src1.is(rdx));
Steve Block3ce2e202009-11-05 08:53:23 +00001185 ASSERT(!src1.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001186
Steve Block3ce2e202009-11-05 08:53:23 +00001187 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001188 j(zero, on_not_smi_result);
1189
1190 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001191 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001192 }
Steve Block3ce2e202009-11-05 08:53:23 +00001193 SmiToInteger32(rax, src1);
1194 SmiToInteger32(src2, src2);
1195
1196 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1197 Label safe_div;
1198 cmpl(rax, Immediate(Smi::kMinValue));
1199 j(not_equal, &safe_div);
1200 cmpl(src2, Immediate(-1));
1201 j(not_equal, &safe_div);
1202 // Retag inputs and go slow case.
1203 Integer32ToSmi(src2, src2);
1204 if (src1.is(rax)) {
1205 movq(src1, kScratchRegister);
1206 }
1207 jmp(on_not_smi_result);
1208 bind(&safe_div);
1209
Steve Blocka7e24c12009-10-30 11:49:00 +00001210 // Sign extend eax into edx:eax.
1211 cdq();
1212 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001213 // Restore smi tags on inputs.
1214 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001215 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001216 movq(src1, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001217 }
Steve Block3ce2e202009-11-05 08:53:23 +00001218 // Check for a negative zero result. If the result is zero, and the
1219 // dividend is negative, go slow to return a floating point negative zero.
1220 Label smi_result;
1221 testl(rdx, rdx);
1222 j(not_zero, &smi_result);
1223 testq(src1, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001224 j(negative, on_not_smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +00001225 bind(&smi_result);
1226 Integer32ToSmi(dst, rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001227}
1228
1229
1230void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001231 ASSERT(!dst.is(kScratchRegister));
1232 ASSERT(!src.is(kScratchRegister));
1233 // Set tag and padding bits before negating, so that they are zero afterwards.
1234 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001235 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001236 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001237 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001238 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001239 }
Steve Block3ce2e202009-11-05 08:53:23 +00001240 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001241}
1242
1243
1244void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001245 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001246 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001247 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001248 }
1249 and_(dst, src2);
1250}
1251
1252
Steve Block3ce2e202009-11-05 08:53:23 +00001253void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1254 if (constant->value() == 0) {
1255 xor_(dst, dst);
1256 } else if (dst.is(src)) {
1257 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001258 Register constant_reg = GetSmiConstant(constant);
1259 and_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001260 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001261 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001262 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001263 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001264}
1265
1266
1267void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1268 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001269 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001270 }
1271 or_(dst, src2);
1272}
1273
1274
Steve Block3ce2e202009-11-05 08:53:23 +00001275void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1276 if (dst.is(src)) {
1277 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001278 Register constant_reg = GetSmiConstant(constant);
1279 or_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001280 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001281 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001282 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001283 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001284}
1285
Steve Block3ce2e202009-11-05 08:53:23 +00001286
Steve Blocka7e24c12009-10-30 11:49:00 +00001287void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1288 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001289 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001290 }
1291 xor_(dst, src2);
1292}
1293
1294
Steve Block3ce2e202009-11-05 08:53:23 +00001295void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1296 if (dst.is(src)) {
1297 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001298 Register constant_reg = GetSmiConstant(constant);
1299 xor_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001300 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001301 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001302 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001303 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001304}
1305
1306
Steve Blocka7e24c12009-10-30 11:49:00 +00001307void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1308 Register src,
1309 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001310 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001311 if (shift_value > 0) {
1312 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001313 sar(dst, Immediate(shift_value + kSmiShift));
1314 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001315 } else {
1316 UNIMPLEMENTED(); // Not used.
1317 }
1318 }
1319}
1320
1321
1322void MacroAssembler::SmiShiftLogicalRightConstant(Register dst,
1323 Register src,
1324 int shift_value,
1325 Label* on_not_smi_result) {
1326 // Logic right shift interprets its result as an *unsigned* number.
1327 if (dst.is(src)) {
1328 UNIMPLEMENTED(); // Not used.
1329 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001330 movq(dst, src);
1331 if (shift_value == 0) {
1332 testq(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001333 j(negative, on_not_smi_result);
1334 }
Steve Block3ce2e202009-11-05 08:53:23 +00001335 shr(dst, Immediate(shift_value + kSmiShift));
1336 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001337 }
1338}
1339
1340
1341void MacroAssembler::SmiShiftLeftConstant(Register dst,
1342 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001343 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001344 if (!dst.is(src)) {
1345 movq(dst, src);
1346 }
1347 if (shift_value > 0) {
1348 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001349 }
1350}
1351
1352
1353void MacroAssembler::SmiShiftLeft(Register dst,
1354 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001355 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001356 ASSERT(!dst.is(rcx));
1357 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001358 // Untag shift amount.
1359 if (!dst.is(src1)) {
1360 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001361 }
Steve Block3ce2e202009-11-05 08:53:23 +00001362 SmiToInteger32(rcx, src2);
1363 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1364 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001365 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001366}
1367
1368
1369void MacroAssembler::SmiShiftLogicalRight(Register dst,
1370 Register src1,
1371 Register src2,
1372 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001373 ASSERT(!dst.is(kScratchRegister));
1374 ASSERT(!src1.is(kScratchRegister));
1375 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001376 ASSERT(!dst.is(rcx));
1377 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001378 if (src1.is(rcx) || src2.is(rcx)) {
1379 movq(kScratchRegister, rcx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001380 }
Steve Block3ce2e202009-11-05 08:53:23 +00001381 if (!dst.is(src1)) {
1382 movq(dst, src1);
1383 }
1384 SmiToInteger32(rcx, src2);
1385 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001386 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
Steve Block3ce2e202009-11-05 08:53:23 +00001387 shl(dst, Immediate(kSmiShift));
1388 testq(dst, dst);
1389 if (src1.is(rcx) || src2.is(rcx)) {
1390 Label positive_result;
1391 j(positive, &positive_result);
1392 if (src1.is(rcx)) {
1393 movq(src1, kScratchRegister);
1394 } else {
1395 movq(src2, kScratchRegister);
1396 }
1397 jmp(on_not_smi_result);
1398 bind(&positive_result);
1399 } else {
1400 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1401 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001402}
1403
1404
1405void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1406 Register src1,
1407 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001408 ASSERT(!dst.is(kScratchRegister));
1409 ASSERT(!src1.is(kScratchRegister));
1410 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001411 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001412 if (src1.is(rcx)) {
1413 movq(kScratchRegister, src1);
1414 } else if (src2.is(rcx)) {
1415 movq(kScratchRegister, src2);
1416 }
1417 if (!dst.is(src1)) {
1418 movq(dst, src1);
1419 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001420 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001421 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001422 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001423 shl(dst, Immediate(kSmiShift));
1424 if (src1.is(rcx)) {
1425 movq(src1, kScratchRegister);
1426 } else if (src2.is(rcx)) {
1427 movq(src2, kScratchRegister);
1428 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001429}
1430
1431
1432void MacroAssembler::SelectNonSmi(Register dst,
1433 Register src1,
1434 Register src2,
1435 Label* on_not_smis) {
Steve Block3ce2e202009-11-05 08:53:23 +00001436 ASSERT(!dst.is(kScratchRegister));
1437 ASSERT(!src1.is(kScratchRegister));
1438 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001439 ASSERT(!dst.is(src1));
1440 ASSERT(!dst.is(src2));
1441 // Both operands must not be smis.
1442#ifdef DEBUG
Steve Block3ce2e202009-11-05 08:53:23 +00001443 if (allow_stub_calls()) { // Check contains a stub call.
1444 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1445 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1446 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001447#endif
1448 ASSERT_EQ(0, kSmiTag);
1449 ASSERT_EQ(0, Smi::FromInt(0));
Steve Block3ce2e202009-11-05 08:53:23 +00001450 movl(kScratchRegister, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001451 and_(kScratchRegister, src1);
1452 testl(kScratchRegister, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001453 // If non-zero then both are smis.
Steve Blocka7e24c12009-10-30 11:49:00 +00001454 j(not_zero, on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00001455
Steve Block3ce2e202009-11-05 08:53:23 +00001456 // Exactly one operand is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001457 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1458 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1459 subq(kScratchRegister, Immediate(1));
1460 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1461 movq(dst, src1);
1462 xor_(dst, src2);
1463 and_(dst, kScratchRegister);
1464 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1465 xor_(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001466 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001467}
1468
Steve Block8defd9f2010-07-08 12:39:36 +01001469
Steve Block3ce2e202009-11-05 08:53:23 +00001470SmiIndex MacroAssembler::SmiToIndex(Register dst,
1471 Register src,
1472 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001473 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001474 // There is a possible optimization if shift is in the range 60-63, but that
1475 // will (and must) never happen.
1476 if (!dst.is(src)) {
1477 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001478 }
Steve Block3ce2e202009-11-05 08:53:23 +00001479 if (shift < kSmiShift) {
1480 sar(dst, Immediate(kSmiShift - shift));
1481 } else {
1482 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001483 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001484 return SmiIndex(dst, times_1);
1485}
1486
Steve Blocka7e24c12009-10-30 11:49:00 +00001487SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1488 Register src,
1489 int shift) {
1490 // Register src holds a positive smi.
1491 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001492 if (!dst.is(src)) {
1493 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001494 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001495 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001496 if (shift < kSmiShift) {
1497 sar(dst, Immediate(kSmiShift - shift));
1498 } else {
1499 shl(dst, Immediate(shift - kSmiShift));
1500 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001501 return SmiIndex(dst, times_1);
1502}
1503
1504
Steve Block3ce2e202009-11-05 08:53:23 +00001505void MacroAssembler::JumpIfSmi(Register src, Label* on_smi) {
1506 ASSERT_EQ(0, kSmiTag);
1507 Condition smi = CheckSmi(src);
1508 j(smi, on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001509}
1510
Steve Block3ce2e202009-11-05 08:53:23 +00001511
1512void MacroAssembler::JumpIfNotSmi(Register src, Label* on_not_smi) {
1513 Condition smi = CheckSmi(src);
1514 j(NegateCondition(smi), on_not_smi);
1515}
1516
1517
1518void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1519 Label* on_not_positive_smi) {
1520 Condition positive_smi = CheckPositiveSmi(src);
1521 j(NegateCondition(positive_smi), on_not_positive_smi);
1522}
1523
1524
1525void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1526 Smi* constant,
1527 Label* on_equals) {
1528 SmiCompare(src, constant);
1529 j(equal, on_equals);
1530}
1531
1532
1533void MacroAssembler::JumpIfNotValidSmiValue(Register src, Label* on_invalid) {
1534 Condition is_valid = CheckInteger32ValidSmiValue(src);
1535 j(NegateCondition(is_valid), on_invalid);
1536}
1537
1538
1539void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1540 Label* on_invalid) {
1541 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1542 j(NegateCondition(is_valid), on_invalid);
1543}
1544
1545
1546void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2,
1547 Label* on_not_both_smi) {
1548 Condition both_smi = CheckBothSmi(src1, src2);
1549 j(NegateCondition(both_smi), on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001550}
1551
1552
Leon Clarked91b9f72010-01-27 17:25:45 +00001553void MacroAssembler::JumpIfNotBothPositiveSmi(Register src1, Register src2,
1554 Label* on_not_both_smi) {
1555 Condition both_smi = CheckBothPositiveSmi(src1, src2);
1556 j(NegateCondition(both_smi), on_not_both_smi);
1557}
1558
1559
1560
Leon Clarkee46be812010-01-19 14:06:41 +00001561void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1562 Register second_object,
1563 Register scratch1,
1564 Register scratch2,
1565 Label* on_fail) {
1566 // Check that both objects are not smis.
1567 Condition either_smi = CheckEitherSmi(first_object, second_object);
1568 j(either_smi, on_fail);
1569
1570 // Load instance type for both strings.
1571 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1572 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1573 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1574 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1575
1576 // Check that both are flat ascii strings.
1577 ASSERT(kNotStringTag != 0);
1578 const int kFlatAsciiStringMask =
1579 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Leon Clarked91b9f72010-01-27 17:25:45 +00001580 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
Leon Clarkee46be812010-01-19 14:06:41 +00001581
1582 andl(scratch1, Immediate(kFlatAsciiStringMask));
1583 andl(scratch2, Immediate(kFlatAsciiStringMask));
1584 // Interleave the bits to check both scratch1 and scratch2 in one test.
1585 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1586 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1587 cmpl(scratch1,
Leon Clarked91b9f72010-01-27 17:25:45 +00001588 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
Leon Clarkee46be812010-01-19 14:06:41 +00001589 j(not_equal, on_fail);
1590}
1591
1592
Steve Block6ded16b2010-05-10 14:33:55 +01001593void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1594 Register instance_type,
1595 Register scratch,
1596 Label *failure) {
1597 if (!scratch.is(instance_type)) {
1598 movl(scratch, instance_type);
1599 }
1600
1601 const int kFlatAsciiStringMask =
1602 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1603
1604 andl(scratch, Immediate(kFlatAsciiStringMask));
1605 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1606 j(not_equal, failure);
1607}
1608
1609
1610void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1611 Register first_object_instance_type,
1612 Register second_object_instance_type,
1613 Register scratch1,
1614 Register scratch2,
1615 Label* on_fail) {
1616 // Load instance type for both strings.
1617 movq(scratch1, first_object_instance_type);
1618 movq(scratch2, second_object_instance_type);
1619
1620 // Check that both are flat ascii strings.
1621 ASSERT(kNotStringTag != 0);
1622 const int kFlatAsciiStringMask =
1623 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1624 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1625
1626 andl(scratch1, Immediate(kFlatAsciiStringMask));
1627 andl(scratch2, Immediate(kFlatAsciiStringMask));
1628 // Interleave the bits to check both scratch1 and scratch2 in one test.
1629 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1630 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1631 cmpl(scratch1,
1632 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1633 j(not_equal, on_fail);
1634}
1635
1636
Steve Blocka7e24c12009-10-30 11:49:00 +00001637void MacroAssembler::Move(Register dst, Handle<Object> source) {
1638 ASSERT(!source->IsFailure());
1639 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001640 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001641 } else {
1642 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1643 }
1644}
1645
1646
1647void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001648 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001649 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001650 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001651 } else {
1652 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1653 movq(dst, kScratchRegister);
1654 }
1655}
1656
1657
1658void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001659 if (source->IsSmi()) {
1660 SmiCompare(dst, Smi::cast(*source));
1661 } else {
1662 Move(kScratchRegister, source);
1663 cmpq(dst, kScratchRegister);
1664 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001665}
1666
1667
1668void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1669 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001670 SmiCompare(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001671 } else {
1672 ASSERT(source->IsHeapObject());
1673 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1674 cmpq(dst, kScratchRegister);
1675 }
1676}
1677
1678
1679void MacroAssembler::Push(Handle<Object> source) {
1680 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001681 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001682 } else {
1683 ASSERT(source->IsHeapObject());
1684 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1685 push(kScratchRegister);
1686 }
1687}
1688
1689
1690void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001691 intptr_t smi = reinterpret_cast<intptr_t>(source);
1692 if (is_int32(smi)) {
1693 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001694 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001695 Register constant = GetSmiConstant(source);
1696 push(constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001697 }
1698}
1699
1700
Leon Clarkee46be812010-01-19 14:06:41 +00001701void MacroAssembler::Drop(int stack_elements) {
1702 if (stack_elements > 0) {
1703 addq(rsp, Immediate(stack_elements * kPointerSize));
1704 }
1705}
1706
1707
Steve Block3ce2e202009-11-05 08:53:23 +00001708void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001709 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001710}
1711
1712
1713void MacroAssembler::Jump(ExternalReference ext) {
1714 movq(kScratchRegister, ext);
1715 jmp(kScratchRegister);
1716}
1717
1718
1719void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1720 movq(kScratchRegister, destination, rmode);
1721 jmp(kScratchRegister);
1722}
1723
1724
1725void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001726 // TODO(X64): Inline this
1727 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001728}
1729
1730
1731void MacroAssembler::Call(ExternalReference ext) {
1732 movq(kScratchRegister, ext);
1733 call(kScratchRegister);
1734}
1735
1736
1737void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1738 movq(kScratchRegister, destination, rmode);
1739 call(kScratchRegister);
1740}
1741
1742
1743void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1744 ASSERT(RelocInfo::IsCodeTarget(rmode));
1745 WriteRecordedPositions();
Steve Block3ce2e202009-11-05 08:53:23 +00001746 call(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001747}
1748
1749
1750void MacroAssembler::PushTryHandler(CodeLocation try_location,
1751 HandlerType type) {
1752 // Adjust this code if not the case.
1753 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1754
1755 // The pc (return address) is already on TOS. This code pushes state,
1756 // frame pointer and current handler. Check that they are expected
1757 // next on the stack, in that order.
1758 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1759 StackHandlerConstants::kPCOffset - kPointerSize);
1760 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1761 StackHandlerConstants::kStateOffset - kPointerSize);
1762 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1763 StackHandlerConstants::kFPOffset - kPointerSize);
1764
1765 if (try_location == IN_JAVASCRIPT) {
1766 if (type == TRY_CATCH_HANDLER) {
1767 push(Immediate(StackHandler::TRY_CATCH));
1768 } else {
1769 push(Immediate(StackHandler::TRY_FINALLY));
1770 }
1771 push(rbp);
1772 } else {
1773 ASSERT(try_location == IN_JS_ENTRY);
1774 // The frame pointer does not point to a JS frame so we save NULL
1775 // for rbp. We expect the code throwing an exception to check rbp
1776 // before dereferencing it to restore the context.
1777 push(Immediate(StackHandler::ENTRY));
1778 push(Immediate(0)); // NULL frame pointer.
1779 }
1780 // Save the current handler.
1781 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1782 push(Operand(kScratchRegister, 0));
1783 // Link this handler.
1784 movq(Operand(kScratchRegister, 0), rsp);
1785}
1786
1787
Leon Clarkee46be812010-01-19 14:06:41 +00001788void MacroAssembler::PopTryHandler() {
1789 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1790 // Unlink this handler.
1791 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1792 pop(Operand(kScratchRegister, 0));
1793 // Remove the remaining fields.
1794 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1795}
1796
1797
Steve Blocka7e24c12009-10-30 11:49:00 +00001798void MacroAssembler::Ret() {
1799 ret(0);
1800}
1801
1802
1803void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001804 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01001805 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001806}
1807
1808
1809void MacroAssembler::CmpObjectType(Register heap_object,
1810 InstanceType type,
1811 Register map) {
1812 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1813 CmpInstanceType(map, type);
1814}
1815
1816
1817void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1818 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1819 Immediate(static_cast<int8_t>(type)));
1820}
1821
1822
Andrei Popescu31002712010-02-23 13:46:05 +00001823void MacroAssembler::CheckMap(Register obj,
1824 Handle<Map> map,
1825 Label* fail,
1826 bool is_heap_object) {
1827 if (!is_heap_object) {
1828 JumpIfSmi(obj, fail);
1829 }
1830 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1831 j(not_equal, fail);
1832}
1833
1834
Leon Clarkef7060e22010-06-03 12:02:55 +01001835void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001836 Label ok;
1837 Condition is_smi = CheckSmi(object);
1838 j(is_smi, &ok);
1839 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1840 Factory::heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01001841 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00001842 bind(&ok);
1843}
1844
1845
Leon Clarkef7060e22010-06-03 12:02:55 +01001846void MacroAssembler::AbortIfNotSmi(Register object) {
Steve Block6ded16b2010-05-10 14:33:55 +01001847 Label ok;
1848 Condition is_smi = CheckSmi(object);
Leon Clarkef7060e22010-06-03 12:02:55 +01001849 Assert(is_smi, "Operand not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01001850}
1851
1852
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001853void MacroAssembler::AbortIfNotRootValue(Register src,
1854 Heap::RootListIndex root_value_index,
1855 const char* message) {
1856 ASSERT(!src.is(kScratchRegister));
1857 LoadRoot(kScratchRegister, root_value_index);
1858 cmpq(src, kScratchRegister);
1859 Check(equal, message);
1860}
1861
1862
1863
Leon Clarked91b9f72010-01-27 17:25:45 +00001864Condition MacroAssembler::IsObjectStringType(Register heap_object,
1865 Register map,
1866 Register instance_type) {
1867 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00001868 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00001869 ASSERT(kNotStringTag != 0);
1870 testb(instance_type, Immediate(kIsNotStringMask));
1871 return zero;
1872}
1873
1874
Steve Blocka7e24c12009-10-30 11:49:00 +00001875void MacroAssembler::TryGetFunctionPrototype(Register function,
1876 Register result,
1877 Label* miss) {
1878 // Check that the receiver isn't a smi.
1879 testl(function, Immediate(kSmiTagMask));
1880 j(zero, miss);
1881
1882 // Check that the function really is a function.
1883 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1884 j(not_equal, miss);
1885
1886 // Make sure that the function has an instance prototype.
1887 Label non_instance;
1888 testb(FieldOperand(result, Map::kBitFieldOffset),
1889 Immediate(1 << Map::kHasNonInstancePrototype));
1890 j(not_zero, &non_instance);
1891
1892 // Get the prototype or initial map from the function.
1893 movq(result,
1894 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1895
1896 // If the prototype or initial map is the hole, don't return it and
1897 // simply miss the cache instead. This will allow us to allocate a
1898 // prototype object on-demand in the runtime system.
1899 CompareRoot(result, Heap::kTheHoleValueRootIndex);
1900 j(equal, miss);
1901
1902 // If the function does not have an initial map, we're done.
1903 Label done;
1904 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1905 j(not_equal, &done);
1906
1907 // Get the prototype from the initial map.
1908 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1909 jmp(&done);
1910
1911 // Non-instance prototype: Fetch prototype from constructor field
1912 // in initial map.
1913 bind(&non_instance);
1914 movq(result, FieldOperand(result, Map::kConstructorOffset));
1915
1916 // All done.
1917 bind(&done);
1918}
1919
1920
1921void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1922 if (FLAG_native_code_counters && counter->Enabled()) {
1923 movq(kScratchRegister, ExternalReference(counter));
1924 movl(Operand(kScratchRegister, 0), Immediate(value));
1925 }
1926}
1927
1928
1929void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1930 ASSERT(value > 0);
1931 if (FLAG_native_code_counters && counter->Enabled()) {
1932 movq(kScratchRegister, ExternalReference(counter));
1933 Operand operand(kScratchRegister, 0);
1934 if (value == 1) {
1935 incl(operand);
1936 } else {
1937 addl(operand, Immediate(value));
1938 }
1939 }
1940}
1941
1942
1943void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1944 ASSERT(value > 0);
1945 if (FLAG_native_code_counters && counter->Enabled()) {
1946 movq(kScratchRegister, ExternalReference(counter));
1947 Operand operand(kScratchRegister, 0);
1948 if (value == 1) {
1949 decl(operand);
1950 } else {
1951 subl(operand, Immediate(value));
1952 }
1953 }
1954}
1955
Steve Blocka7e24c12009-10-30 11:49:00 +00001956#ifdef ENABLE_DEBUGGER_SUPPORT
1957
1958void MacroAssembler::PushRegistersFromMemory(RegList regs) {
1959 ASSERT((regs & ~kJSCallerSaved) == 0);
1960 // Push the content of the memory location to the stack.
1961 for (int i = 0; i < kNumJSCallerSaved; i++) {
1962 int r = JSCallerSavedCode(i);
1963 if ((regs & (1 << r)) != 0) {
1964 ExternalReference reg_addr =
1965 ExternalReference(Debug_Address::Register(i));
1966 movq(kScratchRegister, reg_addr);
1967 push(Operand(kScratchRegister, 0));
1968 }
1969 }
1970}
1971
Steve Block3ce2e202009-11-05 08:53:23 +00001972
Steve Blocka7e24c12009-10-30 11:49:00 +00001973void MacroAssembler::SaveRegistersToMemory(RegList regs) {
1974 ASSERT((regs & ~kJSCallerSaved) == 0);
1975 // Copy the content of registers to memory location.
1976 for (int i = 0; i < kNumJSCallerSaved; i++) {
1977 int r = JSCallerSavedCode(i);
1978 if ((regs & (1 << r)) != 0) {
1979 Register reg = { r };
1980 ExternalReference reg_addr =
1981 ExternalReference(Debug_Address::Register(i));
1982 movq(kScratchRegister, reg_addr);
1983 movq(Operand(kScratchRegister, 0), reg);
1984 }
1985 }
1986}
1987
1988
1989void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
1990 ASSERT((regs & ~kJSCallerSaved) == 0);
1991 // Copy the content of memory location to registers.
1992 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1993 int r = JSCallerSavedCode(i);
1994 if ((regs & (1 << r)) != 0) {
1995 Register reg = { r };
1996 ExternalReference reg_addr =
1997 ExternalReference(Debug_Address::Register(i));
1998 movq(kScratchRegister, reg_addr);
1999 movq(reg, Operand(kScratchRegister, 0));
2000 }
2001 }
2002}
2003
2004
2005void MacroAssembler::PopRegistersToMemory(RegList regs) {
2006 ASSERT((regs & ~kJSCallerSaved) == 0);
2007 // Pop the content from the stack to the memory location.
2008 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
2009 int r = JSCallerSavedCode(i);
2010 if ((regs & (1 << r)) != 0) {
2011 ExternalReference reg_addr =
2012 ExternalReference(Debug_Address::Register(i));
2013 movq(kScratchRegister, reg_addr);
2014 pop(Operand(kScratchRegister, 0));
2015 }
2016 }
2017}
2018
2019
2020void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
2021 Register scratch,
2022 RegList regs) {
2023 ASSERT(!scratch.is(kScratchRegister));
2024 ASSERT(!base.is(kScratchRegister));
2025 ASSERT(!base.is(scratch));
2026 ASSERT((regs & ~kJSCallerSaved) == 0);
2027 // Copy the content of the stack to the memory location and adjust base.
2028 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
2029 int r = JSCallerSavedCode(i);
2030 if ((regs & (1 << r)) != 0) {
2031 movq(scratch, Operand(base, 0));
2032 ExternalReference reg_addr =
2033 ExternalReference(Debug_Address::Register(i));
2034 movq(kScratchRegister, reg_addr);
2035 movq(Operand(kScratchRegister, 0), scratch);
2036 lea(base, Operand(base, kPointerSize));
2037 }
2038 }
2039}
2040
Andrei Popescu402d9372010-02-26 13:31:12 +00002041void MacroAssembler::DebugBreak() {
2042 ASSERT(allow_stub_calls());
2043 xor_(rax, rax); // no arguments
2044 movq(rbx, ExternalReference(Runtime::kDebugBreak));
2045 CEntryStub ces(1);
2046 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00002047}
Andrei Popescu402d9372010-02-26 13:31:12 +00002048#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00002049
2050
2051void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2052 const ParameterCount& actual,
2053 Handle<Code> code_constant,
2054 Register code_register,
2055 Label* done,
2056 InvokeFlag flag) {
2057 bool definitely_matches = false;
2058 Label invoke;
2059 if (expected.is_immediate()) {
2060 ASSERT(actual.is_immediate());
2061 if (expected.immediate() == actual.immediate()) {
2062 definitely_matches = true;
2063 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002064 Set(rax, actual.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002065 if (expected.immediate() ==
Steve Block3ce2e202009-11-05 08:53:23 +00002066 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002067 // Don't worry about adapting arguments for built-ins that
2068 // don't want that done. Skip adaption code by making it look
2069 // like we have a match between expected and actual number of
2070 // arguments.
2071 definitely_matches = true;
2072 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002073 Set(rbx, expected.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002074 }
2075 }
2076 } else {
2077 if (actual.is_immediate()) {
2078 // Expected is in register, actual is immediate. This is the
2079 // case when we invoke function values without going through the
2080 // IC mechanism.
2081 cmpq(expected.reg(), Immediate(actual.immediate()));
2082 j(equal, &invoke);
2083 ASSERT(expected.reg().is(rbx));
Steve Block8defd9f2010-07-08 12:39:36 +01002084 Set(rax, actual.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002085 } else if (!expected.reg().is(actual.reg())) {
2086 // Both expected and actual are in (different) registers. This
2087 // is the case when we invoke functions using call and apply.
2088 cmpq(expected.reg(), actual.reg());
2089 j(equal, &invoke);
2090 ASSERT(actual.reg().is(rax));
2091 ASSERT(expected.reg().is(rbx));
2092 }
2093 }
2094
2095 if (!definitely_matches) {
2096 Handle<Code> adaptor =
2097 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
2098 if (!code_constant.is_null()) {
2099 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
2100 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2101 } else if (!code_register.is(rdx)) {
2102 movq(rdx, code_register);
2103 }
2104
2105 if (flag == CALL_FUNCTION) {
2106 Call(adaptor, RelocInfo::CODE_TARGET);
2107 jmp(done);
2108 } else {
2109 Jump(adaptor, RelocInfo::CODE_TARGET);
2110 }
2111 bind(&invoke);
2112 }
2113}
2114
2115
2116void MacroAssembler::InvokeCode(Register code,
2117 const ParameterCount& expected,
2118 const ParameterCount& actual,
2119 InvokeFlag flag) {
2120 Label done;
2121 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
2122 if (flag == CALL_FUNCTION) {
2123 call(code);
2124 } else {
2125 ASSERT(flag == JUMP_FUNCTION);
2126 jmp(code);
2127 }
2128 bind(&done);
2129}
2130
2131
2132void MacroAssembler::InvokeCode(Handle<Code> code,
2133 const ParameterCount& expected,
2134 const ParameterCount& actual,
2135 RelocInfo::Mode rmode,
2136 InvokeFlag flag) {
2137 Label done;
2138 Register dummy = rax;
2139 InvokePrologue(expected, actual, code, dummy, &done, flag);
2140 if (flag == CALL_FUNCTION) {
2141 Call(code, rmode);
2142 } else {
2143 ASSERT(flag == JUMP_FUNCTION);
2144 Jump(code, rmode);
2145 }
2146 bind(&done);
2147}
2148
2149
2150void MacroAssembler::InvokeFunction(Register function,
2151 const ParameterCount& actual,
2152 InvokeFlag flag) {
2153 ASSERT(function.is(rdi));
2154 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2155 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
2156 movsxlq(rbx,
2157 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2158 movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
2159 // Advances rdx to the end of the Code object header, to the start of
2160 // the executable code.
2161 lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
2162
2163 ParameterCount expected(rbx);
2164 InvokeCode(rdx, expected, actual, flag);
2165}
2166
2167
Andrei Popescu402d9372010-02-26 13:31:12 +00002168void MacroAssembler::InvokeFunction(JSFunction* function,
2169 const ParameterCount& actual,
2170 InvokeFlag flag) {
2171 ASSERT(function->is_compiled());
2172 // Get the function and setup the context.
2173 Move(rdi, Handle<JSFunction>(function));
2174 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2175
2176 // Invoke the cached code.
2177 Handle<Code> code(function->code());
2178 ParameterCount expected(function->shared()->formal_parameter_count());
2179 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
2180}
2181
2182
Steve Blocka7e24c12009-10-30 11:49:00 +00002183void MacroAssembler::EnterFrame(StackFrame::Type type) {
2184 push(rbp);
2185 movq(rbp, rsp);
2186 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00002187 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002188 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2189 push(kScratchRegister);
2190 if (FLAG_debug_code) {
2191 movq(kScratchRegister,
2192 Factory::undefined_value(),
2193 RelocInfo::EMBEDDED_OBJECT);
2194 cmpq(Operand(rsp, 0), kScratchRegister);
2195 Check(not_equal, "code object not properly patched");
2196 }
2197}
2198
2199
2200void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2201 if (FLAG_debug_code) {
Steve Block3ce2e202009-11-05 08:53:23 +00002202 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002203 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2204 Check(equal, "stack frame types must match");
2205 }
2206 movq(rsp, rbp);
2207 pop(rbp);
2208}
2209
2210
Steve Blockd0582a62009-12-15 09:54:21 +00002211void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002212 // Setup the frame structure on the stack.
2213 // All constants are relative to the frame pointer of the exit frame.
2214 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
2215 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
2216 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
2217 push(rbp);
2218 movq(rbp, rsp);
2219
2220 // Reserve room for entry stack pointer and push the debug marker.
Steve Block3ce2e202009-11-05 08:53:23 +00002221 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00002222 push(Immediate(0)); // Saved entry sp, patched before call.
2223 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2224 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00002225
2226 // Save the frame pointer and the context in top.
2227 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2228 ExternalReference context_address(Top::k_context_address);
2229 movq(r14, rax); // Backup rax before we use it.
2230
2231 movq(rax, rbp);
2232 store_rax(c_entry_fp_address);
2233 movq(rax, rsi);
2234 store_rax(context_address);
2235
Steve Block8defd9f2010-07-08 12:39:36 +01002236 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
Steve Blocka7e24c12009-10-30 11:49:00 +00002237 // so it must be retained across the C-call.
2238 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Steve Block8defd9f2010-07-08 12:39:36 +01002239 lea(r12, Operand(rbp, r14, times_pointer_size, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002240
2241#ifdef ENABLE_DEBUGGER_SUPPORT
2242 // Save the state of all registers to the stack from the memory
2243 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00002244 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002245 // TODO(1243899): This should be symmetric to
2246 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
2247 // correct here, but computed for the other call. Very error
2248 // prone! FIX THIS. Actually there are deeper problems with
2249 // register saving than this asymmetry (see the bug report
2250 // associated with this issue).
2251 PushRegistersFromMemory(kJSCallerSaved);
2252 }
2253#endif
2254
2255#ifdef _WIN64
2256 // Reserve space on stack for result and argument structures, if necessary.
2257 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
2258 // Reserve space for the Arguments object. The Windows 64-bit ABI
2259 // requires us to pass this structure as a pointer to its location on
2260 // the stack. The structure contains 2 values.
2261 int argument_stack_space = 2 * kPointerSize;
2262 // We also need backing space for 4 parameters, even though
2263 // we only pass one or two parameter, and it is in a register.
2264 int argument_mirror_space = 4 * kPointerSize;
2265 int total_stack_space =
2266 argument_mirror_space + argument_stack_space + result_stack_space;
2267 subq(rsp, Immediate(total_stack_space));
2268#endif
2269
2270 // Get the required frame alignment for the OS.
2271 static const int kFrameAlignment = OS::ActivationFrameAlignment();
2272 if (kFrameAlignment > 0) {
2273 ASSERT(IsPowerOf2(kFrameAlignment));
2274 movq(kScratchRegister, Immediate(-kFrameAlignment));
2275 and_(rsp, kScratchRegister);
2276 }
2277
2278 // Patch the saved entry sp.
2279 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2280}
2281
2282
Steve Blockd0582a62009-12-15 09:54:21 +00002283void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002284 // Registers:
Steve Block8defd9f2010-07-08 12:39:36 +01002285 // r12 : argv
Steve Blocka7e24c12009-10-30 11:49:00 +00002286#ifdef ENABLE_DEBUGGER_SUPPORT
2287 // Restore the memory copy of the registers by digging them out from
2288 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00002289 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002290 // It's okay to clobber register rbx below because we don't need
2291 // the function pointer after this.
2292 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +00002293 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002294 lea(rbx, Operand(rbp, kOffset));
2295 CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
2296 }
2297#endif
2298
2299 // Get the return address from the stack and restore the frame pointer.
2300 movq(rcx, Operand(rbp, 1 * kPointerSize));
2301 movq(rbp, Operand(rbp, 0 * kPointerSize));
2302
Steve Blocka7e24c12009-10-30 11:49:00 +00002303 // Pop everything up to and including the arguments and the receiver
2304 // from the caller stack.
Steve Block8defd9f2010-07-08 12:39:36 +01002305 lea(rsp, Operand(r12, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00002306
2307 // Restore current context from top and clear it in debug mode.
2308 ExternalReference context_address(Top::k_context_address);
2309 movq(kScratchRegister, context_address);
2310 movq(rsi, Operand(kScratchRegister, 0));
2311#ifdef DEBUG
2312 movq(Operand(kScratchRegister, 0), Immediate(0));
2313#endif
2314
2315 // Push the return address to get ready to return.
2316 push(rcx);
2317
2318 // Clear the top frame.
2319 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2320 movq(kScratchRegister, c_entry_fp_address);
2321 movq(Operand(kScratchRegister, 0), Immediate(0));
2322}
2323
2324
Steve Block3ce2e202009-11-05 08:53:23 +00002325Register MacroAssembler::CheckMaps(JSObject* object,
2326 Register object_reg,
2327 JSObject* holder,
2328 Register holder_reg,
Steve Blocka7e24c12009-10-30 11:49:00 +00002329 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01002330 int save_at_depth,
Steve Blocka7e24c12009-10-30 11:49:00 +00002331 Label* miss) {
2332 // Make sure there's no overlap between scratch and the other
2333 // registers.
2334 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
2335
2336 // Keep track of the current object in register reg. On the first
2337 // iteration, reg is an alias for object_reg, on later iterations,
2338 // it is an alias for holder_reg.
2339 Register reg = object_reg;
Steve Block6ded16b2010-05-10 14:33:55 +01002340 int depth = 0;
2341
2342 if (save_at_depth == depth) {
2343 movq(Operand(rsp, kPointerSize), object_reg);
2344 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002345
2346 // Check the maps in the prototype chain.
2347 // Traverse the prototype chain from the object and do map checks.
2348 while (object != holder) {
2349 depth++;
2350
2351 // Only global objects and objects that do not require access
2352 // checks are allowed in stubs.
2353 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2354
2355 JSObject* prototype = JSObject::cast(object->GetPrototype());
2356 if (Heap::InNewSpace(prototype)) {
2357 // Get the map of the current object.
2358 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2359 Cmp(scratch, Handle<Map>(object->map()));
2360 // Branch on the result of the map check.
2361 j(not_equal, miss);
2362 // Check access rights to the global object. This has to happen
2363 // after the map check so that we know that the object is
2364 // actually a global object.
2365 if (object->IsJSGlobalProxy()) {
2366 CheckAccessGlobalProxy(reg, scratch, miss);
2367
2368 // Restore scratch register to be the map of the object.
2369 // We load the prototype from the map in the scratch register.
2370 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2371 }
2372 // The prototype is in new space; we cannot store a reference
2373 // to it in the code. Load it from the map.
2374 reg = holder_reg; // from now the object is in holder_reg
2375 movq(reg, FieldOperand(scratch, Map::kPrototypeOffset));
2376
2377 } else {
2378 // Check the map of the current object.
2379 Cmp(FieldOperand(reg, HeapObject::kMapOffset),
2380 Handle<Map>(object->map()));
2381 // Branch on the result of the map check.
2382 j(not_equal, miss);
2383 // Check access rights to the global object. This has to happen
2384 // after the map check so that we know that the object is
2385 // actually a global object.
2386 if (object->IsJSGlobalProxy()) {
2387 CheckAccessGlobalProxy(reg, scratch, miss);
2388 }
2389 // The prototype is in old space; load it directly.
2390 reg = holder_reg; // from now the object is in holder_reg
2391 Move(reg, Handle<JSObject>(prototype));
2392 }
2393
Steve Block6ded16b2010-05-10 14:33:55 +01002394 if (save_at_depth == depth) {
2395 movq(Operand(rsp, kPointerSize), reg);
2396 }
2397
Steve Blocka7e24c12009-10-30 11:49:00 +00002398 // Go to the next object in the prototype chain.
2399 object = prototype;
2400 }
2401
2402 // Check the holder map.
Steve Block3ce2e202009-11-05 08:53:23 +00002403 Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002404 j(not_equal, miss);
2405
2406 // Log the check depth.
Steve Block6ded16b2010-05-10 14:33:55 +01002407 LOG(IntEvent("check-maps-depth", depth + 1));
Steve Blocka7e24c12009-10-30 11:49:00 +00002408
2409 // Perform security check for access to the global object and return
2410 // the holder register.
2411 ASSERT(object == holder);
2412 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2413 if (object->IsJSGlobalProxy()) {
2414 CheckAccessGlobalProxy(reg, scratch, miss);
2415 }
2416 return reg;
2417}
2418
2419
Steve Blocka7e24c12009-10-30 11:49:00 +00002420void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2421 Register scratch,
2422 Label* miss) {
2423 Label same_contexts;
2424
2425 ASSERT(!holder_reg.is(scratch));
2426 ASSERT(!scratch.is(kScratchRegister));
2427 // Load current lexical context from the stack frame.
2428 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2429
2430 // When generating debug code, make sure the lexical context is set.
2431 if (FLAG_debug_code) {
2432 cmpq(scratch, Immediate(0));
2433 Check(not_equal, "we should not have an empty lexical context");
2434 }
2435 // Load the global context of the current context.
2436 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2437 movq(scratch, FieldOperand(scratch, offset));
2438 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2439
2440 // Check the context is a global context.
2441 if (FLAG_debug_code) {
2442 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2443 Factory::global_context_map());
2444 Check(equal, "JSGlobalObject::global_context should be a global context.");
2445 }
2446
2447 // Check if both contexts are the same.
2448 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2449 j(equal, &same_contexts);
2450
2451 // Compare security tokens.
2452 // Check that the security token in the calling global object is
2453 // compatible with the security token in the receiving global
2454 // object.
2455
2456 // Check the context is a global context.
2457 if (FLAG_debug_code) {
2458 // Preserve original value of holder_reg.
2459 push(holder_reg);
2460 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2461 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2462 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2463
2464 // Read the first word and compare to global_context_map(),
2465 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2466 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2467 Check(equal, "JSGlobalObject::global_context should be a global context.");
2468 pop(holder_reg);
2469 }
2470
2471 movq(kScratchRegister,
2472 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00002473 int token_offset =
2474 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002475 movq(scratch, FieldOperand(scratch, token_offset));
2476 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2477 j(not_equal, miss);
2478
2479 bind(&same_contexts);
2480}
2481
2482
2483void MacroAssembler::LoadAllocationTopHelper(Register result,
2484 Register result_end,
2485 Register scratch,
2486 AllocationFlags flags) {
2487 ExternalReference new_space_allocation_top =
2488 ExternalReference::new_space_allocation_top_address();
2489
2490 // Just return if allocation top is already known.
2491 if ((flags & RESULT_CONTAINS_TOP) != 0) {
2492 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01002493 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00002494#ifdef DEBUG
2495 // Assert that result actually contains top on entry.
2496 movq(kScratchRegister, new_space_allocation_top);
2497 cmpq(result, Operand(kScratchRegister, 0));
2498 Check(equal, "Unexpected allocation top");
2499#endif
2500 return;
2501 }
2502
Steve Block6ded16b2010-05-10 14:33:55 +01002503 // Move address of new object to result. Use scratch register if available,
2504 // and keep address in scratch until call to UpdateAllocationTopHelper.
2505 if (scratch.is_valid()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002506 ASSERT(!scratch.is(result_end));
2507 movq(scratch, new_space_allocation_top);
2508 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01002509 } else if (result.is(rax)) {
2510 load_rax(new_space_allocation_top);
2511 } else {
2512 movq(kScratchRegister, new_space_allocation_top);
2513 movq(result, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002514 }
2515}
2516
2517
2518void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2519 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +00002520 if (FLAG_debug_code) {
2521 testq(result_end, Immediate(kObjectAlignmentMask));
2522 Check(zero, "Unaligned allocation in new space");
2523 }
2524
Steve Blocka7e24c12009-10-30 11:49:00 +00002525 ExternalReference new_space_allocation_top =
2526 ExternalReference::new_space_allocation_top_address();
2527
2528 // Update new top.
2529 if (result_end.is(rax)) {
2530 // rax can be stored directly to a memory location.
2531 store_rax(new_space_allocation_top);
2532 } else {
2533 // Register required - use scratch provided if available.
Steve Block6ded16b2010-05-10 14:33:55 +01002534 if (scratch.is_valid()) {
2535 movq(Operand(scratch, 0), result_end);
2536 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00002537 movq(kScratchRegister, new_space_allocation_top);
2538 movq(Operand(kScratchRegister, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00002539 }
2540 }
2541}
2542
2543
2544void MacroAssembler::AllocateInNewSpace(int object_size,
2545 Register result,
2546 Register result_end,
2547 Register scratch,
2548 Label* gc_required,
2549 AllocationFlags flags) {
2550 ASSERT(!result.is(result_end));
2551
2552 // Load address of new object into result.
2553 LoadAllocationTopHelper(result, result_end, scratch, flags);
2554
2555 // Calculate new top and bail out if new space is exhausted.
2556 ExternalReference new_space_allocation_limit =
2557 ExternalReference::new_space_allocation_limit_address();
Steve Block6ded16b2010-05-10 14:33:55 +01002558
2559 Register top_reg = result_end.is_valid() ? result_end : result;
2560
2561 if (top_reg.is(result)) {
2562 addq(top_reg, Immediate(object_size));
2563 } else {
2564 lea(top_reg, Operand(result, object_size));
2565 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002566 movq(kScratchRegister, new_space_allocation_limit);
Steve Block6ded16b2010-05-10 14:33:55 +01002567 cmpq(top_reg, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002568 j(above, gc_required);
2569
2570 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01002571 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00002572
Steve Block6ded16b2010-05-10 14:33:55 +01002573 if (top_reg.is(result)) {
2574 if ((flags & TAG_OBJECT) != 0) {
2575 subq(result, Immediate(object_size - kHeapObjectTag));
2576 } else {
2577 subq(result, Immediate(object_size));
2578 }
2579 } else if ((flags & TAG_OBJECT) != 0) {
2580 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00002581 addq(result, Immediate(kHeapObjectTag));
2582 }
2583}
2584
2585
2586void MacroAssembler::AllocateInNewSpace(int header_size,
2587 ScaleFactor element_size,
2588 Register element_count,
2589 Register result,
2590 Register result_end,
2591 Register scratch,
2592 Label* gc_required,
2593 AllocationFlags flags) {
2594 ASSERT(!result.is(result_end));
2595
2596 // Load address of new object into result.
2597 LoadAllocationTopHelper(result, result_end, scratch, flags);
2598
2599 // Calculate new top and bail out if new space is exhausted.
2600 ExternalReference new_space_allocation_limit =
2601 ExternalReference::new_space_allocation_limit_address();
2602 lea(result_end, Operand(result, element_count, element_size, header_size));
2603 movq(kScratchRegister, new_space_allocation_limit);
2604 cmpq(result_end, Operand(kScratchRegister, 0));
2605 j(above, gc_required);
2606
2607 // Update allocation top.
2608 UpdateAllocationTopHelper(result_end, scratch);
2609
2610 // Tag the result if requested.
2611 if ((flags & TAG_OBJECT) != 0) {
2612 addq(result, Immediate(kHeapObjectTag));
2613 }
2614}
2615
2616
2617void MacroAssembler::AllocateInNewSpace(Register object_size,
2618 Register result,
2619 Register result_end,
2620 Register scratch,
2621 Label* gc_required,
2622 AllocationFlags flags) {
2623 // Load address of new object into result.
2624 LoadAllocationTopHelper(result, result_end, scratch, flags);
2625
2626 // Calculate new top and bail out if new space is exhausted.
2627 ExternalReference new_space_allocation_limit =
2628 ExternalReference::new_space_allocation_limit_address();
2629 if (!object_size.is(result_end)) {
2630 movq(result_end, object_size);
2631 }
2632 addq(result_end, result);
2633 movq(kScratchRegister, new_space_allocation_limit);
2634 cmpq(result_end, Operand(kScratchRegister, 0));
2635 j(above, gc_required);
2636
2637 // Update allocation top.
2638 UpdateAllocationTopHelper(result_end, scratch);
2639
2640 // Tag the result if requested.
2641 if ((flags & TAG_OBJECT) != 0) {
2642 addq(result, Immediate(kHeapObjectTag));
2643 }
2644}
2645
2646
2647void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2648 ExternalReference new_space_allocation_top =
2649 ExternalReference::new_space_allocation_top_address();
2650
2651 // Make sure the object has no tag before resetting top.
2652 and_(object, Immediate(~kHeapObjectTagMask));
2653 movq(kScratchRegister, new_space_allocation_top);
2654#ifdef DEBUG
2655 cmpq(object, Operand(kScratchRegister, 0));
2656 Check(below, "Undo allocation of non allocated memory");
2657#endif
2658 movq(Operand(kScratchRegister, 0), object);
2659}
2660
2661
Steve Block3ce2e202009-11-05 08:53:23 +00002662void MacroAssembler::AllocateHeapNumber(Register result,
2663 Register scratch,
2664 Label* gc_required) {
2665 // Allocate heap number in new space.
2666 AllocateInNewSpace(HeapNumber::kSize,
2667 result,
2668 scratch,
2669 no_reg,
2670 gc_required,
2671 TAG_OBJECT);
2672
2673 // Set the map.
2674 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2675 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2676}
2677
2678
Leon Clarkee46be812010-01-19 14:06:41 +00002679void MacroAssembler::AllocateTwoByteString(Register result,
2680 Register length,
2681 Register scratch1,
2682 Register scratch2,
2683 Register scratch3,
2684 Label* gc_required) {
2685 // Calculate the number of bytes needed for the characters in the string while
2686 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002687 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2688 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002689 ASSERT(kShortSize == 2);
2690 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01002691 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2692 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002693 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002694 if (kHeaderAlignment > 0) {
2695 subq(scratch1, Immediate(kHeaderAlignment));
2696 }
Leon Clarkee46be812010-01-19 14:06:41 +00002697
2698 // Allocate two byte string in new space.
2699 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2700 times_1,
2701 scratch1,
2702 result,
2703 scratch2,
2704 scratch3,
2705 gc_required,
2706 TAG_OBJECT);
2707
2708 // Set the map, length and hash field.
2709 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2710 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002711 Integer32ToSmi(scratch1, length);
2712 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002713 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002714 Immediate(String::kEmptyHashField));
2715}
2716
2717
2718void MacroAssembler::AllocateAsciiString(Register result,
2719 Register length,
2720 Register scratch1,
2721 Register scratch2,
2722 Register scratch3,
2723 Label* gc_required) {
2724 // Calculate the number of bytes needed for the characters in the string while
2725 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002726 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2727 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002728 movl(scratch1, length);
2729 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01002730 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002731 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002732 if (kHeaderAlignment > 0) {
2733 subq(scratch1, Immediate(kHeaderAlignment));
2734 }
Leon Clarkee46be812010-01-19 14:06:41 +00002735
2736 // Allocate ascii string in new space.
2737 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2738 times_1,
2739 scratch1,
2740 result,
2741 scratch2,
2742 scratch3,
2743 gc_required,
2744 TAG_OBJECT);
2745
2746 // Set the map, length and hash field.
2747 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2748 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002749 Integer32ToSmi(scratch1, length);
2750 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002751 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002752 Immediate(String::kEmptyHashField));
2753}
2754
2755
2756void MacroAssembler::AllocateConsString(Register result,
2757 Register scratch1,
2758 Register scratch2,
2759 Label* gc_required) {
2760 // Allocate heap number in new space.
2761 AllocateInNewSpace(ConsString::kSize,
2762 result,
2763 scratch1,
2764 scratch2,
2765 gc_required,
2766 TAG_OBJECT);
2767
2768 // Set the map. The other fields are left uninitialized.
2769 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2770 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2771}
2772
2773
2774void MacroAssembler::AllocateAsciiConsString(Register result,
2775 Register scratch1,
2776 Register scratch2,
2777 Label* gc_required) {
2778 // Allocate heap number in new space.
2779 AllocateInNewSpace(ConsString::kSize,
2780 result,
2781 scratch1,
2782 scratch2,
2783 gc_required,
2784 TAG_OBJECT);
2785
2786 // Set the map. The other fields are left uninitialized.
2787 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2788 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2789}
2790
2791
Steve Blockd0582a62009-12-15 09:54:21 +00002792void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2793 if (context_chain_length > 0) {
2794 // Move up the chain of contexts to the context containing the slot.
2795 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2796 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002797 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002798 for (int i = 1; i < context_chain_length; i++) {
2799 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2800 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2801 }
2802 // The context may be an intermediate context, not a function context.
2803 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2804 } else { // context is the current function context.
2805 // The context may be an intermediate context, not a function context.
2806 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2807 }
2808}
2809
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002810
Leon Clarke4515c472010-02-03 11:58:03 +00002811int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002812 // On Windows 64 stack slots are reserved by the caller for all arguments
2813 // including the ones passed in registers, and space is always allocated for
2814 // the four register arguments even if the function takes fewer than four
2815 // arguments.
2816 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2817 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00002818 ASSERT(num_arguments >= 0);
2819#ifdef _WIN64
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002820 static const int kMinimumStackSlots = 4;
2821 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2822 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002823#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002824 static const int kRegisterPassedArguments = 6;
2825 if (num_arguments < kRegisterPassedArguments) return 0;
2826 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002827#endif
Leon Clarke4515c472010-02-03 11:58:03 +00002828}
2829
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002830
Leon Clarke4515c472010-02-03 11:58:03 +00002831void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2832 int frame_alignment = OS::ActivationFrameAlignment();
2833 ASSERT(frame_alignment != 0);
2834 ASSERT(num_arguments >= 0);
2835 // Make stack end at alignment and allocate space for arguments and old rsp.
2836 movq(kScratchRegister, rsp);
2837 ASSERT(IsPowerOf2(frame_alignment));
2838 int argument_slots_on_stack =
2839 ArgumentStackSlotsForCFunctionCall(num_arguments);
2840 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2841 and_(rsp, Immediate(-frame_alignment));
2842 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2843}
2844
2845
2846void MacroAssembler::CallCFunction(ExternalReference function,
2847 int num_arguments) {
2848 movq(rax, function);
2849 CallCFunction(rax, num_arguments);
2850}
2851
2852
2853void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01002854 // Check stack alignment.
2855 if (FLAG_debug_code) {
2856 CheckStackAlignment();
2857 }
2858
Leon Clarke4515c472010-02-03 11:58:03 +00002859 call(function);
2860 ASSERT(OS::ActivationFrameAlignment() != 0);
2861 ASSERT(num_arguments >= 0);
2862 int argument_slots_on_stack =
2863 ArgumentStackSlotsForCFunctionCall(num_arguments);
2864 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2865}
2866
Steve Blockd0582a62009-12-15 09:54:21 +00002867
Steve Blocka7e24c12009-10-30 11:49:00 +00002868CodePatcher::CodePatcher(byte* address, int size)
2869 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2870 // Create a new macro assembler pointing to the address of the code to patch.
2871 // The size is adjusted with kGap on order for the assembler to generate size
2872 // bytes of instructions without failing with buffer size constraints.
2873 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2874}
2875
2876
2877CodePatcher::~CodePatcher() {
2878 // Indicate that code has changed.
2879 CPU::FlushICache(address_, size_);
2880
2881 // Check that the code was patched as expected.
2882 ASSERT(masm_.pc_ == address_ + size_);
2883 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2884}
2885
Steve Blocka7e24c12009-10-30 11:49:00 +00002886} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002887
2888#endif // V8_TARGET_ARCH_X64