blob: bab0199354d5e546fb8775f67a7c86b3566e80a5 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010038#include "heap.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
40namespace v8 {
41namespace internal {
42
43MacroAssembler::MacroAssembler(void* buffer, int size)
Steve Block3ce2e202009-11-05 08:53:23 +000044 : Assembler(buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000045 generating_stub_(false),
46 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
Steve Blocka7e24c12009-10-30 11:49:00 +000048}
49
50
Steve Block3ce2e202009-11-05 08:53:23 +000051void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010052 movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000053}
54
55
Kristian Monsen25f61362010-05-21 11:50:48 +010056void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
57 movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
58}
59
60
Steve Blocka7e24c12009-10-30 11:49:00 +000061void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010062 push(Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000063}
64
65
Steve Block3ce2e202009-11-05 08:53:23 +000066void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010067 cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000068}
69
70
Steve Block3ce2e202009-11-05 08:53:23 +000071void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000072 LoadRoot(kScratchRegister, index);
73 cmpq(with, kScratchRegister);
74}
75
76
Steve Blockd0582a62009-12-15 09:54:21 +000077void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
78 CompareRoot(rsp, Heap::kStackLimitRootIndex);
79 j(below, on_stack_overflow);
80}
81
82
Steve Block6ded16b2010-05-10 14:33:55 +010083void MacroAssembler::RecordWriteHelper(Register object,
84 Register addr,
85 Register scratch) {
86 if (FLAG_debug_code) {
87 // Check that the object is not in new space.
88 Label not_in_new_space;
89 InNewSpace(object, scratch, not_equal, &not_in_new_space);
90 Abort("new-space object passed to RecordWriteHelper");
91 bind(&not_in_new_space);
92 }
93
Steve Blocka7e24c12009-10-30 11:49:00 +000094 // Compute the page start address from the heap object pointer, and reuse
95 // the 'object' register for it.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010096 and_(object, Immediate(~Page::kPageAlignmentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +000097
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010098 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
99 // method for more details.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100100 shrl(addr, Immediate(Page::kRegionSizeLog2));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100101 andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000102
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100103 // Set dirty mark for region.
104 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000105}
106
107
Steve Blocka7e24c12009-10-30 11:49:00 +0000108void MacroAssembler::RecordWrite(Register object,
109 int offset,
110 Register value,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100111 Register index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000112 // The compiled code assumes that record write doesn't change the
113 // context register, so we check that none of the clobbered
114 // registers are rsi.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100115 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
Leon Clarke4515c472010-02-03 11:58:03 +0000116
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100117 // First, check if a write barrier is even needed. The tests below
118 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000119 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000120 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000121
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100122 RecordWriteNonSmi(object, offset, value, index);
Steve Block3ce2e202009-11-05 08:53:23 +0000123 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000124
125 // Clobber all input registers when running with the debug-code flag
126 // turned on to provoke errors. This clobbering repeats the
127 // clobbering done inside RecordWriteNonSmi but it's necessary to
128 // avoid having the fast case for smis leave the registers
129 // unchanged.
130 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100131 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
132 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100133 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000134 }
Steve Block3ce2e202009-11-05 08:53:23 +0000135}
136
137
Steve Block8defd9f2010-07-08 12:39:36 +0100138void MacroAssembler::RecordWrite(Register object,
139 Register address,
140 Register value) {
141 // The compiled code assumes that record write doesn't change the
142 // context register, so we check that none of the clobbered
143 // registers are esi.
144 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
145
146 // First, check if a write barrier is even needed. The tests below
147 // catch stores of Smis and stores into young gen.
148 Label done;
149 JumpIfSmi(value, &done);
150
151 InNewSpace(object, value, equal, &done);
152
153 RecordWriteHelper(object, address, value);
154
155 bind(&done);
156
157 // Clobber all input registers when running with the debug-code flag
158 // turned on to provoke errors.
159 if (FLAG_debug_code) {
160 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
161 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
162 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
163 }
164}
165
166
Steve Block3ce2e202009-11-05 08:53:23 +0000167void MacroAssembler::RecordWriteNonSmi(Register object,
168 int offset,
169 Register scratch,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100170 Register index) {
Steve Block3ce2e202009-11-05 08:53:23 +0000171 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000172
173 if (FLAG_debug_code) {
174 Label okay;
175 JumpIfNotSmi(object, &okay);
176 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
177 bind(&okay);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100178
179 if (offset == 0) {
180 // index must be int32.
181 Register tmp = index.is(rax) ? rbx : rax;
182 push(tmp);
183 movl(tmp, index);
184 cmpq(tmp, index);
185 Check(equal, "Index register for RecordWrite must be untagged int32.");
186 pop(tmp);
187 }
Leon Clarke4515c472010-02-03 11:58:03 +0000188 }
189
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100190 // Test that the object address is not in the new space. We cannot
191 // update page dirty marks for new space pages.
Steve Block6ded16b2010-05-10 14:33:55 +0100192 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000193
Steve Block6ded16b2010-05-10 14:33:55 +0100194 // The offset is relative to a tagged or untagged HeapObject pointer,
195 // so either offset or offset + kHeapObjectTag must be a
196 // multiple of kPointerSize.
197 ASSERT(IsAligned(offset, kPointerSize) ||
198 IsAligned(offset + kHeapObjectTag, kPointerSize));
199
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100200 Register dst = index;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100201 if (offset != 0) {
202 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000203 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100204 // array access: calculate the destination address in the same manner as
205 // KeyedStoreIC::GenerateGeneric.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100206 lea(dst, FieldOperand(object,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100207 index,
208 times_pointer_size,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100209 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000210 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100211 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000212
213 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000214
215 // Clobber all input registers when running with the debug-code flag
216 // turned on to provoke errors.
217 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100218 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
219 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100220 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Steve Block6ded16b2010-05-10 14:33:55 +0100221 }
222}
223
224
225void MacroAssembler::InNewSpace(Register object,
226 Register scratch,
227 Condition cc,
228 Label* branch) {
229 if (Serializer::enabled()) {
230 // Can't do arithmetic on external references if it might get serialized.
231 // The mask isn't really an address. We load it as an external reference in
232 // case the size of the new space is different between the snapshot maker
233 // and the running system.
234 if (scratch.is(object)) {
235 movq(kScratchRegister, ExternalReference::new_space_mask());
236 and_(scratch, kScratchRegister);
237 } else {
238 movq(scratch, ExternalReference::new_space_mask());
239 and_(scratch, object);
240 }
241 movq(kScratchRegister, ExternalReference::new_space_start());
242 cmpq(scratch, kScratchRegister);
243 j(cc, branch);
244 } else {
245 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
246 intptr_t new_space_start =
247 reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
248 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
249 if (scratch.is(object)) {
250 addq(scratch, kScratchRegister);
251 } else {
252 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
253 }
254 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
255 j(cc, branch);
Leon Clarke4515c472010-02-03 11:58:03 +0000256 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000257}
258
259
260void MacroAssembler::Assert(Condition cc, const char* msg) {
261 if (FLAG_debug_code) Check(cc, msg);
262}
263
264
265void MacroAssembler::Check(Condition cc, const char* msg) {
266 Label L;
267 j(cc, &L);
268 Abort(msg);
269 // will not return here
270 bind(&L);
271}
272
273
Steve Block6ded16b2010-05-10 14:33:55 +0100274void MacroAssembler::CheckStackAlignment() {
275 int frame_alignment = OS::ActivationFrameAlignment();
276 int frame_alignment_mask = frame_alignment - 1;
277 if (frame_alignment > kPointerSize) {
278 ASSERT(IsPowerOf2(frame_alignment));
279 Label alignment_as_expected;
280 testq(rsp, Immediate(frame_alignment_mask));
281 j(zero, &alignment_as_expected);
282 // Abort if stack is not aligned.
283 int3();
284 bind(&alignment_as_expected);
285 }
286}
287
288
Steve Blocka7e24c12009-10-30 11:49:00 +0000289void MacroAssembler::NegativeZeroTest(Register result,
290 Register op,
291 Label* then_label) {
292 Label ok;
293 testl(result, result);
294 j(not_zero, &ok);
295 testl(op, op);
296 j(sign, then_label);
297 bind(&ok);
298}
299
300
301void MacroAssembler::Abort(const char* msg) {
302 // We want to pass the msg string like a smi to avoid GC
303 // problems, however msg is not guaranteed to be aligned
304 // properly. Instead, we pass an aligned pointer that is
305 // a proper v8 smi, but also pass the alignment difference
306 // from the real pointer as a smi.
307 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
308 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
309 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
310 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
311#ifdef DEBUG
312 if (msg != NULL) {
313 RecordComment("Abort message: ");
314 RecordComment(msg);
315 }
316#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000317 // Disable stub call restrictions to always allow calls to abort.
318 set_allow_stub_calls(true);
319
Steve Blocka7e24c12009-10-30 11:49:00 +0000320 push(rax);
321 movq(kScratchRegister, p0, RelocInfo::NONE);
322 push(kScratchRegister);
323 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000324 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000325 RelocInfo::NONE);
326 push(kScratchRegister);
327 CallRuntime(Runtime::kAbort, 2);
328 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000329 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000330}
331
332
333void MacroAssembler::CallStub(CodeStub* stub) {
334 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
335 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
336}
337
338
Ben Murdochbb769b22010-08-11 14:56:33 +0100339Object* MacroAssembler::TryCallStub(CodeStub* stub) {
340 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
341 Object* result = stub->TryGetCode();
342 if (!result->IsFailure()) {
343 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
344 }
345 return result;
346}
347
348
Leon Clarkee46be812010-01-19 14:06:41 +0000349void MacroAssembler::TailCallStub(CodeStub* stub) {
350 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
351 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
352}
353
354
Ben Murdochbb769b22010-08-11 14:56:33 +0100355Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
356 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
357 Object* result = stub->TryGetCode();
358 if (!result->IsFailure()) {
359 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
360 }
361 return result;
362}
363
364
Steve Blocka7e24c12009-10-30 11:49:00 +0000365void MacroAssembler::StubReturn(int argc) {
366 ASSERT(argc >= 1 && generating_stub());
367 ret((argc - 1) * kPointerSize);
368}
369
370
371void MacroAssembler::IllegalOperation(int num_arguments) {
372 if (num_arguments > 0) {
373 addq(rsp, Immediate(num_arguments * kPointerSize));
374 }
375 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
376}
377
378
379void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
380 CallRuntime(Runtime::FunctionForId(id), num_arguments);
381}
382
383
Ben Murdochbb769b22010-08-11 14:56:33 +0100384Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
385 int num_arguments) {
386 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
387}
388
389
Steve Blocka7e24c12009-10-30 11:49:00 +0000390void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
391 // If the expected number of arguments of the runtime function is
392 // constant, we check that the actual number of arguments match the
393 // expectation.
394 if (f->nargs >= 0 && f->nargs != num_arguments) {
395 IllegalOperation(num_arguments);
396 return;
397 }
398
Leon Clarke4515c472010-02-03 11:58:03 +0000399 // TODO(1236192): Most runtime routines don't need the number of
400 // arguments passed in because it is constant. At some point we
401 // should remove this need and make the runtime routine entry code
402 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100403 Set(rax, num_arguments);
Leon Clarke4515c472010-02-03 11:58:03 +0000404 movq(rbx, ExternalReference(f));
405 CEntryStub ces(f->result_size);
406 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000407}
408
409
Ben Murdochbb769b22010-08-11 14:56:33 +0100410Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
411 int num_arguments) {
412 if (f->nargs >= 0 && f->nargs != num_arguments) {
413 IllegalOperation(num_arguments);
414 // Since we did not call the stub, there was no allocation failure.
415 // Return some non-failure object.
416 return Heap::undefined_value();
417 }
418
419 // TODO(1236192): Most runtime routines don't need the number of
420 // arguments passed in because it is constant. At some point we
421 // should remove this need and make the runtime routine entry code
422 // smarter.
423 Set(rax, num_arguments);
424 movq(rbx, ExternalReference(f));
425 CEntryStub ces(f->result_size);
426 return TryCallStub(&ces);
427}
428
429
Andrei Popescu402d9372010-02-26 13:31:12 +0000430void MacroAssembler::CallExternalReference(const ExternalReference& ext,
431 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100432 Set(rax, num_arguments);
Andrei Popescu402d9372010-02-26 13:31:12 +0000433 movq(rbx, ext);
434
435 CEntryStub stub(1);
436 CallStub(&stub);
437}
438
439
Steve Block6ded16b2010-05-10 14:33:55 +0100440void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
441 int num_arguments,
442 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000443 // ----------- S t a t e -------------
444 // -- rsp[0] : return address
445 // -- rsp[8] : argument num_arguments - 1
446 // ...
447 // -- rsp[8 * num_arguments] : argument 0 (receiver)
448 // -----------------------------------
449
450 // TODO(1236192): Most runtime routines don't need the number of
451 // arguments passed in because it is constant. At some point we
452 // should remove this need and make the runtime routine entry code
453 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100454 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100455 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000456}
457
458
Steve Block6ded16b2010-05-10 14:33:55 +0100459void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
460 int num_arguments,
461 int result_size) {
462 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
463}
464
465
Ben Murdochbb769b22010-08-11 14:56:33 +0100466static int Offset(ExternalReference ref0, ExternalReference ref1) {
467 int64_t offset = (ref0.address() - ref1.address());
468 // Check that fits into int.
469 ASSERT(static_cast<int>(offset) == offset);
470 return static_cast<int>(offset);
471}
472
473
474void MacroAssembler::PushHandleScope(Register scratch) {
475 ExternalReference extensions_address =
476 ExternalReference::handle_scope_extensions_address();
477 const int kExtensionsOffset = 0;
478 const int kNextOffset = Offset(
479 ExternalReference::handle_scope_next_address(),
480 extensions_address);
481 const int kLimitOffset = Offset(
482 ExternalReference::handle_scope_limit_address(),
483 extensions_address);
484
485 // Push the number of extensions, smi-tagged so the gc will ignore it.
486 movq(kScratchRegister, extensions_address);
487 movq(scratch, Operand(kScratchRegister, kExtensionsOffset));
488 movq(Operand(kScratchRegister, kExtensionsOffset), Immediate(0));
489 Integer32ToSmi(scratch, scratch);
490 push(scratch);
491 // Push next and limit pointers which will be wordsize aligned and
492 // hence automatically smi tagged.
493 push(Operand(kScratchRegister, kNextOffset));
494 push(Operand(kScratchRegister, kLimitOffset));
495}
496
497
498Object* MacroAssembler::PopHandleScopeHelper(Register saved,
499 Register scratch,
500 bool gc_allowed) {
501 ExternalReference extensions_address =
502 ExternalReference::handle_scope_extensions_address();
503 const int kExtensionsOffset = 0;
504 const int kNextOffset = Offset(
505 ExternalReference::handle_scope_next_address(),
506 extensions_address);
507 const int kLimitOffset = Offset(
508 ExternalReference::handle_scope_limit_address(),
509 extensions_address);
510
511 Object* result = NULL;
512 Label write_back;
513 movq(kScratchRegister, extensions_address);
514 cmpq(Operand(kScratchRegister, kExtensionsOffset), Immediate(0));
515 j(equal, &write_back);
516 push(saved);
517 if (gc_allowed) {
518 CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
519 } else {
520 result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
521 if (result->IsFailure()) return result;
522 }
523 pop(saved);
524 movq(kScratchRegister, extensions_address);
525
526 bind(&write_back);
527 pop(Operand(kScratchRegister, kLimitOffset));
528 pop(Operand(kScratchRegister, kNextOffset));
529 pop(scratch);
530 SmiToInteger32(scratch, scratch);
531 movq(Operand(kScratchRegister, kExtensionsOffset), scratch);
532
533 return result;
534}
535
536
537void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
538 PopHandleScopeHelper(saved, scratch, true);
539}
540
541
542Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
543 return PopHandleScopeHelper(saved, scratch, false);
544}
545
546
Steve Block6ded16b2010-05-10 14:33:55 +0100547void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
548 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000549 // Set the entry point and jump to the C entry runtime stub.
550 movq(rbx, ext);
551 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000552 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000553}
554
555
Andrei Popescu402d9372010-02-26 13:31:12 +0000556void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
557 // Calls are not allowed in some stubs.
558 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000559
Andrei Popescu402d9372010-02-26 13:31:12 +0000560 // Rely on the assertion to check that the number of provided
561 // arguments match the expected number of arguments. Fake a
562 // parameter count to avoid emitting code to do the check.
563 ParameterCount expected(0);
564 GetBuiltinEntry(rdx, id);
565 InvokeCode(rdx, expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +0000566}
567
Andrei Popescu402d9372010-02-26 13:31:12 +0000568
569void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100570 ASSERT(!target.is(rdi));
571
572 // Load the builtins object into target register.
573 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
574 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
575
Andrei Popescu402d9372010-02-26 13:31:12 +0000576 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +0100577 movq(rdi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
578
579 // Load the code entry point from the builtins object.
580 movq(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
581 if (FLAG_debug_code) {
582 // Make sure the code objects in the builtins object and in the
583 // builtin function are the same.
584 push(target);
585 movq(target, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
586 movq(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
587 cmpq(target, Operand(rsp, 0));
588 Assert(equal, "Builtin code object changed");
589 pop(target);
590 }
591 lea(target, FieldOperand(target, Code::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000592}
593
594
595void MacroAssembler::Set(Register dst, int64_t x) {
596 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100597 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000598 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000599 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000600 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000601 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000602 } else {
603 movq(dst, x, RelocInfo::NONE);
604 }
605}
606
Steve Blocka7e24c12009-10-30 11:49:00 +0000607void MacroAssembler::Set(const Operand& dst, int64_t x) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100608 if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000609 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000610 } else {
611 movq(kScratchRegister, x, RelocInfo::NONE);
612 movq(dst, kScratchRegister);
613 }
614}
615
Steve Blocka7e24c12009-10-30 11:49:00 +0000616// ----------------------------------------------------------------------------
617// Smi tagging, untagging and tag detection.
618
Steve Block3ce2e202009-11-05 08:53:23 +0000619static int kSmiShift = kSmiTagSize + kSmiShiftSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000620
Steve Block8defd9f2010-07-08 12:39:36 +0100621Register MacroAssembler::GetSmiConstant(Smi* source) {
622 int value = source->value();
623 if (value == 0) {
624 xorl(kScratchRegister, kScratchRegister);
625 return kScratchRegister;
626 }
627 if (value == 1) {
628 return kSmiConstantRegister;
629 }
630 LoadSmiConstant(kScratchRegister, source);
631 return kScratchRegister;
632}
633
634void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
635 if (FLAG_debug_code) {
636 movq(dst,
637 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
638 RelocInfo::NONE);
639 cmpq(dst, kSmiConstantRegister);
640 if (allow_stub_calls()) {
641 Assert(equal, "Uninitialized kSmiConstantRegister");
642 } else {
643 Label ok;
644 j(equal, &ok);
645 int3();
646 bind(&ok);
647 }
648 }
649 if (source->value() == 0) {
650 xorl(dst, dst);
651 return;
652 }
653 int value = source->value();
654 bool negative = value < 0;
655 unsigned int uvalue = negative ? -value : value;
656
657 switch (uvalue) {
658 case 9:
659 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
660 break;
661 case 8:
662 xorl(dst, dst);
663 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
664 break;
665 case 4:
666 xorl(dst, dst);
667 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
668 break;
669 case 5:
670 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
671 break;
672 case 3:
673 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
674 break;
675 case 2:
676 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
677 break;
678 case 1:
679 movq(dst, kSmiConstantRegister);
680 break;
681 case 0:
682 UNREACHABLE();
683 return;
684 default:
685 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
686 return;
687 }
688 if (negative) {
689 neg(dst);
690 }
691}
692
Steve Blocka7e24c12009-10-30 11:49:00 +0000693void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000694 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000695 if (!dst.is(src)) {
696 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000697 }
Steve Block3ce2e202009-11-05 08:53:23 +0000698 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000699}
700
701
702void MacroAssembler::Integer32ToSmi(Register dst,
703 Register src,
704 Label* on_overflow) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000705 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000706 // 32-bit integer always fits in a long smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000707 if (!dst.is(src)) {
708 movl(dst, src);
709 }
Steve Block3ce2e202009-11-05 08:53:23 +0000710 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000711}
712
713
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100714void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
715 if (FLAG_debug_code) {
716 testb(dst, Immediate(0x01));
717 Label ok;
718 j(zero, &ok);
719 if (allow_stub_calls()) {
720 Abort("Integer32ToSmiField writing to non-smi location");
721 } else {
722 int3();
723 }
724 bind(&ok);
725 }
726 ASSERT(kSmiShift % kBitsPerByte == 0);
727 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
728}
729
730
Steve Block3ce2e202009-11-05 08:53:23 +0000731void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
732 Register src,
733 int constant) {
734 if (dst.is(src)) {
735 addq(dst, Immediate(constant));
736 } else {
737 lea(dst, Operand(src, constant));
738 }
739 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000740}
741
742
743void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000744 ASSERT_EQ(0, kSmiTag);
745 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000746 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000747 }
Steve Block3ce2e202009-11-05 08:53:23 +0000748 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000749}
750
751
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100752void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
753 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
754}
755
756
Steve Blocka7e24c12009-10-30 11:49:00 +0000757void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000758 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000759 if (!dst.is(src)) {
760 movq(dst, src);
761 }
762 sar(dst, Immediate(kSmiShift));
763}
764
765
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100766void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
767 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
768}
769
770
Steve Block3ce2e202009-11-05 08:53:23 +0000771void MacroAssembler::SmiTest(Register src) {
772 testq(src, src);
773}
774
775
776void MacroAssembler::SmiCompare(Register dst, Register src) {
777 cmpq(dst, src);
778}
779
780
781void MacroAssembler::SmiCompare(Register dst, Smi* src) {
782 ASSERT(!dst.is(kScratchRegister));
783 if (src->value() == 0) {
784 testq(dst, dst);
785 } else {
786 Move(kScratchRegister, src);
787 cmpq(dst, kScratchRegister);
788 }
789}
790
791
Leon Clarkef7060e22010-06-03 12:02:55 +0100792void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block6ded16b2010-05-10 14:33:55 +0100793 cmpq(dst, src);
794}
795
796
Steve Block3ce2e202009-11-05 08:53:23 +0000797void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
798 cmpq(dst, src);
799}
800
801
802void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100803 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000804}
805
806
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100807void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
808 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
809}
810
811
Steve Blocka7e24c12009-10-30 11:49:00 +0000812void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
813 Register src,
814 int power) {
815 ASSERT(power >= 0);
816 ASSERT(power < 64);
817 if (power == 0) {
818 SmiToInteger64(dst, src);
819 return;
820 }
Steve Block3ce2e202009-11-05 08:53:23 +0000821 if (!dst.is(src)) {
822 movq(dst, src);
823 }
824 if (power < kSmiShift) {
825 sar(dst, Immediate(kSmiShift - power));
826 } else if (power > kSmiShift) {
827 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000828 }
829}
830
831
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100832void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
833 Register src,
834 int power) {
835 ASSERT((0 <= power) && (power < 32));
836 if (dst.is(src)) {
837 shr(dst, Immediate(power + kSmiShift));
838 } else {
839 UNIMPLEMENTED(); // Not used.
840 }
841}
842
843
Steve Blocka7e24c12009-10-30 11:49:00 +0000844Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000845 ASSERT_EQ(0, kSmiTag);
846 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000847 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000848}
849
850
851Condition MacroAssembler::CheckPositiveSmi(Register src) {
852 ASSERT_EQ(0, kSmiTag);
Steve Block8defd9f2010-07-08 12:39:36 +0100853 // Make mask 0x8000000000000001 and test that both bits are zero.
Steve Block3ce2e202009-11-05 08:53:23 +0000854 movq(kScratchRegister, src);
855 rol(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +0100856 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +0000857 return zero;
858}
859
860
Steve Blocka7e24c12009-10-30 11:49:00 +0000861Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
862 if (first.is(second)) {
863 return CheckSmi(first);
864 }
Steve Block8defd9f2010-07-08 12:39:36 +0100865 ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
866 leal(kScratchRegister, Operand(first, second, times_1, 0));
867 testb(kScratchRegister, Immediate(0x03));
Steve Block3ce2e202009-11-05 08:53:23 +0000868 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000869}
870
871
Leon Clarked91b9f72010-01-27 17:25:45 +0000872Condition MacroAssembler::CheckBothPositiveSmi(Register first,
873 Register second) {
874 if (first.is(second)) {
875 return CheckPositiveSmi(first);
876 }
Steve Block8defd9f2010-07-08 12:39:36 +0100877 movq(kScratchRegister, first);
878 or_(kScratchRegister, second);
Leon Clarked91b9f72010-01-27 17:25:45 +0000879 rol(kScratchRegister, Immediate(1));
880 testl(kScratchRegister, Immediate(0x03));
881 return zero;
882}
883
884
Ben Murdochbb769b22010-08-11 14:56:33 +0100885Condition MacroAssembler::CheckEitherSmi(Register first,
886 Register second,
887 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +0000888 if (first.is(second)) {
889 return CheckSmi(first);
890 }
Ben Murdochbb769b22010-08-11 14:56:33 +0100891 if (scratch.is(second)) {
892 andl(scratch, first);
893 } else {
894 if (!scratch.is(first)) {
895 movl(scratch, first);
896 }
897 andl(scratch, second);
898 }
899 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +0000900 return zero;
901}
902
903
Steve Blocka7e24c12009-10-30 11:49:00 +0000904Condition MacroAssembler::CheckIsMinSmi(Register src) {
Steve Block8defd9f2010-07-08 12:39:36 +0100905 ASSERT(!src.is(kScratchRegister));
906 // If we overflow by subtracting one, it's the minimal smi value.
907 cmpq(src, kSmiConstantRegister);
908 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +0000909}
910
Steve Blocka7e24c12009-10-30 11:49:00 +0000911
912Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000913 // A 32-bit integer value can always be converted to a smi.
914 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +0000915}
916
917
Steve Block3ce2e202009-11-05 08:53:23 +0000918Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
919 // An unsigned 32-bit integer value is valid as long as the high bit
920 // is not set.
Steve Block8defd9f2010-07-08 12:39:36 +0100921 testl(src, src);
922 return positive;
Steve Block3ce2e202009-11-05 08:53:23 +0000923}
924
925
926void MacroAssembler::SmiNeg(Register dst, Register src, Label* on_smi_result) {
927 if (dst.is(src)) {
928 ASSERT(!dst.is(kScratchRegister));
929 movq(kScratchRegister, src);
930 neg(dst); // Low 32 bits are retained as zero by negation.
931 // Test if result is zero or Smi::kMinValue.
932 cmpq(dst, kScratchRegister);
933 j(not_equal, on_smi_result);
934 movq(src, kScratchRegister);
935 } else {
936 movq(dst, src);
937 neg(dst);
938 cmpq(dst, src);
939 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
940 j(not_equal, on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000941 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000942}
943
944
945void MacroAssembler::SmiAdd(Register dst,
946 Register src1,
947 Register src2,
948 Label* on_not_smi_result) {
949 ASSERT(!dst.is(src2));
Steve Block6ded16b2010-05-10 14:33:55 +0100950 if (on_not_smi_result == NULL) {
951 // No overflow checking. Use only when it's known that
952 // overflowing is impossible.
953 if (dst.is(src1)) {
954 addq(dst, src2);
955 } else {
956 movq(dst, src1);
957 addq(dst, src2);
958 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100959 Assert(no_overflow, "Smi addition overflow");
Steve Block6ded16b2010-05-10 14:33:55 +0100960 } else if (dst.is(src1)) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100961 movq(kScratchRegister, src1);
962 addq(kScratchRegister, src2);
963 j(overflow, on_not_smi_result);
964 movq(dst, kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +0000965 } else {
966 movq(dst, src1);
967 addq(dst, src2);
968 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000969 }
970}
971
972
Steve Blocka7e24c12009-10-30 11:49:00 +0000973void MacroAssembler::SmiSub(Register dst,
974 Register src1,
975 Register src2,
976 Label* on_not_smi_result) {
977 ASSERT(!dst.is(src2));
Leon Clarked91b9f72010-01-27 17:25:45 +0000978 if (on_not_smi_result == NULL) {
979 // No overflow checking. Use only when it's known that
980 // overflowing is impossible (e.g., subtracting two positive smis).
981 if (dst.is(src1)) {
982 subq(dst, src2);
983 } else {
984 movq(dst, src1);
985 subq(dst, src2);
986 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100987 Assert(no_overflow, "Smi subtraction overflow");
Leon Clarked91b9f72010-01-27 17:25:45 +0000988 } else if (dst.is(src1)) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100989 cmpq(dst, src2);
990 j(overflow, on_not_smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +0000991 subq(dst, src2);
Steve Block3ce2e202009-11-05 08:53:23 +0000992 } else {
993 movq(dst, src1);
994 subq(dst, src2);
995 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000996 }
997}
998
999
Steve Block6ded16b2010-05-10 14:33:55 +01001000void MacroAssembler::SmiSub(Register dst,
1001 Register src1,
Leon Clarkef7060e22010-06-03 12:02:55 +01001002 const Operand& src2,
Steve Block6ded16b2010-05-10 14:33:55 +01001003 Label* on_not_smi_result) {
1004 if (on_not_smi_result == NULL) {
1005 // No overflow checking. Use only when it's known that
1006 // overflowing is impossible (e.g., subtracting two positive smis).
1007 if (dst.is(src1)) {
1008 subq(dst, src2);
1009 } else {
1010 movq(dst, src1);
1011 subq(dst, src2);
1012 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001013 Assert(no_overflow, "Smi subtraction overflow");
Steve Block6ded16b2010-05-10 14:33:55 +01001014 } else if (dst.is(src1)) {
Steve Block8defd9f2010-07-08 12:39:36 +01001015 movq(kScratchRegister, src2);
1016 cmpq(src1, kScratchRegister);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001017 j(overflow, on_not_smi_result);
Steve Block8defd9f2010-07-08 12:39:36 +01001018 subq(src1, kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01001019 } else {
1020 movq(dst, src1);
1021 subq(dst, src2);
1022 j(overflow, on_not_smi_result);
1023 }
1024}
1025
Steve Blocka7e24c12009-10-30 11:49:00 +00001026void MacroAssembler::SmiMul(Register dst,
1027 Register src1,
1028 Register src2,
1029 Label* on_not_smi_result) {
1030 ASSERT(!dst.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +00001031 ASSERT(!dst.is(kScratchRegister));
1032 ASSERT(!src1.is(kScratchRegister));
1033 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001034
1035 if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001036 Label failure, zero_correct_result;
1037 movq(kScratchRegister, src1); // Create backup for later testing.
1038 SmiToInteger64(dst, src1);
1039 imul(dst, src2);
1040 j(overflow, &failure);
1041
1042 // Check for negative zero result. If product is zero, and one
1043 // argument is negative, go to slow case.
1044 Label correct_result;
1045 testq(dst, dst);
1046 j(not_zero, &correct_result);
1047
1048 movq(dst, kScratchRegister);
1049 xor_(dst, src2);
1050 j(positive, &zero_correct_result); // Result was positive zero.
1051
1052 bind(&failure); // Reused failure exit, restores src1.
1053 movq(src1, kScratchRegister);
1054 jmp(on_not_smi_result);
1055
1056 bind(&zero_correct_result);
1057 xor_(dst, dst);
1058
1059 bind(&correct_result);
1060 } else {
1061 SmiToInteger64(dst, src1);
1062 imul(dst, src2);
1063 j(overflow, on_not_smi_result);
1064 // Check for negative zero result. If product is zero, and one
1065 // argument is negative, go to slow case.
1066 Label correct_result;
1067 testq(dst, dst);
1068 j(not_zero, &correct_result);
1069 // One of src1 and src2 is zero, the check whether the other is
1070 // negative.
Steve Blocka7e24c12009-10-30 11:49:00 +00001071 movq(kScratchRegister, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001072 xor_(kScratchRegister, src2);
1073 j(negative, on_not_smi_result);
1074 bind(&correct_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001075 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001076}
1077
1078
1079void MacroAssembler::SmiTryAddConstant(Register dst,
1080 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +00001081 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +00001082 Label* on_not_smi_result) {
1083 // Does not assume that src is a smi.
Steve Block3ce2e202009-11-05 08:53:23 +00001084 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001085 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +00001086 ASSERT(!dst.is(kScratchRegister));
1087 ASSERT(!src.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001088
Steve Block3ce2e202009-11-05 08:53:23 +00001089 JumpIfNotSmi(src, on_not_smi_result);
1090 Register tmp = (dst.is(src) ? kScratchRegister : dst);
Steve Block8defd9f2010-07-08 12:39:36 +01001091 LoadSmiConstant(tmp, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001092 addq(tmp, src);
1093 j(overflow, on_not_smi_result);
1094 if (dst.is(src)) {
1095 movq(dst, tmp);
1096 }
1097}
1098
1099
1100void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1101 if (constant->value() == 0) {
1102 if (!dst.is(src)) {
1103 movq(dst, src);
1104 }
Steve Block8defd9f2010-07-08 12:39:36 +01001105 return;
Steve Block3ce2e202009-11-05 08:53:23 +00001106 } else if (dst.is(src)) {
1107 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001108 switch (constant->value()) {
1109 case 1:
1110 addq(dst, kSmiConstantRegister);
1111 return;
1112 case 2:
1113 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1114 return;
1115 case 4:
1116 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1117 return;
1118 case 8:
1119 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1120 return;
1121 default:
1122 Register constant_reg = GetSmiConstant(constant);
1123 addq(dst, constant_reg);
1124 return;
1125 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001126 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001127 switch (constant->value()) {
1128 case 1:
1129 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
1130 return;
1131 case 2:
1132 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1133 return;
1134 case 4:
1135 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1136 return;
1137 case 8:
1138 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1139 return;
1140 default:
1141 LoadSmiConstant(dst, constant);
1142 addq(dst, src);
1143 return;
1144 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001145 }
1146}
1147
1148
Leon Clarkef7060e22010-06-03 12:02:55 +01001149void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1150 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001151 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01001152 }
1153}
1154
1155
Steve Blocka7e24c12009-10-30 11:49:00 +00001156void MacroAssembler::SmiAddConstant(Register dst,
1157 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +00001158 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +00001159 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001160 if (constant->value() == 0) {
1161 if (!dst.is(src)) {
1162 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001163 }
Steve Block3ce2e202009-11-05 08:53:23 +00001164 } else if (dst.is(src)) {
1165 ASSERT(!dst.is(kScratchRegister));
1166
Steve Block8defd9f2010-07-08 12:39:36 +01001167 LoadSmiConstant(kScratchRegister, constant);
1168 addq(kScratchRegister, src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001169 j(overflow, on_not_smi_result);
1170 movq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001171 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001172 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001173 addq(dst, src);
1174 j(overflow, on_not_smi_result);
1175 }
1176}
1177
1178
1179void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1180 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001181 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001182 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001183 }
Steve Block3ce2e202009-11-05 08:53:23 +00001184 } else if (dst.is(src)) {
1185 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001186 Register constant_reg = GetSmiConstant(constant);
1187 subq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001188 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001189 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001190 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001191 // Adding and subtracting the min-value gives the same result, it only
1192 // differs on the overflow bit, which we don't check here.
1193 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001194 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001195 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001196 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Steve Block3ce2e202009-11-05 08:53:23 +00001197 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001198 }
1199 }
1200}
1201
1202
1203void MacroAssembler::SmiSubConstant(Register dst,
1204 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +00001205 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +00001206 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001207 if (constant->value() == 0) {
1208 if (!dst.is(src)) {
1209 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001210 }
Steve Block3ce2e202009-11-05 08:53:23 +00001211 } else if (dst.is(src)) {
1212 ASSERT(!dst.is(kScratchRegister));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001213 if (constant->value() == Smi::kMinValue) {
1214 // Subtracting min-value from any non-negative value will overflow.
1215 // We test the non-negativeness before doing the subtraction.
1216 testq(src, src);
1217 j(not_sign, on_not_smi_result);
Steve Block8defd9f2010-07-08 12:39:36 +01001218 LoadSmiConstant(kScratchRegister, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001219 subq(dst, kScratchRegister);
1220 } else {
1221 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001222 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001223 addq(kScratchRegister, dst);
1224 j(overflow, on_not_smi_result);
1225 movq(dst, kScratchRegister);
1226 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001227 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001228 if (constant->value() == Smi::kMinValue) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001229 // Subtracting min-value from any non-negative value will overflow.
1230 // We test the non-negativeness before doing the subtraction.
1231 testq(src, src);
1232 j(not_sign, on_not_smi_result);
Steve Block8defd9f2010-07-08 12:39:36 +01001233 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001234 // Adding and subtracting the min-value gives the same result, it only
1235 // differs on the overflow bit, which we don't check here.
1236 addq(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001237 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001238 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001239 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
Steve Block3ce2e202009-11-05 08:53:23 +00001240 addq(dst, src);
1241 j(overflow, on_not_smi_result);
1242 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001243 }
1244}
1245
1246
1247void MacroAssembler::SmiDiv(Register dst,
1248 Register src1,
1249 Register src2,
1250 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001251 ASSERT(!src1.is(kScratchRegister));
1252 ASSERT(!src2.is(kScratchRegister));
1253 ASSERT(!dst.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001254 ASSERT(!src2.is(rax));
1255 ASSERT(!src2.is(rdx));
1256 ASSERT(!src1.is(rdx));
1257
1258 // Check for 0 divisor (result is +/-Infinity).
1259 Label positive_divisor;
Steve Block3ce2e202009-11-05 08:53:23 +00001260 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001261 j(zero, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001262
Steve Block3ce2e202009-11-05 08:53:23 +00001263 if (src1.is(rax)) {
1264 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001265 }
Steve Block3ce2e202009-11-05 08:53:23 +00001266 SmiToInteger32(rax, src1);
1267 // We need to rule out dividing Smi::kMinValue by -1, since that would
1268 // overflow in idiv and raise an exception.
1269 // We combine this with negative zero test (negative zero only happens
1270 // when dividing zero by a negative number).
Steve Blocka7e24c12009-10-30 11:49:00 +00001271
Steve Block3ce2e202009-11-05 08:53:23 +00001272 // We overshoot a little and go to slow case if we divide min-value
1273 // by any negative value, not just -1.
1274 Label safe_div;
1275 testl(rax, Immediate(0x7fffffff));
1276 j(not_zero, &safe_div);
1277 testq(src2, src2);
1278 if (src1.is(rax)) {
1279 j(positive, &safe_div);
1280 movq(src1, kScratchRegister);
1281 jmp(on_not_smi_result);
1282 } else {
1283 j(negative, on_not_smi_result);
1284 }
1285 bind(&safe_div);
1286
1287 SmiToInteger32(src2, src2);
1288 // Sign extend src1 into edx:eax.
1289 cdq();
Steve Blocka7e24c12009-10-30 11:49:00 +00001290 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001291 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001292 // Check that the remainder is zero.
1293 testl(rdx, rdx);
Steve Block3ce2e202009-11-05 08:53:23 +00001294 if (src1.is(rax)) {
1295 Label smi_result;
1296 j(zero, &smi_result);
1297 movq(src1, kScratchRegister);
1298 jmp(on_not_smi_result);
1299 bind(&smi_result);
1300 } else {
1301 j(not_zero, on_not_smi_result);
1302 }
1303 if (!dst.is(src1) && src1.is(rax)) {
1304 movq(src1, kScratchRegister);
1305 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001306 Integer32ToSmi(dst, rax);
1307}
1308
1309
1310void MacroAssembler::SmiMod(Register dst,
1311 Register src1,
1312 Register src2,
1313 Label* on_not_smi_result) {
1314 ASSERT(!dst.is(kScratchRegister));
1315 ASSERT(!src1.is(kScratchRegister));
1316 ASSERT(!src2.is(kScratchRegister));
1317 ASSERT(!src2.is(rax));
1318 ASSERT(!src2.is(rdx));
1319 ASSERT(!src1.is(rdx));
Steve Block3ce2e202009-11-05 08:53:23 +00001320 ASSERT(!src1.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001321
Steve Block3ce2e202009-11-05 08:53:23 +00001322 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001323 j(zero, on_not_smi_result);
1324
1325 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001326 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001327 }
Steve Block3ce2e202009-11-05 08:53:23 +00001328 SmiToInteger32(rax, src1);
1329 SmiToInteger32(src2, src2);
1330
1331 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1332 Label safe_div;
1333 cmpl(rax, Immediate(Smi::kMinValue));
1334 j(not_equal, &safe_div);
1335 cmpl(src2, Immediate(-1));
1336 j(not_equal, &safe_div);
1337 // Retag inputs and go slow case.
1338 Integer32ToSmi(src2, src2);
1339 if (src1.is(rax)) {
1340 movq(src1, kScratchRegister);
1341 }
1342 jmp(on_not_smi_result);
1343 bind(&safe_div);
1344
Steve Blocka7e24c12009-10-30 11:49:00 +00001345 // Sign extend eax into edx:eax.
1346 cdq();
1347 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001348 // Restore smi tags on inputs.
1349 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001350 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001351 movq(src1, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001352 }
Steve Block3ce2e202009-11-05 08:53:23 +00001353 // Check for a negative zero result. If the result is zero, and the
1354 // dividend is negative, go slow to return a floating point negative zero.
1355 Label smi_result;
1356 testl(rdx, rdx);
1357 j(not_zero, &smi_result);
1358 testq(src1, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001359 j(negative, on_not_smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +00001360 bind(&smi_result);
1361 Integer32ToSmi(dst, rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001362}
1363
1364
1365void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001366 ASSERT(!dst.is(kScratchRegister));
1367 ASSERT(!src.is(kScratchRegister));
1368 // Set tag and padding bits before negating, so that they are zero afterwards.
1369 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001370 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001371 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001372 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001373 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001374 }
Steve Block3ce2e202009-11-05 08:53:23 +00001375 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001376}
1377
1378
1379void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001380 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001381 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001382 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001383 }
1384 and_(dst, src2);
1385}
1386
1387
Steve Block3ce2e202009-11-05 08:53:23 +00001388void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1389 if (constant->value() == 0) {
1390 xor_(dst, dst);
1391 } else if (dst.is(src)) {
1392 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001393 Register constant_reg = GetSmiConstant(constant);
1394 and_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001395 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001396 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001397 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001398 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001399}
1400
1401
1402void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1403 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001404 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001405 }
1406 or_(dst, src2);
1407}
1408
1409
Steve Block3ce2e202009-11-05 08:53:23 +00001410void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1411 if (dst.is(src)) {
1412 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001413 Register constant_reg = GetSmiConstant(constant);
1414 or_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001415 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001416 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001417 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001418 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001419}
1420
Steve Block3ce2e202009-11-05 08:53:23 +00001421
Steve Blocka7e24c12009-10-30 11:49:00 +00001422void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1423 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001424 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001425 }
1426 xor_(dst, src2);
1427}
1428
1429
Steve Block3ce2e202009-11-05 08:53:23 +00001430void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1431 if (dst.is(src)) {
1432 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001433 Register constant_reg = GetSmiConstant(constant);
1434 xor_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001435 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001436 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001437 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001438 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001439}
1440
1441
Steve Blocka7e24c12009-10-30 11:49:00 +00001442void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1443 Register src,
1444 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001445 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001446 if (shift_value > 0) {
1447 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001448 sar(dst, Immediate(shift_value + kSmiShift));
1449 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001450 } else {
1451 UNIMPLEMENTED(); // Not used.
1452 }
1453 }
1454}
1455
1456
1457void MacroAssembler::SmiShiftLogicalRightConstant(Register dst,
1458 Register src,
1459 int shift_value,
1460 Label* on_not_smi_result) {
1461 // Logic right shift interprets its result as an *unsigned* number.
1462 if (dst.is(src)) {
1463 UNIMPLEMENTED(); // Not used.
1464 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001465 movq(dst, src);
1466 if (shift_value == 0) {
1467 testq(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001468 j(negative, on_not_smi_result);
1469 }
Steve Block3ce2e202009-11-05 08:53:23 +00001470 shr(dst, Immediate(shift_value + kSmiShift));
1471 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001472 }
1473}
1474
1475
1476void MacroAssembler::SmiShiftLeftConstant(Register dst,
1477 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001478 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001479 if (!dst.is(src)) {
1480 movq(dst, src);
1481 }
1482 if (shift_value > 0) {
1483 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001484 }
1485}
1486
1487
1488void MacroAssembler::SmiShiftLeft(Register dst,
1489 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001490 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001491 ASSERT(!dst.is(rcx));
1492 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001493 // Untag shift amount.
1494 if (!dst.is(src1)) {
1495 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001496 }
Steve Block3ce2e202009-11-05 08:53:23 +00001497 SmiToInteger32(rcx, src2);
1498 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1499 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001500 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001501}
1502
1503
1504void MacroAssembler::SmiShiftLogicalRight(Register dst,
1505 Register src1,
1506 Register src2,
1507 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001508 ASSERT(!dst.is(kScratchRegister));
1509 ASSERT(!src1.is(kScratchRegister));
1510 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001511 ASSERT(!dst.is(rcx));
1512 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001513 if (src1.is(rcx) || src2.is(rcx)) {
1514 movq(kScratchRegister, rcx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001515 }
Steve Block3ce2e202009-11-05 08:53:23 +00001516 if (!dst.is(src1)) {
1517 movq(dst, src1);
1518 }
1519 SmiToInteger32(rcx, src2);
1520 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001521 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
Steve Block3ce2e202009-11-05 08:53:23 +00001522 shl(dst, Immediate(kSmiShift));
1523 testq(dst, dst);
1524 if (src1.is(rcx) || src2.is(rcx)) {
1525 Label positive_result;
1526 j(positive, &positive_result);
1527 if (src1.is(rcx)) {
1528 movq(src1, kScratchRegister);
1529 } else {
1530 movq(src2, kScratchRegister);
1531 }
1532 jmp(on_not_smi_result);
1533 bind(&positive_result);
1534 } else {
1535 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1536 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001537}
1538
1539
1540void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1541 Register src1,
1542 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001543 ASSERT(!dst.is(kScratchRegister));
1544 ASSERT(!src1.is(kScratchRegister));
1545 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001546 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001547 if (src1.is(rcx)) {
1548 movq(kScratchRegister, src1);
1549 } else if (src2.is(rcx)) {
1550 movq(kScratchRegister, src2);
1551 }
1552 if (!dst.is(src1)) {
1553 movq(dst, src1);
1554 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001555 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001556 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001557 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001558 shl(dst, Immediate(kSmiShift));
1559 if (src1.is(rcx)) {
1560 movq(src1, kScratchRegister);
1561 } else if (src2.is(rcx)) {
1562 movq(src2, kScratchRegister);
1563 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001564}
1565
1566
1567void MacroAssembler::SelectNonSmi(Register dst,
1568 Register src1,
1569 Register src2,
1570 Label* on_not_smis) {
Steve Block3ce2e202009-11-05 08:53:23 +00001571 ASSERT(!dst.is(kScratchRegister));
1572 ASSERT(!src1.is(kScratchRegister));
1573 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001574 ASSERT(!dst.is(src1));
1575 ASSERT(!dst.is(src2));
1576 // Both operands must not be smis.
1577#ifdef DEBUG
Steve Block3ce2e202009-11-05 08:53:23 +00001578 if (allow_stub_calls()) { // Check contains a stub call.
1579 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1580 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1581 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001582#endif
1583 ASSERT_EQ(0, kSmiTag);
1584 ASSERT_EQ(0, Smi::FromInt(0));
Steve Block3ce2e202009-11-05 08:53:23 +00001585 movl(kScratchRegister, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001586 and_(kScratchRegister, src1);
1587 testl(kScratchRegister, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001588 // If non-zero then both are smis.
Steve Blocka7e24c12009-10-30 11:49:00 +00001589 j(not_zero, on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00001590
Steve Block3ce2e202009-11-05 08:53:23 +00001591 // Exactly one operand is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001592 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1593 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1594 subq(kScratchRegister, Immediate(1));
1595 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1596 movq(dst, src1);
1597 xor_(dst, src2);
1598 and_(dst, kScratchRegister);
1599 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1600 xor_(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001601 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001602}
1603
Steve Block8defd9f2010-07-08 12:39:36 +01001604
Steve Block3ce2e202009-11-05 08:53:23 +00001605SmiIndex MacroAssembler::SmiToIndex(Register dst,
1606 Register src,
1607 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001608 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001609 // There is a possible optimization if shift is in the range 60-63, but that
1610 // will (and must) never happen.
1611 if (!dst.is(src)) {
1612 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001613 }
Steve Block3ce2e202009-11-05 08:53:23 +00001614 if (shift < kSmiShift) {
1615 sar(dst, Immediate(kSmiShift - shift));
1616 } else {
1617 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001618 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001619 return SmiIndex(dst, times_1);
1620}
1621
Steve Blocka7e24c12009-10-30 11:49:00 +00001622SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1623 Register src,
1624 int shift) {
1625 // Register src holds a positive smi.
1626 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001627 if (!dst.is(src)) {
1628 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001629 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001630 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001631 if (shift < kSmiShift) {
1632 sar(dst, Immediate(kSmiShift - shift));
1633 } else {
1634 shl(dst, Immediate(shift - kSmiShift));
1635 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001636 return SmiIndex(dst, times_1);
1637}
1638
1639
Steve Block3ce2e202009-11-05 08:53:23 +00001640void MacroAssembler::JumpIfSmi(Register src, Label* on_smi) {
1641 ASSERT_EQ(0, kSmiTag);
1642 Condition smi = CheckSmi(src);
1643 j(smi, on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001644}
1645
Steve Block3ce2e202009-11-05 08:53:23 +00001646
1647void MacroAssembler::JumpIfNotSmi(Register src, Label* on_not_smi) {
1648 Condition smi = CheckSmi(src);
1649 j(NegateCondition(smi), on_not_smi);
1650}
1651
1652
1653void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1654 Label* on_not_positive_smi) {
1655 Condition positive_smi = CheckPositiveSmi(src);
1656 j(NegateCondition(positive_smi), on_not_positive_smi);
1657}
1658
1659
1660void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1661 Smi* constant,
1662 Label* on_equals) {
1663 SmiCompare(src, constant);
1664 j(equal, on_equals);
1665}
1666
1667
1668void MacroAssembler::JumpIfNotValidSmiValue(Register src, Label* on_invalid) {
1669 Condition is_valid = CheckInteger32ValidSmiValue(src);
1670 j(NegateCondition(is_valid), on_invalid);
1671}
1672
1673
1674void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1675 Label* on_invalid) {
1676 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1677 j(NegateCondition(is_valid), on_invalid);
1678}
1679
1680
1681void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2,
1682 Label* on_not_both_smi) {
1683 Condition both_smi = CheckBothSmi(src1, src2);
1684 j(NegateCondition(both_smi), on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001685}
1686
1687
Leon Clarked91b9f72010-01-27 17:25:45 +00001688void MacroAssembler::JumpIfNotBothPositiveSmi(Register src1, Register src2,
1689 Label* on_not_both_smi) {
1690 Condition both_smi = CheckBothPositiveSmi(src1, src2);
1691 j(NegateCondition(both_smi), on_not_both_smi);
1692}
1693
1694
1695
Leon Clarkee46be812010-01-19 14:06:41 +00001696void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1697 Register second_object,
1698 Register scratch1,
1699 Register scratch2,
1700 Label* on_fail) {
1701 // Check that both objects are not smis.
1702 Condition either_smi = CheckEitherSmi(first_object, second_object);
1703 j(either_smi, on_fail);
1704
1705 // Load instance type for both strings.
1706 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1707 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1708 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1709 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1710
1711 // Check that both are flat ascii strings.
1712 ASSERT(kNotStringTag != 0);
1713 const int kFlatAsciiStringMask =
1714 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Leon Clarked91b9f72010-01-27 17:25:45 +00001715 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
Leon Clarkee46be812010-01-19 14:06:41 +00001716
1717 andl(scratch1, Immediate(kFlatAsciiStringMask));
1718 andl(scratch2, Immediate(kFlatAsciiStringMask));
1719 // Interleave the bits to check both scratch1 and scratch2 in one test.
1720 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1721 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1722 cmpl(scratch1,
Leon Clarked91b9f72010-01-27 17:25:45 +00001723 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
Leon Clarkee46be812010-01-19 14:06:41 +00001724 j(not_equal, on_fail);
1725}
1726
1727
Steve Block6ded16b2010-05-10 14:33:55 +01001728void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1729 Register instance_type,
1730 Register scratch,
1731 Label *failure) {
1732 if (!scratch.is(instance_type)) {
1733 movl(scratch, instance_type);
1734 }
1735
1736 const int kFlatAsciiStringMask =
1737 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1738
1739 andl(scratch, Immediate(kFlatAsciiStringMask));
1740 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1741 j(not_equal, failure);
1742}
1743
1744
1745void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1746 Register first_object_instance_type,
1747 Register second_object_instance_type,
1748 Register scratch1,
1749 Register scratch2,
1750 Label* on_fail) {
1751 // Load instance type for both strings.
1752 movq(scratch1, first_object_instance_type);
1753 movq(scratch2, second_object_instance_type);
1754
1755 // Check that both are flat ascii strings.
1756 ASSERT(kNotStringTag != 0);
1757 const int kFlatAsciiStringMask =
1758 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1759 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1760
1761 andl(scratch1, Immediate(kFlatAsciiStringMask));
1762 andl(scratch2, Immediate(kFlatAsciiStringMask));
1763 // Interleave the bits to check both scratch1 and scratch2 in one test.
1764 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1765 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1766 cmpl(scratch1,
1767 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1768 j(not_equal, on_fail);
1769}
1770
1771
Steve Blocka7e24c12009-10-30 11:49:00 +00001772void MacroAssembler::Move(Register dst, Handle<Object> source) {
1773 ASSERT(!source->IsFailure());
1774 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001775 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001776 } else {
1777 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1778 }
1779}
1780
1781
1782void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001783 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001784 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001785 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001786 } else {
1787 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1788 movq(dst, kScratchRegister);
1789 }
1790}
1791
1792
1793void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001794 if (source->IsSmi()) {
1795 SmiCompare(dst, Smi::cast(*source));
1796 } else {
1797 Move(kScratchRegister, source);
1798 cmpq(dst, kScratchRegister);
1799 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001800}
1801
1802
1803void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1804 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001805 SmiCompare(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001806 } else {
1807 ASSERT(source->IsHeapObject());
1808 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1809 cmpq(dst, kScratchRegister);
1810 }
1811}
1812
1813
1814void MacroAssembler::Push(Handle<Object> source) {
1815 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001816 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001817 } else {
1818 ASSERT(source->IsHeapObject());
1819 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1820 push(kScratchRegister);
1821 }
1822}
1823
1824
1825void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001826 intptr_t smi = reinterpret_cast<intptr_t>(source);
1827 if (is_int32(smi)) {
1828 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001829 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001830 Register constant = GetSmiConstant(source);
1831 push(constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001832 }
1833}
1834
1835
Leon Clarkee46be812010-01-19 14:06:41 +00001836void MacroAssembler::Drop(int stack_elements) {
1837 if (stack_elements > 0) {
1838 addq(rsp, Immediate(stack_elements * kPointerSize));
1839 }
1840}
1841
1842
Steve Block3ce2e202009-11-05 08:53:23 +00001843void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001844 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001845}
1846
1847
1848void MacroAssembler::Jump(ExternalReference ext) {
1849 movq(kScratchRegister, ext);
1850 jmp(kScratchRegister);
1851}
1852
1853
1854void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1855 movq(kScratchRegister, destination, rmode);
1856 jmp(kScratchRegister);
1857}
1858
1859
1860void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001861 // TODO(X64): Inline this
1862 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001863}
1864
1865
1866void MacroAssembler::Call(ExternalReference ext) {
1867 movq(kScratchRegister, ext);
1868 call(kScratchRegister);
1869}
1870
1871
1872void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1873 movq(kScratchRegister, destination, rmode);
1874 call(kScratchRegister);
1875}
1876
1877
1878void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1879 ASSERT(RelocInfo::IsCodeTarget(rmode));
1880 WriteRecordedPositions();
Steve Block3ce2e202009-11-05 08:53:23 +00001881 call(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001882}
1883
1884
1885void MacroAssembler::PushTryHandler(CodeLocation try_location,
1886 HandlerType type) {
1887 // Adjust this code if not the case.
1888 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1889
1890 // The pc (return address) is already on TOS. This code pushes state,
1891 // frame pointer and current handler. Check that they are expected
1892 // next on the stack, in that order.
1893 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1894 StackHandlerConstants::kPCOffset - kPointerSize);
1895 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1896 StackHandlerConstants::kStateOffset - kPointerSize);
1897 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1898 StackHandlerConstants::kFPOffset - kPointerSize);
1899
1900 if (try_location == IN_JAVASCRIPT) {
1901 if (type == TRY_CATCH_HANDLER) {
1902 push(Immediate(StackHandler::TRY_CATCH));
1903 } else {
1904 push(Immediate(StackHandler::TRY_FINALLY));
1905 }
1906 push(rbp);
1907 } else {
1908 ASSERT(try_location == IN_JS_ENTRY);
1909 // The frame pointer does not point to a JS frame so we save NULL
1910 // for rbp. We expect the code throwing an exception to check rbp
1911 // before dereferencing it to restore the context.
1912 push(Immediate(StackHandler::ENTRY));
1913 push(Immediate(0)); // NULL frame pointer.
1914 }
1915 // Save the current handler.
1916 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1917 push(Operand(kScratchRegister, 0));
1918 // Link this handler.
1919 movq(Operand(kScratchRegister, 0), rsp);
1920}
1921
1922
Leon Clarkee46be812010-01-19 14:06:41 +00001923void MacroAssembler::PopTryHandler() {
1924 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1925 // Unlink this handler.
1926 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1927 pop(Operand(kScratchRegister, 0));
1928 // Remove the remaining fields.
1929 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1930}
1931
1932
Steve Blocka7e24c12009-10-30 11:49:00 +00001933void MacroAssembler::Ret() {
1934 ret(0);
1935}
1936
1937
1938void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001939 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01001940 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001941}
1942
1943
1944void MacroAssembler::CmpObjectType(Register heap_object,
1945 InstanceType type,
1946 Register map) {
1947 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1948 CmpInstanceType(map, type);
1949}
1950
1951
1952void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1953 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1954 Immediate(static_cast<int8_t>(type)));
1955}
1956
1957
Andrei Popescu31002712010-02-23 13:46:05 +00001958void MacroAssembler::CheckMap(Register obj,
1959 Handle<Map> map,
1960 Label* fail,
1961 bool is_heap_object) {
1962 if (!is_heap_object) {
1963 JumpIfSmi(obj, fail);
1964 }
1965 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1966 j(not_equal, fail);
1967}
1968
1969
Leon Clarkef7060e22010-06-03 12:02:55 +01001970void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001971 Label ok;
1972 Condition is_smi = CheckSmi(object);
1973 j(is_smi, &ok);
1974 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1975 Factory::heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01001976 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00001977 bind(&ok);
1978}
1979
1980
Leon Clarkef7060e22010-06-03 12:02:55 +01001981void MacroAssembler::AbortIfNotSmi(Register object) {
Steve Block6ded16b2010-05-10 14:33:55 +01001982 Label ok;
1983 Condition is_smi = CheckSmi(object);
Leon Clarkef7060e22010-06-03 12:02:55 +01001984 Assert(is_smi, "Operand not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01001985}
1986
1987
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001988void MacroAssembler::AbortIfNotRootValue(Register src,
1989 Heap::RootListIndex root_value_index,
1990 const char* message) {
1991 ASSERT(!src.is(kScratchRegister));
1992 LoadRoot(kScratchRegister, root_value_index);
1993 cmpq(src, kScratchRegister);
1994 Check(equal, message);
1995}
1996
1997
1998
Leon Clarked91b9f72010-01-27 17:25:45 +00001999Condition MacroAssembler::IsObjectStringType(Register heap_object,
2000 Register map,
2001 Register instance_type) {
2002 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00002003 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00002004 ASSERT(kNotStringTag != 0);
2005 testb(instance_type, Immediate(kIsNotStringMask));
2006 return zero;
2007}
2008
2009
Steve Blocka7e24c12009-10-30 11:49:00 +00002010void MacroAssembler::TryGetFunctionPrototype(Register function,
2011 Register result,
2012 Label* miss) {
2013 // Check that the receiver isn't a smi.
2014 testl(function, Immediate(kSmiTagMask));
2015 j(zero, miss);
2016
2017 // Check that the function really is a function.
2018 CmpObjectType(function, JS_FUNCTION_TYPE, result);
2019 j(not_equal, miss);
2020
2021 // Make sure that the function has an instance prototype.
2022 Label non_instance;
2023 testb(FieldOperand(result, Map::kBitFieldOffset),
2024 Immediate(1 << Map::kHasNonInstancePrototype));
2025 j(not_zero, &non_instance);
2026
2027 // Get the prototype or initial map from the function.
2028 movq(result,
2029 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2030
2031 // If the prototype or initial map is the hole, don't return it and
2032 // simply miss the cache instead. This will allow us to allocate a
2033 // prototype object on-demand in the runtime system.
2034 CompareRoot(result, Heap::kTheHoleValueRootIndex);
2035 j(equal, miss);
2036
2037 // If the function does not have an initial map, we're done.
2038 Label done;
2039 CmpObjectType(result, MAP_TYPE, kScratchRegister);
2040 j(not_equal, &done);
2041
2042 // Get the prototype from the initial map.
2043 movq(result, FieldOperand(result, Map::kPrototypeOffset));
2044 jmp(&done);
2045
2046 // Non-instance prototype: Fetch prototype from constructor field
2047 // in initial map.
2048 bind(&non_instance);
2049 movq(result, FieldOperand(result, Map::kConstructorOffset));
2050
2051 // All done.
2052 bind(&done);
2053}
2054
2055
2056void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2057 if (FLAG_native_code_counters && counter->Enabled()) {
2058 movq(kScratchRegister, ExternalReference(counter));
2059 movl(Operand(kScratchRegister, 0), Immediate(value));
2060 }
2061}
2062
2063
2064void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2065 ASSERT(value > 0);
2066 if (FLAG_native_code_counters && counter->Enabled()) {
2067 movq(kScratchRegister, ExternalReference(counter));
2068 Operand operand(kScratchRegister, 0);
2069 if (value == 1) {
2070 incl(operand);
2071 } else {
2072 addl(operand, Immediate(value));
2073 }
2074 }
2075}
2076
2077
2078void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2079 ASSERT(value > 0);
2080 if (FLAG_native_code_counters && counter->Enabled()) {
2081 movq(kScratchRegister, ExternalReference(counter));
2082 Operand operand(kScratchRegister, 0);
2083 if (value == 1) {
2084 decl(operand);
2085 } else {
2086 subl(operand, Immediate(value));
2087 }
2088 }
2089}
2090
Steve Blocka7e24c12009-10-30 11:49:00 +00002091#ifdef ENABLE_DEBUGGER_SUPPORT
2092
2093void MacroAssembler::PushRegistersFromMemory(RegList regs) {
2094 ASSERT((regs & ~kJSCallerSaved) == 0);
2095 // Push the content of the memory location to the stack.
2096 for (int i = 0; i < kNumJSCallerSaved; i++) {
2097 int r = JSCallerSavedCode(i);
2098 if ((regs & (1 << r)) != 0) {
2099 ExternalReference reg_addr =
2100 ExternalReference(Debug_Address::Register(i));
2101 movq(kScratchRegister, reg_addr);
2102 push(Operand(kScratchRegister, 0));
2103 }
2104 }
2105}
2106
Steve Block3ce2e202009-11-05 08:53:23 +00002107
Steve Blocka7e24c12009-10-30 11:49:00 +00002108void MacroAssembler::SaveRegistersToMemory(RegList regs) {
2109 ASSERT((regs & ~kJSCallerSaved) == 0);
2110 // Copy the content of registers to memory location.
2111 for (int i = 0; i < kNumJSCallerSaved; i++) {
2112 int r = JSCallerSavedCode(i);
2113 if ((regs & (1 << r)) != 0) {
2114 Register reg = { r };
2115 ExternalReference reg_addr =
2116 ExternalReference(Debug_Address::Register(i));
2117 movq(kScratchRegister, reg_addr);
2118 movq(Operand(kScratchRegister, 0), reg);
2119 }
2120 }
2121}
2122
2123
2124void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
2125 ASSERT((regs & ~kJSCallerSaved) == 0);
2126 // Copy the content of memory location to registers.
2127 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
2128 int r = JSCallerSavedCode(i);
2129 if ((regs & (1 << r)) != 0) {
2130 Register reg = { r };
2131 ExternalReference reg_addr =
2132 ExternalReference(Debug_Address::Register(i));
2133 movq(kScratchRegister, reg_addr);
2134 movq(reg, Operand(kScratchRegister, 0));
2135 }
2136 }
2137}
2138
2139
2140void MacroAssembler::PopRegistersToMemory(RegList regs) {
2141 ASSERT((regs & ~kJSCallerSaved) == 0);
2142 // Pop the content from the stack to the memory location.
2143 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
2144 int r = JSCallerSavedCode(i);
2145 if ((regs & (1 << r)) != 0) {
2146 ExternalReference reg_addr =
2147 ExternalReference(Debug_Address::Register(i));
2148 movq(kScratchRegister, reg_addr);
2149 pop(Operand(kScratchRegister, 0));
2150 }
2151 }
2152}
2153
2154
2155void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
2156 Register scratch,
2157 RegList regs) {
2158 ASSERT(!scratch.is(kScratchRegister));
2159 ASSERT(!base.is(kScratchRegister));
2160 ASSERT(!base.is(scratch));
2161 ASSERT((regs & ~kJSCallerSaved) == 0);
2162 // Copy the content of the stack to the memory location and adjust base.
2163 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
2164 int r = JSCallerSavedCode(i);
2165 if ((regs & (1 << r)) != 0) {
2166 movq(scratch, Operand(base, 0));
2167 ExternalReference reg_addr =
2168 ExternalReference(Debug_Address::Register(i));
2169 movq(kScratchRegister, reg_addr);
2170 movq(Operand(kScratchRegister, 0), scratch);
2171 lea(base, Operand(base, kPointerSize));
2172 }
2173 }
2174}
2175
Andrei Popescu402d9372010-02-26 13:31:12 +00002176void MacroAssembler::DebugBreak() {
2177 ASSERT(allow_stub_calls());
2178 xor_(rax, rax); // no arguments
2179 movq(rbx, ExternalReference(Runtime::kDebugBreak));
2180 CEntryStub ces(1);
2181 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00002182}
Andrei Popescu402d9372010-02-26 13:31:12 +00002183#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00002184
2185
2186void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2187 const ParameterCount& actual,
2188 Handle<Code> code_constant,
2189 Register code_register,
2190 Label* done,
2191 InvokeFlag flag) {
2192 bool definitely_matches = false;
2193 Label invoke;
2194 if (expected.is_immediate()) {
2195 ASSERT(actual.is_immediate());
2196 if (expected.immediate() == actual.immediate()) {
2197 definitely_matches = true;
2198 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002199 Set(rax, actual.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002200 if (expected.immediate() ==
Steve Block3ce2e202009-11-05 08:53:23 +00002201 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002202 // Don't worry about adapting arguments for built-ins that
2203 // don't want that done. Skip adaption code by making it look
2204 // like we have a match between expected and actual number of
2205 // arguments.
2206 definitely_matches = true;
2207 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002208 Set(rbx, expected.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002209 }
2210 }
2211 } else {
2212 if (actual.is_immediate()) {
2213 // Expected is in register, actual is immediate. This is the
2214 // case when we invoke function values without going through the
2215 // IC mechanism.
2216 cmpq(expected.reg(), Immediate(actual.immediate()));
2217 j(equal, &invoke);
2218 ASSERT(expected.reg().is(rbx));
Steve Block8defd9f2010-07-08 12:39:36 +01002219 Set(rax, actual.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002220 } else if (!expected.reg().is(actual.reg())) {
2221 // Both expected and actual are in (different) registers. This
2222 // is the case when we invoke functions using call and apply.
2223 cmpq(expected.reg(), actual.reg());
2224 j(equal, &invoke);
2225 ASSERT(actual.reg().is(rax));
2226 ASSERT(expected.reg().is(rbx));
2227 }
2228 }
2229
2230 if (!definitely_matches) {
2231 Handle<Code> adaptor =
2232 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
2233 if (!code_constant.is_null()) {
2234 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
2235 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2236 } else if (!code_register.is(rdx)) {
2237 movq(rdx, code_register);
2238 }
2239
2240 if (flag == CALL_FUNCTION) {
2241 Call(adaptor, RelocInfo::CODE_TARGET);
2242 jmp(done);
2243 } else {
2244 Jump(adaptor, RelocInfo::CODE_TARGET);
2245 }
2246 bind(&invoke);
2247 }
2248}
2249
2250
2251void MacroAssembler::InvokeCode(Register code,
2252 const ParameterCount& expected,
2253 const ParameterCount& actual,
2254 InvokeFlag flag) {
2255 Label done;
2256 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
2257 if (flag == CALL_FUNCTION) {
2258 call(code);
2259 } else {
2260 ASSERT(flag == JUMP_FUNCTION);
2261 jmp(code);
2262 }
2263 bind(&done);
2264}
2265
2266
2267void MacroAssembler::InvokeCode(Handle<Code> code,
2268 const ParameterCount& expected,
2269 const ParameterCount& actual,
2270 RelocInfo::Mode rmode,
2271 InvokeFlag flag) {
2272 Label done;
2273 Register dummy = rax;
2274 InvokePrologue(expected, actual, code, dummy, &done, flag);
2275 if (flag == CALL_FUNCTION) {
2276 Call(code, rmode);
2277 } else {
2278 ASSERT(flag == JUMP_FUNCTION);
2279 Jump(code, rmode);
2280 }
2281 bind(&done);
2282}
2283
2284
2285void MacroAssembler::InvokeFunction(Register function,
2286 const ParameterCount& actual,
2287 InvokeFlag flag) {
2288 ASSERT(function.is(rdi));
2289 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2290 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
2291 movsxlq(rbx,
2292 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2293 movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
2294 // Advances rdx to the end of the Code object header, to the start of
2295 // the executable code.
2296 lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
2297
2298 ParameterCount expected(rbx);
2299 InvokeCode(rdx, expected, actual, flag);
2300}
2301
2302
Andrei Popescu402d9372010-02-26 13:31:12 +00002303void MacroAssembler::InvokeFunction(JSFunction* function,
2304 const ParameterCount& actual,
2305 InvokeFlag flag) {
2306 ASSERT(function->is_compiled());
2307 // Get the function and setup the context.
2308 Move(rdi, Handle<JSFunction>(function));
2309 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2310
2311 // Invoke the cached code.
2312 Handle<Code> code(function->code());
2313 ParameterCount expected(function->shared()->formal_parameter_count());
2314 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
2315}
2316
2317
Steve Blocka7e24c12009-10-30 11:49:00 +00002318void MacroAssembler::EnterFrame(StackFrame::Type type) {
2319 push(rbp);
2320 movq(rbp, rsp);
2321 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00002322 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002323 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2324 push(kScratchRegister);
2325 if (FLAG_debug_code) {
2326 movq(kScratchRegister,
2327 Factory::undefined_value(),
2328 RelocInfo::EMBEDDED_OBJECT);
2329 cmpq(Operand(rsp, 0), kScratchRegister);
2330 Check(not_equal, "code object not properly patched");
2331 }
2332}
2333
2334
2335void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2336 if (FLAG_debug_code) {
Steve Block3ce2e202009-11-05 08:53:23 +00002337 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002338 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2339 Check(equal, "stack frame types must match");
2340 }
2341 movq(rsp, rbp);
2342 pop(rbp);
2343}
2344
2345
Ben Murdochbb769b22010-08-11 14:56:33 +01002346void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode,
2347 bool save_rax) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002348 // Setup the frame structure on the stack.
2349 // All constants are relative to the frame pointer of the exit frame.
2350 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
2351 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
2352 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
2353 push(rbp);
2354 movq(rbp, rsp);
2355
2356 // Reserve room for entry stack pointer and push the debug marker.
Steve Block3ce2e202009-11-05 08:53:23 +00002357 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00002358 push(Immediate(0)); // Saved entry sp, patched before call.
2359 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2360 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00002361
2362 // Save the frame pointer and the context in top.
2363 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2364 ExternalReference context_address(Top::k_context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01002365 if (save_rax) {
2366 movq(r14, rax); // Backup rax before we use it.
2367 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002368
2369 movq(rax, rbp);
2370 store_rax(c_entry_fp_address);
2371 movq(rax, rsi);
2372 store_rax(context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01002373}
Steve Blocka7e24c12009-10-30 11:49:00 +00002374
Ben Murdochbb769b22010-08-11 14:56:33 +01002375void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode,
2376 int result_size,
2377 int argc) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002378#ifdef ENABLE_DEBUGGER_SUPPORT
2379 // Save the state of all registers to the stack from the memory
2380 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00002381 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002382 // TODO(1243899): This should be symmetric to
2383 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
2384 // correct here, but computed for the other call. Very error
2385 // prone! FIX THIS. Actually there are deeper problems with
2386 // register saving than this asymmetry (see the bug report
2387 // associated with this issue).
2388 PushRegistersFromMemory(kJSCallerSaved);
2389 }
2390#endif
2391
2392#ifdef _WIN64
2393 // Reserve space on stack for result and argument structures, if necessary.
2394 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
2395 // Reserve space for the Arguments object. The Windows 64-bit ABI
2396 // requires us to pass this structure as a pointer to its location on
2397 // the stack. The structure contains 2 values.
Ben Murdochbb769b22010-08-11 14:56:33 +01002398 int argument_stack_space = argc * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002399 // We also need backing space for 4 parameters, even though
2400 // we only pass one or two parameter, and it is in a register.
2401 int argument_mirror_space = 4 * kPointerSize;
2402 int total_stack_space =
2403 argument_mirror_space + argument_stack_space + result_stack_space;
2404 subq(rsp, Immediate(total_stack_space));
2405#endif
2406
2407 // Get the required frame alignment for the OS.
2408 static const int kFrameAlignment = OS::ActivationFrameAlignment();
2409 if (kFrameAlignment > 0) {
2410 ASSERT(IsPowerOf2(kFrameAlignment));
2411 movq(kScratchRegister, Immediate(-kFrameAlignment));
2412 and_(rsp, kScratchRegister);
2413 }
2414
2415 // Patch the saved entry sp.
2416 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2417}
2418
2419
Ben Murdochbb769b22010-08-11 14:56:33 +01002420void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
2421 EnterExitFramePrologue(mode, true);
2422
2423 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
2424 // so it must be retained across the C-call.
2425 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
2426 lea(r12, Operand(rbp, r14, times_pointer_size, offset));
2427
2428 EnterExitFrameEpilogue(mode, result_size, 2);
2429}
2430
2431
2432void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode,
2433 int stack_space,
2434 int argc,
2435 int result_size) {
2436 EnterExitFramePrologue(mode, false);
2437
2438 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
2439 // so it must be retained across the C-call.
2440 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
2441 lea(r12, Operand(rbp, (stack_space * kPointerSize) + offset));
2442
2443 EnterExitFrameEpilogue(mode, result_size, argc);
2444}
2445
2446
Steve Blockd0582a62009-12-15 09:54:21 +00002447void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002448 // Registers:
Steve Block8defd9f2010-07-08 12:39:36 +01002449 // r12 : argv
Steve Blocka7e24c12009-10-30 11:49:00 +00002450#ifdef ENABLE_DEBUGGER_SUPPORT
2451 // Restore the memory copy of the registers by digging them out from
2452 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00002453 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002454 // It's okay to clobber register rbx below because we don't need
2455 // the function pointer after this.
2456 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +00002457 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002458 lea(rbx, Operand(rbp, kOffset));
2459 CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
2460 }
2461#endif
2462
2463 // Get the return address from the stack and restore the frame pointer.
2464 movq(rcx, Operand(rbp, 1 * kPointerSize));
2465 movq(rbp, Operand(rbp, 0 * kPointerSize));
2466
Steve Blocka7e24c12009-10-30 11:49:00 +00002467 // Pop everything up to and including the arguments and the receiver
2468 // from the caller stack.
Steve Block8defd9f2010-07-08 12:39:36 +01002469 lea(rsp, Operand(r12, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00002470
2471 // Restore current context from top and clear it in debug mode.
2472 ExternalReference context_address(Top::k_context_address);
2473 movq(kScratchRegister, context_address);
2474 movq(rsi, Operand(kScratchRegister, 0));
2475#ifdef DEBUG
2476 movq(Operand(kScratchRegister, 0), Immediate(0));
2477#endif
2478
2479 // Push the return address to get ready to return.
2480 push(rcx);
2481
2482 // Clear the top frame.
2483 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2484 movq(kScratchRegister, c_entry_fp_address);
2485 movq(Operand(kScratchRegister, 0), Immediate(0));
2486}
2487
2488
Steve Blocka7e24c12009-10-30 11:49:00 +00002489void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2490 Register scratch,
2491 Label* miss) {
2492 Label same_contexts;
2493
2494 ASSERT(!holder_reg.is(scratch));
2495 ASSERT(!scratch.is(kScratchRegister));
2496 // Load current lexical context from the stack frame.
2497 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2498
2499 // When generating debug code, make sure the lexical context is set.
2500 if (FLAG_debug_code) {
2501 cmpq(scratch, Immediate(0));
2502 Check(not_equal, "we should not have an empty lexical context");
2503 }
2504 // Load the global context of the current context.
2505 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2506 movq(scratch, FieldOperand(scratch, offset));
2507 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2508
2509 // Check the context is a global context.
2510 if (FLAG_debug_code) {
2511 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2512 Factory::global_context_map());
2513 Check(equal, "JSGlobalObject::global_context should be a global context.");
2514 }
2515
2516 // Check if both contexts are the same.
2517 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2518 j(equal, &same_contexts);
2519
2520 // Compare security tokens.
2521 // Check that the security token in the calling global object is
2522 // compatible with the security token in the receiving global
2523 // object.
2524
2525 // Check the context is a global context.
2526 if (FLAG_debug_code) {
2527 // Preserve original value of holder_reg.
2528 push(holder_reg);
2529 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2530 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2531 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2532
2533 // Read the first word and compare to global_context_map(),
2534 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2535 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2536 Check(equal, "JSGlobalObject::global_context should be a global context.");
2537 pop(holder_reg);
2538 }
2539
2540 movq(kScratchRegister,
2541 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00002542 int token_offset =
2543 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002544 movq(scratch, FieldOperand(scratch, token_offset));
2545 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2546 j(not_equal, miss);
2547
2548 bind(&same_contexts);
2549}
2550
2551
2552void MacroAssembler::LoadAllocationTopHelper(Register result,
2553 Register result_end,
2554 Register scratch,
2555 AllocationFlags flags) {
2556 ExternalReference new_space_allocation_top =
2557 ExternalReference::new_space_allocation_top_address();
2558
2559 // Just return if allocation top is already known.
2560 if ((flags & RESULT_CONTAINS_TOP) != 0) {
2561 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01002562 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00002563#ifdef DEBUG
2564 // Assert that result actually contains top on entry.
2565 movq(kScratchRegister, new_space_allocation_top);
2566 cmpq(result, Operand(kScratchRegister, 0));
2567 Check(equal, "Unexpected allocation top");
2568#endif
2569 return;
2570 }
2571
Steve Block6ded16b2010-05-10 14:33:55 +01002572 // Move address of new object to result. Use scratch register if available,
2573 // and keep address in scratch until call to UpdateAllocationTopHelper.
2574 if (scratch.is_valid()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002575 ASSERT(!scratch.is(result_end));
2576 movq(scratch, new_space_allocation_top);
2577 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01002578 } else if (result.is(rax)) {
2579 load_rax(new_space_allocation_top);
2580 } else {
2581 movq(kScratchRegister, new_space_allocation_top);
2582 movq(result, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002583 }
2584}
2585
2586
2587void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2588 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +00002589 if (FLAG_debug_code) {
2590 testq(result_end, Immediate(kObjectAlignmentMask));
2591 Check(zero, "Unaligned allocation in new space");
2592 }
2593
Steve Blocka7e24c12009-10-30 11:49:00 +00002594 ExternalReference new_space_allocation_top =
2595 ExternalReference::new_space_allocation_top_address();
2596
2597 // Update new top.
2598 if (result_end.is(rax)) {
2599 // rax can be stored directly to a memory location.
2600 store_rax(new_space_allocation_top);
2601 } else {
2602 // Register required - use scratch provided if available.
Steve Block6ded16b2010-05-10 14:33:55 +01002603 if (scratch.is_valid()) {
2604 movq(Operand(scratch, 0), result_end);
2605 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00002606 movq(kScratchRegister, new_space_allocation_top);
2607 movq(Operand(kScratchRegister, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00002608 }
2609 }
2610}
2611
2612
2613void MacroAssembler::AllocateInNewSpace(int object_size,
2614 Register result,
2615 Register result_end,
2616 Register scratch,
2617 Label* gc_required,
2618 AllocationFlags flags) {
2619 ASSERT(!result.is(result_end));
2620
2621 // Load address of new object into result.
2622 LoadAllocationTopHelper(result, result_end, scratch, flags);
2623
2624 // Calculate new top and bail out if new space is exhausted.
2625 ExternalReference new_space_allocation_limit =
2626 ExternalReference::new_space_allocation_limit_address();
Steve Block6ded16b2010-05-10 14:33:55 +01002627
2628 Register top_reg = result_end.is_valid() ? result_end : result;
2629
2630 if (top_reg.is(result)) {
2631 addq(top_reg, Immediate(object_size));
2632 } else {
2633 lea(top_reg, Operand(result, object_size));
2634 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002635 movq(kScratchRegister, new_space_allocation_limit);
Steve Block6ded16b2010-05-10 14:33:55 +01002636 cmpq(top_reg, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002637 j(above, gc_required);
2638
2639 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01002640 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00002641
Steve Block6ded16b2010-05-10 14:33:55 +01002642 if (top_reg.is(result)) {
2643 if ((flags & TAG_OBJECT) != 0) {
2644 subq(result, Immediate(object_size - kHeapObjectTag));
2645 } else {
2646 subq(result, Immediate(object_size));
2647 }
2648 } else if ((flags & TAG_OBJECT) != 0) {
2649 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00002650 addq(result, Immediate(kHeapObjectTag));
2651 }
2652}
2653
2654
2655void MacroAssembler::AllocateInNewSpace(int header_size,
2656 ScaleFactor element_size,
2657 Register element_count,
2658 Register result,
2659 Register result_end,
2660 Register scratch,
2661 Label* gc_required,
2662 AllocationFlags flags) {
2663 ASSERT(!result.is(result_end));
2664
2665 // Load address of new object into result.
2666 LoadAllocationTopHelper(result, result_end, scratch, flags);
2667
2668 // Calculate new top and bail out if new space is exhausted.
2669 ExternalReference new_space_allocation_limit =
2670 ExternalReference::new_space_allocation_limit_address();
2671 lea(result_end, Operand(result, element_count, element_size, header_size));
2672 movq(kScratchRegister, new_space_allocation_limit);
2673 cmpq(result_end, Operand(kScratchRegister, 0));
2674 j(above, gc_required);
2675
2676 // Update allocation top.
2677 UpdateAllocationTopHelper(result_end, scratch);
2678
2679 // Tag the result if requested.
2680 if ((flags & TAG_OBJECT) != 0) {
2681 addq(result, Immediate(kHeapObjectTag));
2682 }
2683}
2684
2685
2686void MacroAssembler::AllocateInNewSpace(Register object_size,
2687 Register result,
2688 Register result_end,
2689 Register scratch,
2690 Label* gc_required,
2691 AllocationFlags flags) {
2692 // Load address of new object into result.
2693 LoadAllocationTopHelper(result, result_end, scratch, flags);
2694
2695 // Calculate new top and bail out if new space is exhausted.
2696 ExternalReference new_space_allocation_limit =
2697 ExternalReference::new_space_allocation_limit_address();
2698 if (!object_size.is(result_end)) {
2699 movq(result_end, object_size);
2700 }
2701 addq(result_end, result);
2702 movq(kScratchRegister, new_space_allocation_limit);
2703 cmpq(result_end, Operand(kScratchRegister, 0));
2704 j(above, gc_required);
2705
2706 // Update allocation top.
2707 UpdateAllocationTopHelper(result_end, scratch);
2708
2709 // Tag the result if requested.
2710 if ((flags & TAG_OBJECT) != 0) {
2711 addq(result, Immediate(kHeapObjectTag));
2712 }
2713}
2714
2715
2716void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2717 ExternalReference new_space_allocation_top =
2718 ExternalReference::new_space_allocation_top_address();
2719
2720 // Make sure the object has no tag before resetting top.
2721 and_(object, Immediate(~kHeapObjectTagMask));
2722 movq(kScratchRegister, new_space_allocation_top);
2723#ifdef DEBUG
2724 cmpq(object, Operand(kScratchRegister, 0));
2725 Check(below, "Undo allocation of non allocated memory");
2726#endif
2727 movq(Operand(kScratchRegister, 0), object);
2728}
2729
2730
Steve Block3ce2e202009-11-05 08:53:23 +00002731void MacroAssembler::AllocateHeapNumber(Register result,
2732 Register scratch,
2733 Label* gc_required) {
2734 // Allocate heap number in new space.
2735 AllocateInNewSpace(HeapNumber::kSize,
2736 result,
2737 scratch,
2738 no_reg,
2739 gc_required,
2740 TAG_OBJECT);
2741
2742 // Set the map.
2743 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2744 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2745}
2746
2747
Leon Clarkee46be812010-01-19 14:06:41 +00002748void MacroAssembler::AllocateTwoByteString(Register result,
2749 Register length,
2750 Register scratch1,
2751 Register scratch2,
2752 Register scratch3,
2753 Label* gc_required) {
2754 // Calculate the number of bytes needed for the characters in the string while
2755 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002756 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2757 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002758 ASSERT(kShortSize == 2);
2759 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01002760 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2761 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002762 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002763 if (kHeaderAlignment > 0) {
2764 subq(scratch1, Immediate(kHeaderAlignment));
2765 }
Leon Clarkee46be812010-01-19 14:06:41 +00002766
2767 // Allocate two byte string in new space.
2768 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2769 times_1,
2770 scratch1,
2771 result,
2772 scratch2,
2773 scratch3,
2774 gc_required,
2775 TAG_OBJECT);
2776
2777 // Set the map, length and hash field.
2778 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2779 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002780 Integer32ToSmi(scratch1, length);
2781 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002782 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002783 Immediate(String::kEmptyHashField));
2784}
2785
2786
2787void MacroAssembler::AllocateAsciiString(Register result,
2788 Register length,
2789 Register scratch1,
2790 Register scratch2,
2791 Register scratch3,
2792 Label* gc_required) {
2793 // Calculate the number of bytes needed for the characters in the string while
2794 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002795 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2796 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002797 movl(scratch1, length);
2798 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01002799 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002800 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002801 if (kHeaderAlignment > 0) {
2802 subq(scratch1, Immediate(kHeaderAlignment));
2803 }
Leon Clarkee46be812010-01-19 14:06:41 +00002804
2805 // Allocate ascii string in new space.
2806 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2807 times_1,
2808 scratch1,
2809 result,
2810 scratch2,
2811 scratch3,
2812 gc_required,
2813 TAG_OBJECT);
2814
2815 // Set the map, length and hash field.
2816 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2817 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002818 Integer32ToSmi(scratch1, length);
2819 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002820 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002821 Immediate(String::kEmptyHashField));
2822}
2823
2824
2825void MacroAssembler::AllocateConsString(Register result,
2826 Register scratch1,
2827 Register scratch2,
2828 Label* gc_required) {
2829 // Allocate heap number in new space.
2830 AllocateInNewSpace(ConsString::kSize,
2831 result,
2832 scratch1,
2833 scratch2,
2834 gc_required,
2835 TAG_OBJECT);
2836
2837 // Set the map. The other fields are left uninitialized.
2838 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2839 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2840}
2841
2842
2843void MacroAssembler::AllocateAsciiConsString(Register result,
2844 Register scratch1,
2845 Register scratch2,
2846 Label* gc_required) {
2847 // Allocate heap number in new space.
2848 AllocateInNewSpace(ConsString::kSize,
2849 result,
2850 scratch1,
2851 scratch2,
2852 gc_required,
2853 TAG_OBJECT);
2854
2855 // Set the map. The other fields are left uninitialized.
2856 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2857 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2858}
2859
2860
Steve Blockd0582a62009-12-15 09:54:21 +00002861void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2862 if (context_chain_length > 0) {
2863 // Move up the chain of contexts to the context containing the slot.
2864 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2865 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002866 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002867 for (int i = 1; i < context_chain_length; i++) {
2868 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2869 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2870 }
2871 // The context may be an intermediate context, not a function context.
2872 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2873 } else { // context is the current function context.
2874 // The context may be an intermediate context, not a function context.
2875 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2876 }
2877}
2878
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002879
Leon Clarke4515c472010-02-03 11:58:03 +00002880int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002881 // On Windows 64 stack slots are reserved by the caller for all arguments
2882 // including the ones passed in registers, and space is always allocated for
2883 // the four register arguments even if the function takes fewer than four
2884 // arguments.
2885 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2886 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00002887 ASSERT(num_arguments >= 0);
2888#ifdef _WIN64
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002889 static const int kMinimumStackSlots = 4;
2890 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2891 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002892#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002893 static const int kRegisterPassedArguments = 6;
2894 if (num_arguments < kRegisterPassedArguments) return 0;
2895 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002896#endif
Leon Clarke4515c472010-02-03 11:58:03 +00002897}
2898
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002899
Leon Clarke4515c472010-02-03 11:58:03 +00002900void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2901 int frame_alignment = OS::ActivationFrameAlignment();
2902 ASSERT(frame_alignment != 0);
2903 ASSERT(num_arguments >= 0);
2904 // Make stack end at alignment and allocate space for arguments and old rsp.
2905 movq(kScratchRegister, rsp);
2906 ASSERT(IsPowerOf2(frame_alignment));
2907 int argument_slots_on_stack =
2908 ArgumentStackSlotsForCFunctionCall(num_arguments);
2909 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2910 and_(rsp, Immediate(-frame_alignment));
2911 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2912}
2913
2914
2915void MacroAssembler::CallCFunction(ExternalReference function,
2916 int num_arguments) {
2917 movq(rax, function);
2918 CallCFunction(rax, num_arguments);
2919}
2920
2921
2922void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01002923 // Check stack alignment.
2924 if (FLAG_debug_code) {
2925 CheckStackAlignment();
2926 }
2927
Leon Clarke4515c472010-02-03 11:58:03 +00002928 call(function);
2929 ASSERT(OS::ActivationFrameAlignment() != 0);
2930 ASSERT(num_arguments >= 0);
2931 int argument_slots_on_stack =
2932 ArgumentStackSlotsForCFunctionCall(num_arguments);
2933 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2934}
2935
Steve Blockd0582a62009-12-15 09:54:21 +00002936
Steve Blocka7e24c12009-10-30 11:49:00 +00002937CodePatcher::CodePatcher(byte* address, int size)
2938 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2939 // Create a new macro assembler pointing to the address of the code to patch.
2940 // The size is adjusted with kGap on order for the assembler to generate size
2941 // bytes of instructions without failing with buffer size constraints.
2942 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2943}
2944
2945
2946CodePatcher::~CodePatcher() {
2947 // Indicate that code has changed.
2948 CPU::FlushICache(address_, size_);
2949
2950 // Check that the code was patched as expected.
2951 ASSERT(masm_.pc_ == address_ + size_);
2952 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2953}
2954
Steve Blocka7e24c12009-10-30 11:49:00 +00002955} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002956
2957#endif // V8_TARGET_ARCH_X64