blob: 2f4b5f6ad289a028304a732eed5762ff42427053 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010038#include "heap.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
40namespace v8 {
41namespace internal {
42
43MacroAssembler::MacroAssembler(void* buffer, int size)
Steve Block3ce2e202009-11-05 08:53:23 +000044 : Assembler(buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000045 generating_stub_(false),
46 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
Steve Blocka7e24c12009-10-30 11:49:00 +000048}
49
50
Steve Block3ce2e202009-11-05 08:53:23 +000051void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010052 movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000053}
54
55
Kristian Monsen25f61362010-05-21 11:50:48 +010056void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
57 movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
58}
59
60
Steve Blocka7e24c12009-10-30 11:49:00 +000061void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010062 push(Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000063}
64
65
Steve Block3ce2e202009-11-05 08:53:23 +000066void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010067 cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000068}
69
70
Steve Block3ce2e202009-11-05 08:53:23 +000071void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000072 LoadRoot(kScratchRegister, index);
73 cmpq(with, kScratchRegister);
74}
75
76
Steve Blockd0582a62009-12-15 09:54:21 +000077void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
78 CompareRoot(rsp, Heap::kStackLimitRootIndex);
79 j(below, on_stack_overflow);
80}
81
82
Steve Block6ded16b2010-05-10 14:33:55 +010083void MacroAssembler::RecordWriteHelper(Register object,
84 Register addr,
85 Register scratch) {
86 if (FLAG_debug_code) {
87 // Check that the object is not in new space.
88 Label not_in_new_space;
89 InNewSpace(object, scratch, not_equal, &not_in_new_space);
90 Abort("new-space object passed to RecordWriteHelper");
91 bind(&not_in_new_space);
92 }
93
Steve Blocka7e24c12009-10-30 11:49:00 +000094 // Compute the page start address from the heap object pointer, and reuse
95 // the 'object' register for it.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010096 and_(object, Immediate(~Page::kPageAlignmentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +000097
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010098 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
99 // method for more details.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100100 shrl(addr, Immediate(Page::kRegionSizeLog2));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100101 andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000102
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100103 // Set dirty mark for region.
104 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000105}
106
107
Steve Blocka7e24c12009-10-30 11:49:00 +0000108void MacroAssembler::RecordWrite(Register object,
109 int offset,
110 Register value,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100111 Register index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000112 // The compiled code assumes that record write doesn't change the
113 // context register, so we check that none of the clobbered
114 // registers are rsi.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100115 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
Leon Clarke4515c472010-02-03 11:58:03 +0000116
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100117 // First, check if a write barrier is even needed. The tests below
118 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000119 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000120 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000121
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100122 RecordWriteNonSmi(object, offset, value, index);
Steve Block3ce2e202009-11-05 08:53:23 +0000123 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000124
125 // Clobber all input registers when running with the debug-code flag
126 // turned on to provoke errors. This clobbering repeats the
127 // clobbering done inside RecordWriteNonSmi but it's necessary to
128 // avoid having the fast case for smis leave the registers
129 // unchanged.
130 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100131 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
132 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100133 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000134 }
Steve Block3ce2e202009-11-05 08:53:23 +0000135}
136
137
Steve Block8defd9f2010-07-08 12:39:36 +0100138void MacroAssembler::RecordWrite(Register object,
139 Register address,
140 Register value) {
141 // The compiled code assumes that record write doesn't change the
142 // context register, so we check that none of the clobbered
143 // registers are esi.
144 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
145
146 // First, check if a write barrier is even needed. The tests below
147 // catch stores of Smis and stores into young gen.
148 Label done;
149 JumpIfSmi(value, &done);
150
151 InNewSpace(object, value, equal, &done);
152
153 RecordWriteHelper(object, address, value);
154
155 bind(&done);
156
157 // Clobber all input registers when running with the debug-code flag
158 // turned on to provoke errors.
159 if (FLAG_debug_code) {
160 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
161 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
162 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
163 }
164}
165
166
Steve Block3ce2e202009-11-05 08:53:23 +0000167void MacroAssembler::RecordWriteNonSmi(Register object,
168 int offset,
169 Register scratch,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100170 Register index) {
Steve Block3ce2e202009-11-05 08:53:23 +0000171 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000172
173 if (FLAG_debug_code) {
174 Label okay;
175 JumpIfNotSmi(object, &okay);
176 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
177 bind(&okay);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100178
179 if (offset == 0) {
180 // index must be int32.
181 Register tmp = index.is(rax) ? rbx : rax;
182 push(tmp);
183 movl(tmp, index);
184 cmpq(tmp, index);
185 Check(equal, "Index register for RecordWrite must be untagged int32.");
186 pop(tmp);
187 }
Leon Clarke4515c472010-02-03 11:58:03 +0000188 }
189
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100190 // Test that the object address is not in the new space. We cannot
191 // update page dirty marks for new space pages.
Steve Block6ded16b2010-05-10 14:33:55 +0100192 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000193
Steve Block6ded16b2010-05-10 14:33:55 +0100194 // The offset is relative to a tagged or untagged HeapObject pointer,
195 // so either offset or offset + kHeapObjectTag must be a
196 // multiple of kPointerSize.
197 ASSERT(IsAligned(offset, kPointerSize) ||
198 IsAligned(offset + kHeapObjectTag, kPointerSize));
199
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100200 Register dst = index;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100201 if (offset != 0) {
202 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000203 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100204 // array access: calculate the destination address in the same manner as
205 // KeyedStoreIC::GenerateGeneric.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100206 lea(dst, FieldOperand(object,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100207 index,
208 times_pointer_size,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100209 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000210 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100211 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000212
213 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000214
215 // Clobber all input registers when running with the debug-code flag
216 // turned on to provoke errors.
217 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100218 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
219 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100220 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Steve Block6ded16b2010-05-10 14:33:55 +0100221 }
222}
223
224
225void MacroAssembler::InNewSpace(Register object,
226 Register scratch,
227 Condition cc,
228 Label* branch) {
229 if (Serializer::enabled()) {
230 // Can't do arithmetic on external references if it might get serialized.
231 // The mask isn't really an address. We load it as an external reference in
232 // case the size of the new space is different between the snapshot maker
233 // and the running system.
234 if (scratch.is(object)) {
235 movq(kScratchRegister, ExternalReference::new_space_mask());
236 and_(scratch, kScratchRegister);
237 } else {
238 movq(scratch, ExternalReference::new_space_mask());
239 and_(scratch, object);
240 }
241 movq(kScratchRegister, ExternalReference::new_space_start());
242 cmpq(scratch, kScratchRegister);
243 j(cc, branch);
244 } else {
245 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
246 intptr_t new_space_start =
247 reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
248 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
249 if (scratch.is(object)) {
250 addq(scratch, kScratchRegister);
251 } else {
252 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
253 }
254 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
255 j(cc, branch);
Leon Clarke4515c472010-02-03 11:58:03 +0000256 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000257}
258
259
260void MacroAssembler::Assert(Condition cc, const char* msg) {
261 if (FLAG_debug_code) Check(cc, msg);
262}
263
264
Iain Merrick75681382010-08-19 15:07:18 +0100265void MacroAssembler::AssertFastElements(Register elements) {
266 if (FLAG_debug_code) {
267 Label ok;
268 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
269 Heap::kFixedArrayMapRootIndex);
270 j(equal, &ok);
271 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
272 Heap::kFixedCOWArrayMapRootIndex);
273 j(equal, &ok);
274 Abort("JSObject with fast elements map has slow elements");
275 bind(&ok);
276 }
277}
278
279
Steve Blocka7e24c12009-10-30 11:49:00 +0000280void MacroAssembler::Check(Condition cc, const char* msg) {
281 Label L;
282 j(cc, &L);
283 Abort(msg);
284 // will not return here
285 bind(&L);
286}
287
288
Steve Block6ded16b2010-05-10 14:33:55 +0100289void MacroAssembler::CheckStackAlignment() {
290 int frame_alignment = OS::ActivationFrameAlignment();
291 int frame_alignment_mask = frame_alignment - 1;
292 if (frame_alignment > kPointerSize) {
293 ASSERT(IsPowerOf2(frame_alignment));
294 Label alignment_as_expected;
295 testq(rsp, Immediate(frame_alignment_mask));
296 j(zero, &alignment_as_expected);
297 // Abort if stack is not aligned.
298 int3();
299 bind(&alignment_as_expected);
300 }
301}
302
303
Steve Blocka7e24c12009-10-30 11:49:00 +0000304void MacroAssembler::NegativeZeroTest(Register result,
305 Register op,
306 Label* then_label) {
307 Label ok;
308 testl(result, result);
309 j(not_zero, &ok);
310 testl(op, op);
311 j(sign, then_label);
312 bind(&ok);
313}
314
315
316void MacroAssembler::Abort(const char* msg) {
317 // We want to pass the msg string like a smi to avoid GC
318 // problems, however msg is not guaranteed to be aligned
319 // properly. Instead, we pass an aligned pointer that is
320 // a proper v8 smi, but also pass the alignment difference
321 // from the real pointer as a smi.
322 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
323 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
324 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
325 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
326#ifdef DEBUG
327 if (msg != NULL) {
328 RecordComment("Abort message: ");
329 RecordComment(msg);
330 }
331#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000332 // Disable stub call restrictions to always allow calls to abort.
333 set_allow_stub_calls(true);
334
Steve Blocka7e24c12009-10-30 11:49:00 +0000335 push(rax);
336 movq(kScratchRegister, p0, RelocInfo::NONE);
337 push(kScratchRegister);
338 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000339 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000340 RelocInfo::NONE);
341 push(kScratchRegister);
342 CallRuntime(Runtime::kAbort, 2);
343 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000344 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000345}
346
347
348void MacroAssembler::CallStub(CodeStub* stub) {
349 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
350 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
351}
352
353
Ben Murdochbb769b22010-08-11 14:56:33 +0100354Object* MacroAssembler::TryCallStub(CodeStub* stub) {
355 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
356 Object* result = stub->TryGetCode();
357 if (!result->IsFailure()) {
358 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
359 }
360 return result;
361}
362
363
Leon Clarkee46be812010-01-19 14:06:41 +0000364void MacroAssembler::TailCallStub(CodeStub* stub) {
365 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
366 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
367}
368
369
Ben Murdochbb769b22010-08-11 14:56:33 +0100370Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
371 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
372 Object* result = stub->TryGetCode();
373 if (!result->IsFailure()) {
374 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
375 }
376 return result;
377}
378
379
Steve Blocka7e24c12009-10-30 11:49:00 +0000380void MacroAssembler::StubReturn(int argc) {
381 ASSERT(argc >= 1 && generating_stub());
382 ret((argc - 1) * kPointerSize);
383}
384
385
386void MacroAssembler::IllegalOperation(int num_arguments) {
387 if (num_arguments > 0) {
388 addq(rsp, Immediate(num_arguments * kPointerSize));
389 }
390 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
391}
392
393
394void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
395 CallRuntime(Runtime::FunctionForId(id), num_arguments);
396}
397
398
Ben Murdochbb769b22010-08-11 14:56:33 +0100399Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
400 int num_arguments) {
401 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
402}
403
404
Steve Blocka7e24c12009-10-30 11:49:00 +0000405void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
406 // If the expected number of arguments of the runtime function is
407 // constant, we check that the actual number of arguments match the
408 // expectation.
409 if (f->nargs >= 0 && f->nargs != num_arguments) {
410 IllegalOperation(num_arguments);
411 return;
412 }
413
Leon Clarke4515c472010-02-03 11:58:03 +0000414 // TODO(1236192): Most runtime routines don't need the number of
415 // arguments passed in because it is constant. At some point we
416 // should remove this need and make the runtime routine entry code
417 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100418 Set(rax, num_arguments);
Leon Clarke4515c472010-02-03 11:58:03 +0000419 movq(rbx, ExternalReference(f));
420 CEntryStub ces(f->result_size);
421 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000422}
423
424
Ben Murdochbb769b22010-08-11 14:56:33 +0100425Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
426 int num_arguments) {
427 if (f->nargs >= 0 && f->nargs != num_arguments) {
428 IllegalOperation(num_arguments);
429 // Since we did not call the stub, there was no allocation failure.
430 // Return some non-failure object.
431 return Heap::undefined_value();
432 }
433
434 // TODO(1236192): Most runtime routines don't need the number of
435 // arguments passed in because it is constant. At some point we
436 // should remove this need and make the runtime routine entry code
437 // smarter.
438 Set(rax, num_arguments);
439 movq(rbx, ExternalReference(f));
440 CEntryStub ces(f->result_size);
441 return TryCallStub(&ces);
442}
443
444
Andrei Popescu402d9372010-02-26 13:31:12 +0000445void MacroAssembler::CallExternalReference(const ExternalReference& ext,
446 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100447 Set(rax, num_arguments);
Andrei Popescu402d9372010-02-26 13:31:12 +0000448 movq(rbx, ext);
449
450 CEntryStub stub(1);
451 CallStub(&stub);
452}
453
454
Steve Block6ded16b2010-05-10 14:33:55 +0100455void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
456 int num_arguments,
457 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000458 // ----------- S t a t e -------------
459 // -- rsp[0] : return address
460 // -- rsp[8] : argument num_arguments - 1
461 // ...
462 // -- rsp[8 * num_arguments] : argument 0 (receiver)
463 // -----------------------------------
464
465 // TODO(1236192): Most runtime routines don't need the number of
466 // arguments passed in because it is constant. At some point we
467 // should remove this need and make the runtime routine entry code
468 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100469 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100470 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000471}
472
473
Steve Block6ded16b2010-05-10 14:33:55 +0100474void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
475 int num_arguments,
476 int result_size) {
477 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
478}
479
480
Ben Murdochbb769b22010-08-11 14:56:33 +0100481static int Offset(ExternalReference ref0, ExternalReference ref1) {
482 int64_t offset = (ref0.address() - ref1.address());
483 // Check that fits into int.
484 ASSERT(static_cast<int>(offset) == offset);
485 return static_cast<int>(offset);
486}
487
488
489void MacroAssembler::PushHandleScope(Register scratch) {
490 ExternalReference extensions_address =
491 ExternalReference::handle_scope_extensions_address();
492 const int kExtensionsOffset = 0;
493 const int kNextOffset = Offset(
494 ExternalReference::handle_scope_next_address(),
495 extensions_address);
496 const int kLimitOffset = Offset(
497 ExternalReference::handle_scope_limit_address(),
498 extensions_address);
499
500 // Push the number of extensions, smi-tagged so the gc will ignore it.
501 movq(kScratchRegister, extensions_address);
502 movq(scratch, Operand(kScratchRegister, kExtensionsOffset));
503 movq(Operand(kScratchRegister, kExtensionsOffset), Immediate(0));
504 Integer32ToSmi(scratch, scratch);
505 push(scratch);
506 // Push next and limit pointers which will be wordsize aligned and
507 // hence automatically smi tagged.
508 push(Operand(kScratchRegister, kNextOffset));
509 push(Operand(kScratchRegister, kLimitOffset));
510}
511
512
513Object* MacroAssembler::PopHandleScopeHelper(Register saved,
514 Register scratch,
515 bool gc_allowed) {
516 ExternalReference extensions_address =
517 ExternalReference::handle_scope_extensions_address();
518 const int kExtensionsOffset = 0;
519 const int kNextOffset = Offset(
520 ExternalReference::handle_scope_next_address(),
521 extensions_address);
522 const int kLimitOffset = Offset(
523 ExternalReference::handle_scope_limit_address(),
524 extensions_address);
525
526 Object* result = NULL;
527 Label write_back;
528 movq(kScratchRegister, extensions_address);
529 cmpq(Operand(kScratchRegister, kExtensionsOffset), Immediate(0));
530 j(equal, &write_back);
531 push(saved);
532 if (gc_allowed) {
533 CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
534 } else {
535 result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
536 if (result->IsFailure()) return result;
537 }
538 pop(saved);
539 movq(kScratchRegister, extensions_address);
540
541 bind(&write_back);
542 pop(Operand(kScratchRegister, kLimitOffset));
543 pop(Operand(kScratchRegister, kNextOffset));
544 pop(scratch);
545 SmiToInteger32(scratch, scratch);
546 movq(Operand(kScratchRegister, kExtensionsOffset), scratch);
547
548 return result;
549}
550
551
552void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
553 PopHandleScopeHelper(saved, scratch, true);
554}
555
556
557Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
558 return PopHandleScopeHelper(saved, scratch, false);
559}
560
561
Steve Block6ded16b2010-05-10 14:33:55 +0100562void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
563 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000564 // Set the entry point and jump to the C entry runtime stub.
565 movq(rbx, ext);
566 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000567 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000568}
569
570
Andrei Popescu402d9372010-02-26 13:31:12 +0000571void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
572 // Calls are not allowed in some stubs.
573 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000574
Andrei Popescu402d9372010-02-26 13:31:12 +0000575 // Rely on the assertion to check that the number of provided
576 // arguments match the expected number of arguments. Fake a
577 // parameter count to avoid emitting code to do the check.
578 ParameterCount expected(0);
579 GetBuiltinEntry(rdx, id);
580 InvokeCode(rdx, expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +0000581}
582
Andrei Popescu402d9372010-02-26 13:31:12 +0000583
584void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100585 ASSERT(!target.is(rdi));
586
587 // Load the builtins object into target register.
588 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
589 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
590
Andrei Popescu402d9372010-02-26 13:31:12 +0000591 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +0100592 movq(rdi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
593
594 // Load the code entry point from the builtins object.
595 movq(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
596 if (FLAG_debug_code) {
597 // Make sure the code objects in the builtins object and in the
598 // builtin function are the same.
599 push(target);
Iain Merrick75681382010-08-19 15:07:18 +0100600 movq(target, FieldOperand(rdi, JSFunction::kCodeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +0100601 cmpq(target, Operand(rsp, 0));
602 Assert(equal, "Builtin code object changed");
603 pop(target);
604 }
605 lea(target, FieldOperand(target, Code::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000606}
607
608
609void MacroAssembler::Set(Register dst, int64_t x) {
610 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +0100611 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +0000612 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000613 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000614 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000615 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000616 } else {
617 movq(dst, x, RelocInfo::NONE);
618 }
619}
620
Steve Blocka7e24c12009-10-30 11:49:00 +0000621void MacroAssembler::Set(const Operand& dst, int64_t x) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100622 if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000623 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000624 } else {
625 movq(kScratchRegister, x, RelocInfo::NONE);
626 movq(dst, kScratchRegister);
627 }
628}
629
Steve Blocka7e24c12009-10-30 11:49:00 +0000630// ----------------------------------------------------------------------------
631// Smi tagging, untagging and tag detection.
632
Steve Block3ce2e202009-11-05 08:53:23 +0000633static int kSmiShift = kSmiTagSize + kSmiShiftSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000634
Steve Block8defd9f2010-07-08 12:39:36 +0100635Register MacroAssembler::GetSmiConstant(Smi* source) {
636 int value = source->value();
637 if (value == 0) {
638 xorl(kScratchRegister, kScratchRegister);
639 return kScratchRegister;
640 }
641 if (value == 1) {
642 return kSmiConstantRegister;
643 }
644 LoadSmiConstant(kScratchRegister, source);
645 return kScratchRegister;
646}
647
648void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
649 if (FLAG_debug_code) {
650 movq(dst,
651 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
652 RelocInfo::NONE);
653 cmpq(dst, kSmiConstantRegister);
654 if (allow_stub_calls()) {
655 Assert(equal, "Uninitialized kSmiConstantRegister");
656 } else {
657 Label ok;
658 j(equal, &ok);
659 int3();
660 bind(&ok);
661 }
662 }
663 if (source->value() == 0) {
664 xorl(dst, dst);
665 return;
666 }
667 int value = source->value();
668 bool negative = value < 0;
669 unsigned int uvalue = negative ? -value : value;
670
671 switch (uvalue) {
672 case 9:
673 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
674 break;
675 case 8:
676 xorl(dst, dst);
677 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
678 break;
679 case 4:
680 xorl(dst, dst);
681 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
682 break;
683 case 5:
684 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
685 break;
686 case 3:
687 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
688 break;
689 case 2:
690 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
691 break;
692 case 1:
693 movq(dst, kSmiConstantRegister);
694 break;
695 case 0:
696 UNREACHABLE();
697 return;
698 default:
699 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
700 return;
701 }
702 if (negative) {
703 neg(dst);
704 }
705}
706
Steve Blocka7e24c12009-10-30 11:49:00 +0000707void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000708 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000709 if (!dst.is(src)) {
710 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000711 }
Steve Block3ce2e202009-11-05 08:53:23 +0000712 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000713}
714
715
716void MacroAssembler::Integer32ToSmi(Register dst,
717 Register src,
718 Label* on_overflow) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000719 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000720 // 32-bit integer always fits in a long smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000721 if (!dst.is(src)) {
722 movl(dst, src);
723 }
Steve Block3ce2e202009-11-05 08:53:23 +0000724 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000725}
726
727
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100728void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
729 if (FLAG_debug_code) {
730 testb(dst, Immediate(0x01));
731 Label ok;
732 j(zero, &ok);
733 if (allow_stub_calls()) {
734 Abort("Integer32ToSmiField writing to non-smi location");
735 } else {
736 int3();
737 }
738 bind(&ok);
739 }
740 ASSERT(kSmiShift % kBitsPerByte == 0);
741 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
742}
743
744
Steve Block3ce2e202009-11-05 08:53:23 +0000745void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
746 Register src,
747 int constant) {
748 if (dst.is(src)) {
749 addq(dst, Immediate(constant));
750 } else {
751 lea(dst, Operand(src, constant));
752 }
753 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000754}
755
756
757void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000758 ASSERT_EQ(0, kSmiTag);
759 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000760 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000761 }
Steve Block3ce2e202009-11-05 08:53:23 +0000762 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000763}
764
765
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100766void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
767 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
768}
769
770
Steve Blocka7e24c12009-10-30 11:49:00 +0000771void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000772 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000773 if (!dst.is(src)) {
774 movq(dst, src);
775 }
776 sar(dst, Immediate(kSmiShift));
777}
778
779
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100780void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
781 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
782}
783
784
Steve Block3ce2e202009-11-05 08:53:23 +0000785void MacroAssembler::SmiTest(Register src) {
786 testq(src, src);
787}
788
789
790void MacroAssembler::SmiCompare(Register dst, Register src) {
791 cmpq(dst, src);
792}
793
794
795void MacroAssembler::SmiCompare(Register dst, Smi* src) {
796 ASSERT(!dst.is(kScratchRegister));
797 if (src->value() == 0) {
798 testq(dst, dst);
799 } else {
Iain Merrick75681382010-08-19 15:07:18 +0100800 Register constant_reg = GetSmiConstant(src);
801 cmpq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +0000802 }
803}
804
805
Leon Clarkef7060e22010-06-03 12:02:55 +0100806void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block6ded16b2010-05-10 14:33:55 +0100807 cmpq(dst, src);
808}
809
810
Steve Block3ce2e202009-11-05 08:53:23 +0000811void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
812 cmpq(dst, src);
813}
814
815
816void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100817 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000818}
819
820
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100821void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
822 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
823}
824
825
Steve Blocka7e24c12009-10-30 11:49:00 +0000826void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
827 Register src,
828 int power) {
829 ASSERT(power >= 0);
830 ASSERT(power < 64);
831 if (power == 0) {
832 SmiToInteger64(dst, src);
833 return;
834 }
Steve Block3ce2e202009-11-05 08:53:23 +0000835 if (!dst.is(src)) {
836 movq(dst, src);
837 }
838 if (power < kSmiShift) {
839 sar(dst, Immediate(kSmiShift - power));
840 } else if (power > kSmiShift) {
841 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000842 }
843}
844
845
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100846void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
847 Register src,
848 int power) {
849 ASSERT((0 <= power) && (power < 32));
850 if (dst.is(src)) {
851 shr(dst, Immediate(power + kSmiShift));
852 } else {
853 UNIMPLEMENTED(); // Not used.
854 }
855}
856
857
Steve Blocka7e24c12009-10-30 11:49:00 +0000858Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000859 ASSERT_EQ(0, kSmiTag);
860 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000861 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000862}
863
864
865Condition MacroAssembler::CheckPositiveSmi(Register src) {
866 ASSERT_EQ(0, kSmiTag);
Steve Block8defd9f2010-07-08 12:39:36 +0100867 // Make mask 0x8000000000000001 and test that both bits are zero.
Steve Block3ce2e202009-11-05 08:53:23 +0000868 movq(kScratchRegister, src);
869 rol(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +0100870 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +0000871 return zero;
872}
873
874
Steve Blocka7e24c12009-10-30 11:49:00 +0000875Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
876 if (first.is(second)) {
877 return CheckSmi(first);
878 }
Steve Block8defd9f2010-07-08 12:39:36 +0100879 ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
880 leal(kScratchRegister, Operand(first, second, times_1, 0));
881 testb(kScratchRegister, Immediate(0x03));
Steve Block3ce2e202009-11-05 08:53:23 +0000882 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000883}
884
885
Leon Clarked91b9f72010-01-27 17:25:45 +0000886Condition MacroAssembler::CheckBothPositiveSmi(Register first,
887 Register second) {
888 if (first.is(second)) {
889 return CheckPositiveSmi(first);
890 }
Steve Block8defd9f2010-07-08 12:39:36 +0100891 movq(kScratchRegister, first);
892 or_(kScratchRegister, second);
Leon Clarked91b9f72010-01-27 17:25:45 +0000893 rol(kScratchRegister, Immediate(1));
894 testl(kScratchRegister, Immediate(0x03));
895 return zero;
896}
897
898
Ben Murdochbb769b22010-08-11 14:56:33 +0100899Condition MacroAssembler::CheckEitherSmi(Register first,
900 Register second,
901 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +0000902 if (first.is(second)) {
903 return CheckSmi(first);
904 }
Ben Murdochbb769b22010-08-11 14:56:33 +0100905 if (scratch.is(second)) {
906 andl(scratch, first);
907 } else {
908 if (!scratch.is(first)) {
909 movl(scratch, first);
910 }
911 andl(scratch, second);
912 }
913 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +0000914 return zero;
915}
916
917
Steve Blocka7e24c12009-10-30 11:49:00 +0000918Condition MacroAssembler::CheckIsMinSmi(Register src) {
Steve Block8defd9f2010-07-08 12:39:36 +0100919 ASSERT(!src.is(kScratchRegister));
920 // If we overflow by subtracting one, it's the minimal smi value.
921 cmpq(src, kSmiConstantRegister);
922 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +0000923}
924
Steve Blocka7e24c12009-10-30 11:49:00 +0000925
926Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000927 // A 32-bit integer value can always be converted to a smi.
928 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +0000929}
930
931
Steve Block3ce2e202009-11-05 08:53:23 +0000932Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
933 // An unsigned 32-bit integer value is valid as long as the high bit
934 // is not set.
Steve Block8defd9f2010-07-08 12:39:36 +0100935 testl(src, src);
936 return positive;
Steve Block3ce2e202009-11-05 08:53:23 +0000937}
938
939
940void MacroAssembler::SmiNeg(Register dst, Register src, Label* on_smi_result) {
941 if (dst.is(src)) {
942 ASSERT(!dst.is(kScratchRegister));
943 movq(kScratchRegister, src);
944 neg(dst); // Low 32 bits are retained as zero by negation.
945 // Test if result is zero or Smi::kMinValue.
946 cmpq(dst, kScratchRegister);
947 j(not_equal, on_smi_result);
948 movq(src, kScratchRegister);
949 } else {
950 movq(dst, src);
951 neg(dst);
952 cmpq(dst, src);
953 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
954 j(not_equal, on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000955 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000956}
957
958
959void MacroAssembler::SmiAdd(Register dst,
960 Register src1,
961 Register src2,
962 Label* on_not_smi_result) {
963 ASSERT(!dst.is(src2));
Steve Block6ded16b2010-05-10 14:33:55 +0100964 if (on_not_smi_result == NULL) {
965 // No overflow checking. Use only when it's known that
966 // overflowing is impossible.
967 if (dst.is(src1)) {
968 addq(dst, src2);
969 } else {
970 movq(dst, src1);
971 addq(dst, src2);
972 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100973 Assert(no_overflow, "Smi addition overflow");
Steve Block6ded16b2010-05-10 14:33:55 +0100974 } else if (dst.is(src1)) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100975 movq(kScratchRegister, src1);
976 addq(kScratchRegister, src2);
977 j(overflow, on_not_smi_result);
978 movq(dst, kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +0000979 } else {
980 movq(dst, src1);
981 addq(dst, src2);
982 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000983 }
984}
985
986
Steve Blocka7e24c12009-10-30 11:49:00 +0000987void MacroAssembler::SmiSub(Register dst,
988 Register src1,
989 Register src2,
990 Label* on_not_smi_result) {
991 ASSERT(!dst.is(src2));
Leon Clarked91b9f72010-01-27 17:25:45 +0000992 if (on_not_smi_result == NULL) {
993 // No overflow checking. Use only when it's known that
994 // overflowing is impossible (e.g., subtracting two positive smis).
995 if (dst.is(src1)) {
996 subq(dst, src2);
997 } else {
998 movq(dst, src1);
999 subq(dst, src2);
1000 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001001 Assert(no_overflow, "Smi subtraction overflow");
Leon Clarked91b9f72010-01-27 17:25:45 +00001002 } else if (dst.is(src1)) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001003 cmpq(dst, src2);
1004 j(overflow, on_not_smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +00001005 subq(dst, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001006 } else {
1007 movq(dst, src1);
1008 subq(dst, src2);
1009 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001010 }
1011}
1012
1013
Steve Block6ded16b2010-05-10 14:33:55 +01001014void MacroAssembler::SmiSub(Register dst,
1015 Register src1,
Leon Clarkef7060e22010-06-03 12:02:55 +01001016 const Operand& src2,
Steve Block6ded16b2010-05-10 14:33:55 +01001017 Label* on_not_smi_result) {
1018 if (on_not_smi_result == NULL) {
1019 // No overflow checking. Use only when it's known that
1020 // overflowing is impossible (e.g., subtracting two positive smis).
1021 if (dst.is(src1)) {
1022 subq(dst, src2);
1023 } else {
1024 movq(dst, src1);
1025 subq(dst, src2);
1026 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001027 Assert(no_overflow, "Smi subtraction overflow");
Steve Block6ded16b2010-05-10 14:33:55 +01001028 } else if (dst.is(src1)) {
Steve Block8defd9f2010-07-08 12:39:36 +01001029 movq(kScratchRegister, src2);
1030 cmpq(src1, kScratchRegister);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001031 j(overflow, on_not_smi_result);
Steve Block8defd9f2010-07-08 12:39:36 +01001032 subq(src1, kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01001033 } else {
1034 movq(dst, src1);
1035 subq(dst, src2);
1036 j(overflow, on_not_smi_result);
1037 }
1038}
1039
Steve Blocka7e24c12009-10-30 11:49:00 +00001040void MacroAssembler::SmiMul(Register dst,
1041 Register src1,
1042 Register src2,
1043 Label* on_not_smi_result) {
1044 ASSERT(!dst.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +00001045 ASSERT(!dst.is(kScratchRegister));
1046 ASSERT(!src1.is(kScratchRegister));
1047 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001048
1049 if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001050 Label failure, zero_correct_result;
1051 movq(kScratchRegister, src1); // Create backup for later testing.
1052 SmiToInteger64(dst, src1);
1053 imul(dst, src2);
1054 j(overflow, &failure);
1055
1056 // Check for negative zero result. If product is zero, and one
1057 // argument is negative, go to slow case.
1058 Label correct_result;
1059 testq(dst, dst);
1060 j(not_zero, &correct_result);
1061
1062 movq(dst, kScratchRegister);
1063 xor_(dst, src2);
1064 j(positive, &zero_correct_result); // Result was positive zero.
1065
1066 bind(&failure); // Reused failure exit, restores src1.
1067 movq(src1, kScratchRegister);
1068 jmp(on_not_smi_result);
1069
1070 bind(&zero_correct_result);
1071 xor_(dst, dst);
1072
1073 bind(&correct_result);
1074 } else {
1075 SmiToInteger64(dst, src1);
1076 imul(dst, src2);
1077 j(overflow, on_not_smi_result);
1078 // Check for negative zero result. If product is zero, and one
1079 // argument is negative, go to slow case.
1080 Label correct_result;
1081 testq(dst, dst);
1082 j(not_zero, &correct_result);
1083 // One of src1 and src2 is zero, the check whether the other is
1084 // negative.
Steve Blocka7e24c12009-10-30 11:49:00 +00001085 movq(kScratchRegister, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001086 xor_(kScratchRegister, src2);
1087 j(negative, on_not_smi_result);
1088 bind(&correct_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001089 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001090}
1091
1092
1093void MacroAssembler::SmiTryAddConstant(Register dst,
1094 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +00001095 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +00001096 Label* on_not_smi_result) {
1097 // Does not assume that src is a smi.
Steve Block3ce2e202009-11-05 08:53:23 +00001098 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001099 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +00001100 ASSERT(!dst.is(kScratchRegister));
1101 ASSERT(!src.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001102
Steve Block3ce2e202009-11-05 08:53:23 +00001103 JumpIfNotSmi(src, on_not_smi_result);
1104 Register tmp = (dst.is(src) ? kScratchRegister : dst);
Steve Block8defd9f2010-07-08 12:39:36 +01001105 LoadSmiConstant(tmp, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001106 addq(tmp, src);
1107 j(overflow, on_not_smi_result);
1108 if (dst.is(src)) {
1109 movq(dst, tmp);
1110 }
1111}
1112
1113
1114void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1115 if (constant->value() == 0) {
1116 if (!dst.is(src)) {
1117 movq(dst, src);
1118 }
Steve Block8defd9f2010-07-08 12:39:36 +01001119 return;
Steve Block3ce2e202009-11-05 08:53:23 +00001120 } else if (dst.is(src)) {
1121 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001122 switch (constant->value()) {
1123 case 1:
1124 addq(dst, kSmiConstantRegister);
1125 return;
1126 case 2:
1127 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1128 return;
1129 case 4:
1130 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1131 return;
1132 case 8:
1133 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1134 return;
1135 default:
1136 Register constant_reg = GetSmiConstant(constant);
1137 addq(dst, constant_reg);
1138 return;
1139 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001140 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001141 switch (constant->value()) {
1142 case 1:
1143 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
1144 return;
1145 case 2:
1146 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1147 return;
1148 case 4:
1149 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1150 return;
1151 case 8:
1152 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1153 return;
1154 default:
1155 LoadSmiConstant(dst, constant);
1156 addq(dst, src);
1157 return;
1158 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001159 }
1160}
1161
1162
Leon Clarkef7060e22010-06-03 12:02:55 +01001163void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1164 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001165 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01001166 }
1167}
1168
1169
Steve Blocka7e24c12009-10-30 11:49:00 +00001170void MacroAssembler::SmiAddConstant(Register dst,
1171 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +00001172 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +00001173 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001174 if (constant->value() == 0) {
1175 if (!dst.is(src)) {
1176 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001177 }
Steve Block3ce2e202009-11-05 08:53:23 +00001178 } else if (dst.is(src)) {
1179 ASSERT(!dst.is(kScratchRegister));
1180
Steve Block8defd9f2010-07-08 12:39:36 +01001181 LoadSmiConstant(kScratchRegister, constant);
1182 addq(kScratchRegister, src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001183 j(overflow, on_not_smi_result);
1184 movq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001185 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001186 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001187 addq(dst, src);
1188 j(overflow, on_not_smi_result);
1189 }
1190}
1191
1192
1193void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1194 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001195 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001196 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001197 }
Steve Block3ce2e202009-11-05 08:53:23 +00001198 } else if (dst.is(src)) {
1199 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001200 Register constant_reg = GetSmiConstant(constant);
1201 subq(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001202 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001203 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001204 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001205 // Adding and subtracting the min-value gives the same result, it only
1206 // differs on the overflow bit, which we don't check here.
1207 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001208 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001209 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001210 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Steve Block3ce2e202009-11-05 08:53:23 +00001211 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001212 }
1213 }
1214}
1215
1216
1217void MacroAssembler::SmiSubConstant(Register dst,
1218 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +00001219 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +00001220 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001221 if (constant->value() == 0) {
1222 if (!dst.is(src)) {
1223 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001224 }
Steve Block3ce2e202009-11-05 08:53:23 +00001225 } else if (dst.is(src)) {
1226 ASSERT(!dst.is(kScratchRegister));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001227 if (constant->value() == Smi::kMinValue) {
1228 // Subtracting min-value from any non-negative value will overflow.
1229 // We test the non-negativeness before doing the subtraction.
1230 testq(src, src);
1231 j(not_sign, on_not_smi_result);
Steve Block8defd9f2010-07-08 12:39:36 +01001232 LoadSmiConstant(kScratchRegister, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001233 subq(dst, kScratchRegister);
1234 } else {
1235 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001236 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001237 addq(kScratchRegister, dst);
1238 j(overflow, on_not_smi_result);
1239 movq(dst, kScratchRegister);
1240 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001241 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001242 if (constant->value() == Smi::kMinValue) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001243 // Subtracting min-value from any non-negative value will overflow.
1244 // We test the non-negativeness before doing the subtraction.
1245 testq(src, src);
1246 j(not_sign, on_not_smi_result);
Steve Block8defd9f2010-07-08 12:39:36 +01001247 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001248 // Adding and subtracting the min-value gives the same result, it only
1249 // differs on the overflow bit, which we don't check here.
1250 addq(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001251 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001252 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001253 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
Steve Block3ce2e202009-11-05 08:53:23 +00001254 addq(dst, src);
1255 j(overflow, on_not_smi_result);
1256 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001257 }
1258}
1259
1260
1261void MacroAssembler::SmiDiv(Register dst,
1262 Register src1,
1263 Register src2,
1264 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001265 ASSERT(!src1.is(kScratchRegister));
1266 ASSERT(!src2.is(kScratchRegister));
1267 ASSERT(!dst.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001268 ASSERT(!src2.is(rax));
1269 ASSERT(!src2.is(rdx));
1270 ASSERT(!src1.is(rdx));
1271
1272 // Check for 0 divisor (result is +/-Infinity).
1273 Label positive_divisor;
Steve Block3ce2e202009-11-05 08:53:23 +00001274 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001275 j(zero, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001276
Steve Block3ce2e202009-11-05 08:53:23 +00001277 if (src1.is(rax)) {
1278 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001279 }
Steve Block3ce2e202009-11-05 08:53:23 +00001280 SmiToInteger32(rax, src1);
1281 // We need to rule out dividing Smi::kMinValue by -1, since that would
1282 // overflow in idiv and raise an exception.
1283 // We combine this with negative zero test (negative zero only happens
1284 // when dividing zero by a negative number).
Steve Blocka7e24c12009-10-30 11:49:00 +00001285
Steve Block3ce2e202009-11-05 08:53:23 +00001286 // We overshoot a little and go to slow case if we divide min-value
1287 // by any negative value, not just -1.
1288 Label safe_div;
1289 testl(rax, Immediate(0x7fffffff));
1290 j(not_zero, &safe_div);
1291 testq(src2, src2);
1292 if (src1.is(rax)) {
1293 j(positive, &safe_div);
1294 movq(src1, kScratchRegister);
1295 jmp(on_not_smi_result);
1296 } else {
1297 j(negative, on_not_smi_result);
1298 }
1299 bind(&safe_div);
1300
1301 SmiToInteger32(src2, src2);
1302 // Sign extend src1 into edx:eax.
1303 cdq();
Steve Blocka7e24c12009-10-30 11:49:00 +00001304 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001305 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001306 // Check that the remainder is zero.
1307 testl(rdx, rdx);
Steve Block3ce2e202009-11-05 08:53:23 +00001308 if (src1.is(rax)) {
1309 Label smi_result;
1310 j(zero, &smi_result);
1311 movq(src1, kScratchRegister);
1312 jmp(on_not_smi_result);
1313 bind(&smi_result);
1314 } else {
1315 j(not_zero, on_not_smi_result);
1316 }
1317 if (!dst.is(src1) && src1.is(rax)) {
1318 movq(src1, kScratchRegister);
1319 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001320 Integer32ToSmi(dst, rax);
1321}
1322
1323
1324void MacroAssembler::SmiMod(Register dst,
1325 Register src1,
1326 Register src2,
1327 Label* on_not_smi_result) {
1328 ASSERT(!dst.is(kScratchRegister));
1329 ASSERT(!src1.is(kScratchRegister));
1330 ASSERT(!src2.is(kScratchRegister));
1331 ASSERT(!src2.is(rax));
1332 ASSERT(!src2.is(rdx));
1333 ASSERT(!src1.is(rdx));
Steve Block3ce2e202009-11-05 08:53:23 +00001334 ASSERT(!src1.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001335
Steve Block3ce2e202009-11-05 08:53:23 +00001336 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001337 j(zero, on_not_smi_result);
1338
1339 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001340 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001341 }
Steve Block3ce2e202009-11-05 08:53:23 +00001342 SmiToInteger32(rax, src1);
1343 SmiToInteger32(src2, src2);
1344
1345 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1346 Label safe_div;
1347 cmpl(rax, Immediate(Smi::kMinValue));
1348 j(not_equal, &safe_div);
1349 cmpl(src2, Immediate(-1));
1350 j(not_equal, &safe_div);
1351 // Retag inputs and go slow case.
1352 Integer32ToSmi(src2, src2);
1353 if (src1.is(rax)) {
1354 movq(src1, kScratchRegister);
1355 }
1356 jmp(on_not_smi_result);
1357 bind(&safe_div);
1358
Steve Blocka7e24c12009-10-30 11:49:00 +00001359 // Sign extend eax into edx:eax.
1360 cdq();
1361 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001362 // Restore smi tags on inputs.
1363 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001364 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001365 movq(src1, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001366 }
Steve Block3ce2e202009-11-05 08:53:23 +00001367 // Check for a negative zero result. If the result is zero, and the
1368 // dividend is negative, go slow to return a floating point negative zero.
1369 Label smi_result;
1370 testl(rdx, rdx);
1371 j(not_zero, &smi_result);
1372 testq(src1, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001373 j(negative, on_not_smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +00001374 bind(&smi_result);
1375 Integer32ToSmi(dst, rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001376}
1377
1378
1379void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001380 ASSERT(!dst.is(kScratchRegister));
1381 ASSERT(!src.is(kScratchRegister));
1382 // Set tag and padding bits before negating, so that they are zero afterwards.
1383 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001384 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001385 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001386 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001387 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001388 }
Steve Block3ce2e202009-11-05 08:53:23 +00001389 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001390}
1391
1392
1393void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001394 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001395 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001396 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001397 }
1398 and_(dst, src2);
1399}
1400
1401
Steve Block3ce2e202009-11-05 08:53:23 +00001402void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1403 if (constant->value() == 0) {
1404 xor_(dst, dst);
1405 } else if (dst.is(src)) {
1406 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001407 Register constant_reg = GetSmiConstant(constant);
1408 and_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001409 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001410 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001411 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001412 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001413}
1414
1415
1416void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1417 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001418 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001419 }
1420 or_(dst, src2);
1421}
1422
1423
Steve Block3ce2e202009-11-05 08:53:23 +00001424void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1425 if (dst.is(src)) {
1426 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001427 Register constant_reg = GetSmiConstant(constant);
1428 or_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001429 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001430 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001431 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001432 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001433}
1434
Steve Block3ce2e202009-11-05 08:53:23 +00001435
Steve Blocka7e24c12009-10-30 11:49:00 +00001436void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1437 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001438 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001439 }
1440 xor_(dst, src2);
1441}
1442
1443
Steve Block3ce2e202009-11-05 08:53:23 +00001444void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1445 if (dst.is(src)) {
1446 ASSERT(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001447 Register constant_reg = GetSmiConstant(constant);
1448 xor_(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001449 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001450 LoadSmiConstant(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001451 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001452 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001453}
1454
1455
Steve Blocka7e24c12009-10-30 11:49:00 +00001456void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1457 Register src,
1458 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001459 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001460 if (shift_value > 0) {
1461 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001462 sar(dst, Immediate(shift_value + kSmiShift));
1463 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001464 } else {
1465 UNIMPLEMENTED(); // Not used.
1466 }
1467 }
1468}
1469
1470
1471void MacroAssembler::SmiShiftLogicalRightConstant(Register dst,
1472 Register src,
1473 int shift_value,
1474 Label* on_not_smi_result) {
1475 // Logic right shift interprets its result as an *unsigned* number.
1476 if (dst.is(src)) {
1477 UNIMPLEMENTED(); // Not used.
1478 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001479 movq(dst, src);
1480 if (shift_value == 0) {
1481 testq(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001482 j(negative, on_not_smi_result);
1483 }
Steve Block3ce2e202009-11-05 08:53:23 +00001484 shr(dst, Immediate(shift_value + kSmiShift));
1485 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001486 }
1487}
1488
1489
1490void MacroAssembler::SmiShiftLeftConstant(Register dst,
1491 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001492 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001493 if (!dst.is(src)) {
1494 movq(dst, src);
1495 }
1496 if (shift_value > 0) {
1497 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001498 }
1499}
1500
1501
1502void MacroAssembler::SmiShiftLeft(Register dst,
1503 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001504 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001505 ASSERT(!dst.is(rcx));
1506 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001507 // Untag shift amount.
1508 if (!dst.is(src1)) {
1509 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001510 }
Steve Block3ce2e202009-11-05 08:53:23 +00001511 SmiToInteger32(rcx, src2);
1512 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1513 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001514 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001515}
1516
1517
1518void MacroAssembler::SmiShiftLogicalRight(Register dst,
1519 Register src1,
1520 Register src2,
1521 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001522 ASSERT(!dst.is(kScratchRegister));
1523 ASSERT(!src1.is(kScratchRegister));
1524 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001525 ASSERT(!dst.is(rcx));
1526 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001527 if (src1.is(rcx) || src2.is(rcx)) {
1528 movq(kScratchRegister, rcx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001529 }
Steve Block3ce2e202009-11-05 08:53:23 +00001530 if (!dst.is(src1)) {
1531 movq(dst, src1);
1532 }
1533 SmiToInteger32(rcx, src2);
1534 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001535 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
Steve Block3ce2e202009-11-05 08:53:23 +00001536 shl(dst, Immediate(kSmiShift));
1537 testq(dst, dst);
1538 if (src1.is(rcx) || src2.is(rcx)) {
1539 Label positive_result;
1540 j(positive, &positive_result);
1541 if (src1.is(rcx)) {
1542 movq(src1, kScratchRegister);
1543 } else {
1544 movq(src2, kScratchRegister);
1545 }
1546 jmp(on_not_smi_result);
1547 bind(&positive_result);
1548 } else {
1549 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1550 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001551}
1552
1553
1554void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1555 Register src1,
1556 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001557 ASSERT(!dst.is(kScratchRegister));
1558 ASSERT(!src1.is(kScratchRegister));
1559 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001560 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001561 if (src1.is(rcx)) {
1562 movq(kScratchRegister, src1);
1563 } else if (src2.is(rcx)) {
1564 movq(kScratchRegister, src2);
1565 }
1566 if (!dst.is(src1)) {
1567 movq(dst, src1);
1568 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001569 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001570 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001571 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001572 shl(dst, Immediate(kSmiShift));
1573 if (src1.is(rcx)) {
1574 movq(src1, kScratchRegister);
1575 } else if (src2.is(rcx)) {
1576 movq(src2, kScratchRegister);
1577 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001578}
1579
1580
1581void MacroAssembler::SelectNonSmi(Register dst,
1582 Register src1,
1583 Register src2,
1584 Label* on_not_smis) {
Steve Block3ce2e202009-11-05 08:53:23 +00001585 ASSERT(!dst.is(kScratchRegister));
1586 ASSERT(!src1.is(kScratchRegister));
1587 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001588 ASSERT(!dst.is(src1));
1589 ASSERT(!dst.is(src2));
1590 // Both operands must not be smis.
1591#ifdef DEBUG
Steve Block3ce2e202009-11-05 08:53:23 +00001592 if (allow_stub_calls()) { // Check contains a stub call.
1593 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1594 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1595 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001596#endif
1597 ASSERT_EQ(0, kSmiTag);
1598 ASSERT_EQ(0, Smi::FromInt(0));
Steve Block3ce2e202009-11-05 08:53:23 +00001599 movl(kScratchRegister, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001600 and_(kScratchRegister, src1);
1601 testl(kScratchRegister, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001602 // If non-zero then both are smis.
Steve Blocka7e24c12009-10-30 11:49:00 +00001603 j(not_zero, on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00001604
Steve Block3ce2e202009-11-05 08:53:23 +00001605 // Exactly one operand is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001606 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1607 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1608 subq(kScratchRegister, Immediate(1));
1609 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1610 movq(dst, src1);
1611 xor_(dst, src2);
1612 and_(dst, kScratchRegister);
1613 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1614 xor_(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001615 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001616}
1617
Steve Block8defd9f2010-07-08 12:39:36 +01001618
Steve Block3ce2e202009-11-05 08:53:23 +00001619SmiIndex MacroAssembler::SmiToIndex(Register dst,
1620 Register src,
1621 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001622 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001623 // There is a possible optimization if shift is in the range 60-63, but that
1624 // will (and must) never happen.
1625 if (!dst.is(src)) {
1626 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001627 }
Steve Block3ce2e202009-11-05 08:53:23 +00001628 if (shift < kSmiShift) {
1629 sar(dst, Immediate(kSmiShift - shift));
1630 } else {
1631 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001632 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001633 return SmiIndex(dst, times_1);
1634}
1635
Steve Blocka7e24c12009-10-30 11:49:00 +00001636SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1637 Register src,
1638 int shift) {
1639 // Register src holds a positive smi.
1640 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001641 if (!dst.is(src)) {
1642 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001643 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001644 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001645 if (shift < kSmiShift) {
1646 sar(dst, Immediate(kSmiShift - shift));
1647 } else {
1648 shl(dst, Immediate(shift - kSmiShift));
1649 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001650 return SmiIndex(dst, times_1);
1651}
1652
1653
Steve Block3ce2e202009-11-05 08:53:23 +00001654void MacroAssembler::JumpIfSmi(Register src, Label* on_smi) {
1655 ASSERT_EQ(0, kSmiTag);
1656 Condition smi = CheckSmi(src);
1657 j(smi, on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001658}
1659
Steve Block3ce2e202009-11-05 08:53:23 +00001660
1661void MacroAssembler::JumpIfNotSmi(Register src, Label* on_not_smi) {
1662 Condition smi = CheckSmi(src);
1663 j(NegateCondition(smi), on_not_smi);
1664}
1665
1666
1667void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1668 Label* on_not_positive_smi) {
1669 Condition positive_smi = CheckPositiveSmi(src);
1670 j(NegateCondition(positive_smi), on_not_positive_smi);
1671}
1672
1673
1674void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1675 Smi* constant,
1676 Label* on_equals) {
1677 SmiCompare(src, constant);
1678 j(equal, on_equals);
1679}
1680
1681
1682void MacroAssembler::JumpIfNotValidSmiValue(Register src, Label* on_invalid) {
1683 Condition is_valid = CheckInteger32ValidSmiValue(src);
1684 j(NegateCondition(is_valid), on_invalid);
1685}
1686
1687
1688void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1689 Label* on_invalid) {
1690 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1691 j(NegateCondition(is_valid), on_invalid);
1692}
1693
1694
1695void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2,
1696 Label* on_not_both_smi) {
1697 Condition both_smi = CheckBothSmi(src1, src2);
1698 j(NegateCondition(both_smi), on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001699}
1700
1701
Leon Clarked91b9f72010-01-27 17:25:45 +00001702void MacroAssembler::JumpIfNotBothPositiveSmi(Register src1, Register src2,
1703 Label* on_not_both_smi) {
1704 Condition both_smi = CheckBothPositiveSmi(src1, src2);
1705 j(NegateCondition(both_smi), on_not_both_smi);
1706}
1707
1708
1709
Leon Clarkee46be812010-01-19 14:06:41 +00001710void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1711 Register second_object,
1712 Register scratch1,
1713 Register scratch2,
1714 Label* on_fail) {
1715 // Check that both objects are not smis.
1716 Condition either_smi = CheckEitherSmi(first_object, second_object);
1717 j(either_smi, on_fail);
1718
1719 // Load instance type for both strings.
1720 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1721 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1722 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1723 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1724
1725 // Check that both are flat ascii strings.
1726 ASSERT(kNotStringTag != 0);
1727 const int kFlatAsciiStringMask =
1728 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Leon Clarked91b9f72010-01-27 17:25:45 +00001729 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
Leon Clarkee46be812010-01-19 14:06:41 +00001730
1731 andl(scratch1, Immediate(kFlatAsciiStringMask));
1732 andl(scratch2, Immediate(kFlatAsciiStringMask));
1733 // Interleave the bits to check both scratch1 and scratch2 in one test.
1734 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1735 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1736 cmpl(scratch1,
Leon Clarked91b9f72010-01-27 17:25:45 +00001737 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
Leon Clarkee46be812010-01-19 14:06:41 +00001738 j(not_equal, on_fail);
1739}
1740
1741
Steve Block6ded16b2010-05-10 14:33:55 +01001742void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1743 Register instance_type,
1744 Register scratch,
1745 Label *failure) {
1746 if (!scratch.is(instance_type)) {
1747 movl(scratch, instance_type);
1748 }
1749
1750 const int kFlatAsciiStringMask =
1751 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1752
1753 andl(scratch, Immediate(kFlatAsciiStringMask));
1754 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1755 j(not_equal, failure);
1756}
1757
1758
1759void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1760 Register first_object_instance_type,
1761 Register second_object_instance_type,
1762 Register scratch1,
1763 Register scratch2,
1764 Label* on_fail) {
1765 // Load instance type for both strings.
1766 movq(scratch1, first_object_instance_type);
1767 movq(scratch2, second_object_instance_type);
1768
1769 // Check that both are flat ascii strings.
1770 ASSERT(kNotStringTag != 0);
1771 const int kFlatAsciiStringMask =
1772 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1773 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1774
1775 andl(scratch1, Immediate(kFlatAsciiStringMask));
1776 andl(scratch2, Immediate(kFlatAsciiStringMask));
1777 // Interleave the bits to check both scratch1 and scratch2 in one test.
1778 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1779 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1780 cmpl(scratch1,
1781 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1782 j(not_equal, on_fail);
1783}
1784
1785
Steve Blocka7e24c12009-10-30 11:49:00 +00001786void MacroAssembler::Move(Register dst, Handle<Object> source) {
1787 ASSERT(!source->IsFailure());
1788 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001789 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001790 } else {
1791 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1792 }
1793}
1794
1795
1796void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001797 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001798 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001799 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001800 } else {
1801 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1802 movq(dst, kScratchRegister);
1803 }
1804}
1805
1806
1807void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001808 if (source->IsSmi()) {
1809 SmiCompare(dst, Smi::cast(*source));
1810 } else {
1811 Move(kScratchRegister, source);
1812 cmpq(dst, kScratchRegister);
1813 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001814}
1815
1816
1817void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1818 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001819 SmiCompare(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001820 } else {
1821 ASSERT(source->IsHeapObject());
1822 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1823 cmpq(dst, kScratchRegister);
1824 }
1825}
1826
1827
1828void MacroAssembler::Push(Handle<Object> source) {
1829 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001830 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001831 } else {
1832 ASSERT(source->IsHeapObject());
1833 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1834 push(kScratchRegister);
1835 }
1836}
1837
1838
1839void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001840 intptr_t smi = reinterpret_cast<intptr_t>(source);
1841 if (is_int32(smi)) {
1842 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001843 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001844 Register constant = GetSmiConstant(source);
1845 push(constant);
Steve Block3ce2e202009-11-05 08:53:23 +00001846 }
1847}
1848
1849
Leon Clarkee46be812010-01-19 14:06:41 +00001850void MacroAssembler::Drop(int stack_elements) {
1851 if (stack_elements > 0) {
1852 addq(rsp, Immediate(stack_elements * kPointerSize));
1853 }
1854}
1855
1856
Steve Block3ce2e202009-11-05 08:53:23 +00001857void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001858 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001859}
1860
1861
1862void MacroAssembler::Jump(ExternalReference ext) {
1863 movq(kScratchRegister, ext);
1864 jmp(kScratchRegister);
1865}
1866
1867
1868void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1869 movq(kScratchRegister, destination, rmode);
1870 jmp(kScratchRegister);
1871}
1872
1873
1874void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001875 // TODO(X64): Inline this
1876 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001877}
1878
1879
1880void MacroAssembler::Call(ExternalReference ext) {
1881 movq(kScratchRegister, ext);
1882 call(kScratchRegister);
1883}
1884
1885
1886void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1887 movq(kScratchRegister, destination, rmode);
1888 call(kScratchRegister);
1889}
1890
1891
1892void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1893 ASSERT(RelocInfo::IsCodeTarget(rmode));
1894 WriteRecordedPositions();
Steve Block3ce2e202009-11-05 08:53:23 +00001895 call(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001896}
1897
1898
1899void MacroAssembler::PushTryHandler(CodeLocation try_location,
1900 HandlerType type) {
1901 // Adjust this code if not the case.
1902 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1903
1904 // The pc (return address) is already on TOS. This code pushes state,
1905 // frame pointer and current handler. Check that they are expected
1906 // next on the stack, in that order.
1907 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1908 StackHandlerConstants::kPCOffset - kPointerSize);
1909 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1910 StackHandlerConstants::kStateOffset - kPointerSize);
1911 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1912 StackHandlerConstants::kFPOffset - kPointerSize);
1913
1914 if (try_location == IN_JAVASCRIPT) {
1915 if (type == TRY_CATCH_HANDLER) {
1916 push(Immediate(StackHandler::TRY_CATCH));
1917 } else {
1918 push(Immediate(StackHandler::TRY_FINALLY));
1919 }
1920 push(rbp);
1921 } else {
1922 ASSERT(try_location == IN_JS_ENTRY);
1923 // The frame pointer does not point to a JS frame so we save NULL
1924 // for rbp. We expect the code throwing an exception to check rbp
1925 // before dereferencing it to restore the context.
1926 push(Immediate(StackHandler::ENTRY));
1927 push(Immediate(0)); // NULL frame pointer.
1928 }
1929 // Save the current handler.
1930 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1931 push(Operand(kScratchRegister, 0));
1932 // Link this handler.
1933 movq(Operand(kScratchRegister, 0), rsp);
1934}
1935
1936
Leon Clarkee46be812010-01-19 14:06:41 +00001937void MacroAssembler::PopTryHandler() {
1938 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1939 // Unlink this handler.
1940 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1941 pop(Operand(kScratchRegister, 0));
1942 // Remove the remaining fields.
1943 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1944}
1945
1946
Steve Blocka7e24c12009-10-30 11:49:00 +00001947void MacroAssembler::Ret() {
1948 ret(0);
1949}
1950
1951
1952void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001953 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01001954 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001955}
1956
1957
1958void MacroAssembler::CmpObjectType(Register heap_object,
1959 InstanceType type,
1960 Register map) {
1961 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1962 CmpInstanceType(map, type);
1963}
1964
1965
1966void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1967 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1968 Immediate(static_cast<int8_t>(type)));
1969}
1970
1971
Andrei Popescu31002712010-02-23 13:46:05 +00001972void MacroAssembler::CheckMap(Register obj,
1973 Handle<Map> map,
1974 Label* fail,
1975 bool is_heap_object) {
1976 if (!is_heap_object) {
1977 JumpIfSmi(obj, fail);
1978 }
1979 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1980 j(not_equal, fail);
1981}
1982
1983
Leon Clarkef7060e22010-06-03 12:02:55 +01001984void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001985 Label ok;
1986 Condition is_smi = CheckSmi(object);
1987 j(is_smi, &ok);
1988 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1989 Factory::heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01001990 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00001991 bind(&ok);
1992}
1993
1994
Iain Merrick75681382010-08-19 15:07:18 +01001995void MacroAssembler::AbortIfSmi(Register object) {
1996 Label ok;
1997 Condition is_smi = CheckSmi(object);
1998 Assert(NegateCondition(is_smi), "Operand is a smi");
1999}
2000
2001
Leon Clarkef7060e22010-06-03 12:02:55 +01002002void MacroAssembler::AbortIfNotSmi(Register object) {
Steve Block6ded16b2010-05-10 14:33:55 +01002003 Label ok;
2004 Condition is_smi = CheckSmi(object);
Iain Merrick75681382010-08-19 15:07:18 +01002005 Assert(is_smi, "Operand is not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01002006}
2007
2008
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002009void MacroAssembler::AbortIfNotRootValue(Register src,
2010 Heap::RootListIndex root_value_index,
2011 const char* message) {
2012 ASSERT(!src.is(kScratchRegister));
2013 LoadRoot(kScratchRegister, root_value_index);
2014 cmpq(src, kScratchRegister);
2015 Check(equal, message);
2016}
2017
2018
2019
Leon Clarked91b9f72010-01-27 17:25:45 +00002020Condition MacroAssembler::IsObjectStringType(Register heap_object,
2021 Register map,
2022 Register instance_type) {
2023 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00002024 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00002025 ASSERT(kNotStringTag != 0);
2026 testb(instance_type, Immediate(kIsNotStringMask));
2027 return zero;
2028}
2029
2030
Steve Blocka7e24c12009-10-30 11:49:00 +00002031void MacroAssembler::TryGetFunctionPrototype(Register function,
2032 Register result,
2033 Label* miss) {
2034 // Check that the receiver isn't a smi.
2035 testl(function, Immediate(kSmiTagMask));
2036 j(zero, miss);
2037
2038 // Check that the function really is a function.
2039 CmpObjectType(function, JS_FUNCTION_TYPE, result);
2040 j(not_equal, miss);
2041
2042 // Make sure that the function has an instance prototype.
2043 Label non_instance;
2044 testb(FieldOperand(result, Map::kBitFieldOffset),
2045 Immediate(1 << Map::kHasNonInstancePrototype));
2046 j(not_zero, &non_instance);
2047
2048 // Get the prototype or initial map from the function.
2049 movq(result,
2050 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2051
2052 // If the prototype or initial map is the hole, don't return it and
2053 // simply miss the cache instead. This will allow us to allocate a
2054 // prototype object on-demand in the runtime system.
2055 CompareRoot(result, Heap::kTheHoleValueRootIndex);
2056 j(equal, miss);
2057
2058 // If the function does not have an initial map, we're done.
2059 Label done;
2060 CmpObjectType(result, MAP_TYPE, kScratchRegister);
2061 j(not_equal, &done);
2062
2063 // Get the prototype from the initial map.
2064 movq(result, FieldOperand(result, Map::kPrototypeOffset));
2065 jmp(&done);
2066
2067 // Non-instance prototype: Fetch prototype from constructor field
2068 // in initial map.
2069 bind(&non_instance);
2070 movq(result, FieldOperand(result, Map::kConstructorOffset));
2071
2072 // All done.
2073 bind(&done);
2074}
2075
2076
2077void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2078 if (FLAG_native_code_counters && counter->Enabled()) {
2079 movq(kScratchRegister, ExternalReference(counter));
2080 movl(Operand(kScratchRegister, 0), Immediate(value));
2081 }
2082}
2083
2084
2085void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2086 ASSERT(value > 0);
2087 if (FLAG_native_code_counters && counter->Enabled()) {
2088 movq(kScratchRegister, ExternalReference(counter));
2089 Operand operand(kScratchRegister, 0);
2090 if (value == 1) {
2091 incl(operand);
2092 } else {
2093 addl(operand, Immediate(value));
2094 }
2095 }
2096}
2097
2098
2099void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2100 ASSERT(value > 0);
2101 if (FLAG_native_code_counters && counter->Enabled()) {
2102 movq(kScratchRegister, ExternalReference(counter));
2103 Operand operand(kScratchRegister, 0);
2104 if (value == 1) {
2105 decl(operand);
2106 } else {
2107 subl(operand, Immediate(value));
2108 }
2109 }
2110}
2111
Steve Blocka7e24c12009-10-30 11:49:00 +00002112#ifdef ENABLE_DEBUGGER_SUPPORT
2113
2114void MacroAssembler::PushRegistersFromMemory(RegList regs) {
2115 ASSERT((regs & ~kJSCallerSaved) == 0);
2116 // Push the content of the memory location to the stack.
2117 for (int i = 0; i < kNumJSCallerSaved; i++) {
2118 int r = JSCallerSavedCode(i);
2119 if ((regs & (1 << r)) != 0) {
2120 ExternalReference reg_addr =
2121 ExternalReference(Debug_Address::Register(i));
2122 movq(kScratchRegister, reg_addr);
2123 push(Operand(kScratchRegister, 0));
2124 }
2125 }
2126}
2127
Steve Block3ce2e202009-11-05 08:53:23 +00002128
Steve Blocka7e24c12009-10-30 11:49:00 +00002129void MacroAssembler::SaveRegistersToMemory(RegList regs) {
2130 ASSERT((regs & ~kJSCallerSaved) == 0);
2131 // Copy the content of registers to memory location.
2132 for (int i = 0; i < kNumJSCallerSaved; i++) {
2133 int r = JSCallerSavedCode(i);
2134 if ((regs & (1 << r)) != 0) {
2135 Register reg = { r };
2136 ExternalReference reg_addr =
2137 ExternalReference(Debug_Address::Register(i));
2138 movq(kScratchRegister, reg_addr);
2139 movq(Operand(kScratchRegister, 0), reg);
2140 }
2141 }
2142}
2143
2144
2145void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
2146 ASSERT((regs & ~kJSCallerSaved) == 0);
2147 // Copy the content of memory location to registers.
2148 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
2149 int r = JSCallerSavedCode(i);
2150 if ((regs & (1 << r)) != 0) {
2151 Register reg = { r };
2152 ExternalReference reg_addr =
2153 ExternalReference(Debug_Address::Register(i));
2154 movq(kScratchRegister, reg_addr);
2155 movq(reg, Operand(kScratchRegister, 0));
2156 }
2157 }
2158}
2159
2160
2161void MacroAssembler::PopRegistersToMemory(RegList regs) {
2162 ASSERT((regs & ~kJSCallerSaved) == 0);
2163 // Pop the content from the stack to the memory location.
2164 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
2165 int r = JSCallerSavedCode(i);
2166 if ((regs & (1 << r)) != 0) {
2167 ExternalReference reg_addr =
2168 ExternalReference(Debug_Address::Register(i));
2169 movq(kScratchRegister, reg_addr);
2170 pop(Operand(kScratchRegister, 0));
2171 }
2172 }
2173}
2174
2175
2176void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
2177 Register scratch,
2178 RegList regs) {
2179 ASSERT(!scratch.is(kScratchRegister));
2180 ASSERT(!base.is(kScratchRegister));
2181 ASSERT(!base.is(scratch));
2182 ASSERT((regs & ~kJSCallerSaved) == 0);
2183 // Copy the content of the stack to the memory location and adjust base.
2184 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
2185 int r = JSCallerSavedCode(i);
2186 if ((regs & (1 << r)) != 0) {
2187 movq(scratch, Operand(base, 0));
2188 ExternalReference reg_addr =
2189 ExternalReference(Debug_Address::Register(i));
2190 movq(kScratchRegister, reg_addr);
2191 movq(Operand(kScratchRegister, 0), scratch);
2192 lea(base, Operand(base, kPointerSize));
2193 }
2194 }
2195}
2196
Andrei Popescu402d9372010-02-26 13:31:12 +00002197void MacroAssembler::DebugBreak() {
2198 ASSERT(allow_stub_calls());
2199 xor_(rax, rax); // no arguments
2200 movq(rbx, ExternalReference(Runtime::kDebugBreak));
2201 CEntryStub ces(1);
2202 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00002203}
Andrei Popescu402d9372010-02-26 13:31:12 +00002204#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00002205
2206
2207void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2208 const ParameterCount& actual,
2209 Handle<Code> code_constant,
2210 Register code_register,
2211 Label* done,
2212 InvokeFlag flag) {
2213 bool definitely_matches = false;
2214 Label invoke;
2215 if (expected.is_immediate()) {
2216 ASSERT(actual.is_immediate());
2217 if (expected.immediate() == actual.immediate()) {
2218 definitely_matches = true;
2219 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002220 Set(rax, actual.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002221 if (expected.immediate() ==
Steve Block3ce2e202009-11-05 08:53:23 +00002222 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002223 // Don't worry about adapting arguments for built-ins that
2224 // don't want that done. Skip adaption code by making it look
2225 // like we have a match between expected and actual number of
2226 // arguments.
2227 definitely_matches = true;
2228 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002229 Set(rbx, expected.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002230 }
2231 }
2232 } else {
2233 if (actual.is_immediate()) {
2234 // Expected is in register, actual is immediate. This is the
2235 // case when we invoke function values without going through the
2236 // IC mechanism.
2237 cmpq(expected.reg(), Immediate(actual.immediate()));
2238 j(equal, &invoke);
2239 ASSERT(expected.reg().is(rbx));
Steve Block8defd9f2010-07-08 12:39:36 +01002240 Set(rax, actual.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002241 } else if (!expected.reg().is(actual.reg())) {
2242 // Both expected and actual are in (different) registers. This
2243 // is the case when we invoke functions using call and apply.
2244 cmpq(expected.reg(), actual.reg());
2245 j(equal, &invoke);
2246 ASSERT(actual.reg().is(rax));
2247 ASSERT(expected.reg().is(rbx));
2248 }
2249 }
2250
2251 if (!definitely_matches) {
2252 Handle<Code> adaptor =
2253 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
2254 if (!code_constant.is_null()) {
2255 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
2256 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2257 } else if (!code_register.is(rdx)) {
2258 movq(rdx, code_register);
2259 }
2260
2261 if (flag == CALL_FUNCTION) {
2262 Call(adaptor, RelocInfo::CODE_TARGET);
2263 jmp(done);
2264 } else {
2265 Jump(adaptor, RelocInfo::CODE_TARGET);
2266 }
2267 bind(&invoke);
2268 }
2269}
2270
2271
2272void MacroAssembler::InvokeCode(Register code,
2273 const ParameterCount& expected,
2274 const ParameterCount& actual,
2275 InvokeFlag flag) {
2276 Label done;
2277 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
2278 if (flag == CALL_FUNCTION) {
2279 call(code);
2280 } else {
2281 ASSERT(flag == JUMP_FUNCTION);
2282 jmp(code);
2283 }
2284 bind(&done);
2285}
2286
2287
2288void MacroAssembler::InvokeCode(Handle<Code> code,
2289 const ParameterCount& expected,
2290 const ParameterCount& actual,
2291 RelocInfo::Mode rmode,
2292 InvokeFlag flag) {
2293 Label done;
2294 Register dummy = rax;
2295 InvokePrologue(expected, actual, code, dummy, &done, flag);
2296 if (flag == CALL_FUNCTION) {
2297 Call(code, rmode);
2298 } else {
2299 ASSERT(flag == JUMP_FUNCTION);
2300 Jump(code, rmode);
2301 }
2302 bind(&done);
2303}
2304
2305
2306void MacroAssembler::InvokeFunction(Register function,
2307 const ParameterCount& actual,
2308 InvokeFlag flag) {
2309 ASSERT(function.is(rdi));
2310 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2311 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
2312 movsxlq(rbx,
2313 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
Iain Merrick75681382010-08-19 15:07:18 +01002314 movq(rdx, FieldOperand(rdi, JSFunction::kCodeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002315 // Advances rdx to the end of the Code object header, to the start of
2316 // the executable code.
2317 lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
2318
2319 ParameterCount expected(rbx);
2320 InvokeCode(rdx, expected, actual, flag);
2321}
2322
2323
Andrei Popescu402d9372010-02-26 13:31:12 +00002324void MacroAssembler::InvokeFunction(JSFunction* function,
2325 const ParameterCount& actual,
2326 InvokeFlag flag) {
2327 ASSERT(function->is_compiled());
2328 // Get the function and setup the context.
2329 Move(rdi, Handle<JSFunction>(function));
2330 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2331
2332 // Invoke the cached code.
2333 Handle<Code> code(function->code());
2334 ParameterCount expected(function->shared()->formal_parameter_count());
2335 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
2336}
2337
2338
Steve Blocka7e24c12009-10-30 11:49:00 +00002339void MacroAssembler::EnterFrame(StackFrame::Type type) {
2340 push(rbp);
2341 movq(rbp, rsp);
2342 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00002343 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002344 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2345 push(kScratchRegister);
2346 if (FLAG_debug_code) {
2347 movq(kScratchRegister,
2348 Factory::undefined_value(),
2349 RelocInfo::EMBEDDED_OBJECT);
2350 cmpq(Operand(rsp, 0), kScratchRegister);
2351 Check(not_equal, "code object not properly patched");
2352 }
2353}
2354
2355
2356void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2357 if (FLAG_debug_code) {
Steve Block3ce2e202009-11-05 08:53:23 +00002358 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002359 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2360 Check(equal, "stack frame types must match");
2361 }
2362 movq(rsp, rbp);
2363 pop(rbp);
2364}
2365
2366
Ben Murdochbb769b22010-08-11 14:56:33 +01002367void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode,
2368 bool save_rax) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002369 // Setup the frame structure on the stack.
2370 // All constants are relative to the frame pointer of the exit frame.
2371 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
2372 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
2373 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
2374 push(rbp);
2375 movq(rbp, rsp);
2376
2377 // Reserve room for entry stack pointer and push the debug marker.
Steve Block3ce2e202009-11-05 08:53:23 +00002378 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00002379 push(Immediate(0)); // Saved entry sp, patched before call.
2380 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2381 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00002382
2383 // Save the frame pointer and the context in top.
2384 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2385 ExternalReference context_address(Top::k_context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01002386 if (save_rax) {
2387 movq(r14, rax); // Backup rax before we use it.
2388 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002389
2390 movq(rax, rbp);
2391 store_rax(c_entry_fp_address);
2392 movq(rax, rsi);
2393 store_rax(context_address);
Ben Murdochbb769b22010-08-11 14:56:33 +01002394}
Steve Blocka7e24c12009-10-30 11:49:00 +00002395
Ben Murdochbb769b22010-08-11 14:56:33 +01002396void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode,
2397 int result_size,
2398 int argc) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002399#ifdef ENABLE_DEBUGGER_SUPPORT
2400 // Save the state of all registers to the stack from the memory
2401 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00002402 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002403 // TODO(1243899): This should be symmetric to
2404 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
2405 // correct here, but computed for the other call. Very error
2406 // prone! FIX THIS. Actually there are deeper problems with
2407 // register saving than this asymmetry (see the bug report
2408 // associated with this issue).
2409 PushRegistersFromMemory(kJSCallerSaved);
2410 }
2411#endif
2412
2413#ifdef _WIN64
2414 // Reserve space on stack for result and argument structures, if necessary.
2415 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
2416 // Reserve space for the Arguments object. The Windows 64-bit ABI
2417 // requires us to pass this structure as a pointer to its location on
2418 // the stack. The structure contains 2 values.
Ben Murdochbb769b22010-08-11 14:56:33 +01002419 int argument_stack_space = argc * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002420 // We also need backing space for 4 parameters, even though
2421 // we only pass one or two parameter, and it is in a register.
2422 int argument_mirror_space = 4 * kPointerSize;
2423 int total_stack_space =
2424 argument_mirror_space + argument_stack_space + result_stack_space;
2425 subq(rsp, Immediate(total_stack_space));
2426#endif
2427
2428 // Get the required frame alignment for the OS.
2429 static const int kFrameAlignment = OS::ActivationFrameAlignment();
2430 if (kFrameAlignment > 0) {
2431 ASSERT(IsPowerOf2(kFrameAlignment));
2432 movq(kScratchRegister, Immediate(-kFrameAlignment));
2433 and_(rsp, kScratchRegister);
2434 }
2435
2436 // Patch the saved entry sp.
2437 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2438}
2439
2440
Ben Murdochbb769b22010-08-11 14:56:33 +01002441void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
2442 EnterExitFramePrologue(mode, true);
2443
2444 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
2445 // so it must be retained across the C-call.
2446 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
2447 lea(r12, Operand(rbp, r14, times_pointer_size, offset));
2448
2449 EnterExitFrameEpilogue(mode, result_size, 2);
2450}
2451
2452
2453void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode,
2454 int stack_space,
2455 int argc,
2456 int result_size) {
2457 EnterExitFramePrologue(mode, false);
2458
2459 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
2460 // so it must be retained across the C-call.
2461 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
2462 lea(r12, Operand(rbp, (stack_space * kPointerSize) + offset));
2463
2464 EnterExitFrameEpilogue(mode, result_size, argc);
2465}
2466
2467
Steve Blockd0582a62009-12-15 09:54:21 +00002468void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002469 // Registers:
Steve Block8defd9f2010-07-08 12:39:36 +01002470 // r12 : argv
Steve Blocka7e24c12009-10-30 11:49:00 +00002471#ifdef ENABLE_DEBUGGER_SUPPORT
2472 // Restore the memory copy of the registers by digging them out from
2473 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00002474 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002475 // It's okay to clobber register rbx below because we don't need
2476 // the function pointer after this.
2477 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +00002478 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002479 lea(rbx, Operand(rbp, kOffset));
2480 CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
2481 }
2482#endif
2483
2484 // Get the return address from the stack and restore the frame pointer.
2485 movq(rcx, Operand(rbp, 1 * kPointerSize));
2486 movq(rbp, Operand(rbp, 0 * kPointerSize));
2487
Steve Blocka7e24c12009-10-30 11:49:00 +00002488 // Pop everything up to and including the arguments and the receiver
2489 // from the caller stack.
Steve Block8defd9f2010-07-08 12:39:36 +01002490 lea(rsp, Operand(r12, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00002491
2492 // Restore current context from top and clear it in debug mode.
2493 ExternalReference context_address(Top::k_context_address);
2494 movq(kScratchRegister, context_address);
2495 movq(rsi, Operand(kScratchRegister, 0));
2496#ifdef DEBUG
2497 movq(Operand(kScratchRegister, 0), Immediate(0));
2498#endif
2499
2500 // Push the return address to get ready to return.
2501 push(rcx);
2502
2503 // Clear the top frame.
2504 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2505 movq(kScratchRegister, c_entry_fp_address);
2506 movq(Operand(kScratchRegister, 0), Immediate(0));
2507}
2508
2509
Steve Blocka7e24c12009-10-30 11:49:00 +00002510void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2511 Register scratch,
2512 Label* miss) {
2513 Label same_contexts;
2514
2515 ASSERT(!holder_reg.is(scratch));
2516 ASSERT(!scratch.is(kScratchRegister));
2517 // Load current lexical context from the stack frame.
2518 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2519
2520 // When generating debug code, make sure the lexical context is set.
2521 if (FLAG_debug_code) {
2522 cmpq(scratch, Immediate(0));
2523 Check(not_equal, "we should not have an empty lexical context");
2524 }
2525 // Load the global context of the current context.
2526 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2527 movq(scratch, FieldOperand(scratch, offset));
2528 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2529
2530 // Check the context is a global context.
2531 if (FLAG_debug_code) {
2532 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2533 Factory::global_context_map());
2534 Check(equal, "JSGlobalObject::global_context should be a global context.");
2535 }
2536
2537 // Check if both contexts are the same.
2538 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2539 j(equal, &same_contexts);
2540
2541 // Compare security tokens.
2542 // Check that the security token in the calling global object is
2543 // compatible with the security token in the receiving global
2544 // object.
2545
2546 // Check the context is a global context.
2547 if (FLAG_debug_code) {
2548 // Preserve original value of holder_reg.
2549 push(holder_reg);
2550 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2551 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2552 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2553
2554 // Read the first word and compare to global_context_map(),
2555 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2556 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2557 Check(equal, "JSGlobalObject::global_context should be a global context.");
2558 pop(holder_reg);
2559 }
2560
2561 movq(kScratchRegister,
2562 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00002563 int token_offset =
2564 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002565 movq(scratch, FieldOperand(scratch, token_offset));
2566 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2567 j(not_equal, miss);
2568
2569 bind(&same_contexts);
2570}
2571
2572
2573void MacroAssembler::LoadAllocationTopHelper(Register result,
2574 Register result_end,
2575 Register scratch,
2576 AllocationFlags flags) {
2577 ExternalReference new_space_allocation_top =
2578 ExternalReference::new_space_allocation_top_address();
2579
2580 // Just return if allocation top is already known.
2581 if ((flags & RESULT_CONTAINS_TOP) != 0) {
2582 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01002583 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00002584#ifdef DEBUG
2585 // Assert that result actually contains top on entry.
2586 movq(kScratchRegister, new_space_allocation_top);
2587 cmpq(result, Operand(kScratchRegister, 0));
2588 Check(equal, "Unexpected allocation top");
2589#endif
2590 return;
2591 }
2592
Steve Block6ded16b2010-05-10 14:33:55 +01002593 // Move address of new object to result. Use scratch register if available,
2594 // and keep address in scratch until call to UpdateAllocationTopHelper.
2595 if (scratch.is_valid()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002596 ASSERT(!scratch.is(result_end));
2597 movq(scratch, new_space_allocation_top);
2598 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01002599 } else if (result.is(rax)) {
2600 load_rax(new_space_allocation_top);
2601 } else {
2602 movq(kScratchRegister, new_space_allocation_top);
2603 movq(result, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002604 }
2605}
2606
2607
2608void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2609 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +00002610 if (FLAG_debug_code) {
2611 testq(result_end, Immediate(kObjectAlignmentMask));
2612 Check(zero, "Unaligned allocation in new space");
2613 }
2614
Steve Blocka7e24c12009-10-30 11:49:00 +00002615 ExternalReference new_space_allocation_top =
2616 ExternalReference::new_space_allocation_top_address();
2617
2618 // Update new top.
2619 if (result_end.is(rax)) {
2620 // rax can be stored directly to a memory location.
2621 store_rax(new_space_allocation_top);
2622 } else {
2623 // Register required - use scratch provided if available.
Steve Block6ded16b2010-05-10 14:33:55 +01002624 if (scratch.is_valid()) {
2625 movq(Operand(scratch, 0), result_end);
2626 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00002627 movq(kScratchRegister, new_space_allocation_top);
2628 movq(Operand(kScratchRegister, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00002629 }
2630 }
2631}
2632
2633
2634void MacroAssembler::AllocateInNewSpace(int object_size,
2635 Register result,
2636 Register result_end,
2637 Register scratch,
2638 Label* gc_required,
2639 AllocationFlags flags) {
2640 ASSERT(!result.is(result_end));
2641
2642 // Load address of new object into result.
2643 LoadAllocationTopHelper(result, result_end, scratch, flags);
2644
2645 // Calculate new top and bail out if new space is exhausted.
2646 ExternalReference new_space_allocation_limit =
2647 ExternalReference::new_space_allocation_limit_address();
Steve Block6ded16b2010-05-10 14:33:55 +01002648
2649 Register top_reg = result_end.is_valid() ? result_end : result;
2650
2651 if (top_reg.is(result)) {
2652 addq(top_reg, Immediate(object_size));
2653 } else {
2654 lea(top_reg, Operand(result, object_size));
2655 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002656 movq(kScratchRegister, new_space_allocation_limit);
Steve Block6ded16b2010-05-10 14:33:55 +01002657 cmpq(top_reg, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002658 j(above, gc_required);
2659
2660 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01002661 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00002662
Steve Block6ded16b2010-05-10 14:33:55 +01002663 if (top_reg.is(result)) {
2664 if ((flags & TAG_OBJECT) != 0) {
2665 subq(result, Immediate(object_size - kHeapObjectTag));
2666 } else {
2667 subq(result, Immediate(object_size));
2668 }
2669 } else if ((flags & TAG_OBJECT) != 0) {
2670 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00002671 addq(result, Immediate(kHeapObjectTag));
2672 }
2673}
2674
2675
2676void MacroAssembler::AllocateInNewSpace(int header_size,
2677 ScaleFactor element_size,
2678 Register element_count,
2679 Register result,
2680 Register result_end,
2681 Register scratch,
2682 Label* gc_required,
2683 AllocationFlags flags) {
2684 ASSERT(!result.is(result_end));
2685
2686 // Load address of new object into result.
2687 LoadAllocationTopHelper(result, result_end, scratch, flags);
2688
2689 // Calculate new top and bail out if new space is exhausted.
2690 ExternalReference new_space_allocation_limit =
2691 ExternalReference::new_space_allocation_limit_address();
2692 lea(result_end, Operand(result, element_count, element_size, header_size));
2693 movq(kScratchRegister, new_space_allocation_limit);
2694 cmpq(result_end, Operand(kScratchRegister, 0));
2695 j(above, gc_required);
2696
2697 // Update allocation top.
2698 UpdateAllocationTopHelper(result_end, scratch);
2699
2700 // Tag the result if requested.
2701 if ((flags & TAG_OBJECT) != 0) {
2702 addq(result, Immediate(kHeapObjectTag));
2703 }
2704}
2705
2706
2707void MacroAssembler::AllocateInNewSpace(Register object_size,
2708 Register result,
2709 Register result_end,
2710 Register scratch,
2711 Label* gc_required,
2712 AllocationFlags flags) {
2713 // Load address of new object into result.
2714 LoadAllocationTopHelper(result, result_end, scratch, flags);
2715
2716 // Calculate new top and bail out if new space is exhausted.
2717 ExternalReference new_space_allocation_limit =
2718 ExternalReference::new_space_allocation_limit_address();
2719 if (!object_size.is(result_end)) {
2720 movq(result_end, object_size);
2721 }
2722 addq(result_end, result);
2723 movq(kScratchRegister, new_space_allocation_limit);
2724 cmpq(result_end, Operand(kScratchRegister, 0));
2725 j(above, gc_required);
2726
2727 // Update allocation top.
2728 UpdateAllocationTopHelper(result_end, scratch);
2729
2730 // Tag the result if requested.
2731 if ((flags & TAG_OBJECT) != 0) {
2732 addq(result, Immediate(kHeapObjectTag));
2733 }
2734}
2735
2736
2737void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2738 ExternalReference new_space_allocation_top =
2739 ExternalReference::new_space_allocation_top_address();
2740
2741 // Make sure the object has no tag before resetting top.
2742 and_(object, Immediate(~kHeapObjectTagMask));
2743 movq(kScratchRegister, new_space_allocation_top);
2744#ifdef DEBUG
2745 cmpq(object, Operand(kScratchRegister, 0));
2746 Check(below, "Undo allocation of non allocated memory");
2747#endif
2748 movq(Operand(kScratchRegister, 0), object);
2749}
2750
2751
Steve Block3ce2e202009-11-05 08:53:23 +00002752void MacroAssembler::AllocateHeapNumber(Register result,
2753 Register scratch,
2754 Label* gc_required) {
2755 // Allocate heap number in new space.
2756 AllocateInNewSpace(HeapNumber::kSize,
2757 result,
2758 scratch,
2759 no_reg,
2760 gc_required,
2761 TAG_OBJECT);
2762
2763 // Set the map.
2764 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2765 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2766}
2767
2768
Leon Clarkee46be812010-01-19 14:06:41 +00002769void MacroAssembler::AllocateTwoByteString(Register result,
2770 Register length,
2771 Register scratch1,
2772 Register scratch2,
2773 Register scratch3,
2774 Label* gc_required) {
2775 // Calculate the number of bytes needed for the characters in the string while
2776 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002777 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2778 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002779 ASSERT(kShortSize == 2);
2780 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01002781 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2782 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002783 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002784 if (kHeaderAlignment > 0) {
2785 subq(scratch1, Immediate(kHeaderAlignment));
2786 }
Leon Clarkee46be812010-01-19 14:06:41 +00002787
2788 // Allocate two byte string in new space.
2789 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2790 times_1,
2791 scratch1,
2792 result,
2793 scratch2,
2794 scratch3,
2795 gc_required,
2796 TAG_OBJECT);
2797
2798 // Set the map, length and hash field.
2799 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2800 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002801 Integer32ToSmi(scratch1, length);
2802 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002803 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002804 Immediate(String::kEmptyHashField));
2805}
2806
2807
2808void MacroAssembler::AllocateAsciiString(Register result,
2809 Register length,
2810 Register scratch1,
2811 Register scratch2,
2812 Register scratch3,
2813 Label* gc_required) {
2814 // Calculate the number of bytes needed for the characters in the string while
2815 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002816 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2817 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002818 movl(scratch1, length);
2819 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01002820 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002821 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002822 if (kHeaderAlignment > 0) {
2823 subq(scratch1, Immediate(kHeaderAlignment));
2824 }
Leon Clarkee46be812010-01-19 14:06:41 +00002825
2826 // Allocate ascii string in new space.
2827 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2828 times_1,
2829 scratch1,
2830 result,
2831 scratch2,
2832 scratch3,
2833 gc_required,
2834 TAG_OBJECT);
2835
2836 // Set the map, length and hash field.
2837 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2838 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002839 Integer32ToSmi(scratch1, length);
2840 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002841 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002842 Immediate(String::kEmptyHashField));
2843}
2844
2845
2846void MacroAssembler::AllocateConsString(Register result,
2847 Register scratch1,
2848 Register scratch2,
2849 Label* gc_required) {
2850 // Allocate heap number in new space.
2851 AllocateInNewSpace(ConsString::kSize,
2852 result,
2853 scratch1,
2854 scratch2,
2855 gc_required,
2856 TAG_OBJECT);
2857
2858 // Set the map. The other fields are left uninitialized.
2859 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2860 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2861}
2862
2863
2864void MacroAssembler::AllocateAsciiConsString(Register result,
2865 Register scratch1,
2866 Register scratch2,
2867 Label* gc_required) {
2868 // Allocate heap number in new space.
2869 AllocateInNewSpace(ConsString::kSize,
2870 result,
2871 scratch1,
2872 scratch2,
2873 gc_required,
2874 TAG_OBJECT);
2875
2876 // Set the map. The other fields are left uninitialized.
2877 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2878 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2879}
2880
2881
Steve Blockd0582a62009-12-15 09:54:21 +00002882void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2883 if (context_chain_length > 0) {
2884 // Move up the chain of contexts to the context containing the slot.
2885 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2886 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002887 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002888 for (int i = 1; i < context_chain_length; i++) {
2889 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2890 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2891 }
2892 // The context may be an intermediate context, not a function context.
2893 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2894 } else { // context is the current function context.
2895 // The context may be an intermediate context, not a function context.
2896 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2897 }
2898}
2899
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002900
Leon Clarke4515c472010-02-03 11:58:03 +00002901int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002902 // On Windows 64 stack slots are reserved by the caller for all arguments
2903 // including the ones passed in registers, and space is always allocated for
2904 // the four register arguments even if the function takes fewer than four
2905 // arguments.
2906 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2907 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00002908 ASSERT(num_arguments >= 0);
2909#ifdef _WIN64
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002910 static const int kMinimumStackSlots = 4;
2911 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2912 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002913#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002914 static const int kRegisterPassedArguments = 6;
2915 if (num_arguments < kRegisterPassedArguments) return 0;
2916 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002917#endif
Leon Clarke4515c472010-02-03 11:58:03 +00002918}
2919
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002920
Leon Clarke4515c472010-02-03 11:58:03 +00002921void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2922 int frame_alignment = OS::ActivationFrameAlignment();
2923 ASSERT(frame_alignment != 0);
2924 ASSERT(num_arguments >= 0);
2925 // Make stack end at alignment and allocate space for arguments and old rsp.
2926 movq(kScratchRegister, rsp);
2927 ASSERT(IsPowerOf2(frame_alignment));
2928 int argument_slots_on_stack =
2929 ArgumentStackSlotsForCFunctionCall(num_arguments);
2930 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2931 and_(rsp, Immediate(-frame_alignment));
2932 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2933}
2934
2935
2936void MacroAssembler::CallCFunction(ExternalReference function,
2937 int num_arguments) {
2938 movq(rax, function);
2939 CallCFunction(rax, num_arguments);
2940}
2941
2942
2943void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01002944 // Check stack alignment.
2945 if (FLAG_debug_code) {
2946 CheckStackAlignment();
2947 }
2948
Leon Clarke4515c472010-02-03 11:58:03 +00002949 call(function);
2950 ASSERT(OS::ActivationFrameAlignment() != 0);
2951 ASSERT(num_arguments >= 0);
2952 int argument_slots_on_stack =
2953 ArgumentStackSlotsForCFunctionCall(num_arguments);
2954 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2955}
2956
Steve Blockd0582a62009-12-15 09:54:21 +00002957
Steve Blocka7e24c12009-10-30 11:49:00 +00002958CodePatcher::CodePatcher(byte* address, int size)
2959 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2960 // Create a new macro assembler pointing to the address of the code to patch.
2961 // The size is adjusted with kGap on order for the assembler to generate size
2962 // bytes of instructions without failing with buffer size constraints.
2963 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2964}
2965
2966
2967CodePatcher::~CodePatcher() {
2968 // Indicate that code has changed.
2969 CPU::FlushICache(address_, size_);
2970
2971 // Check that the code was patched as expected.
2972 ASSERT(masm_.pc_ == address_ + size_);
2973 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2974}
2975
Steve Blocka7e24c12009-10-30 11:49:00 +00002976} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002977
2978#endif // V8_TARGET_ARCH_X64