blob: e9aa7a2533a3f6f0e80c02122cefb79a3c2bb5c9 [file] [log] [blame]
ager@chromium.org5ec48922009-05-05 07:25:34 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
kasperl@chromium.org71affb52009-05-26 05:44:31 +000028#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
ager@chromium.orgeadaf222009-06-16 09:43:10 +000032#include "assembler-x64.h"
ager@chromium.orge2902be2009-06-08 12:21:35 +000033#include "macro-assembler-x64.h"
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000034#include "serialize.h"
ager@chromium.orgeadaf222009-06-16 09:43:10 +000035#include "debug.h"
kasperl@chromium.org71affb52009-05-26 05:44:31 +000036
37namespace v8 {
38namespace internal {
39
40MacroAssembler::MacroAssembler(void* buffer, int size)
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +000041 : Assembler(buffer, size),
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +000042 generating_stub_(false),
43 allow_stub_calls_(true),
44 code_object_(Heap::undefined_value()) {
kasperl@chromium.org71affb52009-05-26 05:44:31 +000045}
46
ager@chromium.orge2902be2009-06-08 12:21:35 +000047
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +000048void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
whesse@chromium.orgb6e43bb2010-04-14 09:36:28 +000049 movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
ager@chromium.org18ad94b2009-09-02 08:22:29 +000050}
51
52
53void MacroAssembler::PushRoot(Heap::RootListIndex index) {
whesse@chromium.orgb6e43bb2010-04-14 09:36:28 +000054 push(Operand(kRootRegister, index << kPointerSizeLog2));
ager@chromium.org18ad94b2009-09-02 08:22:29 +000055}
56
57
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +000058void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
whesse@chromium.orgb6e43bb2010-04-14 09:36:28 +000059 cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
ager@chromium.org18ad94b2009-09-02 08:22:29 +000060}
61
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000062
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +000063void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +000064 LoadRoot(kScratchRegister, index);
65 cmpq(with, kScratchRegister);
66}
67
68
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +000069void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
70 CompareRoot(rsp, Heap::kStackLimitRootIndex);
71 j(below, on_stack_overflow);
72}
73
74
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000075static void RecordWriteHelper(MacroAssembler* masm,
76 Register object,
77 Register addr,
78 Register scratch) {
79 Label fast;
80
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000081 // Compute the page start address from the heap object pointer, and reuse
82 // the 'object' register for it.
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000083 ASSERT(is_int32(~Page::kPageAlignmentMask));
84 masm->and_(object,
85 Immediate(static_cast<int32_t>(~Page::kPageAlignmentMask)));
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000086 Register page_start = object;
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000087
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000088 // Compute the bit addr in the remembered set/index of the pointer in the
89 // page. Reuse 'addr' as pointer_offset.
90 masm->subq(addr, page_start);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000091 masm->shr(addr, Immediate(kPointerSizeLog2));
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000092 Register pointer_offset = addr;
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000093
94 // If the bit offset lies beyond the normal remembered set range, it is in
95 // the extra remembered set area of a large object.
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000096 masm->cmpq(pointer_offset, Immediate(Page::kPageSize / kPointerSize));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000097 masm->j(less, &fast);
98
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000099 // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
100 // extra remembered set after the large object.
101
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000102 // Load the array length into 'scratch'.
103 masm->movl(scratch,
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +0000104 Operand(page_start,
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000105 Page::kObjectStartOffset + FixedArray::kLengthOffset));
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +0000106 Register array_length = scratch;
107
108 // Extra remembered set starts right after the large object (a FixedArray), at
109 // page_start + kObjectStartOffset + objectSize
110 // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
111 // Add the delta between the end of the normal RSet and the start of the
112 // extra RSet to 'page_start', so that addressing the bit using
113 // 'pointer_offset' hits the extra RSet words.
114 masm->lea(page_start,
115 Operand(page_start, array_length, times_pointer_size,
116 Page::kObjectStartOffset + FixedArray::kHeaderSize
117 - Page::kRSetEndOffset));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000118
119 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
120 // to limit code size. We should probably evaluate this decision by
121 // measuring the performance of an equivalent implementation using
122 // "simpler" instructions
123 masm->bind(&fast);
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +0000124 masm->bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000125}
126
127
128class RecordWriteStub : public CodeStub {
129 public:
130 RecordWriteStub(Register object, Register addr, Register scratch)
131 : object_(object), addr_(addr), scratch_(scratch) { }
132
133 void Generate(MacroAssembler* masm);
134
135 private:
136 Register object_;
137 Register addr_;
138 Register scratch_;
139
140#ifdef DEBUG
141 void Print() {
142 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
143 object_.code(), addr_.code(), scratch_.code());
144 }
145#endif
146
147 // Minor key encoding in 12 bits of three registers (object, address and
148 // scratch) OOOOAAAASSSS.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000149 class ScratchBits : public BitField<uint32_t, 0, 4> {};
150 class AddressBits : public BitField<uint32_t, 4, 4> {};
151 class ObjectBits : public BitField<uint32_t, 8, 4> {};
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000152
153 Major MajorKey() { return RecordWrite; }
154
155 int MinorKey() {
156 // Encode the registers.
157 return ObjectBits::encode(object_.code()) |
158 AddressBits::encode(addr_.code()) |
159 ScratchBits::encode(scratch_.code());
160 }
161};
162
163
164void RecordWriteStub::Generate(MacroAssembler* masm) {
165 RecordWriteHelper(masm, object_, addr_, scratch_);
166 masm->ret(0);
167}
168
169
170// Set the remembered set bit for [object+offset].
171// object is the object being stored into, value is the object being stored.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000172// If offset is zero, then the smi_index register contains the array index into
173// the elements array represented as a smi. Otherwise it can be used as a
174// scratch register.
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000175// All registers are clobbered by the operation.
176void MacroAssembler::RecordWrite(Register object,
177 int offset,
178 Register value,
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000179 Register smi_index) {
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000180 // The compiled code assumes that record write doesn't change the
181 // context register, so we check that none of the clobbered
182 // registers are rsi.
183 ASSERT(!object.is(rsi) && !value.is(rsi) && !smi_index.is(rsi));
184
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000185 // First, check if a remembered set write is even needed. The tests below
186 // catch stores of Smis and stores into young gen (which does not have space
187 // for the remembered set bits.
188 Label done;
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000189 JumpIfSmi(value, &done);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000190
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000191 RecordWriteNonSmi(object, offset, value, smi_index);
192 bind(&done);
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000193
194 // Clobber all input registers when running with the debug-code flag
195 // turned on to provoke errors. This clobbering repeats the
196 // clobbering done inside RecordWriteNonSmi but it's necessary to
197 // avoid having the fast case for smis leave the registers
198 // unchanged.
199 if (FLAG_debug_code) {
vegorov@chromium.orgf8372902010-03-15 10:26:20 +0000200 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
201 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
202 movq(smi_index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000203 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000204}
205
206
207void MacroAssembler::RecordWriteNonSmi(Register object,
208 int offset,
209 Register scratch,
210 Register smi_index) {
211 Label done;
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000212
213 if (FLAG_debug_code) {
214 Label okay;
215 JumpIfNotSmi(object, &okay);
216 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
217 bind(&okay);
218 }
219
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000220 // Test that the object address is not in the new space. We cannot
221 // set remembered set bits in the new space.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000222 movq(scratch, object);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000223 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000224 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000225 movq(kScratchRegister, ExternalReference::new_space_start());
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000226 cmpq(scratch, kScratchRegister);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000227 j(equal, &done);
228
whesse@chromium.orge88a9ed2010-04-15 15:07:46 +0000229 // The offset is relative to a tagged or untagged HeapObject pointer,
230 // so either offset or offset + kHeapObjectTag must be a
231 // multiple of kPointerSize.
232 ASSERT(IsAligned(offset, kPointerSize) ||
233 IsAligned(offset + kHeapObjectTag, kPointerSize));
234
235 // We use optimized write barrier code if the word being written to is not in
236 // a large object page, or is in the first "page" of a large object page.
237 // We make sure that an offset is inside the right limits whether it is
238 // tagged or untagged.
239 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize - kHeapObjectTag)) {
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000240 // Compute the bit offset in the remembered set, leave it in 'value'.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000241 lea(scratch, Operand(object, offset));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000242 ASSERT(is_int32(Page::kPageAlignmentMask));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000243 and_(scratch, Immediate(static_cast<int32_t>(Page::kPageAlignmentMask)));
244 shr(scratch, Immediate(kObjectAlignmentBits));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000245
246 // Compute the page address from the heap object pointer, leave it in
247 // 'object' (immediate value is sign extended).
248 and_(object, Immediate(~Page::kPageAlignmentMask));
249
250 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
251 // to limit code size. We should probably evaluate this decision by
252 // measuring the performance of an equivalent implementation using
253 // "simpler" instructions
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000254 bts(Operand(object, Page::kRSetOffset), scratch);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000255 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000256 Register dst = smi_index;
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000257 if (offset != 0) {
258 lea(dst, Operand(object, offset));
259 } else {
260 // array access: calculate the destination address in the same manner as
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000261 // KeyedStoreIC::GenerateGeneric.
262 SmiIndex index = SmiToIndex(smi_index, smi_index, kPointerSizeLog2);
263 lea(dst, Operand(object,
264 index.reg,
265 index.scale,
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000266 FixedArray::kHeaderSize - kHeapObjectTag));
267 }
268 // If we are already generating a shared stub, not inlining the
269 // record write code isn't going to save us any memory.
270 if (generating_stub()) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000271 RecordWriteHelper(this, object, dst, scratch);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000272 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000273 RecordWriteStub stub(object, dst, scratch);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000274 CallStub(&stub);
275 }
276 }
277
278 bind(&done);
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000279
280 // Clobber all input registers when running with the debug-code flag
281 // turned on to provoke errors.
282 if (FLAG_debug_code) {
vegorov@chromium.orgf8372902010-03-15 10:26:20 +0000283 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
284 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
285 movq(smi_index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000286 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000287}
288
289
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000290void MacroAssembler::Assert(Condition cc, const char* msg) {
291 if (FLAG_debug_code) Check(cc, msg);
292}
293
294
295void MacroAssembler::Check(Condition cc, const char* msg) {
296 Label L;
297 j(cc, &L);
298 Abort(msg);
299 // will not return here
300 bind(&L);
301}
302
303
ricow@chromium.orgc9c80822010-04-21 08:22:37 +0000304void MacroAssembler::CheckStackAlignment() {
305 int frame_alignment = OS::ActivationFrameAlignment();
306 int frame_alignment_mask = frame_alignment - 1;
307 if (frame_alignment > kPointerSize) {
308 ASSERT(IsPowerOf2(frame_alignment));
309 Label alignment_as_expected;
310 testq(rsp, Immediate(frame_alignment_mask));
311 j(zero, &alignment_as_expected);
312 // Abort if stack is not aligned.
313 int3();
314 bind(&alignment_as_expected);
315 }
316}
317
318
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000319void MacroAssembler::NegativeZeroTest(Register result,
320 Register op,
321 Label* then_label) {
322 Label ok;
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000323 testl(result, result);
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000324 j(not_zero, &ok);
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000325 testl(op, op);
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000326 j(sign, then_label);
327 bind(&ok);
328}
329
330
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000331void MacroAssembler::Abort(const char* msg) {
332 // We want to pass the msg string like a smi to avoid GC
333 // problems, however msg is not guaranteed to be aligned
334 // properly. Instead, we pass an aligned pointer that is
335 // a proper v8 smi, but also pass the alignment difference
336 // from the real pointer as a smi.
337 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
338 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
339 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
340 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
341#ifdef DEBUG
342 if (msg != NULL) {
343 RecordComment("Abort message: ");
344 RecordComment(msg);
345 }
346#endif
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +0000347 // Disable stub call restrictions to always allow calls to abort.
348 set_allow_stub_calls(true);
349
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000350 push(rax);
351 movq(kScratchRegister, p0, RelocInfo::NONE);
352 push(kScratchRegister);
353 movq(kScratchRegister,
ager@chromium.orgc4c92722009-11-18 14:12:51 +0000354 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000355 RelocInfo::NONE);
356 push(kScratchRegister);
357 CallRuntime(Runtime::kAbort, 2);
358 // will not return here
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +0000359 int3();
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000360}
361
362
363void MacroAssembler::CallStub(CodeStub* stub) {
364 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
sgjesse@chromium.org911335c2009-08-19 12:59:44 +0000365 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000366}
367
368
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +0000369void MacroAssembler::TailCallStub(CodeStub* stub) {
370 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
371 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
372}
373
374
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000375void MacroAssembler::StubReturn(int argc) {
376 ASSERT(argc >= 1 && generating_stub());
377 ret((argc - 1) * kPointerSize);
378}
379
380
381void MacroAssembler::IllegalOperation(int num_arguments) {
382 if (num_arguments > 0) {
383 addq(rsp, Immediate(num_arguments * kPointerSize));
384 }
ager@chromium.org18ad94b2009-09-02 08:22:29 +0000385 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000386}
387
388
389void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
390 CallRuntime(Runtime::FunctionForId(id), num_arguments);
391}
392
393
394void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
395 // If the expected number of arguments of the runtime function is
396 // constant, we check that the actual number of arguments match the
397 // expectation.
398 if (f->nargs >= 0 && f->nargs != num_arguments) {
399 IllegalOperation(num_arguments);
400 return;
401 }
402
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000403 // TODO(1236192): Most runtime routines don't need the number of
404 // arguments passed in because it is constant. At some point we
405 // should remove this need and make the runtime routine entry code
406 // smarter.
407 movq(rax, Immediate(num_arguments));
408 movq(rbx, ExternalReference(f));
409 CEntryStub ces(f->result_size);
410 CallStub(&ces);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000411}
412
413
ager@chromium.org5c838252010-02-19 08:53:10 +0000414void MacroAssembler::CallExternalReference(const ExternalReference& ext,
415 int num_arguments) {
416 movq(rax, Immediate(num_arguments));
417 movq(rbx, ext);
418
419 CEntryStub stub(1);
420 CallStub(&stub);
421}
422
423
ager@chromium.orgce5e87b2010-03-10 10:24:18 +0000424void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
425 int num_arguments,
426 int result_size) {
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000427 // ----------- S t a t e -------------
428 // -- rsp[0] : return address
429 // -- rsp[8] : argument num_arguments - 1
430 // ...
431 // -- rsp[8 * num_arguments] : argument 0 (receiver)
432 // -----------------------------------
433
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000434 // TODO(1236192): Most runtime routines don't need the number of
435 // arguments passed in because it is constant. At some point we
436 // should remove this need and make the runtime routine entry code
437 // smarter.
438 movq(rax, Immediate(num_arguments));
ager@chromium.orgce5e87b2010-03-10 10:24:18 +0000439 JumpToExternalReference(ext, result_size);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000440}
441
442
ager@chromium.orgce5e87b2010-03-10 10:24:18 +0000443void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
444 int num_arguments,
445 int result_size) {
446 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
447}
448
449
450void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
451 int result_size) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000452 // Set the entry point and jump to the C entry runtime stub.
453 movq(rbx, ext);
ager@chromium.orga1645e22009-09-09 19:27:10 +0000454 CEntryStub ces(result_size);
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +0000455 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
kasperl@chromium.org71affb52009-05-26 05:44:31 +0000456}
457
ager@chromium.orge2902be2009-06-08 12:21:35 +0000458
ager@chromium.org5c838252010-02-19 08:53:10 +0000459void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
460 // Calls are not allowed in some stubs.
461 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000462
ager@chromium.org5c838252010-02-19 08:53:10 +0000463 // Rely on the assertion to check that the number of provided
464 // arguments match the expected number of arguments. Fake a
465 // parameter count to avoid emitting code to do the check.
466 ParameterCount expected(0);
467 GetBuiltinEntry(rdx, id);
468 InvokeCode(rdx, expected, expected, flag);
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000469}
470
ager@chromium.org5c838252010-02-19 08:53:10 +0000471
472void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
ricow@chromium.orgc9c80822010-04-21 08:22:37 +0000473 ASSERT(!target.is(rdi));
474
475 // Load the builtins object into target register.
476 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
477 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
478
ager@chromium.org5c838252010-02-19 08:53:10 +0000479 // Load the JavaScript builtin function from the builtins object.
ricow@chromium.orgc9c80822010-04-21 08:22:37 +0000480 movq(rdi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
481
482 // Load the code entry point from the builtins object.
483 movq(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
484 if (FLAG_debug_code) {
485 // Make sure the code objects in the builtins object and in the
486 // builtin function are the same.
487 push(target);
488 movq(target, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
489 movq(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
490 cmpq(target, Operand(rsp, 0));
491 Assert(equal, "Builtin code object changed");
492 pop(target);
493 }
494 lea(target, FieldOperand(target, Code::kHeaderSize));
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000495}
496
497
ager@chromium.orge2902be2009-06-08 12:21:35 +0000498void MacroAssembler::Set(Register dst, int64_t x) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000499 if (x == 0) {
500 xor_(dst, dst);
501 } else if (is_int32(x)) {
ager@chromium.orgc4c92722009-11-18 14:12:51 +0000502 movq(dst, Immediate(static_cast<int32_t>(x)));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000503 } else if (is_uint32(x)) {
ager@chromium.orgc4c92722009-11-18 14:12:51 +0000504 movl(dst, Immediate(static_cast<uint32_t>(x)));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000505 } else {
506 movq(dst, x, RelocInfo::NONE);
507 }
508}
509
510
511void MacroAssembler::Set(const Operand& dst, int64_t x) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000512 if (x == 0) {
513 xor_(kScratchRegister, kScratchRegister);
514 movq(dst, kScratchRegister);
515 } else if (is_int32(x)) {
ager@chromium.orgc4c92722009-11-18 14:12:51 +0000516 movq(dst, Immediate(static_cast<int32_t>(x)));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000517 } else if (is_uint32(x)) {
ager@chromium.orgc4c92722009-11-18 14:12:51 +0000518 movl(dst, Immediate(static_cast<uint32_t>(x)));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000519 } else {
520 movq(kScratchRegister, x, RelocInfo::NONE);
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000521 movq(dst, kScratchRegister);
ager@chromium.orge2902be2009-06-08 12:21:35 +0000522 }
ager@chromium.orge2902be2009-06-08 12:21:35 +0000523}
524
ager@chromium.org4af710e2009-09-15 12:20:11 +0000525// ----------------------------------------------------------------------------
526// Smi tagging, untagging and tag detection.
527
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000528static int kSmiShift = kSmiTagSize + kSmiShiftSize;
ager@chromium.org4af710e2009-09-15 12:20:11 +0000529
530void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000531 ASSERT_EQ(0, kSmiTag);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000532 if (!dst.is(src)) {
533 movl(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000534 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000535 shl(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000536}
537
538
539void MacroAssembler::Integer32ToSmi(Register dst,
540 Register src,
541 Label* on_overflow) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000542 ASSERT_EQ(0, kSmiTag);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000543 // 32-bit integer always fits in a long smi.
ager@chromium.org4af710e2009-09-15 12:20:11 +0000544 if (!dst.is(src)) {
545 movl(dst, src);
546 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000547 shl(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000548}
549
550
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000551void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
552 Register src,
553 int constant) {
554 if (dst.is(src)) {
555 addq(dst, Immediate(constant));
556 } else {
557 lea(dst, Operand(src, constant));
558 }
559 shl(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000560}
561
562
563void MacroAssembler::SmiToInteger32(Register dst, Register src) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000564 ASSERT_EQ(0, kSmiTag);
565 if (!dst.is(src)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000566 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000567 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000568 shr(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000569}
570
571
572void MacroAssembler::SmiToInteger64(Register dst, Register src) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000573 ASSERT_EQ(0, kSmiTag);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000574 if (!dst.is(src)) {
575 movq(dst, src);
576 }
577 sar(dst, Immediate(kSmiShift));
578}
579
580
581void MacroAssembler::SmiTest(Register src) {
582 testq(src, src);
583}
584
585
586void MacroAssembler::SmiCompare(Register dst, Register src) {
587 cmpq(dst, src);
588}
589
590
591void MacroAssembler::SmiCompare(Register dst, Smi* src) {
592 ASSERT(!dst.is(kScratchRegister));
593 if (src->value() == 0) {
594 testq(dst, dst);
595 } else {
596 Move(kScratchRegister, src);
597 cmpq(dst, kScratchRegister);
598 }
599}
600
601
602void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
603 cmpq(dst, src);
604}
605
606
607void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
608 if (src->value() == 0) {
609 // Only tagged long smi to have 32-bit representation.
610 cmpq(dst, Immediate(0));
611 } else {
612 Move(kScratchRegister, src);
613 cmpq(dst, kScratchRegister);
614 }
ager@chromium.org4af710e2009-09-15 12:20:11 +0000615}
616
617
618void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
619 Register src,
620 int power) {
621 ASSERT(power >= 0);
622 ASSERT(power < 64);
623 if (power == 0) {
624 SmiToInteger64(dst, src);
625 return;
626 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000627 if (!dst.is(src)) {
628 movq(dst, src);
629 }
630 if (power < kSmiShift) {
631 sar(dst, Immediate(kSmiShift - power));
632 } else if (power > kSmiShift) {
633 shl(dst, Immediate(power - kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000634 }
635}
636
637
ager@chromium.org4af710e2009-09-15 12:20:11 +0000638Condition MacroAssembler::CheckSmi(Register src) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000639 ASSERT_EQ(0, kSmiTag);
640 testb(src, Immediate(kSmiTagMask));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000641 return zero;
ager@chromium.org4af710e2009-09-15 12:20:11 +0000642}
643
644
645Condition MacroAssembler::CheckPositiveSmi(Register src) {
646 ASSERT_EQ(0, kSmiTag);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000647 movq(kScratchRegister, src);
648 rol(kScratchRegister, Immediate(1));
649 testl(kScratchRegister, Immediate(0x03));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000650 return zero;
651}
652
653
ager@chromium.org4af710e2009-09-15 12:20:11 +0000654Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
655 if (first.is(second)) {
656 return CheckSmi(first);
657 }
658 movl(kScratchRegister, first);
659 orl(kScratchRegister, second);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000660 testb(kScratchRegister, Immediate(kSmiTagMask));
661 return zero;
ager@chromium.org4af710e2009-09-15 12:20:11 +0000662}
663
664
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000665Condition MacroAssembler::CheckBothPositiveSmi(Register first,
666 Register second) {
667 if (first.is(second)) {
668 return CheckPositiveSmi(first);
669 }
670 movl(kScratchRegister, first);
671 orl(kScratchRegister, second);
672 rol(kScratchRegister, Immediate(1));
673 testl(kScratchRegister, Immediate(0x03));
674 return zero;
675}
676
677
678
679Condition MacroAssembler::CheckEitherSmi(Register first, Register second) {
680 if (first.is(second)) {
681 return CheckSmi(first);
682 }
683 movl(kScratchRegister, first);
684 andl(kScratchRegister, second);
685 testb(kScratchRegister, Immediate(kSmiTagMask));
686 return zero;
687}
688
689
ager@chromium.org4af710e2009-09-15 12:20:11 +0000690Condition MacroAssembler::CheckIsMinSmi(Register src) {
691 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000692 movq(kScratchRegister, src);
693 rol(kScratchRegister, Immediate(1));
694 cmpq(kScratchRegister, Immediate(1));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000695 return equal;
696}
697
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +0000698
ager@chromium.org4af710e2009-09-15 12:20:11 +0000699Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000700 // A 32-bit integer value can always be converted to a smi.
701 return always;
ager@chromium.org4af710e2009-09-15 12:20:11 +0000702}
703
704
ager@chromium.org3811b432009-10-28 14:53:37 +0000705Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
706 // An unsigned 32-bit integer value is valid as long as the high bit
707 // is not set.
708 testq(src, Immediate(0x80000000));
709 return zero;
710}
711
712
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000713void MacroAssembler::SmiNeg(Register dst, Register src, Label* on_smi_result) {
714 if (dst.is(src)) {
715 ASSERT(!dst.is(kScratchRegister));
716 movq(kScratchRegister, src);
717 neg(dst); // Low 32 bits are retained as zero by negation.
718 // Test if result is zero or Smi::kMinValue.
719 cmpq(dst, kScratchRegister);
720 j(not_equal, on_smi_result);
721 movq(src, kScratchRegister);
722 } else {
723 movq(dst, src);
724 neg(dst);
725 cmpq(dst, src);
726 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
727 j(not_equal, on_smi_result);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000728 }
ager@chromium.org4af710e2009-09-15 12:20:11 +0000729}
730
731
732void MacroAssembler::SmiAdd(Register dst,
733 Register src1,
734 Register src2,
735 Label* on_not_smi_result) {
736 ASSERT(!dst.is(src2));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000737 if (dst.is(src1)) {
738 addq(dst, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000739 Label smi_result;
740 j(no_overflow, &smi_result);
741 // Restore src1.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000742 subq(src1, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000743 jmp(on_not_smi_result);
744 bind(&smi_result);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000745 } else {
746 movq(dst, src1);
747 addq(dst, src2);
748 j(overflow, on_not_smi_result);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000749 }
750}
751
752
ager@chromium.org4af710e2009-09-15 12:20:11 +0000753void MacroAssembler::SmiSub(Register dst,
754 Register src1,
755 Register src2,
756 Label* on_not_smi_result) {
757 ASSERT(!dst.is(src2));
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000758 if (on_not_smi_result == NULL) {
759 // No overflow checking. Use only when it's known that
760 // overflowing is impossible (e.g., subtracting two positive smis).
761 if (dst.is(src1)) {
762 subq(dst, src2);
763 } else {
764 movq(dst, src1);
765 subq(dst, src2);
766 }
767 Assert(no_overflow, "Smi substraction onverflow");
768 } else if (dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000769 subq(dst, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000770 Label smi_result;
771 j(no_overflow, &smi_result);
772 // Restore src1.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000773 addq(src1, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000774 jmp(on_not_smi_result);
775 bind(&smi_result);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000776 } else {
777 movq(dst, src1);
778 subq(dst, src2);
779 j(overflow, on_not_smi_result);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000780 }
781}
782
783
784void MacroAssembler::SmiMul(Register dst,
785 Register src1,
786 Register src2,
787 Label* on_not_smi_result) {
788 ASSERT(!dst.is(src2));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000789 ASSERT(!dst.is(kScratchRegister));
790 ASSERT(!src1.is(kScratchRegister));
791 ASSERT(!src2.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000792
793 if (dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000794 Label failure, zero_correct_result;
795 movq(kScratchRegister, src1); // Create backup for later testing.
796 SmiToInteger64(dst, src1);
797 imul(dst, src2);
798 j(overflow, &failure);
799
800 // Check for negative zero result. If product is zero, and one
801 // argument is negative, go to slow case.
802 Label correct_result;
803 testq(dst, dst);
804 j(not_zero, &correct_result);
805
806 movq(dst, kScratchRegister);
807 xor_(dst, src2);
808 j(positive, &zero_correct_result); // Result was positive zero.
809
810 bind(&failure); // Reused failure exit, restores src1.
811 movq(src1, kScratchRegister);
812 jmp(on_not_smi_result);
813
814 bind(&zero_correct_result);
815 xor_(dst, dst);
816
817 bind(&correct_result);
818 } else {
819 SmiToInteger64(dst, src1);
820 imul(dst, src2);
821 j(overflow, on_not_smi_result);
822 // Check for negative zero result. If product is zero, and one
823 // argument is negative, go to slow case.
824 Label correct_result;
825 testq(dst, dst);
826 j(not_zero, &correct_result);
827 // One of src1 and src2 is zero, the check whether the other is
828 // negative.
ager@chromium.org4af710e2009-09-15 12:20:11 +0000829 movq(kScratchRegister, src1);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000830 xor_(kScratchRegister, src2);
831 j(negative, on_not_smi_result);
832 bind(&correct_result);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000833 }
ager@chromium.org4af710e2009-09-15 12:20:11 +0000834}
835
836
837void MacroAssembler::SmiTryAddConstant(Register dst,
838 Register src,
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000839 Smi* constant,
ager@chromium.org4af710e2009-09-15 12:20:11 +0000840 Label* on_not_smi_result) {
841 // Does not assume that src is a smi.
ager@chromium.org3811b432009-10-28 14:53:37 +0000842 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000843 ASSERT_EQ(0, kSmiTag);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000844 ASSERT(!dst.is(kScratchRegister));
845 ASSERT(!src.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000846
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000847 JumpIfNotSmi(src, on_not_smi_result);
848 Register tmp = (dst.is(src) ? kScratchRegister : dst);
849 Move(tmp, constant);
850 addq(tmp, src);
851 j(overflow, on_not_smi_result);
852 if (dst.is(src)) {
853 movq(dst, tmp);
854 }
855}
856
857
858void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
859 if (constant->value() == 0) {
860 if (!dst.is(src)) {
861 movq(dst, src);
862 }
863 } else if (dst.is(src)) {
864 ASSERT(!dst.is(kScratchRegister));
865
866 Move(kScratchRegister, constant);
867 addq(dst, kScratchRegister);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000868 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000869 Move(dst, constant);
870 addq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000871 }
872}
873
874
875void MacroAssembler::SmiAddConstant(Register dst,
876 Register src,
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000877 Smi* constant,
ager@chromium.org4af710e2009-09-15 12:20:11 +0000878 Label* on_not_smi_result) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000879 if (constant->value() == 0) {
880 if (!dst.is(src)) {
881 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000882 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000883 } else if (dst.is(src)) {
884 ASSERT(!dst.is(kScratchRegister));
885
886 Move(kScratchRegister, constant);
887 addq(dst, kScratchRegister);
888 Label result_ok;
889 j(no_overflow, &result_ok);
890 subq(dst, kScratchRegister);
891 jmp(on_not_smi_result);
892 bind(&result_ok);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000893 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000894 Move(dst, constant);
895 addq(dst, src);
896 j(overflow, on_not_smi_result);
897 }
898}
899
900
901void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
902 if (constant->value() == 0) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000903 if (!dst.is(src)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000904 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000905 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000906 } else if (dst.is(src)) {
907 ASSERT(!dst.is(kScratchRegister));
908
909 Move(kScratchRegister, constant);
910 subq(dst, kScratchRegister);
911 } else {
912 // Subtract by adding the negative, to do it in two operations.
913 if (constant->value() == Smi::kMinValue) {
914 Move(kScratchRegister, constant);
915 movq(dst, src);
916 subq(dst, kScratchRegister);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000917 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000918 Move(dst, Smi::FromInt(-constant->value()));
919 addq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000920 }
921 }
922}
923
924
925void MacroAssembler::SmiSubConstant(Register dst,
926 Register src,
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000927 Smi* constant,
ager@chromium.org4af710e2009-09-15 12:20:11 +0000928 Label* on_not_smi_result) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000929 if (constant->value() == 0) {
930 if (!dst.is(src)) {
931 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000932 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000933 } else if (dst.is(src)) {
934 ASSERT(!dst.is(kScratchRegister));
935
936 Move(kScratchRegister, constant);
937 subq(dst, kScratchRegister);
938 Label sub_success;
939 j(no_overflow, &sub_success);
940 addq(src, kScratchRegister);
941 jmp(on_not_smi_result);
942 bind(&sub_success);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000943 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000944 if (constant->value() == Smi::kMinValue) {
945 Move(kScratchRegister, constant);
946 movq(dst, src);
947 subq(dst, kScratchRegister);
948 j(overflow, on_not_smi_result);
949 } else {
950 Move(dst, Smi::FromInt(-(constant->value())));
951 addq(dst, src);
952 j(overflow, on_not_smi_result);
953 }
ager@chromium.org4af710e2009-09-15 12:20:11 +0000954 }
955}
956
957
958void MacroAssembler::SmiDiv(Register dst,
959 Register src1,
960 Register src2,
961 Label* on_not_smi_result) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000962 ASSERT(!src1.is(kScratchRegister));
963 ASSERT(!src2.is(kScratchRegister));
964 ASSERT(!dst.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000965 ASSERT(!src2.is(rax));
966 ASSERT(!src2.is(rdx));
967 ASSERT(!src1.is(rdx));
968
969 // Check for 0 divisor (result is +/-Infinity).
970 Label positive_divisor;
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000971 testq(src2, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000972 j(zero, on_not_smi_result);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000973
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000974 if (src1.is(rax)) {
975 movq(kScratchRegister, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000976 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000977 SmiToInteger32(rax, src1);
978 // We need to rule out dividing Smi::kMinValue by -1, since that would
979 // overflow in idiv and raise an exception.
980 // We combine this with negative zero test (negative zero only happens
981 // when dividing zero by a negative number).
ager@chromium.org4af710e2009-09-15 12:20:11 +0000982
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000983 // We overshoot a little and go to slow case if we divide min-value
984 // by any negative value, not just -1.
985 Label safe_div;
986 testl(rax, Immediate(0x7fffffff));
987 j(not_zero, &safe_div);
988 testq(src2, src2);
989 if (src1.is(rax)) {
990 j(positive, &safe_div);
991 movq(src1, kScratchRegister);
992 jmp(on_not_smi_result);
993 } else {
994 j(negative, on_not_smi_result);
995 }
996 bind(&safe_div);
997
998 SmiToInteger32(src2, src2);
999 // Sign extend src1 into edx:eax.
1000 cdq();
ager@chromium.org4af710e2009-09-15 12:20:11 +00001001 idivl(src2);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001002 Integer32ToSmi(src2, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001003 // Check that the remainder is zero.
1004 testl(rdx, rdx);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001005 if (src1.is(rax)) {
1006 Label smi_result;
1007 j(zero, &smi_result);
1008 movq(src1, kScratchRegister);
1009 jmp(on_not_smi_result);
1010 bind(&smi_result);
1011 } else {
1012 j(not_zero, on_not_smi_result);
1013 }
1014 if (!dst.is(src1) && src1.is(rax)) {
1015 movq(src1, kScratchRegister);
1016 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001017 Integer32ToSmi(dst, rax);
1018}
1019
1020
1021void MacroAssembler::SmiMod(Register dst,
1022 Register src1,
1023 Register src2,
1024 Label* on_not_smi_result) {
1025 ASSERT(!dst.is(kScratchRegister));
1026 ASSERT(!src1.is(kScratchRegister));
1027 ASSERT(!src2.is(kScratchRegister));
1028 ASSERT(!src2.is(rax));
1029 ASSERT(!src2.is(rdx));
1030 ASSERT(!src1.is(rdx));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001031 ASSERT(!src1.is(src2));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001032
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001033 testq(src2, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001034 j(zero, on_not_smi_result);
1035
1036 if (src1.is(rax)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001037 movq(kScratchRegister, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001038 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001039 SmiToInteger32(rax, src1);
1040 SmiToInteger32(src2, src2);
1041
1042 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1043 Label safe_div;
1044 cmpl(rax, Immediate(Smi::kMinValue));
1045 j(not_equal, &safe_div);
1046 cmpl(src2, Immediate(-1));
1047 j(not_equal, &safe_div);
1048 // Retag inputs and go slow case.
1049 Integer32ToSmi(src2, src2);
1050 if (src1.is(rax)) {
1051 movq(src1, kScratchRegister);
1052 }
1053 jmp(on_not_smi_result);
1054 bind(&safe_div);
1055
ager@chromium.org4af710e2009-09-15 12:20:11 +00001056 // Sign extend eax into edx:eax.
1057 cdq();
1058 idivl(src2);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001059 // Restore smi tags on inputs.
1060 Integer32ToSmi(src2, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001061 if (src1.is(rax)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001062 movq(src1, kScratchRegister);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001063 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001064 // Check for a negative zero result. If the result is zero, and the
1065 // dividend is negative, go slow to return a floating point negative zero.
1066 Label smi_result;
1067 testl(rdx, rdx);
1068 j(not_zero, &smi_result);
1069 testq(src1, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001070 j(negative, on_not_smi_result);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001071 bind(&smi_result);
1072 Integer32ToSmi(dst, rdx);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001073}
1074
1075
1076void MacroAssembler::SmiNot(Register dst, Register src) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001077 ASSERT(!dst.is(kScratchRegister));
1078 ASSERT(!src.is(kScratchRegister));
1079 // Set tag and padding bits before negating, so that they are zero afterwards.
1080 movl(kScratchRegister, Immediate(~0));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001081 if (dst.is(src)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001082 xor_(dst, kScratchRegister);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001083 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001084 lea(dst, Operand(src, kScratchRegister, times_1, 0));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001085 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001086 not_(dst);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001087}
1088
1089
1090void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001091 ASSERT(!dst.is(src2));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001092 if (!dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001093 movq(dst, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001094 }
1095 and_(dst, src2);
1096}
1097
1098
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001099void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1100 if (constant->value() == 0) {
1101 xor_(dst, dst);
1102 } else if (dst.is(src)) {
1103 ASSERT(!dst.is(kScratchRegister));
1104 Move(kScratchRegister, constant);
1105 and_(dst, kScratchRegister);
1106 } else {
1107 Move(dst, constant);
1108 and_(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001109 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001110}
1111
1112
1113void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1114 if (!dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001115 movq(dst, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001116 }
1117 or_(dst, src2);
1118}
1119
1120
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001121void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1122 if (dst.is(src)) {
1123 ASSERT(!dst.is(kScratchRegister));
1124 Move(kScratchRegister, constant);
1125 or_(dst, kScratchRegister);
1126 } else {
1127 Move(dst, constant);
1128 or_(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001129 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001130}
1131
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001132
ager@chromium.org4af710e2009-09-15 12:20:11 +00001133void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1134 if (!dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001135 movq(dst, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001136 }
1137 xor_(dst, src2);
1138}
1139
1140
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001141void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1142 if (dst.is(src)) {
1143 ASSERT(!dst.is(kScratchRegister));
1144 Move(kScratchRegister, constant);
1145 xor_(dst, kScratchRegister);
1146 } else {
1147 Move(dst, constant);
1148 xor_(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001149 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001150}
1151
1152
ager@chromium.org4af710e2009-09-15 12:20:11 +00001153void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1154 Register src,
1155 int shift_value) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001156 ASSERT(is_uint5(shift_value));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001157 if (shift_value > 0) {
1158 if (dst.is(src)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001159 sar(dst, Immediate(shift_value + kSmiShift));
1160 shl(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001161 } else {
1162 UNIMPLEMENTED(); // Not used.
1163 }
1164 }
1165}
1166
1167
1168void MacroAssembler::SmiShiftLogicalRightConstant(Register dst,
1169 Register src,
1170 int shift_value,
1171 Label* on_not_smi_result) {
1172 // Logic right shift interprets its result as an *unsigned* number.
1173 if (dst.is(src)) {
1174 UNIMPLEMENTED(); // Not used.
1175 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001176 movq(dst, src);
1177 if (shift_value == 0) {
1178 testq(dst, dst);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001179 j(negative, on_not_smi_result);
1180 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001181 shr(dst, Immediate(shift_value + kSmiShift));
1182 shl(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001183 }
1184}
1185
1186
1187void MacroAssembler::SmiShiftLeftConstant(Register dst,
1188 Register src,
1189 int shift_value,
1190 Label* on_not_smi_result) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001191 if (!dst.is(src)) {
1192 movq(dst, src);
1193 }
1194 if (shift_value > 0) {
1195 shl(dst, Immediate(shift_value));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001196 }
1197}
1198
1199
1200void MacroAssembler::SmiShiftLeft(Register dst,
1201 Register src1,
1202 Register src2,
1203 Label* on_not_smi_result) {
1204 ASSERT(!dst.is(rcx));
1205 Label result_ok;
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001206 // Untag shift amount.
1207 if (!dst.is(src1)) {
1208 movq(dst, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001209 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001210 SmiToInteger32(rcx, src2);
1211 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1212 and_(rcx, Immediate(0x1f));
ager@chromium.orgc4c92722009-11-18 14:12:51 +00001213 shl_cl(dst);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001214}
1215
1216
1217void MacroAssembler::SmiShiftLogicalRight(Register dst,
1218 Register src1,
1219 Register src2,
1220 Label* on_not_smi_result) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001221 ASSERT(!dst.is(kScratchRegister));
1222 ASSERT(!src1.is(kScratchRegister));
1223 ASSERT(!src2.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001224 ASSERT(!dst.is(rcx));
1225 Label result_ok;
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001226 if (src1.is(rcx) || src2.is(rcx)) {
1227 movq(kScratchRegister, rcx);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001228 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001229 if (!dst.is(src1)) {
1230 movq(dst, src1);
1231 }
1232 SmiToInteger32(rcx, src2);
1233 orl(rcx, Immediate(kSmiShift));
ager@chromium.orgc4c92722009-11-18 14:12:51 +00001234 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001235 shl(dst, Immediate(kSmiShift));
1236 testq(dst, dst);
1237 if (src1.is(rcx) || src2.is(rcx)) {
1238 Label positive_result;
1239 j(positive, &positive_result);
1240 if (src1.is(rcx)) {
1241 movq(src1, kScratchRegister);
1242 } else {
1243 movq(src2, kScratchRegister);
1244 }
1245 jmp(on_not_smi_result);
1246 bind(&positive_result);
1247 } else {
1248 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1249 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001250}
1251
1252
1253void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1254 Register src1,
1255 Register src2) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001256 ASSERT(!dst.is(kScratchRegister));
1257 ASSERT(!src1.is(kScratchRegister));
1258 ASSERT(!src2.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001259 ASSERT(!dst.is(rcx));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001260 if (src1.is(rcx)) {
1261 movq(kScratchRegister, src1);
1262 } else if (src2.is(rcx)) {
1263 movq(kScratchRegister, src2);
1264 }
1265 if (!dst.is(src1)) {
1266 movq(dst, src1);
1267 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001268 SmiToInteger32(rcx, src2);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001269 orl(rcx, Immediate(kSmiShift));
ager@chromium.orgc4c92722009-11-18 14:12:51 +00001270 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001271 shl(dst, Immediate(kSmiShift));
1272 if (src1.is(rcx)) {
1273 movq(src1, kScratchRegister);
1274 } else if (src2.is(rcx)) {
1275 movq(src2, kScratchRegister);
1276 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001277}
1278
1279
1280void MacroAssembler::SelectNonSmi(Register dst,
1281 Register src1,
1282 Register src2,
1283 Label* on_not_smis) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001284 ASSERT(!dst.is(kScratchRegister));
1285 ASSERT(!src1.is(kScratchRegister));
1286 ASSERT(!src2.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001287 ASSERT(!dst.is(src1));
1288 ASSERT(!dst.is(src2));
1289 // Both operands must not be smis.
1290#ifdef DEBUG
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001291 if (allow_stub_calls()) { // Check contains a stub call.
1292 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1293 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1294 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001295#endif
1296 ASSERT_EQ(0, kSmiTag);
1297 ASSERT_EQ(0, Smi::FromInt(0));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001298 movl(kScratchRegister, Immediate(kSmiTagMask));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001299 and_(kScratchRegister, src1);
1300 testl(kScratchRegister, src2);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001301 // If non-zero then both are smis.
ager@chromium.org4af710e2009-09-15 12:20:11 +00001302 j(not_zero, on_not_smis);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001303
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001304 // Exactly one operand is a smi.
ager@chromium.org4af710e2009-09-15 12:20:11 +00001305 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1306 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1307 subq(kScratchRegister, Immediate(1));
1308 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1309 movq(dst, src1);
1310 xor_(dst, src2);
1311 and_(dst, kScratchRegister);
1312 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1313 xor_(dst, src1);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001314 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
ager@chromium.org4af710e2009-09-15 12:20:11 +00001315}
1316
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001317SmiIndex MacroAssembler::SmiToIndex(Register dst,
1318 Register src,
1319 int shift) {
ager@chromium.org4af710e2009-09-15 12:20:11 +00001320 ASSERT(is_uint6(shift));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001321 // There is a possible optimization if shift is in the range 60-63, but that
1322 // will (and must) never happen.
1323 if (!dst.is(src)) {
1324 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001325 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001326 if (shift < kSmiShift) {
1327 sar(dst, Immediate(kSmiShift - shift));
1328 } else {
1329 shl(dst, Immediate(shift - kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001330 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001331 return SmiIndex(dst, times_1);
1332}
1333
ager@chromium.org4af710e2009-09-15 12:20:11 +00001334SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1335 Register src,
1336 int shift) {
1337 // Register src holds a positive smi.
1338 ASSERT(is_uint6(shift));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001339 if (!dst.is(src)) {
1340 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001341 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001342 neg(dst);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001343 if (shift < kSmiShift) {
1344 sar(dst, Immediate(kSmiShift - shift));
1345 } else {
1346 shl(dst, Immediate(shift - kSmiShift));
1347 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001348 return SmiIndex(dst, times_1);
1349}
1350
1351
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001352void MacroAssembler::JumpIfSmi(Register src, Label* on_smi) {
1353 ASSERT_EQ(0, kSmiTag);
1354 Condition smi = CheckSmi(src);
1355 j(smi, on_smi);
1356}
1357
1358
1359void MacroAssembler::JumpIfNotSmi(Register src, Label* on_not_smi) {
1360 Condition smi = CheckSmi(src);
1361 j(NegateCondition(smi), on_not_smi);
1362}
1363
1364
1365void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1366 Label* on_not_positive_smi) {
1367 Condition positive_smi = CheckPositiveSmi(src);
1368 j(NegateCondition(positive_smi), on_not_positive_smi);
1369}
1370
1371
1372void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1373 Smi* constant,
1374 Label* on_equals) {
1375 SmiCompare(src, constant);
1376 j(equal, on_equals);
1377}
1378
1379
1380void MacroAssembler::JumpIfNotValidSmiValue(Register src, Label* on_invalid) {
1381 Condition is_valid = CheckInteger32ValidSmiValue(src);
1382 j(NegateCondition(is_valid), on_invalid);
1383}
1384
1385
ager@chromium.org3811b432009-10-28 14:53:37 +00001386void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1387 Label* on_invalid) {
1388 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1389 j(NegateCondition(is_valid), on_invalid);
1390}
1391
1392
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001393void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2,
1394 Label* on_not_both_smi) {
1395 Condition both_smi = CheckBothSmi(src1, src2);
1396 j(NegateCondition(both_smi), on_not_both_smi);
1397}
ager@chromium.org4af710e2009-09-15 12:20:11 +00001398
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001399
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +00001400void MacroAssembler::JumpIfNotBothPositiveSmi(Register src1, Register src2,
1401 Label* on_not_both_smi) {
1402 Condition both_smi = CheckBothPositiveSmi(src1, src2);
1403 j(NegateCondition(both_smi), on_not_both_smi);
1404}
1405
1406
1407
1408void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1409 Register second_object,
1410 Register scratch1,
1411 Register scratch2,
1412 Label* on_fail) {
1413 // Check that both objects are not smis.
1414 Condition either_smi = CheckEitherSmi(first_object, second_object);
1415 j(either_smi, on_fail);
1416
1417 // Load instance type for both strings.
1418 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1419 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1420 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1421 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1422
1423 // Check that both are flat ascii strings.
1424 ASSERT(kNotStringTag != 0);
1425 const int kFlatAsciiStringMask =
1426 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1427 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1428
1429 andl(scratch1, Immediate(kFlatAsciiStringMask));
1430 andl(scratch2, Immediate(kFlatAsciiStringMask));
1431 // Interleave the bits to check both scratch1 and scratch2 in one test.
1432 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1433 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1434 cmpl(scratch1,
1435 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1436 j(not_equal, on_fail);
1437}
1438
1439
ager@chromium.orgce5e87b2010-03-10 10:24:18 +00001440void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1441 Register instance_type,
1442 Register scratch,
1443 Label *failure) {
1444 if (!scratch.is(instance_type)) {
1445 movl(scratch, instance_type);
1446 }
1447
1448 const int kFlatAsciiStringMask =
1449 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1450
1451 andl(scratch, Immediate(kFlatAsciiStringMask));
1452 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1453 j(not_equal, failure);
1454}
1455
1456
1457void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1458 Register first_object_instance_type,
1459 Register second_object_instance_type,
1460 Register scratch1,
1461 Register scratch2,
1462 Label* on_fail) {
1463 // Load instance type for both strings.
1464 movq(scratch1, first_object_instance_type);
1465 movq(scratch2, second_object_instance_type);
1466
1467 // Check that both are flat ascii strings.
1468 ASSERT(kNotStringTag != 0);
1469 const int kFlatAsciiStringMask =
1470 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1471 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1472
1473 andl(scratch1, Immediate(kFlatAsciiStringMask));
1474 andl(scratch2, Immediate(kFlatAsciiStringMask));
1475 // Interleave the bits to check both scratch1 and scratch2 in one test.
1476 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1477 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1478 cmpl(scratch1,
1479 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1480 j(not_equal, on_fail);
1481}
1482
1483
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001484void MacroAssembler::Move(Register dst, Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001485 ASSERT(!source->IsFailure());
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001486 if (source->IsSmi()) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001487 Move(dst, Smi::cast(*source));
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001488 } else {
1489 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1490 }
1491}
1492
1493
1494void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001495 ASSERT(!source->IsFailure());
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001496 if (source->IsSmi()) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001497 Move(dst, Smi::cast(*source));
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001498 } else {
1499 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1500 movq(dst, kScratchRegister);
1501 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001502}
1503
1504
1505void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001506 if (source->IsSmi()) {
1507 SmiCompare(dst, Smi::cast(*source));
1508 } else {
1509 Move(kScratchRegister, source);
1510 cmpq(dst, kScratchRegister);
1511 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001512}
1513
1514
ager@chromium.org3e875802009-06-29 08:26:34 +00001515void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001516 if (source->IsSmi()) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001517 SmiCompare(dst, Smi::cast(*source));
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001518 } else {
1519 ASSERT(source->IsHeapObject());
1520 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1521 cmpq(dst, kScratchRegister);
1522 }
ager@chromium.org3e875802009-06-29 08:26:34 +00001523}
1524
1525
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001526void MacroAssembler::Push(Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001527 if (source->IsSmi()) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001528 Push(Smi::cast(*source));
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001529 } else {
1530 ASSERT(source->IsHeapObject());
1531 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1532 push(kScratchRegister);
1533 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001534}
1535
1536
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +00001537void MacroAssembler::Push(Smi* source) {
ager@chromium.org3811b432009-10-28 14:53:37 +00001538 intptr_t smi = reinterpret_cast<intptr_t>(source);
1539 if (is_int32(smi)) {
1540 push(Immediate(static_cast<int32_t>(smi)));
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +00001541 } else {
ager@chromium.org3811b432009-10-28 14:53:37 +00001542 Set(kScratchRegister, smi);
1543 push(kScratchRegister);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001544 }
1545}
1546
1547
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00001548void MacroAssembler::Drop(int stack_elements) {
1549 if (stack_elements > 0) {
1550 addq(rsp, Immediate(stack_elements * kPointerSize));
1551 }
1552}
1553
1554
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001555void MacroAssembler::Test(const Operand& src, Smi* source) {
ager@chromium.org3811b432009-10-28 14:53:37 +00001556 intptr_t smi = reinterpret_cast<intptr_t>(source);
1557 if (is_int32(smi)) {
1558 testl(src, Immediate(static_cast<int32_t>(smi)));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001559 } else {
ager@chromium.org3811b432009-10-28 14:53:37 +00001560 Move(kScratchRegister, source);
1561 testq(src, kScratchRegister);
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +00001562 }
1563}
1564
1565
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001566void MacroAssembler::Jump(ExternalReference ext) {
1567 movq(kScratchRegister, ext);
1568 jmp(kScratchRegister);
1569}
1570
1571
1572void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1573 movq(kScratchRegister, destination, rmode);
1574 jmp(kScratchRegister);
1575}
1576
1577
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001578void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +00001579 // TODO(X64): Inline this
1580 jmp(code_object, rmode);
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001581}
1582
1583
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001584void MacroAssembler::Call(ExternalReference ext) {
1585 movq(kScratchRegister, ext);
1586 call(kScratchRegister);
1587}
1588
1589
1590void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1591 movq(kScratchRegister, destination, rmode);
1592 call(kScratchRegister);
1593}
1594
1595
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001596void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001597 ASSERT(RelocInfo::IsCodeTarget(rmode));
sgjesse@chromium.org911335c2009-08-19 12:59:44 +00001598 WriteRecordedPositions();
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +00001599 call(code_object, rmode);
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001600}
1601
1602
ager@chromium.orge2902be2009-06-08 12:21:35 +00001603void MacroAssembler::PushTryHandler(CodeLocation try_location,
1604 HandlerType type) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001605 // Adjust this code if not the case.
1606 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1607
1608 // The pc (return address) is already on TOS. This code pushes state,
1609 // frame pointer and current handler. Check that they are expected
1610 // next on the stack, in that order.
ager@chromium.orge2902be2009-06-08 12:21:35 +00001611 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1612 StackHandlerConstants::kPCOffset - kPointerSize);
ager@chromium.orge2902be2009-06-08 12:21:35 +00001613 ASSERT_EQ(StackHandlerConstants::kFPOffset,
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001614 StackHandlerConstants::kStateOffset - kPointerSize);
1615 ASSERT_EQ(StackHandlerConstants::kNextOffset,
ager@chromium.orge2902be2009-06-08 12:21:35 +00001616 StackHandlerConstants::kFPOffset - kPointerSize);
1617
1618 if (try_location == IN_JAVASCRIPT) {
1619 if (type == TRY_CATCH_HANDLER) {
1620 push(Immediate(StackHandler::TRY_CATCH));
1621 } else {
1622 push(Immediate(StackHandler::TRY_FINALLY));
1623 }
ager@chromium.orge2902be2009-06-08 12:21:35 +00001624 push(rbp);
ager@chromium.orge2902be2009-06-08 12:21:35 +00001625 } else {
1626 ASSERT(try_location == IN_JS_ENTRY);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001627 // The frame pointer does not point to a JS frame so we save NULL
1628 // for rbp. We expect the code throwing an exception to check rbp
1629 // before dereferencing it to restore the context.
ager@chromium.orge2902be2009-06-08 12:21:35 +00001630 push(Immediate(StackHandler::ENTRY));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001631 push(Immediate(0)); // NULL frame pointer.
ager@chromium.orge2902be2009-06-08 12:21:35 +00001632 }
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001633 // Save the current handler.
ager@chromium.orge2902be2009-06-08 12:21:35 +00001634 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001635 push(Operand(kScratchRegister, 0));
ager@chromium.orge2902be2009-06-08 12:21:35 +00001636 // Link this handler.
1637 movq(Operand(kScratchRegister, 0), rsp);
1638}
1639
1640
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00001641void MacroAssembler::PopTryHandler() {
1642 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1643 // Unlink this handler.
1644 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1645 pop(Operand(kScratchRegister, 0));
1646 // Remove the remaining fields.
1647 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1648}
1649
1650
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001651void MacroAssembler::Ret() {
1652 ret(0);
1653}
1654
1655
ager@chromium.org3e875802009-06-29 08:26:34 +00001656void MacroAssembler::FCmp() {
ager@chromium.org3811b432009-10-28 14:53:37 +00001657 fucomip();
1658 ffree(0);
1659 fincstp();
ager@chromium.org3e875802009-06-29 08:26:34 +00001660}
1661
1662
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001663void MacroAssembler::CmpObjectType(Register heap_object,
1664 InstanceType type,
1665 Register map) {
1666 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1667 CmpInstanceType(map, type);
1668}
1669
1670
1671void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1672 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1673 Immediate(static_cast<int8_t>(type)));
1674}
1675
1676
ager@chromium.org5c838252010-02-19 08:53:10 +00001677void MacroAssembler::CheckMap(Register obj,
1678 Handle<Map> map,
1679 Label* fail,
1680 bool is_heap_object) {
1681 if (!is_heap_object) {
1682 JumpIfSmi(obj, fail);
1683 }
1684 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1685 j(not_equal, fail);
1686}
1687
1688
1689void MacroAssembler::AbortIfNotNumber(Register object, const char* msg) {
1690 Label ok;
1691 Condition is_smi = CheckSmi(object);
1692 j(is_smi, &ok);
1693 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1694 Factory::heap_number_map());
1695 Assert(equal, msg);
1696 bind(&ok);
1697}
1698
1699
lrn@chromium.org25156de2010-04-06 13:10:27 +00001700void MacroAssembler::AbortIfNotSmi(Register object, const char* msg) {
1701 Label ok;
1702 Condition is_smi = CheckSmi(object);
1703 j(is_smi, &ok);
1704 Assert(equal, msg);
1705 bind(&ok);
1706}
1707
1708
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +00001709Condition MacroAssembler::IsObjectStringType(Register heap_object,
1710 Register map,
1711 Register instance_type) {
1712 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1713 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
1714 ASSERT(kNotStringTag != 0);
1715 testb(instance_type, Immediate(kIsNotStringMask));
1716 return zero;
1717}
1718
1719
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00001720void MacroAssembler::TryGetFunctionPrototype(Register function,
1721 Register result,
1722 Label* miss) {
1723 // Check that the receiver isn't a smi.
1724 testl(function, Immediate(kSmiTagMask));
1725 j(zero, miss);
1726
1727 // Check that the function really is a function.
1728 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1729 j(not_equal, miss);
1730
1731 // Make sure that the function has an instance prototype.
1732 Label non_instance;
1733 testb(FieldOperand(result, Map::kBitFieldOffset),
1734 Immediate(1 << Map::kHasNonInstancePrototype));
1735 j(not_zero, &non_instance);
1736
1737 // Get the prototype or initial map from the function.
1738 movq(result,
1739 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1740
1741 // If the prototype or initial map is the hole, don't return it and
1742 // simply miss the cache instead. This will allow us to allocate a
1743 // prototype object on-demand in the runtime system.
ager@chromium.org18ad94b2009-09-02 08:22:29 +00001744 CompareRoot(result, Heap::kTheHoleValueRootIndex);
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00001745 j(equal, miss);
1746
1747 // If the function does not have an initial map, we're done.
1748 Label done;
1749 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1750 j(not_equal, &done);
1751
1752 // Get the prototype from the initial map.
1753 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1754 jmp(&done);
1755
1756 // Non-instance prototype: Fetch prototype from constructor field
1757 // in initial map.
1758 bind(&non_instance);
1759 movq(result, FieldOperand(result, Map::kConstructorOffset));
1760
1761 // All done.
1762 bind(&done);
1763}
1764
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001765
1766void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1767 if (FLAG_native_code_counters && counter->Enabled()) {
1768 movq(kScratchRegister, ExternalReference(counter));
1769 movl(Operand(kScratchRegister, 0), Immediate(value));
1770 }
1771}
1772
1773
1774void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1775 ASSERT(value > 0);
1776 if (FLAG_native_code_counters && counter->Enabled()) {
1777 movq(kScratchRegister, ExternalReference(counter));
1778 Operand operand(kScratchRegister, 0);
1779 if (value == 1) {
1780 incl(operand);
1781 } else {
1782 addl(operand, Immediate(value));
1783 }
1784 }
1785}
1786
1787
1788void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1789 ASSERT(value > 0);
1790 if (FLAG_native_code_counters && counter->Enabled()) {
1791 movq(kScratchRegister, ExternalReference(counter));
1792 Operand operand(kScratchRegister, 0);
1793 if (value == 1) {
1794 decl(operand);
1795 } else {
1796 subl(operand, Immediate(value));
1797 }
1798 }
1799}
1800
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001801#ifdef ENABLE_DEBUGGER_SUPPORT
1802
1803void MacroAssembler::PushRegistersFromMemory(RegList regs) {
1804 ASSERT((regs & ~kJSCallerSaved) == 0);
1805 // Push the content of the memory location to the stack.
1806 for (int i = 0; i < kNumJSCallerSaved; i++) {
1807 int r = JSCallerSavedCode(i);
1808 if ((regs & (1 << r)) != 0) {
1809 ExternalReference reg_addr =
1810 ExternalReference(Debug_Address::Register(i));
1811 movq(kScratchRegister, reg_addr);
1812 push(Operand(kScratchRegister, 0));
1813 }
1814 }
1815}
1816
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001817
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001818void MacroAssembler::SaveRegistersToMemory(RegList regs) {
1819 ASSERT((regs & ~kJSCallerSaved) == 0);
1820 // Copy the content of registers to memory location.
1821 for (int i = 0; i < kNumJSCallerSaved; i++) {
1822 int r = JSCallerSavedCode(i);
1823 if ((regs & (1 << r)) != 0) {
1824 Register reg = { r };
1825 ExternalReference reg_addr =
1826 ExternalReference(Debug_Address::Register(i));
1827 movq(kScratchRegister, reg_addr);
1828 movq(Operand(kScratchRegister, 0), reg);
1829 }
1830 }
1831}
1832
1833
1834void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
1835 ASSERT((regs & ~kJSCallerSaved) == 0);
1836 // Copy the content of memory location to registers.
1837 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1838 int r = JSCallerSavedCode(i);
1839 if ((regs & (1 << r)) != 0) {
1840 Register reg = { r };
1841 ExternalReference reg_addr =
1842 ExternalReference(Debug_Address::Register(i));
1843 movq(kScratchRegister, reg_addr);
1844 movq(reg, Operand(kScratchRegister, 0));
1845 }
1846 }
1847}
1848
1849
1850void MacroAssembler::PopRegistersToMemory(RegList regs) {
1851 ASSERT((regs & ~kJSCallerSaved) == 0);
1852 // Pop the content from the stack to the memory location.
1853 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1854 int r = JSCallerSavedCode(i);
1855 if ((regs & (1 << r)) != 0) {
1856 ExternalReference reg_addr =
1857 ExternalReference(Debug_Address::Register(i));
1858 movq(kScratchRegister, reg_addr);
1859 pop(Operand(kScratchRegister, 0));
1860 }
1861 }
1862}
1863
1864
1865void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
1866 Register scratch,
1867 RegList regs) {
1868 ASSERT(!scratch.is(kScratchRegister));
1869 ASSERT(!base.is(kScratchRegister));
1870 ASSERT(!base.is(scratch));
1871 ASSERT((regs & ~kJSCallerSaved) == 0);
1872 // Copy the content of the stack to the memory location and adjust base.
1873 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1874 int r = JSCallerSavedCode(i);
1875 if ((regs & (1 << r)) != 0) {
1876 movq(scratch, Operand(base, 0));
1877 ExternalReference reg_addr =
1878 ExternalReference(Debug_Address::Register(i));
1879 movq(kScratchRegister, reg_addr);
1880 movq(Operand(kScratchRegister, 0), scratch);
1881 lea(base, Operand(base, kPointerSize));
1882 }
1883 }
1884}
1885
ager@chromium.org5c838252010-02-19 08:53:10 +00001886void MacroAssembler::DebugBreak() {
1887 ASSERT(allow_stub_calls());
1888 xor_(rax, rax); // no arguments
1889 movq(rbx, ExternalReference(Runtime::kDebugBreak));
1890 CEntryStub ces(1);
1891 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
ager@chromium.org3e875802009-06-29 08:26:34 +00001892}
ager@chromium.org5c838252010-02-19 08:53:10 +00001893#endif // ENABLE_DEBUGGER_SUPPORT
ager@chromium.org3e875802009-06-29 08:26:34 +00001894
1895
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001896void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1897 const ParameterCount& actual,
1898 Handle<Code> code_constant,
1899 Register code_register,
1900 Label* done,
1901 InvokeFlag flag) {
1902 bool definitely_matches = false;
1903 Label invoke;
1904 if (expected.is_immediate()) {
1905 ASSERT(actual.is_immediate());
1906 if (expected.immediate() == actual.immediate()) {
1907 definitely_matches = true;
1908 } else {
1909 movq(rax, Immediate(actual.immediate()));
1910 if (expected.immediate() ==
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001911 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001912 // Don't worry about adapting arguments for built-ins that
1913 // don't want that done. Skip adaption code by making it look
1914 // like we have a match between expected and actual number of
1915 // arguments.
1916 definitely_matches = true;
1917 } else {
1918 movq(rbx, Immediate(expected.immediate()));
1919 }
1920 }
1921 } else {
1922 if (actual.is_immediate()) {
1923 // Expected is in register, actual is immediate. This is the
1924 // case when we invoke function values without going through the
1925 // IC mechanism.
1926 cmpq(expected.reg(), Immediate(actual.immediate()));
1927 j(equal, &invoke);
1928 ASSERT(expected.reg().is(rbx));
1929 movq(rax, Immediate(actual.immediate()));
1930 } else if (!expected.reg().is(actual.reg())) {
1931 // Both expected and actual are in (different) registers. This
1932 // is the case when we invoke functions using call and apply.
1933 cmpq(expected.reg(), actual.reg());
1934 j(equal, &invoke);
1935 ASSERT(actual.reg().is(rax));
1936 ASSERT(expected.reg().is(rbx));
1937 }
1938 }
1939
1940 if (!definitely_matches) {
1941 Handle<Code> adaptor =
1942 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1943 if (!code_constant.is_null()) {
1944 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1945 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1946 } else if (!code_register.is(rdx)) {
1947 movq(rdx, code_register);
1948 }
1949
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001950 if (flag == CALL_FUNCTION) {
sgjesse@chromium.org911335c2009-08-19 12:59:44 +00001951 Call(adaptor, RelocInfo::CODE_TARGET);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001952 jmp(done);
1953 } else {
sgjesse@chromium.org911335c2009-08-19 12:59:44 +00001954 Jump(adaptor, RelocInfo::CODE_TARGET);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001955 }
1956 bind(&invoke);
1957 }
1958}
1959
1960
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001961void MacroAssembler::InvokeCode(Register code,
1962 const ParameterCount& expected,
1963 const ParameterCount& actual,
1964 InvokeFlag flag) {
1965 Label done;
1966 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1967 if (flag == CALL_FUNCTION) {
1968 call(code);
1969 } else {
1970 ASSERT(flag == JUMP_FUNCTION);
1971 jmp(code);
1972 }
1973 bind(&done);
1974}
1975
1976
1977void MacroAssembler::InvokeCode(Handle<Code> code,
1978 const ParameterCount& expected,
1979 const ParameterCount& actual,
1980 RelocInfo::Mode rmode,
1981 InvokeFlag flag) {
1982 Label done;
1983 Register dummy = rax;
1984 InvokePrologue(expected, actual, code, dummy, &done, flag);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001985 if (flag == CALL_FUNCTION) {
ager@chromium.org3e875802009-06-29 08:26:34 +00001986 Call(code, rmode);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001987 } else {
1988 ASSERT(flag == JUMP_FUNCTION);
ager@chromium.org3e875802009-06-29 08:26:34 +00001989 Jump(code, rmode);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001990 }
1991 bind(&done);
1992}
1993
1994
1995void MacroAssembler::InvokeFunction(Register function,
1996 const ParameterCount& actual,
1997 InvokeFlag flag) {
1998 ASSERT(function.is(rdi));
1999 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2000 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
ager@chromium.org3e875802009-06-29 08:26:34 +00002001 movsxlq(rbx,
2002 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002003 movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
ager@chromium.org5aa501c2009-06-23 07:57:28 +00002004 // Advances rdx to the end of the Code object header, to the start of
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002005 // the executable code.
2006 lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
2007
2008 ParameterCount expected(rbx);
2009 InvokeCode(rdx, expected, actual, flag);
2010}
2011
2012
ager@chromium.org5c838252010-02-19 08:53:10 +00002013void MacroAssembler::InvokeFunction(JSFunction* function,
2014 const ParameterCount& actual,
2015 InvokeFlag flag) {
2016 ASSERT(function->is_compiled());
2017 // Get the function and setup the context.
2018 Move(rdi, Handle<JSFunction>(function));
2019 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2020
2021 // Invoke the cached code.
2022 Handle<Code> code(function->code());
2023 ParameterCount expected(function->shared()->formal_parameter_count());
2024 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
2025}
2026
2027
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002028void MacroAssembler::EnterFrame(StackFrame::Type type) {
2029 push(rbp);
2030 movq(rbp, rsp);
2031 push(rsi); // Context.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002032 Push(Smi::FromInt(type));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002033 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2034 push(kScratchRegister);
2035 if (FLAG_debug_code) {
2036 movq(kScratchRegister,
2037 Factory::undefined_value(),
2038 RelocInfo::EMBEDDED_OBJECT);
2039 cmpq(Operand(rsp, 0), kScratchRegister);
2040 Check(not_equal, "code object not properly patched");
2041 }
2042}
2043
2044
2045void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2046 if (FLAG_debug_code) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002047 Move(kScratchRegister, Smi::FromInt(type));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002048 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2049 Check(equal, "stack frame types must match");
2050 }
2051 movq(rsp, rbp);
2052 pop(rbp);
2053}
2054
2055
ager@chromium.orgc4c92722009-11-18 14:12:51 +00002056void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002057 // Setup the frame structure on the stack.
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00002058 // All constants are relative to the frame pointer of the exit frame.
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002059 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
2060 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
2061 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
2062 push(rbp);
2063 movq(rbp, rsp);
2064
2065 // Reserve room for entry stack pointer and push the debug marker.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002066 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
ager@chromium.org5c838252010-02-19 08:53:10 +00002067 push(Immediate(0)); // Saved entry sp, patched before call.
2068 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2069 push(kScratchRegister); // Accessed from EditFrame::code_slot.
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002070
2071 // Save the frame pointer and the context in top.
2072 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2073 ExternalReference context_address(Top::k_context_address);
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00002074 movq(r14, rax); // Backup rax before we use it.
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002075
2076 movq(rax, rbp);
2077 store_rax(c_entry_fp_address);
2078 movq(rax, rsi);
2079 store_rax(context_address);
2080
2081 // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
2082 // so it must be retained across the C-call.
2083 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00002084 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002085
2086#ifdef ENABLE_DEBUGGER_SUPPORT
2087 // Save the state of all registers to the stack from the memory
2088 // location. This is needed to allow nested break points.
ager@chromium.orgc4c92722009-11-18 14:12:51 +00002089 if (mode == ExitFrame::MODE_DEBUG) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002090 // TODO(1243899): This should be symmetric to
2091 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
2092 // correct here, but computed for the other call. Very error
2093 // prone! FIX THIS. Actually there are deeper problems with
2094 // register saving than this asymmetry (see the bug report
2095 // associated with this issue).
2096 PushRegistersFromMemory(kJSCallerSaved);
2097 }
2098#endif
2099
ager@chromium.orga1645e22009-09-09 19:27:10 +00002100#ifdef _WIN64
2101 // Reserve space on stack for result and argument structures, if necessary.
2102 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
2103 // Reserve space for the Arguments object. The Windows 64-bit ABI
2104 // requires us to pass this structure as a pointer to its location on
2105 // the stack. The structure contains 2 values.
2106 int argument_stack_space = 2 * kPointerSize;
2107 // We also need backing space for 4 parameters, even though
2108 // we only pass one or two parameter, and it is in a register.
2109 int argument_mirror_space = 4 * kPointerSize;
2110 int total_stack_space =
2111 argument_mirror_space + argument_stack_space + result_stack_space;
2112 subq(rsp, Immediate(total_stack_space));
2113#endif
2114
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002115 // Get the required frame alignment for the OS.
2116 static const int kFrameAlignment = OS::ActivationFrameAlignment();
2117 if (kFrameAlignment > 0) {
2118 ASSERT(IsPowerOf2(kFrameAlignment));
2119 movq(kScratchRegister, Immediate(-kFrameAlignment));
2120 and_(rsp, kScratchRegister);
2121 }
2122
2123 // Patch the saved entry sp.
2124 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2125}
2126
2127
ager@chromium.orgc4c92722009-11-18 14:12:51 +00002128void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002129 // Registers:
2130 // r15 : argv
2131#ifdef ENABLE_DEBUGGER_SUPPORT
2132 // Restore the memory copy of the registers by digging them out from
2133 // the stack. This is needed to allow nested break points.
ager@chromium.orgc4c92722009-11-18 14:12:51 +00002134 if (mode == ExitFrame::MODE_DEBUG) {
ager@chromium.orga1645e22009-09-09 19:27:10 +00002135 // It's okay to clobber register rbx below because we don't need
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002136 // the function pointer after this.
2137 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
ager@chromium.orgc4c92722009-11-18 14:12:51 +00002138 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002139 lea(rbx, Operand(rbp, kOffset));
2140 CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
2141 }
2142#endif
2143
2144 // Get the return address from the stack and restore the frame pointer.
2145 movq(rcx, Operand(rbp, 1 * kPointerSize));
2146 movq(rbp, Operand(rbp, 0 * kPointerSize));
2147
ager@chromium.orga1645e22009-09-09 19:27:10 +00002148 // Pop everything up to and including the arguments and the receiver
2149 // from the caller stack.
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002150 lea(rsp, Operand(r15, 1 * kPointerSize));
2151
2152 // Restore current context from top and clear it in debug mode.
2153 ExternalReference context_address(Top::k_context_address);
2154 movq(kScratchRegister, context_address);
2155 movq(rsi, Operand(kScratchRegister, 0));
2156#ifdef DEBUG
2157 movq(Operand(kScratchRegister, 0), Immediate(0));
2158#endif
2159
2160 // Push the return address to get ready to return.
2161 push(rcx);
2162
2163 // Clear the top frame.
2164 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2165 movq(kScratchRegister, c_entry_fp_address);
2166 movq(Operand(kScratchRegister, 0), Immediate(0));
2167}
2168
2169
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002170Register MacroAssembler::CheckMaps(JSObject* object,
2171 Register object_reg,
2172 JSObject* holder,
2173 Register holder_reg,
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002174 Register scratch,
2175 Label* miss) {
2176 // Make sure there's no overlap between scratch and the other
2177 // registers.
2178 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
2179
2180 // Keep track of the current object in register reg. On the first
2181 // iteration, reg is an alias for object_reg, on later iterations,
2182 // it is an alias for holder_reg.
2183 Register reg = object_reg;
2184 int depth = 1;
2185
2186 // Check the maps in the prototype chain.
2187 // Traverse the prototype chain from the object and do map checks.
2188 while (object != holder) {
2189 depth++;
2190
2191 // Only global objects and objects that do not require access
2192 // checks are allowed in stubs.
2193 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2194
2195 JSObject* prototype = JSObject::cast(object->GetPrototype());
2196 if (Heap::InNewSpace(prototype)) {
2197 // Get the map of the current object.
2198 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2199 Cmp(scratch, Handle<Map>(object->map()));
2200 // Branch on the result of the map check.
2201 j(not_equal, miss);
2202 // Check access rights to the global object. This has to happen
2203 // after the map check so that we know that the object is
2204 // actually a global object.
2205 if (object->IsJSGlobalProxy()) {
2206 CheckAccessGlobalProxy(reg, scratch, miss);
2207
2208 // Restore scratch register to be the map of the object.
2209 // We load the prototype from the map in the scratch register.
2210 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2211 }
2212 // The prototype is in new space; we cannot store a reference
2213 // to it in the code. Load it from the map.
2214 reg = holder_reg; // from now the object is in holder_reg
2215 movq(reg, FieldOperand(scratch, Map::kPrototypeOffset));
2216
2217 } else {
2218 // Check the map of the current object.
2219 Cmp(FieldOperand(reg, HeapObject::kMapOffset),
2220 Handle<Map>(object->map()));
2221 // Branch on the result of the map check.
2222 j(not_equal, miss);
2223 // Check access rights to the global object. This has to happen
2224 // after the map check so that we know that the object is
2225 // actually a global object.
2226 if (object->IsJSGlobalProxy()) {
2227 CheckAccessGlobalProxy(reg, scratch, miss);
2228 }
2229 // The prototype is in old space; load it directly.
2230 reg = holder_reg; // from now the object is in holder_reg
2231 Move(reg, Handle<JSObject>(prototype));
2232 }
2233
2234 // Go to the next object in the prototype chain.
2235 object = prototype;
2236 }
2237
2238 // Check the holder map.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002239 Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002240 j(not_equal, miss);
2241
2242 // Log the check depth.
2243 LOG(IntEvent("check-maps-depth", depth));
2244
2245 // Perform security check for access to the global object and return
2246 // the holder register.
2247 ASSERT(object == holder);
2248 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2249 if (object->IsJSGlobalProxy()) {
2250 CheckAccessGlobalProxy(reg, scratch, miss);
2251 }
2252 return reg;
2253}
2254
2255
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002256void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2257 Register scratch,
2258 Label* miss) {
2259 Label same_contexts;
2260
2261 ASSERT(!holder_reg.is(scratch));
2262 ASSERT(!scratch.is(kScratchRegister));
2263 // Load current lexical context from the stack frame.
2264 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2265
2266 // When generating debug code, make sure the lexical context is set.
2267 if (FLAG_debug_code) {
2268 cmpq(scratch, Immediate(0));
2269 Check(not_equal, "we should not have an empty lexical context");
2270 }
2271 // Load the global context of the current context.
2272 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2273 movq(scratch, FieldOperand(scratch, offset));
2274 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2275
2276 // Check the context is a global context.
2277 if (FLAG_debug_code) {
2278 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2279 Factory::global_context_map());
2280 Check(equal, "JSGlobalObject::global_context should be a global context.");
2281 }
2282
2283 // Check if both contexts are the same.
2284 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2285 j(equal, &same_contexts);
2286
2287 // Compare security tokens.
2288 // Check that the security token in the calling global object is
2289 // compatible with the security token in the receiving global
2290 // object.
2291
2292 // Check the context is a global context.
2293 if (FLAG_debug_code) {
2294 // Preserve original value of holder_reg.
2295 push(holder_reg);
2296 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002297 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002298 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2299
2300 // Read the first word and compare to global_context_map(),
2301 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002302 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002303 Check(equal, "JSGlobalObject::global_context should be a global context.");
2304 pop(holder_reg);
2305 }
2306
2307 movq(kScratchRegister,
2308 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002309 int token_offset =
2310 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002311 movq(scratch, FieldOperand(scratch, token_offset));
2312 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2313 j(not_equal, miss);
2314
2315 bind(&same_contexts);
2316}
2317
2318
ager@chromium.orga1645e22009-09-09 19:27:10 +00002319void MacroAssembler::LoadAllocationTopHelper(Register result,
2320 Register result_end,
2321 Register scratch,
2322 AllocationFlags flags) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002323 ExternalReference new_space_allocation_top =
2324 ExternalReference::new_space_allocation_top_address();
2325
2326 // Just return if allocation top is already known.
ager@chromium.orga1645e22009-09-09 19:27:10 +00002327 if ((flags & RESULT_CONTAINS_TOP) != 0) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002328 // No use of scratch if allocation top is provided.
2329 ASSERT(scratch.is(no_reg));
ager@chromium.orga1645e22009-09-09 19:27:10 +00002330#ifdef DEBUG
2331 // Assert that result actually contains top on entry.
2332 movq(kScratchRegister, new_space_allocation_top);
2333 cmpq(result, Operand(kScratchRegister, 0));
2334 Check(equal, "Unexpected allocation top");
2335#endif
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002336 return;
2337 }
2338
2339 // Move address of new object to result. Use scratch register if available.
2340 if (scratch.is(no_reg)) {
2341 movq(kScratchRegister, new_space_allocation_top);
2342 movq(result, Operand(kScratchRegister, 0));
2343 } else {
2344 ASSERT(!scratch.is(result_end));
2345 movq(scratch, new_space_allocation_top);
2346 movq(result, Operand(scratch, 0));
2347 }
2348}
2349
2350
2351void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2352 Register scratch) {
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +00002353 if (FLAG_debug_code) {
2354 testq(result_end, Immediate(kObjectAlignmentMask));
2355 Check(zero, "Unaligned allocation in new space");
2356 }
2357
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002358 ExternalReference new_space_allocation_top =
2359 ExternalReference::new_space_allocation_top_address();
2360
2361 // Update new top.
2362 if (result_end.is(rax)) {
2363 // rax can be stored directly to a memory location.
2364 store_rax(new_space_allocation_top);
2365 } else {
2366 // Register required - use scratch provided if available.
2367 if (scratch.is(no_reg)) {
2368 movq(kScratchRegister, new_space_allocation_top);
2369 movq(Operand(kScratchRegister, 0), result_end);
2370 } else {
2371 movq(Operand(scratch, 0), result_end);
2372 }
2373 }
2374}
2375
2376
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +00002377void MacroAssembler::AllocateInNewSpace(int object_size,
2378 Register result,
2379 Register result_end,
2380 Register scratch,
2381 Label* gc_required,
2382 AllocationFlags flags) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002383 ASSERT(!result.is(result_end));
2384
2385 // Load address of new object into result.
ager@chromium.orga1645e22009-09-09 19:27:10 +00002386 LoadAllocationTopHelper(result, result_end, scratch, flags);
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002387
2388 // Calculate new top and bail out if new space is exhausted.
2389 ExternalReference new_space_allocation_limit =
2390 ExternalReference::new_space_allocation_limit_address();
2391 lea(result_end, Operand(result, object_size));
2392 movq(kScratchRegister, new_space_allocation_limit);
2393 cmpq(result_end, Operand(kScratchRegister, 0));
2394 j(above, gc_required);
2395
2396 // Update allocation top.
2397 UpdateAllocationTopHelper(result_end, scratch);
ager@chromium.orga1645e22009-09-09 19:27:10 +00002398
2399 // Tag the result if requested.
2400 if ((flags & TAG_OBJECT) != 0) {
2401 addq(result, Immediate(kHeapObjectTag));
2402 }
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002403}
2404
2405
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +00002406void MacroAssembler::AllocateInNewSpace(int header_size,
2407 ScaleFactor element_size,
2408 Register element_count,
2409 Register result,
2410 Register result_end,
2411 Register scratch,
2412 Label* gc_required,
2413 AllocationFlags flags) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002414 ASSERT(!result.is(result_end));
2415
2416 // Load address of new object into result.
ager@chromium.orga1645e22009-09-09 19:27:10 +00002417 LoadAllocationTopHelper(result, result_end, scratch, flags);
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002418
2419 // Calculate new top and bail out if new space is exhausted.
2420 ExternalReference new_space_allocation_limit =
2421 ExternalReference::new_space_allocation_limit_address();
2422 lea(result_end, Operand(result, element_count, element_size, header_size));
2423 movq(kScratchRegister, new_space_allocation_limit);
2424 cmpq(result_end, Operand(kScratchRegister, 0));
2425 j(above, gc_required);
2426
2427 // Update allocation top.
2428 UpdateAllocationTopHelper(result_end, scratch);
ager@chromium.orga1645e22009-09-09 19:27:10 +00002429
2430 // Tag the result if requested.
2431 if ((flags & TAG_OBJECT) != 0) {
2432 addq(result, Immediate(kHeapObjectTag));
2433 }
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002434}
2435
2436
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +00002437void MacroAssembler::AllocateInNewSpace(Register object_size,
2438 Register result,
2439 Register result_end,
2440 Register scratch,
2441 Label* gc_required,
2442 AllocationFlags flags) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002443 // Load address of new object into result.
ager@chromium.orga1645e22009-09-09 19:27:10 +00002444 LoadAllocationTopHelper(result, result_end, scratch, flags);
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002445
2446 // Calculate new top and bail out if new space is exhausted.
2447 ExternalReference new_space_allocation_limit =
2448 ExternalReference::new_space_allocation_limit_address();
2449 if (!object_size.is(result_end)) {
2450 movq(result_end, object_size);
2451 }
2452 addq(result_end, result);
2453 movq(kScratchRegister, new_space_allocation_limit);
2454 cmpq(result_end, Operand(kScratchRegister, 0));
2455 j(above, gc_required);
2456
2457 // Update allocation top.
2458 UpdateAllocationTopHelper(result_end, scratch);
ager@chromium.orga1645e22009-09-09 19:27:10 +00002459
2460 // Tag the result if requested.
2461 if ((flags & TAG_OBJECT) != 0) {
2462 addq(result, Immediate(kHeapObjectTag));
2463 }
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002464}
2465
2466
2467void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2468 ExternalReference new_space_allocation_top =
2469 ExternalReference::new_space_allocation_top_address();
2470
2471 // Make sure the object has no tag before resetting top.
2472 and_(object, Immediate(~kHeapObjectTagMask));
2473 movq(kScratchRegister, new_space_allocation_top);
2474#ifdef DEBUG
2475 cmpq(object, Operand(kScratchRegister, 0));
2476 Check(below, "Undo allocation of non allocated memory");
2477#endif
2478 movq(Operand(kScratchRegister, 0), object);
2479}
2480
2481
ager@chromium.org3811b432009-10-28 14:53:37 +00002482void MacroAssembler::AllocateHeapNumber(Register result,
2483 Register scratch,
2484 Label* gc_required) {
2485 // Allocate heap number in new space.
2486 AllocateInNewSpace(HeapNumber::kSize,
2487 result,
2488 scratch,
2489 no_reg,
2490 gc_required,
2491 TAG_OBJECT);
2492
2493 // Set the map.
2494 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2495 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2496}
2497
2498
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00002499void MacroAssembler::AllocateTwoByteString(Register result,
2500 Register length,
2501 Register scratch1,
2502 Register scratch2,
2503 Register scratch3,
2504 Label* gc_required) {
2505 // Calculate the number of bytes needed for the characters in the string while
2506 // observing object alignment.
2507 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2508 ASSERT(kShortSize == 2);
2509 // scratch1 = length * 2 + kObjectAlignmentMask.
2510 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
2511 and_(scratch1, Immediate(~kObjectAlignmentMask));
2512
2513 // Allocate two byte string in new space.
2514 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2515 times_1,
2516 scratch1,
2517 result,
2518 scratch2,
2519 scratch3,
2520 gc_required,
2521 TAG_OBJECT);
2522
2523 // Set the map, length and hash field.
2524 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2525 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2526 movl(FieldOperand(result, String::kLengthOffset), length);
2527 movl(FieldOperand(result, String::kHashFieldOffset),
2528 Immediate(String::kEmptyHashField));
2529}
2530
2531
2532void MacroAssembler::AllocateAsciiString(Register result,
2533 Register length,
2534 Register scratch1,
2535 Register scratch2,
2536 Register scratch3,
2537 Label* gc_required) {
2538 // Calculate the number of bytes needed for the characters in the string while
2539 // observing object alignment.
2540 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
2541 movl(scratch1, length);
2542 ASSERT(kCharSize == 1);
2543 addq(scratch1, Immediate(kObjectAlignmentMask));
2544 and_(scratch1, Immediate(~kObjectAlignmentMask));
2545
2546 // Allocate ascii string in new space.
2547 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2548 times_1,
2549 scratch1,
2550 result,
2551 scratch2,
2552 scratch3,
2553 gc_required,
2554 TAG_OBJECT);
2555
2556 // Set the map, length and hash field.
2557 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2558 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2559 movl(FieldOperand(result, String::kLengthOffset), length);
2560 movl(FieldOperand(result, String::kHashFieldOffset),
2561 Immediate(String::kEmptyHashField));
2562}
2563
2564
2565void MacroAssembler::AllocateConsString(Register result,
2566 Register scratch1,
2567 Register scratch2,
2568 Label* gc_required) {
2569 // Allocate heap number in new space.
2570 AllocateInNewSpace(ConsString::kSize,
2571 result,
2572 scratch1,
2573 scratch2,
2574 gc_required,
2575 TAG_OBJECT);
2576
2577 // Set the map. The other fields are left uninitialized.
2578 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2579 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2580}
2581
2582
2583void MacroAssembler::AllocateAsciiConsString(Register result,
2584 Register scratch1,
2585 Register scratch2,
2586 Label* gc_required) {
2587 // Allocate heap number in new space.
2588 AllocateInNewSpace(ConsString::kSize,
2589 result,
2590 scratch1,
2591 scratch2,
2592 gc_required,
2593 TAG_OBJECT);
2594
2595 // Set the map. The other fields are left uninitialized.
2596 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2597 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2598}
2599
2600
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +00002601void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2602 if (context_chain_length > 0) {
2603 // Move up the chain of contexts to the context containing the slot.
2604 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2605 // Load the function context (which is the incoming, outer context).
lrn@chromium.orgd5649e32010-01-19 13:36:12 +00002606 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +00002607 for (int i = 1; i < context_chain_length; i++) {
2608 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2609 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2610 }
2611 // The context may be an intermediate context, not a function context.
2612 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2613 } else { // context is the current function context.
2614 // The context may be an intermediate context, not a function context.
2615 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2616 }
2617}
2618
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +00002619int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2620 // On Windows stack slots are reserved by the caller for all arguments
2621 // including the ones passed in registers. On Linux 6 arguments are passed in
2622 // registers and the caller does not reserve stack slots for them.
2623 ASSERT(num_arguments >= 0);
2624#ifdef _WIN64
2625 static const int kArgumentsWithoutStackSlot = 0;
2626#else
2627 static const int kArgumentsWithoutStackSlot = 6;
2628#endif
2629 return num_arguments > kArgumentsWithoutStackSlot ?
2630 num_arguments - kArgumentsWithoutStackSlot : 0;
2631}
2632
2633void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2634 int frame_alignment = OS::ActivationFrameAlignment();
2635 ASSERT(frame_alignment != 0);
2636 ASSERT(num_arguments >= 0);
2637 // Make stack end at alignment and allocate space for arguments and old rsp.
2638 movq(kScratchRegister, rsp);
2639 ASSERT(IsPowerOf2(frame_alignment));
2640 int argument_slots_on_stack =
2641 ArgumentStackSlotsForCFunctionCall(num_arguments);
2642 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2643 and_(rsp, Immediate(-frame_alignment));
2644 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2645}
2646
2647
2648void MacroAssembler::CallCFunction(ExternalReference function,
2649 int num_arguments) {
2650 movq(rax, function);
2651 CallCFunction(rax, num_arguments);
2652}
2653
2654
2655void MacroAssembler::CallCFunction(Register function, int num_arguments) {
ricow@chromium.orgc9c80822010-04-21 08:22:37 +00002656 // Check stack alignment.
2657 if (FLAG_debug_code) {
2658 CheckStackAlignment();
2659 }
2660
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +00002661 call(function);
2662 ASSERT(OS::ActivationFrameAlignment() != 0);
2663 ASSERT(num_arguments >= 0);
2664 int argument_slots_on_stack =
2665 ArgumentStackSlotsForCFunctionCall(num_arguments);
2666 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2667}
2668
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +00002669
ager@chromium.org4af710e2009-09-15 12:20:11 +00002670CodePatcher::CodePatcher(byte* address, int size)
2671 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2672 // Create a new macro assembler pointing to the address of the code to patch.
2673 // The size is adjusted with kGap on order for the assembler to generate size
2674 // bytes of instructions without failing with buffer size constraints.
2675 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2676}
2677
2678
2679CodePatcher::~CodePatcher() {
2680 // Indicate that code has changed.
2681 CPU::FlushICache(address_, size_);
2682
2683 // Check that the code was patched as expected.
2684 ASSERT(masm_.pc_ == address_ + size_);
2685 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2686}
2687
kasperl@chromium.org71affb52009-05-26 05:44:31 +00002688} } // namespace v8::internal