blob: 2781a8489885e43e72a572b29a4efda3a5c4272f [file] [log] [blame]
ager@chromium.org5ec48922009-05-05 07:25:34 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
kasperl@chromium.org71affb52009-05-26 05:44:31 +000028#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
ager@chromium.orgeadaf222009-06-16 09:43:10 +000032#include "assembler-x64.h"
ager@chromium.orge2902be2009-06-08 12:21:35 +000033#include "macro-assembler-x64.h"
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000034#include "serialize.h"
ager@chromium.orgeadaf222009-06-16 09:43:10 +000035#include "debug.h"
kasperl@chromium.org71affb52009-05-26 05:44:31 +000036
37namespace v8 {
38namespace internal {
39
40MacroAssembler::MacroAssembler(void* buffer, int size)
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +000041 : Assembler(buffer, size),
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +000042 generating_stub_(false),
43 allow_stub_calls_(true),
44 code_object_(Heap::undefined_value()) {
kasperl@chromium.org71affb52009-05-26 05:44:31 +000045}
46
ager@chromium.orge2902be2009-06-08 12:21:35 +000047
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +000048void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +000049 movq(destination, Operand(r13, index << kPointerSizeLog2));
50}
51
52
53void MacroAssembler::PushRoot(Heap::RootListIndex index) {
54 push(Operand(r13, index << kPointerSizeLog2));
55}
56
57
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +000058void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +000059 cmpq(with, Operand(r13, index << kPointerSizeLog2));
60}
61
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000062
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +000063void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +000064 LoadRoot(kScratchRegister, index);
65 cmpq(with, kScratchRegister);
66}
67
68
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +000069void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
70 CompareRoot(rsp, Heap::kStackLimitRootIndex);
71 j(below, on_stack_overflow);
72}
73
74
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000075static void RecordWriteHelper(MacroAssembler* masm,
76 Register object,
77 Register addr,
78 Register scratch) {
79 Label fast;
80
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000081 // Compute the page start address from the heap object pointer, and reuse
82 // the 'object' register for it.
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000083 ASSERT(is_int32(~Page::kPageAlignmentMask));
84 masm->and_(object,
85 Immediate(static_cast<int32_t>(~Page::kPageAlignmentMask)));
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000086 Register page_start = object;
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000087
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000088 // Compute the bit addr in the remembered set/index of the pointer in the
89 // page. Reuse 'addr' as pointer_offset.
90 masm->subq(addr, page_start);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000091 masm->shr(addr, Immediate(kPointerSizeLog2));
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000092 Register pointer_offset = addr;
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000093
94 // If the bit offset lies beyond the normal remembered set range, it is in
95 // the extra remembered set area of a large object.
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000096 masm->cmpq(pointer_offset, Immediate(Page::kPageSize / kPointerSize));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000097 masm->j(less, &fast);
98
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000099 // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
100 // extra remembered set after the large object.
101
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000102 // Load the array length into 'scratch'.
103 masm->movl(scratch,
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +0000104 Operand(page_start,
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000105 Page::kObjectStartOffset + FixedArray::kLengthOffset));
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +0000106 Register array_length = scratch;
107
108 // Extra remembered set starts right after the large object (a FixedArray), at
109 // page_start + kObjectStartOffset + objectSize
110 // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
111 // Add the delta between the end of the normal RSet and the start of the
112 // extra RSet to 'page_start', so that addressing the bit using
113 // 'pointer_offset' hits the extra RSet words.
114 masm->lea(page_start,
115 Operand(page_start, array_length, times_pointer_size,
116 Page::kObjectStartOffset + FixedArray::kHeaderSize
117 - Page::kRSetEndOffset));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000118
119 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
120 // to limit code size. We should probably evaluate this decision by
121 // measuring the performance of an equivalent implementation using
122 // "simpler" instructions
123 masm->bind(&fast);
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +0000124 masm->bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000125}
126
127
128class RecordWriteStub : public CodeStub {
129 public:
130 RecordWriteStub(Register object, Register addr, Register scratch)
131 : object_(object), addr_(addr), scratch_(scratch) { }
132
133 void Generate(MacroAssembler* masm);
134
135 private:
136 Register object_;
137 Register addr_;
138 Register scratch_;
139
140#ifdef DEBUG
141 void Print() {
142 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
143 object_.code(), addr_.code(), scratch_.code());
144 }
145#endif
146
147 // Minor key encoding in 12 bits of three registers (object, address and
148 // scratch) OOOOAAAASSSS.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000149 class ScratchBits : public BitField<uint32_t, 0, 4> {};
150 class AddressBits : public BitField<uint32_t, 4, 4> {};
151 class ObjectBits : public BitField<uint32_t, 8, 4> {};
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000152
153 Major MajorKey() { return RecordWrite; }
154
155 int MinorKey() {
156 // Encode the registers.
157 return ObjectBits::encode(object_.code()) |
158 AddressBits::encode(addr_.code()) |
159 ScratchBits::encode(scratch_.code());
160 }
161};
162
163
164void RecordWriteStub::Generate(MacroAssembler* masm) {
165 RecordWriteHelper(masm, object_, addr_, scratch_);
166 masm->ret(0);
167}
168
169
170// Set the remembered set bit for [object+offset].
171// object is the object being stored into, value is the object being stored.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000172// If offset is zero, then the smi_index register contains the array index into
173// the elements array represented as a smi. Otherwise it can be used as a
174// scratch register.
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000175// All registers are clobbered by the operation.
176void MacroAssembler::RecordWrite(Register object,
177 int offset,
178 Register value,
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000179 Register smi_index) {
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000180 // The compiled code assumes that record write doesn't change the
181 // context register, so we check that none of the clobbered
182 // registers are rsi.
183 ASSERT(!object.is(rsi) && !value.is(rsi) && !smi_index.is(rsi));
184
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000185 // First, check if a remembered set write is even needed. The tests below
186 // catch stores of Smis and stores into young gen (which does not have space
187 // for the remembered set bits.
188 Label done;
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000189 JumpIfSmi(value, &done);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000190
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000191 RecordWriteNonSmi(object, offset, value, smi_index);
192 bind(&done);
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000193
194 // Clobber all input registers when running with the debug-code flag
195 // turned on to provoke errors. This clobbering repeats the
196 // clobbering done inside RecordWriteNonSmi but it's necessary to
197 // avoid having the fast case for smis leave the registers
198 // unchanged.
199 if (FLAG_debug_code) {
vegorov@chromium.orgf8372902010-03-15 10:26:20 +0000200 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
201 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
202 movq(smi_index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000203 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000204}
205
206
207void MacroAssembler::RecordWriteNonSmi(Register object,
208 int offset,
209 Register scratch,
210 Register smi_index) {
211 Label done;
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000212
213 if (FLAG_debug_code) {
214 Label okay;
215 JumpIfNotSmi(object, &okay);
216 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
217 bind(&okay);
218 }
219
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000220 // Test that the object address is not in the new space. We cannot
221 // set remembered set bits in the new space.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000222 movq(scratch, object);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000223 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000224 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000225 movq(kScratchRegister, ExternalReference::new_space_start());
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000226 cmpq(scratch, kScratchRegister);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000227 j(equal, &done);
228
229 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
230 // Compute the bit offset in the remembered set, leave it in 'value'.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000231 lea(scratch, Operand(object, offset));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000232 ASSERT(is_int32(Page::kPageAlignmentMask));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000233 and_(scratch, Immediate(static_cast<int32_t>(Page::kPageAlignmentMask)));
234 shr(scratch, Immediate(kObjectAlignmentBits));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000235
236 // Compute the page address from the heap object pointer, leave it in
237 // 'object' (immediate value is sign extended).
238 and_(object, Immediate(~Page::kPageAlignmentMask));
239
240 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
241 // to limit code size. We should probably evaluate this decision by
242 // measuring the performance of an equivalent implementation using
243 // "simpler" instructions
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000244 bts(Operand(object, Page::kRSetOffset), scratch);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000245 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000246 Register dst = smi_index;
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000247 if (offset != 0) {
248 lea(dst, Operand(object, offset));
249 } else {
250 // array access: calculate the destination address in the same manner as
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000251 // KeyedStoreIC::GenerateGeneric.
252 SmiIndex index = SmiToIndex(smi_index, smi_index, kPointerSizeLog2);
253 lea(dst, Operand(object,
254 index.reg,
255 index.scale,
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000256 FixedArray::kHeaderSize - kHeapObjectTag));
257 }
258 // If we are already generating a shared stub, not inlining the
259 // record write code isn't going to save us any memory.
260 if (generating_stub()) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000261 RecordWriteHelper(this, object, dst, scratch);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000262 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000263 RecordWriteStub stub(object, dst, scratch);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000264 CallStub(&stub);
265 }
266 }
267
268 bind(&done);
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000269
270 // Clobber all input registers when running with the debug-code flag
271 // turned on to provoke errors.
272 if (FLAG_debug_code) {
vegorov@chromium.orgf8372902010-03-15 10:26:20 +0000273 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
274 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
275 movq(smi_index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000276 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000277}
278
279
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000280void MacroAssembler::Assert(Condition cc, const char* msg) {
281 if (FLAG_debug_code) Check(cc, msg);
282}
283
284
285void MacroAssembler::Check(Condition cc, const char* msg) {
286 Label L;
287 j(cc, &L);
288 Abort(msg);
289 // will not return here
290 bind(&L);
291}
292
293
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000294void MacroAssembler::NegativeZeroTest(Register result,
295 Register op,
296 Label* then_label) {
297 Label ok;
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000298 testl(result, result);
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000299 j(not_zero, &ok);
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000300 testl(op, op);
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000301 j(sign, then_label);
302 bind(&ok);
303}
304
305
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000306void MacroAssembler::Abort(const char* msg) {
307 // We want to pass the msg string like a smi to avoid GC
308 // problems, however msg is not guaranteed to be aligned
309 // properly. Instead, we pass an aligned pointer that is
310 // a proper v8 smi, but also pass the alignment difference
311 // from the real pointer as a smi.
312 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
313 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
314 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
315 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
316#ifdef DEBUG
317 if (msg != NULL) {
318 RecordComment("Abort message: ");
319 RecordComment(msg);
320 }
321#endif
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +0000322 // Disable stub call restrictions to always allow calls to abort.
323 set_allow_stub_calls(true);
324
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000325 push(rax);
326 movq(kScratchRegister, p0, RelocInfo::NONE);
327 push(kScratchRegister);
328 movq(kScratchRegister,
ager@chromium.orgc4c92722009-11-18 14:12:51 +0000329 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000330 RelocInfo::NONE);
331 push(kScratchRegister);
332 CallRuntime(Runtime::kAbort, 2);
333 // will not return here
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +0000334 int3();
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000335}
336
337
338void MacroAssembler::CallStub(CodeStub* stub) {
339 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
sgjesse@chromium.org911335c2009-08-19 12:59:44 +0000340 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000341}
342
343
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +0000344void MacroAssembler::TailCallStub(CodeStub* stub) {
345 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
346 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
347}
348
349
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000350void MacroAssembler::StubReturn(int argc) {
351 ASSERT(argc >= 1 && generating_stub());
352 ret((argc - 1) * kPointerSize);
353}
354
355
356void MacroAssembler::IllegalOperation(int num_arguments) {
357 if (num_arguments > 0) {
358 addq(rsp, Immediate(num_arguments * kPointerSize));
359 }
ager@chromium.org18ad94b2009-09-02 08:22:29 +0000360 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000361}
362
363
364void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
365 CallRuntime(Runtime::FunctionForId(id), num_arguments);
366}
367
368
369void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
370 // If the expected number of arguments of the runtime function is
371 // constant, we check that the actual number of arguments match the
372 // expectation.
373 if (f->nargs >= 0 && f->nargs != num_arguments) {
374 IllegalOperation(num_arguments);
375 return;
376 }
377
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000378 // TODO(1236192): Most runtime routines don't need the number of
379 // arguments passed in because it is constant. At some point we
380 // should remove this need and make the runtime routine entry code
381 // smarter.
382 movq(rax, Immediate(num_arguments));
383 movq(rbx, ExternalReference(f));
384 CEntryStub ces(f->result_size);
385 CallStub(&ces);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000386}
387
388
ager@chromium.org5c838252010-02-19 08:53:10 +0000389void MacroAssembler::CallExternalReference(const ExternalReference& ext,
390 int num_arguments) {
391 movq(rax, Immediate(num_arguments));
392 movq(rbx, ext);
393
394 CEntryStub stub(1);
395 CallStub(&stub);
396}
397
398
ager@chromium.orgce5e87b2010-03-10 10:24:18 +0000399void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
400 int num_arguments,
401 int result_size) {
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000402 // ----------- S t a t e -------------
403 // -- rsp[0] : return address
404 // -- rsp[8] : argument num_arguments - 1
405 // ...
406 // -- rsp[8 * num_arguments] : argument 0 (receiver)
407 // -----------------------------------
408
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000409 // TODO(1236192): Most runtime routines don't need the number of
410 // arguments passed in because it is constant. At some point we
411 // should remove this need and make the runtime routine entry code
412 // smarter.
413 movq(rax, Immediate(num_arguments));
ager@chromium.orgce5e87b2010-03-10 10:24:18 +0000414 JumpToExternalReference(ext, result_size);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000415}
416
417
ager@chromium.orgce5e87b2010-03-10 10:24:18 +0000418void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
419 int num_arguments,
420 int result_size) {
421 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
422}
423
424
425void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
426 int result_size) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000427 // Set the entry point and jump to the C entry runtime stub.
428 movq(rbx, ext);
ager@chromium.orga1645e22009-09-09 19:27:10 +0000429 CEntryStub ces(result_size);
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +0000430 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
kasperl@chromium.org71affb52009-05-26 05:44:31 +0000431}
432
ager@chromium.orge2902be2009-06-08 12:21:35 +0000433
ager@chromium.org5c838252010-02-19 08:53:10 +0000434void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
435 // Calls are not allowed in some stubs.
436 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000437
ager@chromium.org5c838252010-02-19 08:53:10 +0000438 // Rely on the assertion to check that the number of provided
439 // arguments match the expected number of arguments. Fake a
440 // parameter count to avoid emitting code to do the check.
441 ParameterCount expected(0);
442 GetBuiltinEntry(rdx, id);
443 InvokeCode(rdx, expected, expected, flag);
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000444}
445
ager@chromium.org5c838252010-02-19 08:53:10 +0000446
447void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
448 // Load the JavaScript builtin function from the builtins object.
449 movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
450 movq(rdi, FieldOperand(rdi, GlobalObject::kBuiltinsOffset));
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000451 int builtins_offset =
452 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
ager@chromium.org5c838252010-02-19 08:53:10 +0000453 movq(rdi, FieldOperand(rdi, builtins_offset));
454 // Load the code entry point from the function into the target register.
455 movq(target, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
456 movq(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
457 addq(target, Immediate(Code::kHeaderSize - kHeapObjectTag));
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000458}
459
460
ager@chromium.orge2902be2009-06-08 12:21:35 +0000461void MacroAssembler::Set(Register dst, int64_t x) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000462 if (x == 0) {
463 xor_(dst, dst);
464 } else if (is_int32(x)) {
ager@chromium.orgc4c92722009-11-18 14:12:51 +0000465 movq(dst, Immediate(static_cast<int32_t>(x)));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000466 } else if (is_uint32(x)) {
ager@chromium.orgc4c92722009-11-18 14:12:51 +0000467 movl(dst, Immediate(static_cast<uint32_t>(x)));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000468 } else {
469 movq(dst, x, RelocInfo::NONE);
470 }
471}
472
473
474void MacroAssembler::Set(const Operand& dst, int64_t x) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000475 if (x == 0) {
476 xor_(kScratchRegister, kScratchRegister);
477 movq(dst, kScratchRegister);
478 } else if (is_int32(x)) {
ager@chromium.orgc4c92722009-11-18 14:12:51 +0000479 movq(dst, Immediate(static_cast<int32_t>(x)));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000480 } else if (is_uint32(x)) {
ager@chromium.orgc4c92722009-11-18 14:12:51 +0000481 movl(dst, Immediate(static_cast<uint32_t>(x)));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000482 } else {
483 movq(kScratchRegister, x, RelocInfo::NONE);
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000484 movq(dst, kScratchRegister);
ager@chromium.orge2902be2009-06-08 12:21:35 +0000485 }
ager@chromium.orge2902be2009-06-08 12:21:35 +0000486}
487
ager@chromium.org4af710e2009-09-15 12:20:11 +0000488// ----------------------------------------------------------------------------
489// Smi tagging, untagging and tag detection.
490
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000491static int kSmiShift = kSmiTagSize + kSmiShiftSize;
ager@chromium.org4af710e2009-09-15 12:20:11 +0000492
493void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000494 ASSERT_EQ(0, kSmiTag);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000495 if (!dst.is(src)) {
496 movl(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000497 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000498 shl(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000499}
500
501
502void MacroAssembler::Integer32ToSmi(Register dst,
503 Register src,
504 Label* on_overflow) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000505 ASSERT_EQ(0, kSmiTag);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000506 // 32-bit integer always fits in a long smi.
ager@chromium.org4af710e2009-09-15 12:20:11 +0000507 if (!dst.is(src)) {
508 movl(dst, src);
509 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000510 shl(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000511}
512
513
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000514void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
515 Register src,
516 int constant) {
517 if (dst.is(src)) {
518 addq(dst, Immediate(constant));
519 } else {
520 lea(dst, Operand(src, constant));
521 }
522 shl(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000523}
524
525
526void MacroAssembler::SmiToInteger32(Register dst, Register src) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000527 ASSERT_EQ(0, kSmiTag);
528 if (!dst.is(src)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000529 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000530 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000531 shr(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000532}
533
534
535void MacroAssembler::SmiToInteger64(Register dst, Register src) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000536 ASSERT_EQ(0, kSmiTag);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000537 if (!dst.is(src)) {
538 movq(dst, src);
539 }
540 sar(dst, Immediate(kSmiShift));
541}
542
543
544void MacroAssembler::SmiTest(Register src) {
545 testq(src, src);
546}
547
548
549void MacroAssembler::SmiCompare(Register dst, Register src) {
550 cmpq(dst, src);
551}
552
553
554void MacroAssembler::SmiCompare(Register dst, Smi* src) {
555 ASSERT(!dst.is(kScratchRegister));
556 if (src->value() == 0) {
557 testq(dst, dst);
558 } else {
559 Move(kScratchRegister, src);
560 cmpq(dst, kScratchRegister);
561 }
562}
563
564
565void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
566 cmpq(dst, src);
567}
568
569
570void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
571 if (src->value() == 0) {
572 // Only tagged long smi to have 32-bit representation.
573 cmpq(dst, Immediate(0));
574 } else {
575 Move(kScratchRegister, src);
576 cmpq(dst, kScratchRegister);
577 }
ager@chromium.org4af710e2009-09-15 12:20:11 +0000578}
579
580
581void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
582 Register src,
583 int power) {
584 ASSERT(power >= 0);
585 ASSERT(power < 64);
586 if (power == 0) {
587 SmiToInteger64(dst, src);
588 return;
589 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000590 if (!dst.is(src)) {
591 movq(dst, src);
592 }
593 if (power < kSmiShift) {
594 sar(dst, Immediate(kSmiShift - power));
595 } else if (power > kSmiShift) {
596 shl(dst, Immediate(power - kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000597 }
598}
599
600
ager@chromium.org4af710e2009-09-15 12:20:11 +0000601Condition MacroAssembler::CheckSmi(Register src) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000602 ASSERT_EQ(0, kSmiTag);
603 testb(src, Immediate(kSmiTagMask));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000604 return zero;
ager@chromium.org4af710e2009-09-15 12:20:11 +0000605}
606
607
608Condition MacroAssembler::CheckPositiveSmi(Register src) {
609 ASSERT_EQ(0, kSmiTag);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000610 movq(kScratchRegister, src);
611 rol(kScratchRegister, Immediate(1));
612 testl(kScratchRegister, Immediate(0x03));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000613 return zero;
614}
615
616
ager@chromium.org4af710e2009-09-15 12:20:11 +0000617Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
618 if (first.is(second)) {
619 return CheckSmi(first);
620 }
621 movl(kScratchRegister, first);
622 orl(kScratchRegister, second);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000623 testb(kScratchRegister, Immediate(kSmiTagMask));
624 return zero;
ager@chromium.org4af710e2009-09-15 12:20:11 +0000625}
626
627
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000628Condition MacroAssembler::CheckBothPositiveSmi(Register first,
629 Register second) {
630 if (first.is(second)) {
631 return CheckPositiveSmi(first);
632 }
633 movl(kScratchRegister, first);
634 orl(kScratchRegister, second);
635 rol(kScratchRegister, Immediate(1));
636 testl(kScratchRegister, Immediate(0x03));
637 return zero;
638}
639
640
641
642Condition MacroAssembler::CheckEitherSmi(Register first, Register second) {
643 if (first.is(second)) {
644 return CheckSmi(first);
645 }
646 movl(kScratchRegister, first);
647 andl(kScratchRegister, second);
648 testb(kScratchRegister, Immediate(kSmiTagMask));
649 return zero;
650}
651
652
ager@chromium.org4af710e2009-09-15 12:20:11 +0000653Condition MacroAssembler::CheckIsMinSmi(Register src) {
654 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000655 movq(kScratchRegister, src);
656 rol(kScratchRegister, Immediate(1));
657 cmpq(kScratchRegister, Immediate(1));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000658 return equal;
659}
660
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +0000661
ager@chromium.org4af710e2009-09-15 12:20:11 +0000662Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000663 // A 32-bit integer value can always be converted to a smi.
664 return always;
ager@chromium.org4af710e2009-09-15 12:20:11 +0000665}
666
667
ager@chromium.org3811b432009-10-28 14:53:37 +0000668Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
669 // An unsigned 32-bit integer value is valid as long as the high bit
670 // is not set.
671 testq(src, Immediate(0x80000000));
672 return zero;
673}
674
675
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000676void MacroAssembler::SmiNeg(Register dst, Register src, Label* on_smi_result) {
677 if (dst.is(src)) {
678 ASSERT(!dst.is(kScratchRegister));
679 movq(kScratchRegister, src);
680 neg(dst); // Low 32 bits are retained as zero by negation.
681 // Test if result is zero or Smi::kMinValue.
682 cmpq(dst, kScratchRegister);
683 j(not_equal, on_smi_result);
684 movq(src, kScratchRegister);
685 } else {
686 movq(dst, src);
687 neg(dst);
688 cmpq(dst, src);
689 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
690 j(not_equal, on_smi_result);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000691 }
ager@chromium.org4af710e2009-09-15 12:20:11 +0000692}
693
694
695void MacroAssembler::SmiAdd(Register dst,
696 Register src1,
697 Register src2,
698 Label* on_not_smi_result) {
699 ASSERT(!dst.is(src2));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000700 if (dst.is(src1)) {
701 addq(dst, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000702 Label smi_result;
703 j(no_overflow, &smi_result);
704 // Restore src1.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000705 subq(src1, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000706 jmp(on_not_smi_result);
707 bind(&smi_result);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000708 } else {
709 movq(dst, src1);
710 addq(dst, src2);
711 j(overflow, on_not_smi_result);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000712 }
713}
714
715
ager@chromium.org4af710e2009-09-15 12:20:11 +0000716void MacroAssembler::SmiSub(Register dst,
717 Register src1,
718 Register src2,
719 Label* on_not_smi_result) {
720 ASSERT(!dst.is(src2));
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000721 if (on_not_smi_result == NULL) {
722 // No overflow checking. Use only when it's known that
723 // overflowing is impossible (e.g., subtracting two positive smis).
724 if (dst.is(src1)) {
725 subq(dst, src2);
726 } else {
727 movq(dst, src1);
728 subq(dst, src2);
729 }
730 Assert(no_overflow, "Smi substraction onverflow");
731 } else if (dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000732 subq(dst, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000733 Label smi_result;
734 j(no_overflow, &smi_result);
735 // Restore src1.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000736 addq(src1, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000737 jmp(on_not_smi_result);
738 bind(&smi_result);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000739 } else {
740 movq(dst, src1);
741 subq(dst, src2);
742 j(overflow, on_not_smi_result);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000743 }
744}
745
746
747void MacroAssembler::SmiMul(Register dst,
748 Register src1,
749 Register src2,
750 Label* on_not_smi_result) {
751 ASSERT(!dst.is(src2));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000752 ASSERT(!dst.is(kScratchRegister));
753 ASSERT(!src1.is(kScratchRegister));
754 ASSERT(!src2.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000755
756 if (dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000757 Label failure, zero_correct_result;
758 movq(kScratchRegister, src1); // Create backup for later testing.
759 SmiToInteger64(dst, src1);
760 imul(dst, src2);
761 j(overflow, &failure);
762
763 // Check for negative zero result. If product is zero, and one
764 // argument is negative, go to slow case.
765 Label correct_result;
766 testq(dst, dst);
767 j(not_zero, &correct_result);
768
769 movq(dst, kScratchRegister);
770 xor_(dst, src2);
771 j(positive, &zero_correct_result); // Result was positive zero.
772
773 bind(&failure); // Reused failure exit, restores src1.
774 movq(src1, kScratchRegister);
775 jmp(on_not_smi_result);
776
777 bind(&zero_correct_result);
778 xor_(dst, dst);
779
780 bind(&correct_result);
781 } else {
782 SmiToInteger64(dst, src1);
783 imul(dst, src2);
784 j(overflow, on_not_smi_result);
785 // Check for negative zero result. If product is zero, and one
786 // argument is negative, go to slow case.
787 Label correct_result;
788 testq(dst, dst);
789 j(not_zero, &correct_result);
790 // One of src1 and src2 is zero, the check whether the other is
791 // negative.
ager@chromium.org4af710e2009-09-15 12:20:11 +0000792 movq(kScratchRegister, src1);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000793 xor_(kScratchRegister, src2);
794 j(negative, on_not_smi_result);
795 bind(&correct_result);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000796 }
ager@chromium.org4af710e2009-09-15 12:20:11 +0000797}
798
799
800void MacroAssembler::SmiTryAddConstant(Register dst,
801 Register src,
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000802 Smi* constant,
ager@chromium.org4af710e2009-09-15 12:20:11 +0000803 Label* on_not_smi_result) {
804 // Does not assume that src is a smi.
ager@chromium.org3811b432009-10-28 14:53:37 +0000805 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000806 ASSERT_EQ(0, kSmiTag);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000807 ASSERT(!dst.is(kScratchRegister));
808 ASSERT(!src.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000809
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000810 JumpIfNotSmi(src, on_not_smi_result);
811 Register tmp = (dst.is(src) ? kScratchRegister : dst);
812 Move(tmp, constant);
813 addq(tmp, src);
814 j(overflow, on_not_smi_result);
815 if (dst.is(src)) {
816 movq(dst, tmp);
817 }
818}
819
820
821void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
822 if (constant->value() == 0) {
823 if (!dst.is(src)) {
824 movq(dst, src);
825 }
826 } else if (dst.is(src)) {
827 ASSERT(!dst.is(kScratchRegister));
828
829 Move(kScratchRegister, constant);
830 addq(dst, kScratchRegister);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000831 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000832 Move(dst, constant);
833 addq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000834 }
835}
836
837
838void MacroAssembler::SmiAddConstant(Register dst,
839 Register src,
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000840 Smi* constant,
ager@chromium.org4af710e2009-09-15 12:20:11 +0000841 Label* on_not_smi_result) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000842 if (constant->value() == 0) {
843 if (!dst.is(src)) {
844 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000845 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000846 } else if (dst.is(src)) {
847 ASSERT(!dst.is(kScratchRegister));
848
849 Move(kScratchRegister, constant);
850 addq(dst, kScratchRegister);
851 Label result_ok;
852 j(no_overflow, &result_ok);
853 subq(dst, kScratchRegister);
854 jmp(on_not_smi_result);
855 bind(&result_ok);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000856 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000857 Move(dst, constant);
858 addq(dst, src);
859 j(overflow, on_not_smi_result);
860 }
861}
862
863
864void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
865 if (constant->value() == 0) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000866 if (!dst.is(src)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000867 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000868 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000869 } else if (dst.is(src)) {
870 ASSERT(!dst.is(kScratchRegister));
871
872 Move(kScratchRegister, constant);
873 subq(dst, kScratchRegister);
874 } else {
875 // Subtract by adding the negative, to do it in two operations.
876 if (constant->value() == Smi::kMinValue) {
877 Move(kScratchRegister, constant);
878 movq(dst, src);
879 subq(dst, kScratchRegister);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000880 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000881 Move(dst, Smi::FromInt(-constant->value()));
882 addq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000883 }
884 }
885}
886
887
888void MacroAssembler::SmiSubConstant(Register dst,
889 Register src,
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000890 Smi* constant,
ager@chromium.org4af710e2009-09-15 12:20:11 +0000891 Label* on_not_smi_result) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000892 if (constant->value() == 0) {
893 if (!dst.is(src)) {
894 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000895 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000896 } else if (dst.is(src)) {
897 ASSERT(!dst.is(kScratchRegister));
898
899 Move(kScratchRegister, constant);
900 subq(dst, kScratchRegister);
901 Label sub_success;
902 j(no_overflow, &sub_success);
903 addq(src, kScratchRegister);
904 jmp(on_not_smi_result);
905 bind(&sub_success);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000906 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000907 if (constant->value() == Smi::kMinValue) {
908 Move(kScratchRegister, constant);
909 movq(dst, src);
910 subq(dst, kScratchRegister);
911 j(overflow, on_not_smi_result);
912 } else {
913 Move(dst, Smi::FromInt(-(constant->value())));
914 addq(dst, src);
915 j(overflow, on_not_smi_result);
916 }
ager@chromium.org4af710e2009-09-15 12:20:11 +0000917 }
918}
919
920
921void MacroAssembler::SmiDiv(Register dst,
922 Register src1,
923 Register src2,
924 Label* on_not_smi_result) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000925 ASSERT(!src1.is(kScratchRegister));
926 ASSERT(!src2.is(kScratchRegister));
927 ASSERT(!dst.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000928 ASSERT(!src2.is(rax));
929 ASSERT(!src2.is(rdx));
930 ASSERT(!src1.is(rdx));
931
932 // Check for 0 divisor (result is +/-Infinity).
933 Label positive_divisor;
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000934 testq(src2, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000935 j(zero, on_not_smi_result);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000936
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000937 if (src1.is(rax)) {
938 movq(kScratchRegister, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000939 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000940 SmiToInteger32(rax, src1);
941 // We need to rule out dividing Smi::kMinValue by -1, since that would
942 // overflow in idiv and raise an exception.
943 // We combine this with negative zero test (negative zero only happens
944 // when dividing zero by a negative number).
ager@chromium.org4af710e2009-09-15 12:20:11 +0000945
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000946 // We overshoot a little and go to slow case if we divide min-value
947 // by any negative value, not just -1.
948 Label safe_div;
949 testl(rax, Immediate(0x7fffffff));
950 j(not_zero, &safe_div);
951 testq(src2, src2);
952 if (src1.is(rax)) {
953 j(positive, &safe_div);
954 movq(src1, kScratchRegister);
955 jmp(on_not_smi_result);
956 } else {
957 j(negative, on_not_smi_result);
958 }
959 bind(&safe_div);
960
961 SmiToInteger32(src2, src2);
962 // Sign extend src1 into edx:eax.
963 cdq();
ager@chromium.org4af710e2009-09-15 12:20:11 +0000964 idivl(src2);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000965 Integer32ToSmi(src2, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000966 // Check that the remainder is zero.
967 testl(rdx, rdx);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000968 if (src1.is(rax)) {
969 Label smi_result;
970 j(zero, &smi_result);
971 movq(src1, kScratchRegister);
972 jmp(on_not_smi_result);
973 bind(&smi_result);
974 } else {
975 j(not_zero, on_not_smi_result);
976 }
977 if (!dst.is(src1) && src1.is(rax)) {
978 movq(src1, kScratchRegister);
979 }
ager@chromium.org4af710e2009-09-15 12:20:11 +0000980 Integer32ToSmi(dst, rax);
981}
982
983
984void MacroAssembler::SmiMod(Register dst,
985 Register src1,
986 Register src2,
987 Label* on_not_smi_result) {
988 ASSERT(!dst.is(kScratchRegister));
989 ASSERT(!src1.is(kScratchRegister));
990 ASSERT(!src2.is(kScratchRegister));
991 ASSERT(!src2.is(rax));
992 ASSERT(!src2.is(rdx));
993 ASSERT(!src1.is(rdx));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000994 ASSERT(!src1.is(src2));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000995
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000996 testq(src2, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000997 j(zero, on_not_smi_result);
998
999 if (src1.is(rax)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001000 movq(kScratchRegister, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001001 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001002 SmiToInteger32(rax, src1);
1003 SmiToInteger32(src2, src2);
1004
1005 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1006 Label safe_div;
1007 cmpl(rax, Immediate(Smi::kMinValue));
1008 j(not_equal, &safe_div);
1009 cmpl(src2, Immediate(-1));
1010 j(not_equal, &safe_div);
1011 // Retag inputs and go slow case.
1012 Integer32ToSmi(src2, src2);
1013 if (src1.is(rax)) {
1014 movq(src1, kScratchRegister);
1015 }
1016 jmp(on_not_smi_result);
1017 bind(&safe_div);
1018
ager@chromium.org4af710e2009-09-15 12:20:11 +00001019 // Sign extend eax into edx:eax.
1020 cdq();
1021 idivl(src2);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001022 // Restore smi tags on inputs.
1023 Integer32ToSmi(src2, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001024 if (src1.is(rax)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001025 movq(src1, kScratchRegister);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001026 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001027 // Check for a negative zero result. If the result is zero, and the
1028 // dividend is negative, go slow to return a floating point negative zero.
1029 Label smi_result;
1030 testl(rdx, rdx);
1031 j(not_zero, &smi_result);
1032 testq(src1, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001033 j(negative, on_not_smi_result);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001034 bind(&smi_result);
1035 Integer32ToSmi(dst, rdx);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001036}
1037
1038
1039void MacroAssembler::SmiNot(Register dst, Register src) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001040 ASSERT(!dst.is(kScratchRegister));
1041 ASSERT(!src.is(kScratchRegister));
1042 // Set tag and padding bits before negating, so that they are zero afterwards.
1043 movl(kScratchRegister, Immediate(~0));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001044 if (dst.is(src)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001045 xor_(dst, kScratchRegister);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001046 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001047 lea(dst, Operand(src, kScratchRegister, times_1, 0));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001048 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001049 not_(dst);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001050}
1051
1052
1053void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001054 ASSERT(!dst.is(src2));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001055 if (!dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001056 movq(dst, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001057 }
1058 and_(dst, src2);
1059}
1060
1061
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001062void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1063 if (constant->value() == 0) {
1064 xor_(dst, dst);
1065 } else if (dst.is(src)) {
1066 ASSERT(!dst.is(kScratchRegister));
1067 Move(kScratchRegister, constant);
1068 and_(dst, kScratchRegister);
1069 } else {
1070 Move(dst, constant);
1071 and_(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001072 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001073}
1074
1075
1076void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1077 if (!dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001078 movq(dst, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001079 }
1080 or_(dst, src2);
1081}
1082
1083
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001084void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1085 if (dst.is(src)) {
1086 ASSERT(!dst.is(kScratchRegister));
1087 Move(kScratchRegister, constant);
1088 or_(dst, kScratchRegister);
1089 } else {
1090 Move(dst, constant);
1091 or_(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001092 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001093}
1094
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001095
ager@chromium.org4af710e2009-09-15 12:20:11 +00001096void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1097 if (!dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001098 movq(dst, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001099 }
1100 xor_(dst, src2);
1101}
1102
1103
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001104void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1105 if (dst.is(src)) {
1106 ASSERT(!dst.is(kScratchRegister));
1107 Move(kScratchRegister, constant);
1108 xor_(dst, kScratchRegister);
1109 } else {
1110 Move(dst, constant);
1111 xor_(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001112 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001113}
1114
1115
ager@chromium.org4af710e2009-09-15 12:20:11 +00001116void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1117 Register src,
1118 int shift_value) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001119 ASSERT(is_uint5(shift_value));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001120 if (shift_value > 0) {
1121 if (dst.is(src)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001122 sar(dst, Immediate(shift_value + kSmiShift));
1123 shl(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001124 } else {
1125 UNIMPLEMENTED(); // Not used.
1126 }
1127 }
1128}
1129
1130
1131void MacroAssembler::SmiShiftLogicalRightConstant(Register dst,
1132 Register src,
1133 int shift_value,
1134 Label* on_not_smi_result) {
1135 // Logic right shift interprets its result as an *unsigned* number.
1136 if (dst.is(src)) {
1137 UNIMPLEMENTED(); // Not used.
1138 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001139 movq(dst, src);
1140 if (shift_value == 0) {
1141 testq(dst, dst);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001142 j(negative, on_not_smi_result);
1143 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001144 shr(dst, Immediate(shift_value + kSmiShift));
1145 shl(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001146 }
1147}
1148
1149
1150void MacroAssembler::SmiShiftLeftConstant(Register dst,
1151 Register src,
1152 int shift_value,
1153 Label* on_not_smi_result) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001154 if (!dst.is(src)) {
1155 movq(dst, src);
1156 }
1157 if (shift_value > 0) {
1158 shl(dst, Immediate(shift_value));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001159 }
1160}
1161
1162
1163void MacroAssembler::SmiShiftLeft(Register dst,
1164 Register src1,
1165 Register src2,
1166 Label* on_not_smi_result) {
1167 ASSERT(!dst.is(rcx));
1168 Label result_ok;
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001169 // Untag shift amount.
1170 if (!dst.is(src1)) {
1171 movq(dst, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001172 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001173 SmiToInteger32(rcx, src2);
1174 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1175 and_(rcx, Immediate(0x1f));
ager@chromium.orgc4c92722009-11-18 14:12:51 +00001176 shl_cl(dst);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001177}
1178
1179
1180void MacroAssembler::SmiShiftLogicalRight(Register dst,
1181 Register src1,
1182 Register src2,
1183 Label* on_not_smi_result) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001184 ASSERT(!dst.is(kScratchRegister));
1185 ASSERT(!src1.is(kScratchRegister));
1186 ASSERT(!src2.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001187 ASSERT(!dst.is(rcx));
1188 Label result_ok;
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001189 if (src1.is(rcx) || src2.is(rcx)) {
1190 movq(kScratchRegister, rcx);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001191 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001192 if (!dst.is(src1)) {
1193 movq(dst, src1);
1194 }
1195 SmiToInteger32(rcx, src2);
1196 orl(rcx, Immediate(kSmiShift));
ager@chromium.orgc4c92722009-11-18 14:12:51 +00001197 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001198 shl(dst, Immediate(kSmiShift));
1199 testq(dst, dst);
1200 if (src1.is(rcx) || src2.is(rcx)) {
1201 Label positive_result;
1202 j(positive, &positive_result);
1203 if (src1.is(rcx)) {
1204 movq(src1, kScratchRegister);
1205 } else {
1206 movq(src2, kScratchRegister);
1207 }
1208 jmp(on_not_smi_result);
1209 bind(&positive_result);
1210 } else {
1211 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1212 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001213}
1214
1215
1216void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1217 Register src1,
1218 Register src2) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001219 ASSERT(!dst.is(kScratchRegister));
1220 ASSERT(!src1.is(kScratchRegister));
1221 ASSERT(!src2.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001222 ASSERT(!dst.is(rcx));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001223 if (src1.is(rcx)) {
1224 movq(kScratchRegister, src1);
1225 } else if (src2.is(rcx)) {
1226 movq(kScratchRegister, src2);
1227 }
1228 if (!dst.is(src1)) {
1229 movq(dst, src1);
1230 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001231 SmiToInteger32(rcx, src2);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001232 orl(rcx, Immediate(kSmiShift));
ager@chromium.orgc4c92722009-11-18 14:12:51 +00001233 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001234 shl(dst, Immediate(kSmiShift));
1235 if (src1.is(rcx)) {
1236 movq(src1, kScratchRegister);
1237 } else if (src2.is(rcx)) {
1238 movq(src2, kScratchRegister);
1239 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001240}
1241
1242
1243void MacroAssembler::SelectNonSmi(Register dst,
1244 Register src1,
1245 Register src2,
1246 Label* on_not_smis) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001247 ASSERT(!dst.is(kScratchRegister));
1248 ASSERT(!src1.is(kScratchRegister));
1249 ASSERT(!src2.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001250 ASSERT(!dst.is(src1));
1251 ASSERT(!dst.is(src2));
1252 // Both operands must not be smis.
1253#ifdef DEBUG
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001254 if (allow_stub_calls()) { // Check contains a stub call.
1255 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1256 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1257 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001258#endif
1259 ASSERT_EQ(0, kSmiTag);
1260 ASSERT_EQ(0, Smi::FromInt(0));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001261 movl(kScratchRegister, Immediate(kSmiTagMask));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001262 and_(kScratchRegister, src1);
1263 testl(kScratchRegister, src2);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001264 // If non-zero then both are smis.
ager@chromium.org4af710e2009-09-15 12:20:11 +00001265 j(not_zero, on_not_smis);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001266
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001267 // Exactly one operand is a smi.
ager@chromium.org4af710e2009-09-15 12:20:11 +00001268 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1269 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1270 subq(kScratchRegister, Immediate(1));
1271 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1272 movq(dst, src1);
1273 xor_(dst, src2);
1274 and_(dst, kScratchRegister);
1275 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1276 xor_(dst, src1);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001277 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
ager@chromium.org4af710e2009-09-15 12:20:11 +00001278}
1279
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001280SmiIndex MacroAssembler::SmiToIndex(Register dst,
1281 Register src,
1282 int shift) {
ager@chromium.org4af710e2009-09-15 12:20:11 +00001283 ASSERT(is_uint6(shift));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001284 // There is a possible optimization if shift is in the range 60-63, but that
1285 // will (and must) never happen.
1286 if (!dst.is(src)) {
1287 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001288 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001289 if (shift < kSmiShift) {
1290 sar(dst, Immediate(kSmiShift - shift));
1291 } else {
1292 shl(dst, Immediate(shift - kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001293 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001294 return SmiIndex(dst, times_1);
1295}
1296
ager@chromium.org4af710e2009-09-15 12:20:11 +00001297SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1298 Register src,
1299 int shift) {
1300 // Register src holds a positive smi.
1301 ASSERT(is_uint6(shift));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001302 if (!dst.is(src)) {
1303 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001304 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001305 neg(dst);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001306 if (shift < kSmiShift) {
1307 sar(dst, Immediate(kSmiShift - shift));
1308 } else {
1309 shl(dst, Immediate(shift - kSmiShift));
1310 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001311 return SmiIndex(dst, times_1);
1312}
1313
1314
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001315void MacroAssembler::JumpIfSmi(Register src, Label* on_smi) {
1316 ASSERT_EQ(0, kSmiTag);
1317 Condition smi = CheckSmi(src);
1318 j(smi, on_smi);
1319}
1320
1321
1322void MacroAssembler::JumpIfNotSmi(Register src, Label* on_not_smi) {
1323 Condition smi = CheckSmi(src);
1324 j(NegateCondition(smi), on_not_smi);
1325}
1326
1327
1328void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1329 Label* on_not_positive_smi) {
1330 Condition positive_smi = CheckPositiveSmi(src);
1331 j(NegateCondition(positive_smi), on_not_positive_smi);
1332}
1333
1334
1335void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1336 Smi* constant,
1337 Label* on_equals) {
1338 SmiCompare(src, constant);
1339 j(equal, on_equals);
1340}
1341
1342
1343void MacroAssembler::JumpIfNotValidSmiValue(Register src, Label* on_invalid) {
1344 Condition is_valid = CheckInteger32ValidSmiValue(src);
1345 j(NegateCondition(is_valid), on_invalid);
1346}
1347
1348
ager@chromium.org3811b432009-10-28 14:53:37 +00001349void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1350 Label* on_invalid) {
1351 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1352 j(NegateCondition(is_valid), on_invalid);
1353}
1354
1355
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001356void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2,
1357 Label* on_not_both_smi) {
1358 Condition both_smi = CheckBothSmi(src1, src2);
1359 j(NegateCondition(both_smi), on_not_both_smi);
1360}
ager@chromium.org4af710e2009-09-15 12:20:11 +00001361
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001362
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +00001363void MacroAssembler::JumpIfNotBothPositiveSmi(Register src1, Register src2,
1364 Label* on_not_both_smi) {
1365 Condition both_smi = CheckBothPositiveSmi(src1, src2);
1366 j(NegateCondition(both_smi), on_not_both_smi);
1367}
1368
1369
1370
1371void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1372 Register second_object,
1373 Register scratch1,
1374 Register scratch2,
1375 Label* on_fail) {
1376 // Check that both objects are not smis.
1377 Condition either_smi = CheckEitherSmi(first_object, second_object);
1378 j(either_smi, on_fail);
1379
1380 // Load instance type for both strings.
1381 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1382 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1383 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1384 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1385
1386 // Check that both are flat ascii strings.
1387 ASSERT(kNotStringTag != 0);
1388 const int kFlatAsciiStringMask =
1389 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1390 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1391
1392 andl(scratch1, Immediate(kFlatAsciiStringMask));
1393 andl(scratch2, Immediate(kFlatAsciiStringMask));
1394 // Interleave the bits to check both scratch1 and scratch2 in one test.
1395 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1396 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1397 cmpl(scratch1,
1398 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1399 j(not_equal, on_fail);
1400}
1401
1402
ager@chromium.orgce5e87b2010-03-10 10:24:18 +00001403void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1404 Register instance_type,
1405 Register scratch,
1406 Label *failure) {
1407 if (!scratch.is(instance_type)) {
1408 movl(scratch, instance_type);
1409 }
1410
1411 const int kFlatAsciiStringMask =
1412 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1413
1414 andl(scratch, Immediate(kFlatAsciiStringMask));
1415 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1416 j(not_equal, failure);
1417}
1418
1419
1420void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1421 Register first_object_instance_type,
1422 Register second_object_instance_type,
1423 Register scratch1,
1424 Register scratch2,
1425 Label* on_fail) {
1426 // Load instance type for both strings.
1427 movq(scratch1, first_object_instance_type);
1428 movq(scratch2, second_object_instance_type);
1429
1430 // Check that both are flat ascii strings.
1431 ASSERT(kNotStringTag != 0);
1432 const int kFlatAsciiStringMask =
1433 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1434 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1435
1436 andl(scratch1, Immediate(kFlatAsciiStringMask));
1437 andl(scratch2, Immediate(kFlatAsciiStringMask));
1438 // Interleave the bits to check both scratch1 and scratch2 in one test.
1439 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1440 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1441 cmpl(scratch1,
1442 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1443 j(not_equal, on_fail);
1444}
1445
1446
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001447void MacroAssembler::Move(Register dst, Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001448 ASSERT(!source->IsFailure());
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001449 if (source->IsSmi()) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001450 Move(dst, Smi::cast(*source));
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001451 } else {
1452 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1453 }
1454}
1455
1456
1457void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001458 ASSERT(!source->IsFailure());
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001459 if (source->IsSmi()) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001460 Move(dst, Smi::cast(*source));
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001461 } else {
1462 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1463 movq(dst, kScratchRegister);
1464 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001465}
1466
1467
1468void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001469 if (source->IsSmi()) {
1470 SmiCompare(dst, Smi::cast(*source));
1471 } else {
1472 Move(kScratchRegister, source);
1473 cmpq(dst, kScratchRegister);
1474 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001475}
1476
1477
ager@chromium.org3e875802009-06-29 08:26:34 +00001478void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001479 if (source->IsSmi()) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001480 SmiCompare(dst, Smi::cast(*source));
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001481 } else {
1482 ASSERT(source->IsHeapObject());
1483 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1484 cmpq(dst, kScratchRegister);
1485 }
ager@chromium.org3e875802009-06-29 08:26:34 +00001486}
1487
1488
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001489void MacroAssembler::Push(Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001490 if (source->IsSmi()) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001491 Push(Smi::cast(*source));
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001492 } else {
1493 ASSERT(source->IsHeapObject());
1494 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1495 push(kScratchRegister);
1496 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001497}
1498
1499
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +00001500void MacroAssembler::Push(Smi* source) {
ager@chromium.org3811b432009-10-28 14:53:37 +00001501 intptr_t smi = reinterpret_cast<intptr_t>(source);
1502 if (is_int32(smi)) {
1503 push(Immediate(static_cast<int32_t>(smi)));
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +00001504 } else {
ager@chromium.org3811b432009-10-28 14:53:37 +00001505 Set(kScratchRegister, smi);
1506 push(kScratchRegister);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001507 }
1508}
1509
1510
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00001511void MacroAssembler::Drop(int stack_elements) {
1512 if (stack_elements > 0) {
1513 addq(rsp, Immediate(stack_elements * kPointerSize));
1514 }
1515}
1516
1517
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001518void MacroAssembler::Test(const Operand& src, Smi* source) {
ager@chromium.org3811b432009-10-28 14:53:37 +00001519 intptr_t smi = reinterpret_cast<intptr_t>(source);
1520 if (is_int32(smi)) {
1521 testl(src, Immediate(static_cast<int32_t>(smi)));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001522 } else {
ager@chromium.org3811b432009-10-28 14:53:37 +00001523 Move(kScratchRegister, source);
1524 testq(src, kScratchRegister);
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +00001525 }
1526}
1527
1528
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001529void MacroAssembler::Jump(ExternalReference ext) {
1530 movq(kScratchRegister, ext);
1531 jmp(kScratchRegister);
1532}
1533
1534
1535void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1536 movq(kScratchRegister, destination, rmode);
1537 jmp(kScratchRegister);
1538}
1539
1540
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001541void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +00001542 // TODO(X64): Inline this
1543 jmp(code_object, rmode);
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001544}
1545
1546
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001547void MacroAssembler::Call(ExternalReference ext) {
1548 movq(kScratchRegister, ext);
1549 call(kScratchRegister);
1550}
1551
1552
1553void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1554 movq(kScratchRegister, destination, rmode);
1555 call(kScratchRegister);
1556}
1557
1558
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001559void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001560 ASSERT(RelocInfo::IsCodeTarget(rmode));
sgjesse@chromium.org911335c2009-08-19 12:59:44 +00001561 WriteRecordedPositions();
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +00001562 call(code_object, rmode);
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001563}
1564
1565
ager@chromium.orge2902be2009-06-08 12:21:35 +00001566void MacroAssembler::PushTryHandler(CodeLocation try_location,
1567 HandlerType type) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001568 // Adjust this code if not the case.
1569 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1570
1571 // The pc (return address) is already on TOS. This code pushes state,
1572 // frame pointer and current handler. Check that they are expected
1573 // next on the stack, in that order.
ager@chromium.orge2902be2009-06-08 12:21:35 +00001574 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1575 StackHandlerConstants::kPCOffset - kPointerSize);
ager@chromium.orge2902be2009-06-08 12:21:35 +00001576 ASSERT_EQ(StackHandlerConstants::kFPOffset,
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001577 StackHandlerConstants::kStateOffset - kPointerSize);
1578 ASSERT_EQ(StackHandlerConstants::kNextOffset,
ager@chromium.orge2902be2009-06-08 12:21:35 +00001579 StackHandlerConstants::kFPOffset - kPointerSize);
1580
1581 if (try_location == IN_JAVASCRIPT) {
1582 if (type == TRY_CATCH_HANDLER) {
1583 push(Immediate(StackHandler::TRY_CATCH));
1584 } else {
1585 push(Immediate(StackHandler::TRY_FINALLY));
1586 }
ager@chromium.orge2902be2009-06-08 12:21:35 +00001587 push(rbp);
ager@chromium.orge2902be2009-06-08 12:21:35 +00001588 } else {
1589 ASSERT(try_location == IN_JS_ENTRY);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001590 // The frame pointer does not point to a JS frame so we save NULL
1591 // for rbp. We expect the code throwing an exception to check rbp
1592 // before dereferencing it to restore the context.
ager@chromium.orge2902be2009-06-08 12:21:35 +00001593 push(Immediate(StackHandler::ENTRY));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001594 push(Immediate(0)); // NULL frame pointer.
ager@chromium.orge2902be2009-06-08 12:21:35 +00001595 }
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001596 // Save the current handler.
ager@chromium.orge2902be2009-06-08 12:21:35 +00001597 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001598 push(Operand(kScratchRegister, 0));
ager@chromium.orge2902be2009-06-08 12:21:35 +00001599 // Link this handler.
1600 movq(Operand(kScratchRegister, 0), rsp);
1601}
1602
1603
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00001604void MacroAssembler::PopTryHandler() {
1605 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1606 // Unlink this handler.
1607 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1608 pop(Operand(kScratchRegister, 0));
1609 // Remove the remaining fields.
1610 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1611}
1612
1613
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001614void MacroAssembler::Ret() {
1615 ret(0);
1616}
1617
1618
ager@chromium.org3e875802009-06-29 08:26:34 +00001619void MacroAssembler::FCmp() {
ager@chromium.org3811b432009-10-28 14:53:37 +00001620 fucomip();
1621 ffree(0);
1622 fincstp();
ager@chromium.org3e875802009-06-29 08:26:34 +00001623}
1624
1625
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001626void MacroAssembler::CmpObjectType(Register heap_object,
1627 InstanceType type,
1628 Register map) {
1629 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1630 CmpInstanceType(map, type);
1631}
1632
1633
1634void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1635 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1636 Immediate(static_cast<int8_t>(type)));
1637}
1638
1639
ager@chromium.org5c838252010-02-19 08:53:10 +00001640void MacroAssembler::CheckMap(Register obj,
1641 Handle<Map> map,
1642 Label* fail,
1643 bool is_heap_object) {
1644 if (!is_heap_object) {
1645 JumpIfSmi(obj, fail);
1646 }
1647 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1648 j(not_equal, fail);
1649}
1650
1651
1652void MacroAssembler::AbortIfNotNumber(Register object, const char* msg) {
1653 Label ok;
1654 Condition is_smi = CheckSmi(object);
1655 j(is_smi, &ok);
1656 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1657 Factory::heap_number_map());
1658 Assert(equal, msg);
1659 bind(&ok);
1660}
1661
1662
lrn@chromium.org25156de2010-04-06 13:10:27 +00001663void MacroAssembler::AbortIfNotSmi(Register object, const char* msg) {
1664 Label ok;
1665 Condition is_smi = CheckSmi(object);
1666 j(is_smi, &ok);
1667 Assert(equal, msg);
1668 bind(&ok);
1669}
1670
1671
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +00001672Condition MacroAssembler::IsObjectStringType(Register heap_object,
1673 Register map,
1674 Register instance_type) {
1675 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1676 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
1677 ASSERT(kNotStringTag != 0);
1678 testb(instance_type, Immediate(kIsNotStringMask));
1679 return zero;
1680}
1681
1682
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00001683void MacroAssembler::TryGetFunctionPrototype(Register function,
1684 Register result,
1685 Label* miss) {
1686 // Check that the receiver isn't a smi.
1687 testl(function, Immediate(kSmiTagMask));
1688 j(zero, miss);
1689
1690 // Check that the function really is a function.
1691 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1692 j(not_equal, miss);
1693
1694 // Make sure that the function has an instance prototype.
1695 Label non_instance;
1696 testb(FieldOperand(result, Map::kBitFieldOffset),
1697 Immediate(1 << Map::kHasNonInstancePrototype));
1698 j(not_zero, &non_instance);
1699
1700 // Get the prototype or initial map from the function.
1701 movq(result,
1702 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1703
1704 // If the prototype or initial map is the hole, don't return it and
1705 // simply miss the cache instead. This will allow us to allocate a
1706 // prototype object on-demand in the runtime system.
ager@chromium.org18ad94b2009-09-02 08:22:29 +00001707 CompareRoot(result, Heap::kTheHoleValueRootIndex);
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00001708 j(equal, miss);
1709
1710 // If the function does not have an initial map, we're done.
1711 Label done;
1712 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1713 j(not_equal, &done);
1714
1715 // Get the prototype from the initial map.
1716 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1717 jmp(&done);
1718
1719 // Non-instance prototype: Fetch prototype from constructor field
1720 // in initial map.
1721 bind(&non_instance);
1722 movq(result, FieldOperand(result, Map::kConstructorOffset));
1723
1724 // All done.
1725 bind(&done);
1726}
1727
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001728
1729void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1730 if (FLAG_native_code_counters && counter->Enabled()) {
1731 movq(kScratchRegister, ExternalReference(counter));
1732 movl(Operand(kScratchRegister, 0), Immediate(value));
1733 }
1734}
1735
1736
1737void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1738 ASSERT(value > 0);
1739 if (FLAG_native_code_counters && counter->Enabled()) {
1740 movq(kScratchRegister, ExternalReference(counter));
1741 Operand operand(kScratchRegister, 0);
1742 if (value == 1) {
1743 incl(operand);
1744 } else {
1745 addl(operand, Immediate(value));
1746 }
1747 }
1748}
1749
1750
1751void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1752 ASSERT(value > 0);
1753 if (FLAG_native_code_counters && counter->Enabled()) {
1754 movq(kScratchRegister, ExternalReference(counter));
1755 Operand operand(kScratchRegister, 0);
1756 if (value == 1) {
1757 decl(operand);
1758 } else {
1759 subl(operand, Immediate(value));
1760 }
1761 }
1762}
1763
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001764#ifdef ENABLE_DEBUGGER_SUPPORT
1765
1766void MacroAssembler::PushRegistersFromMemory(RegList regs) {
1767 ASSERT((regs & ~kJSCallerSaved) == 0);
1768 // Push the content of the memory location to the stack.
1769 for (int i = 0; i < kNumJSCallerSaved; i++) {
1770 int r = JSCallerSavedCode(i);
1771 if ((regs & (1 << r)) != 0) {
1772 ExternalReference reg_addr =
1773 ExternalReference(Debug_Address::Register(i));
1774 movq(kScratchRegister, reg_addr);
1775 push(Operand(kScratchRegister, 0));
1776 }
1777 }
1778}
1779
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001780
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001781void MacroAssembler::SaveRegistersToMemory(RegList regs) {
1782 ASSERT((regs & ~kJSCallerSaved) == 0);
1783 // Copy the content of registers to memory location.
1784 for (int i = 0; i < kNumJSCallerSaved; i++) {
1785 int r = JSCallerSavedCode(i);
1786 if ((regs & (1 << r)) != 0) {
1787 Register reg = { r };
1788 ExternalReference reg_addr =
1789 ExternalReference(Debug_Address::Register(i));
1790 movq(kScratchRegister, reg_addr);
1791 movq(Operand(kScratchRegister, 0), reg);
1792 }
1793 }
1794}
1795
1796
1797void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
1798 ASSERT((regs & ~kJSCallerSaved) == 0);
1799 // Copy the content of memory location to registers.
1800 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1801 int r = JSCallerSavedCode(i);
1802 if ((regs & (1 << r)) != 0) {
1803 Register reg = { r };
1804 ExternalReference reg_addr =
1805 ExternalReference(Debug_Address::Register(i));
1806 movq(kScratchRegister, reg_addr);
1807 movq(reg, Operand(kScratchRegister, 0));
1808 }
1809 }
1810}
1811
1812
1813void MacroAssembler::PopRegistersToMemory(RegList regs) {
1814 ASSERT((regs & ~kJSCallerSaved) == 0);
1815 // Pop the content from the stack to the memory location.
1816 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1817 int r = JSCallerSavedCode(i);
1818 if ((regs & (1 << r)) != 0) {
1819 ExternalReference reg_addr =
1820 ExternalReference(Debug_Address::Register(i));
1821 movq(kScratchRegister, reg_addr);
1822 pop(Operand(kScratchRegister, 0));
1823 }
1824 }
1825}
1826
1827
1828void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
1829 Register scratch,
1830 RegList regs) {
1831 ASSERT(!scratch.is(kScratchRegister));
1832 ASSERT(!base.is(kScratchRegister));
1833 ASSERT(!base.is(scratch));
1834 ASSERT((regs & ~kJSCallerSaved) == 0);
1835 // Copy the content of the stack to the memory location and adjust base.
1836 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1837 int r = JSCallerSavedCode(i);
1838 if ((regs & (1 << r)) != 0) {
1839 movq(scratch, Operand(base, 0));
1840 ExternalReference reg_addr =
1841 ExternalReference(Debug_Address::Register(i));
1842 movq(kScratchRegister, reg_addr);
1843 movq(Operand(kScratchRegister, 0), scratch);
1844 lea(base, Operand(base, kPointerSize));
1845 }
1846 }
1847}
1848
ager@chromium.org5c838252010-02-19 08:53:10 +00001849void MacroAssembler::DebugBreak() {
1850 ASSERT(allow_stub_calls());
1851 xor_(rax, rax); // no arguments
1852 movq(rbx, ExternalReference(Runtime::kDebugBreak));
1853 CEntryStub ces(1);
1854 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
ager@chromium.org3e875802009-06-29 08:26:34 +00001855}
ager@chromium.org5c838252010-02-19 08:53:10 +00001856#endif // ENABLE_DEBUGGER_SUPPORT
ager@chromium.org3e875802009-06-29 08:26:34 +00001857
1858
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001859void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1860 const ParameterCount& actual,
1861 Handle<Code> code_constant,
1862 Register code_register,
1863 Label* done,
1864 InvokeFlag flag) {
1865 bool definitely_matches = false;
1866 Label invoke;
1867 if (expected.is_immediate()) {
1868 ASSERT(actual.is_immediate());
1869 if (expected.immediate() == actual.immediate()) {
1870 definitely_matches = true;
1871 } else {
1872 movq(rax, Immediate(actual.immediate()));
1873 if (expected.immediate() ==
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001874 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001875 // Don't worry about adapting arguments for built-ins that
1876 // don't want that done. Skip adaption code by making it look
1877 // like we have a match between expected and actual number of
1878 // arguments.
1879 definitely_matches = true;
1880 } else {
1881 movq(rbx, Immediate(expected.immediate()));
1882 }
1883 }
1884 } else {
1885 if (actual.is_immediate()) {
1886 // Expected is in register, actual is immediate. This is the
1887 // case when we invoke function values without going through the
1888 // IC mechanism.
1889 cmpq(expected.reg(), Immediate(actual.immediate()));
1890 j(equal, &invoke);
1891 ASSERT(expected.reg().is(rbx));
1892 movq(rax, Immediate(actual.immediate()));
1893 } else if (!expected.reg().is(actual.reg())) {
1894 // Both expected and actual are in (different) registers. This
1895 // is the case when we invoke functions using call and apply.
1896 cmpq(expected.reg(), actual.reg());
1897 j(equal, &invoke);
1898 ASSERT(actual.reg().is(rax));
1899 ASSERT(expected.reg().is(rbx));
1900 }
1901 }
1902
1903 if (!definitely_matches) {
1904 Handle<Code> adaptor =
1905 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1906 if (!code_constant.is_null()) {
1907 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1908 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1909 } else if (!code_register.is(rdx)) {
1910 movq(rdx, code_register);
1911 }
1912
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001913 if (flag == CALL_FUNCTION) {
sgjesse@chromium.org911335c2009-08-19 12:59:44 +00001914 Call(adaptor, RelocInfo::CODE_TARGET);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001915 jmp(done);
1916 } else {
sgjesse@chromium.org911335c2009-08-19 12:59:44 +00001917 Jump(adaptor, RelocInfo::CODE_TARGET);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001918 }
1919 bind(&invoke);
1920 }
1921}
1922
1923
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001924void MacroAssembler::InvokeCode(Register code,
1925 const ParameterCount& expected,
1926 const ParameterCount& actual,
1927 InvokeFlag flag) {
1928 Label done;
1929 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1930 if (flag == CALL_FUNCTION) {
1931 call(code);
1932 } else {
1933 ASSERT(flag == JUMP_FUNCTION);
1934 jmp(code);
1935 }
1936 bind(&done);
1937}
1938
1939
1940void MacroAssembler::InvokeCode(Handle<Code> code,
1941 const ParameterCount& expected,
1942 const ParameterCount& actual,
1943 RelocInfo::Mode rmode,
1944 InvokeFlag flag) {
1945 Label done;
1946 Register dummy = rax;
1947 InvokePrologue(expected, actual, code, dummy, &done, flag);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001948 if (flag == CALL_FUNCTION) {
ager@chromium.org3e875802009-06-29 08:26:34 +00001949 Call(code, rmode);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001950 } else {
1951 ASSERT(flag == JUMP_FUNCTION);
ager@chromium.org3e875802009-06-29 08:26:34 +00001952 Jump(code, rmode);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001953 }
1954 bind(&done);
1955}
1956
1957
1958void MacroAssembler::InvokeFunction(Register function,
1959 const ParameterCount& actual,
1960 InvokeFlag flag) {
1961 ASSERT(function.is(rdi));
1962 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1963 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
ager@chromium.org3e875802009-06-29 08:26:34 +00001964 movsxlq(rbx,
1965 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001966 movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001967 // Advances rdx to the end of the Code object header, to the start of
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001968 // the executable code.
1969 lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
1970
1971 ParameterCount expected(rbx);
1972 InvokeCode(rdx, expected, actual, flag);
1973}
1974
1975
ager@chromium.org5c838252010-02-19 08:53:10 +00001976void MacroAssembler::InvokeFunction(JSFunction* function,
1977 const ParameterCount& actual,
1978 InvokeFlag flag) {
1979 ASSERT(function->is_compiled());
1980 // Get the function and setup the context.
1981 Move(rdi, Handle<JSFunction>(function));
1982 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1983
1984 // Invoke the cached code.
1985 Handle<Code> code(function->code());
1986 ParameterCount expected(function->shared()->formal_parameter_count());
1987 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1988}
1989
1990
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001991void MacroAssembler::EnterFrame(StackFrame::Type type) {
1992 push(rbp);
1993 movq(rbp, rsp);
1994 push(rsi); // Context.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001995 Push(Smi::FromInt(type));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001996 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1997 push(kScratchRegister);
1998 if (FLAG_debug_code) {
1999 movq(kScratchRegister,
2000 Factory::undefined_value(),
2001 RelocInfo::EMBEDDED_OBJECT);
2002 cmpq(Operand(rsp, 0), kScratchRegister);
2003 Check(not_equal, "code object not properly patched");
2004 }
2005}
2006
2007
2008void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2009 if (FLAG_debug_code) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002010 Move(kScratchRegister, Smi::FromInt(type));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002011 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2012 Check(equal, "stack frame types must match");
2013 }
2014 movq(rsp, rbp);
2015 pop(rbp);
2016}
2017
2018
ager@chromium.orgc4c92722009-11-18 14:12:51 +00002019void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002020 // Setup the frame structure on the stack.
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00002021 // All constants are relative to the frame pointer of the exit frame.
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002022 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
2023 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
2024 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
2025 push(rbp);
2026 movq(rbp, rsp);
2027
2028 // Reserve room for entry stack pointer and push the debug marker.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002029 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
ager@chromium.org5c838252010-02-19 08:53:10 +00002030 push(Immediate(0)); // Saved entry sp, patched before call.
2031 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2032 push(kScratchRegister); // Accessed from EditFrame::code_slot.
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002033
2034 // Save the frame pointer and the context in top.
2035 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2036 ExternalReference context_address(Top::k_context_address);
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00002037 movq(r14, rax); // Backup rax before we use it.
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002038
2039 movq(rax, rbp);
2040 store_rax(c_entry_fp_address);
2041 movq(rax, rsi);
2042 store_rax(context_address);
2043
2044 // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
2045 // so it must be retained across the C-call.
2046 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00002047 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002048
2049#ifdef ENABLE_DEBUGGER_SUPPORT
2050 // Save the state of all registers to the stack from the memory
2051 // location. This is needed to allow nested break points.
ager@chromium.orgc4c92722009-11-18 14:12:51 +00002052 if (mode == ExitFrame::MODE_DEBUG) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002053 // TODO(1243899): This should be symmetric to
2054 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
2055 // correct here, but computed for the other call. Very error
2056 // prone! FIX THIS. Actually there are deeper problems with
2057 // register saving than this asymmetry (see the bug report
2058 // associated with this issue).
2059 PushRegistersFromMemory(kJSCallerSaved);
2060 }
2061#endif
2062
ager@chromium.orga1645e22009-09-09 19:27:10 +00002063#ifdef _WIN64
2064 // Reserve space on stack for result and argument structures, if necessary.
2065 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
2066 // Reserve space for the Arguments object. The Windows 64-bit ABI
2067 // requires us to pass this structure as a pointer to its location on
2068 // the stack. The structure contains 2 values.
2069 int argument_stack_space = 2 * kPointerSize;
2070 // We also need backing space for 4 parameters, even though
2071 // we only pass one or two parameter, and it is in a register.
2072 int argument_mirror_space = 4 * kPointerSize;
2073 int total_stack_space =
2074 argument_mirror_space + argument_stack_space + result_stack_space;
2075 subq(rsp, Immediate(total_stack_space));
2076#endif
2077
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002078 // Get the required frame alignment for the OS.
2079 static const int kFrameAlignment = OS::ActivationFrameAlignment();
2080 if (kFrameAlignment > 0) {
2081 ASSERT(IsPowerOf2(kFrameAlignment));
2082 movq(kScratchRegister, Immediate(-kFrameAlignment));
2083 and_(rsp, kScratchRegister);
2084 }
2085
2086 // Patch the saved entry sp.
2087 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2088}
2089
2090
ager@chromium.orgc4c92722009-11-18 14:12:51 +00002091void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002092 // Registers:
2093 // r15 : argv
2094#ifdef ENABLE_DEBUGGER_SUPPORT
2095 // Restore the memory copy of the registers by digging them out from
2096 // the stack. This is needed to allow nested break points.
ager@chromium.orgc4c92722009-11-18 14:12:51 +00002097 if (mode == ExitFrame::MODE_DEBUG) {
ager@chromium.orga1645e22009-09-09 19:27:10 +00002098 // It's okay to clobber register rbx below because we don't need
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002099 // the function pointer after this.
2100 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
ager@chromium.orgc4c92722009-11-18 14:12:51 +00002101 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002102 lea(rbx, Operand(rbp, kOffset));
2103 CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
2104 }
2105#endif
2106
2107 // Get the return address from the stack and restore the frame pointer.
2108 movq(rcx, Operand(rbp, 1 * kPointerSize));
2109 movq(rbp, Operand(rbp, 0 * kPointerSize));
2110
ager@chromium.orga1645e22009-09-09 19:27:10 +00002111 // Pop everything up to and including the arguments and the receiver
2112 // from the caller stack.
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002113 lea(rsp, Operand(r15, 1 * kPointerSize));
2114
2115 // Restore current context from top and clear it in debug mode.
2116 ExternalReference context_address(Top::k_context_address);
2117 movq(kScratchRegister, context_address);
2118 movq(rsi, Operand(kScratchRegister, 0));
2119#ifdef DEBUG
2120 movq(Operand(kScratchRegister, 0), Immediate(0));
2121#endif
2122
2123 // Push the return address to get ready to return.
2124 push(rcx);
2125
2126 // Clear the top frame.
2127 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2128 movq(kScratchRegister, c_entry_fp_address);
2129 movq(Operand(kScratchRegister, 0), Immediate(0));
2130}
2131
2132
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002133Register MacroAssembler::CheckMaps(JSObject* object,
2134 Register object_reg,
2135 JSObject* holder,
2136 Register holder_reg,
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002137 Register scratch,
2138 Label* miss) {
2139 // Make sure there's no overlap between scratch and the other
2140 // registers.
2141 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
2142
2143 // Keep track of the current object in register reg. On the first
2144 // iteration, reg is an alias for object_reg, on later iterations,
2145 // it is an alias for holder_reg.
2146 Register reg = object_reg;
2147 int depth = 1;
2148
2149 // Check the maps in the prototype chain.
2150 // Traverse the prototype chain from the object and do map checks.
2151 while (object != holder) {
2152 depth++;
2153
2154 // Only global objects and objects that do not require access
2155 // checks are allowed in stubs.
2156 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2157
2158 JSObject* prototype = JSObject::cast(object->GetPrototype());
2159 if (Heap::InNewSpace(prototype)) {
2160 // Get the map of the current object.
2161 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2162 Cmp(scratch, Handle<Map>(object->map()));
2163 // Branch on the result of the map check.
2164 j(not_equal, miss);
2165 // Check access rights to the global object. This has to happen
2166 // after the map check so that we know that the object is
2167 // actually a global object.
2168 if (object->IsJSGlobalProxy()) {
2169 CheckAccessGlobalProxy(reg, scratch, miss);
2170
2171 // Restore scratch register to be the map of the object.
2172 // We load the prototype from the map in the scratch register.
2173 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2174 }
2175 // The prototype is in new space; we cannot store a reference
2176 // to it in the code. Load it from the map.
2177 reg = holder_reg; // from now the object is in holder_reg
2178 movq(reg, FieldOperand(scratch, Map::kPrototypeOffset));
2179
2180 } else {
2181 // Check the map of the current object.
2182 Cmp(FieldOperand(reg, HeapObject::kMapOffset),
2183 Handle<Map>(object->map()));
2184 // Branch on the result of the map check.
2185 j(not_equal, miss);
2186 // Check access rights to the global object. This has to happen
2187 // after the map check so that we know that the object is
2188 // actually a global object.
2189 if (object->IsJSGlobalProxy()) {
2190 CheckAccessGlobalProxy(reg, scratch, miss);
2191 }
2192 // The prototype is in old space; load it directly.
2193 reg = holder_reg; // from now the object is in holder_reg
2194 Move(reg, Handle<JSObject>(prototype));
2195 }
2196
2197 // Go to the next object in the prototype chain.
2198 object = prototype;
2199 }
2200
2201 // Check the holder map.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002202 Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002203 j(not_equal, miss);
2204
2205 // Log the check depth.
2206 LOG(IntEvent("check-maps-depth", depth));
2207
2208 // Perform security check for access to the global object and return
2209 // the holder register.
2210 ASSERT(object == holder);
2211 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2212 if (object->IsJSGlobalProxy()) {
2213 CheckAccessGlobalProxy(reg, scratch, miss);
2214 }
2215 return reg;
2216}
2217
2218
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002219void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2220 Register scratch,
2221 Label* miss) {
2222 Label same_contexts;
2223
2224 ASSERT(!holder_reg.is(scratch));
2225 ASSERT(!scratch.is(kScratchRegister));
2226 // Load current lexical context from the stack frame.
2227 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2228
2229 // When generating debug code, make sure the lexical context is set.
2230 if (FLAG_debug_code) {
2231 cmpq(scratch, Immediate(0));
2232 Check(not_equal, "we should not have an empty lexical context");
2233 }
2234 // Load the global context of the current context.
2235 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2236 movq(scratch, FieldOperand(scratch, offset));
2237 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2238
2239 // Check the context is a global context.
2240 if (FLAG_debug_code) {
2241 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2242 Factory::global_context_map());
2243 Check(equal, "JSGlobalObject::global_context should be a global context.");
2244 }
2245
2246 // Check if both contexts are the same.
2247 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2248 j(equal, &same_contexts);
2249
2250 // Compare security tokens.
2251 // Check that the security token in the calling global object is
2252 // compatible with the security token in the receiving global
2253 // object.
2254
2255 // Check the context is a global context.
2256 if (FLAG_debug_code) {
2257 // Preserve original value of holder_reg.
2258 push(holder_reg);
2259 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002260 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002261 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2262
2263 // Read the first word and compare to global_context_map(),
2264 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002265 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002266 Check(equal, "JSGlobalObject::global_context should be a global context.");
2267 pop(holder_reg);
2268 }
2269
2270 movq(kScratchRegister,
2271 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002272 int token_offset =
2273 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002274 movq(scratch, FieldOperand(scratch, token_offset));
2275 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2276 j(not_equal, miss);
2277
2278 bind(&same_contexts);
2279}
2280
2281
ager@chromium.orga1645e22009-09-09 19:27:10 +00002282void MacroAssembler::LoadAllocationTopHelper(Register result,
2283 Register result_end,
2284 Register scratch,
2285 AllocationFlags flags) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002286 ExternalReference new_space_allocation_top =
2287 ExternalReference::new_space_allocation_top_address();
2288
2289 // Just return if allocation top is already known.
ager@chromium.orga1645e22009-09-09 19:27:10 +00002290 if ((flags & RESULT_CONTAINS_TOP) != 0) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002291 // No use of scratch if allocation top is provided.
2292 ASSERT(scratch.is(no_reg));
ager@chromium.orga1645e22009-09-09 19:27:10 +00002293#ifdef DEBUG
2294 // Assert that result actually contains top on entry.
2295 movq(kScratchRegister, new_space_allocation_top);
2296 cmpq(result, Operand(kScratchRegister, 0));
2297 Check(equal, "Unexpected allocation top");
2298#endif
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002299 return;
2300 }
2301
2302 // Move address of new object to result. Use scratch register if available.
2303 if (scratch.is(no_reg)) {
2304 movq(kScratchRegister, new_space_allocation_top);
2305 movq(result, Operand(kScratchRegister, 0));
2306 } else {
2307 ASSERT(!scratch.is(result_end));
2308 movq(scratch, new_space_allocation_top);
2309 movq(result, Operand(scratch, 0));
2310 }
2311}
2312
2313
2314void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2315 Register scratch) {
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +00002316 if (FLAG_debug_code) {
2317 testq(result_end, Immediate(kObjectAlignmentMask));
2318 Check(zero, "Unaligned allocation in new space");
2319 }
2320
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002321 ExternalReference new_space_allocation_top =
2322 ExternalReference::new_space_allocation_top_address();
2323
2324 // Update new top.
2325 if (result_end.is(rax)) {
2326 // rax can be stored directly to a memory location.
2327 store_rax(new_space_allocation_top);
2328 } else {
2329 // Register required - use scratch provided if available.
2330 if (scratch.is(no_reg)) {
2331 movq(kScratchRegister, new_space_allocation_top);
2332 movq(Operand(kScratchRegister, 0), result_end);
2333 } else {
2334 movq(Operand(scratch, 0), result_end);
2335 }
2336 }
2337}
2338
2339
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +00002340void MacroAssembler::AllocateInNewSpace(int object_size,
2341 Register result,
2342 Register result_end,
2343 Register scratch,
2344 Label* gc_required,
2345 AllocationFlags flags) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002346 ASSERT(!result.is(result_end));
2347
2348 // Load address of new object into result.
ager@chromium.orga1645e22009-09-09 19:27:10 +00002349 LoadAllocationTopHelper(result, result_end, scratch, flags);
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002350
2351 // Calculate new top and bail out if new space is exhausted.
2352 ExternalReference new_space_allocation_limit =
2353 ExternalReference::new_space_allocation_limit_address();
2354 lea(result_end, Operand(result, object_size));
2355 movq(kScratchRegister, new_space_allocation_limit);
2356 cmpq(result_end, Operand(kScratchRegister, 0));
2357 j(above, gc_required);
2358
2359 // Update allocation top.
2360 UpdateAllocationTopHelper(result_end, scratch);
ager@chromium.orga1645e22009-09-09 19:27:10 +00002361
2362 // Tag the result if requested.
2363 if ((flags & TAG_OBJECT) != 0) {
2364 addq(result, Immediate(kHeapObjectTag));
2365 }
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002366}
2367
2368
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +00002369void MacroAssembler::AllocateInNewSpace(int header_size,
2370 ScaleFactor element_size,
2371 Register element_count,
2372 Register result,
2373 Register result_end,
2374 Register scratch,
2375 Label* gc_required,
2376 AllocationFlags flags) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002377 ASSERT(!result.is(result_end));
2378
2379 // Load address of new object into result.
ager@chromium.orga1645e22009-09-09 19:27:10 +00002380 LoadAllocationTopHelper(result, result_end, scratch, flags);
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002381
2382 // Calculate new top and bail out if new space is exhausted.
2383 ExternalReference new_space_allocation_limit =
2384 ExternalReference::new_space_allocation_limit_address();
2385 lea(result_end, Operand(result, element_count, element_size, header_size));
2386 movq(kScratchRegister, new_space_allocation_limit);
2387 cmpq(result_end, Operand(kScratchRegister, 0));
2388 j(above, gc_required);
2389
2390 // Update allocation top.
2391 UpdateAllocationTopHelper(result_end, scratch);
ager@chromium.orga1645e22009-09-09 19:27:10 +00002392
2393 // Tag the result if requested.
2394 if ((flags & TAG_OBJECT) != 0) {
2395 addq(result, Immediate(kHeapObjectTag));
2396 }
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002397}
2398
2399
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +00002400void MacroAssembler::AllocateInNewSpace(Register object_size,
2401 Register result,
2402 Register result_end,
2403 Register scratch,
2404 Label* gc_required,
2405 AllocationFlags flags) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002406 // Load address of new object into result.
ager@chromium.orga1645e22009-09-09 19:27:10 +00002407 LoadAllocationTopHelper(result, result_end, scratch, flags);
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002408
2409 // Calculate new top and bail out if new space is exhausted.
2410 ExternalReference new_space_allocation_limit =
2411 ExternalReference::new_space_allocation_limit_address();
2412 if (!object_size.is(result_end)) {
2413 movq(result_end, object_size);
2414 }
2415 addq(result_end, result);
2416 movq(kScratchRegister, new_space_allocation_limit);
2417 cmpq(result_end, Operand(kScratchRegister, 0));
2418 j(above, gc_required);
2419
2420 // Update allocation top.
2421 UpdateAllocationTopHelper(result_end, scratch);
ager@chromium.orga1645e22009-09-09 19:27:10 +00002422
2423 // Tag the result if requested.
2424 if ((flags & TAG_OBJECT) != 0) {
2425 addq(result, Immediate(kHeapObjectTag));
2426 }
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002427}
2428
2429
2430void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2431 ExternalReference new_space_allocation_top =
2432 ExternalReference::new_space_allocation_top_address();
2433
2434 // Make sure the object has no tag before resetting top.
2435 and_(object, Immediate(~kHeapObjectTagMask));
2436 movq(kScratchRegister, new_space_allocation_top);
2437#ifdef DEBUG
2438 cmpq(object, Operand(kScratchRegister, 0));
2439 Check(below, "Undo allocation of non allocated memory");
2440#endif
2441 movq(Operand(kScratchRegister, 0), object);
2442}
2443
2444
ager@chromium.org3811b432009-10-28 14:53:37 +00002445void MacroAssembler::AllocateHeapNumber(Register result,
2446 Register scratch,
2447 Label* gc_required) {
2448 // Allocate heap number in new space.
2449 AllocateInNewSpace(HeapNumber::kSize,
2450 result,
2451 scratch,
2452 no_reg,
2453 gc_required,
2454 TAG_OBJECT);
2455
2456 // Set the map.
2457 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2458 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2459}
2460
2461
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00002462void MacroAssembler::AllocateTwoByteString(Register result,
2463 Register length,
2464 Register scratch1,
2465 Register scratch2,
2466 Register scratch3,
2467 Label* gc_required) {
2468 // Calculate the number of bytes needed for the characters in the string while
2469 // observing object alignment.
2470 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2471 ASSERT(kShortSize == 2);
2472 // scratch1 = length * 2 + kObjectAlignmentMask.
2473 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
2474 and_(scratch1, Immediate(~kObjectAlignmentMask));
2475
2476 // Allocate two byte string in new space.
2477 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2478 times_1,
2479 scratch1,
2480 result,
2481 scratch2,
2482 scratch3,
2483 gc_required,
2484 TAG_OBJECT);
2485
2486 // Set the map, length and hash field.
2487 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2488 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2489 movl(FieldOperand(result, String::kLengthOffset), length);
2490 movl(FieldOperand(result, String::kHashFieldOffset),
2491 Immediate(String::kEmptyHashField));
2492}
2493
2494
2495void MacroAssembler::AllocateAsciiString(Register result,
2496 Register length,
2497 Register scratch1,
2498 Register scratch2,
2499 Register scratch3,
2500 Label* gc_required) {
2501 // Calculate the number of bytes needed for the characters in the string while
2502 // observing object alignment.
2503 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
2504 movl(scratch1, length);
2505 ASSERT(kCharSize == 1);
2506 addq(scratch1, Immediate(kObjectAlignmentMask));
2507 and_(scratch1, Immediate(~kObjectAlignmentMask));
2508
2509 // Allocate ascii string in new space.
2510 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2511 times_1,
2512 scratch1,
2513 result,
2514 scratch2,
2515 scratch3,
2516 gc_required,
2517 TAG_OBJECT);
2518
2519 // Set the map, length and hash field.
2520 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2521 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2522 movl(FieldOperand(result, String::kLengthOffset), length);
2523 movl(FieldOperand(result, String::kHashFieldOffset),
2524 Immediate(String::kEmptyHashField));
2525}
2526
2527
2528void MacroAssembler::AllocateConsString(Register result,
2529 Register scratch1,
2530 Register scratch2,
2531 Label* gc_required) {
2532 // Allocate heap number in new space.
2533 AllocateInNewSpace(ConsString::kSize,
2534 result,
2535 scratch1,
2536 scratch2,
2537 gc_required,
2538 TAG_OBJECT);
2539
2540 // Set the map. The other fields are left uninitialized.
2541 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2542 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2543}
2544
2545
2546void MacroAssembler::AllocateAsciiConsString(Register result,
2547 Register scratch1,
2548 Register scratch2,
2549 Label* gc_required) {
2550 // Allocate heap number in new space.
2551 AllocateInNewSpace(ConsString::kSize,
2552 result,
2553 scratch1,
2554 scratch2,
2555 gc_required,
2556 TAG_OBJECT);
2557
2558 // Set the map. The other fields are left uninitialized.
2559 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2560 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2561}
2562
2563
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +00002564void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2565 if (context_chain_length > 0) {
2566 // Move up the chain of contexts to the context containing the slot.
2567 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2568 // Load the function context (which is the incoming, outer context).
lrn@chromium.orgd5649e32010-01-19 13:36:12 +00002569 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +00002570 for (int i = 1; i < context_chain_length; i++) {
2571 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2572 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2573 }
2574 // The context may be an intermediate context, not a function context.
2575 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2576 } else { // context is the current function context.
2577 // The context may be an intermediate context, not a function context.
2578 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2579 }
2580}
2581
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +00002582int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2583 // On Windows stack slots are reserved by the caller for all arguments
2584 // including the ones passed in registers. On Linux 6 arguments are passed in
2585 // registers and the caller does not reserve stack slots for them.
2586 ASSERT(num_arguments >= 0);
2587#ifdef _WIN64
2588 static const int kArgumentsWithoutStackSlot = 0;
2589#else
2590 static const int kArgumentsWithoutStackSlot = 6;
2591#endif
2592 return num_arguments > kArgumentsWithoutStackSlot ?
2593 num_arguments - kArgumentsWithoutStackSlot : 0;
2594}
2595
2596void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2597 int frame_alignment = OS::ActivationFrameAlignment();
2598 ASSERT(frame_alignment != 0);
2599 ASSERT(num_arguments >= 0);
2600 // Make stack end at alignment and allocate space for arguments and old rsp.
2601 movq(kScratchRegister, rsp);
2602 ASSERT(IsPowerOf2(frame_alignment));
2603 int argument_slots_on_stack =
2604 ArgumentStackSlotsForCFunctionCall(num_arguments);
2605 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2606 and_(rsp, Immediate(-frame_alignment));
2607 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2608}
2609
2610
2611void MacroAssembler::CallCFunction(ExternalReference function,
2612 int num_arguments) {
2613 movq(rax, function);
2614 CallCFunction(rax, num_arguments);
2615}
2616
2617
2618void MacroAssembler::CallCFunction(Register function, int num_arguments) {
2619 call(function);
2620 ASSERT(OS::ActivationFrameAlignment() != 0);
2621 ASSERT(num_arguments >= 0);
2622 int argument_slots_on_stack =
2623 ArgumentStackSlotsForCFunctionCall(num_arguments);
2624 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2625}
2626
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +00002627
ager@chromium.org4af710e2009-09-15 12:20:11 +00002628CodePatcher::CodePatcher(byte* address, int size)
2629 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2630 // Create a new macro assembler pointing to the address of the code to patch.
2631 // The size is adjusted with kGap on order for the assembler to generate size
2632 // bytes of instructions without failing with buffer size constraints.
2633 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2634}
2635
2636
2637CodePatcher::~CodePatcher() {
2638 // Indicate that code has changed.
2639 CPU::FlushICache(address_, size_);
2640
2641 // Check that the code was patched as expected.
2642 ASSERT(masm_.pc_ == address_ + size_);
2643 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2644}
2645
kasperl@chromium.org71affb52009-05-26 05:44:31 +00002646} } // namespace v8::internal