blob: 3823cadb54b519f93c9a973302df964d5809bc82 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
38
39namespace v8 {
40namespace internal {
41
42MacroAssembler::MacroAssembler(void* buffer, int size)
Steve Block3ce2e202009-11-05 08:53:23 +000043 : Assembler(buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000044 generating_stub_(false),
45 allow_stub_calls_(true),
46 code_object_(Heap::undefined_value()) {
Steve Blocka7e24c12009-10-30 11:49:00 +000047}
48
49
Steve Block3ce2e202009-11-05 08:53:23 +000050void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010051 movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000052}
53
54
Kristian Monsen25f61362010-05-21 11:50:48 +010055void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
56 movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
57}
58
59
Steve Blocka7e24c12009-10-30 11:49:00 +000060void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010061 push(Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000062}
63
64
Steve Block3ce2e202009-11-05 08:53:23 +000065void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010066 cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000067}
68
69
Steve Block3ce2e202009-11-05 08:53:23 +000070void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000071 LoadRoot(kScratchRegister, index);
72 cmpq(with, kScratchRegister);
73}
74
75
Steve Blockd0582a62009-12-15 09:54:21 +000076void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
77 CompareRoot(rsp, Heap::kStackLimitRootIndex);
78 j(below, on_stack_overflow);
79}
80
81
Steve Block6ded16b2010-05-10 14:33:55 +010082void MacroAssembler::RecordWriteHelper(Register object,
83 Register addr,
84 Register scratch) {
85 if (FLAG_debug_code) {
86 // Check that the object is not in new space.
87 Label not_in_new_space;
88 InNewSpace(object, scratch, not_equal, &not_in_new_space);
89 Abort("new-space object passed to RecordWriteHelper");
90 bind(&not_in_new_space);
91 }
92
Steve Blocka7e24c12009-10-30 11:49:00 +000093 // Compute the page start address from the heap object pointer, and reuse
94 // the 'object' register for it.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010095 and_(object, Immediate(~Page::kPageAlignmentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +000096
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010097 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
98 // method for more details.
99 and_(addr, Immediate(Page::kPageAlignmentMask));
100 shrl(addr, Immediate(Page::kRegionSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000101
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100102 // Set dirty mark for region.
103 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000104}
105
106
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100107// For page containing |object| mark region covering [object+offset] dirty.
Steve Blocka7e24c12009-10-30 11:49:00 +0000108// object is the object being stored into, value is the object being stored.
Steve Block3ce2e202009-11-05 08:53:23 +0000109// If offset is zero, then the smi_index register contains the array index into
110// the elements array represented as a smi. Otherwise it can be used as a
111// scratch register.
Steve Blocka7e24c12009-10-30 11:49:00 +0000112// All registers are clobbered by the operation.
113void MacroAssembler::RecordWrite(Register object,
114 int offset,
115 Register value,
Steve Block3ce2e202009-11-05 08:53:23 +0000116 Register smi_index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000117 // The compiled code assumes that record write doesn't change the
118 // context register, so we check that none of the clobbered
119 // registers are rsi.
120 ASSERT(!object.is(rsi) && !value.is(rsi) && !smi_index.is(rsi));
121
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100122 // First, check if a write barrier is even needed. The tests below
123 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000124 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000125 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000126
Steve Block3ce2e202009-11-05 08:53:23 +0000127 RecordWriteNonSmi(object, offset, value, smi_index);
128 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000129
130 // Clobber all input registers when running with the debug-code flag
131 // turned on to provoke errors. This clobbering repeats the
132 // clobbering done inside RecordWriteNonSmi but it's necessary to
133 // avoid having the fast case for smis leave the registers
134 // unchanged.
135 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100136 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
137 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
138 movq(smi_index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000139 }
Steve Block3ce2e202009-11-05 08:53:23 +0000140}
141
142
143void MacroAssembler::RecordWriteNonSmi(Register object,
144 int offset,
145 Register scratch,
146 Register smi_index) {
147 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000148
149 if (FLAG_debug_code) {
150 Label okay;
151 JumpIfNotSmi(object, &okay);
152 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
153 bind(&okay);
154 }
155
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100156 // Test that the object address is not in the new space. We cannot
157 // update page dirty marks for new space pages.
Steve Block6ded16b2010-05-10 14:33:55 +0100158 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000159
Steve Block6ded16b2010-05-10 14:33:55 +0100160 // The offset is relative to a tagged or untagged HeapObject pointer,
161 // so either offset or offset + kHeapObjectTag must be a
162 // multiple of kPointerSize.
163 ASSERT(IsAligned(offset, kPointerSize) ||
164 IsAligned(offset + kHeapObjectTag, kPointerSize));
165
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100166 Register dst = smi_index;
167 if (offset != 0) {
168 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000169 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100170 // array access: calculate the destination address in the same manner as
171 // KeyedStoreIC::GenerateGeneric.
172 SmiIndex index = SmiToIndex(smi_index, smi_index, kPointerSizeLog2);
173 lea(dst, FieldOperand(object,
174 index.reg,
175 index.scale,
176 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000177 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100178 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000179
180 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000181
182 // Clobber all input registers when running with the debug-code flag
183 // turned on to provoke errors.
184 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100185 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
186 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
187 movq(smi_index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
188 }
189}
190
191
192void MacroAssembler::InNewSpace(Register object,
193 Register scratch,
194 Condition cc,
195 Label* branch) {
196 if (Serializer::enabled()) {
197 // Can't do arithmetic on external references if it might get serialized.
198 // The mask isn't really an address. We load it as an external reference in
199 // case the size of the new space is different between the snapshot maker
200 // and the running system.
201 if (scratch.is(object)) {
202 movq(kScratchRegister, ExternalReference::new_space_mask());
203 and_(scratch, kScratchRegister);
204 } else {
205 movq(scratch, ExternalReference::new_space_mask());
206 and_(scratch, object);
207 }
208 movq(kScratchRegister, ExternalReference::new_space_start());
209 cmpq(scratch, kScratchRegister);
210 j(cc, branch);
211 } else {
212 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
213 intptr_t new_space_start =
214 reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
215 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
216 if (scratch.is(object)) {
217 addq(scratch, kScratchRegister);
218 } else {
219 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
220 }
221 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
222 j(cc, branch);
Leon Clarke4515c472010-02-03 11:58:03 +0000223 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000224}
225
226
227void MacroAssembler::Assert(Condition cc, const char* msg) {
228 if (FLAG_debug_code) Check(cc, msg);
229}
230
231
232void MacroAssembler::Check(Condition cc, const char* msg) {
233 Label L;
234 j(cc, &L);
235 Abort(msg);
236 // will not return here
237 bind(&L);
238}
239
240
Steve Block6ded16b2010-05-10 14:33:55 +0100241void MacroAssembler::CheckStackAlignment() {
242 int frame_alignment = OS::ActivationFrameAlignment();
243 int frame_alignment_mask = frame_alignment - 1;
244 if (frame_alignment > kPointerSize) {
245 ASSERT(IsPowerOf2(frame_alignment));
246 Label alignment_as_expected;
247 testq(rsp, Immediate(frame_alignment_mask));
248 j(zero, &alignment_as_expected);
249 // Abort if stack is not aligned.
250 int3();
251 bind(&alignment_as_expected);
252 }
253}
254
255
Steve Blocka7e24c12009-10-30 11:49:00 +0000256void MacroAssembler::NegativeZeroTest(Register result,
257 Register op,
258 Label* then_label) {
259 Label ok;
260 testl(result, result);
261 j(not_zero, &ok);
262 testl(op, op);
263 j(sign, then_label);
264 bind(&ok);
265}
266
267
268void MacroAssembler::Abort(const char* msg) {
269 // We want to pass the msg string like a smi to avoid GC
270 // problems, however msg is not guaranteed to be aligned
271 // properly. Instead, we pass an aligned pointer that is
272 // a proper v8 smi, but also pass the alignment difference
273 // from the real pointer as a smi.
274 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
275 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
276 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
277 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
278#ifdef DEBUG
279 if (msg != NULL) {
280 RecordComment("Abort message: ");
281 RecordComment(msg);
282 }
283#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000284 // Disable stub call restrictions to always allow calls to abort.
285 set_allow_stub_calls(true);
286
Steve Blocka7e24c12009-10-30 11:49:00 +0000287 push(rax);
288 movq(kScratchRegister, p0, RelocInfo::NONE);
289 push(kScratchRegister);
290 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000291 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000292 RelocInfo::NONE);
293 push(kScratchRegister);
294 CallRuntime(Runtime::kAbort, 2);
295 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000296 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000297}
298
299
300void MacroAssembler::CallStub(CodeStub* stub) {
301 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
302 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
303}
304
305
Leon Clarkee46be812010-01-19 14:06:41 +0000306void MacroAssembler::TailCallStub(CodeStub* stub) {
307 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
308 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
309}
310
311
Steve Blocka7e24c12009-10-30 11:49:00 +0000312void MacroAssembler::StubReturn(int argc) {
313 ASSERT(argc >= 1 && generating_stub());
314 ret((argc - 1) * kPointerSize);
315}
316
317
318void MacroAssembler::IllegalOperation(int num_arguments) {
319 if (num_arguments > 0) {
320 addq(rsp, Immediate(num_arguments * kPointerSize));
321 }
322 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
323}
324
325
326void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
327 CallRuntime(Runtime::FunctionForId(id), num_arguments);
328}
329
330
331void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
332 // If the expected number of arguments of the runtime function is
333 // constant, we check that the actual number of arguments match the
334 // expectation.
335 if (f->nargs >= 0 && f->nargs != num_arguments) {
336 IllegalOperation(num_arguments);
337 return;
338 }
339
Leon Clarke4515c472010-02-03 11:58:03 +0000340 // TODO(1236192): Most runtime routines don't need the number of
341 // arguments passed in because it is constant. At some point we
342 // should remove this need and make the runtime routine entry code
343 // smarter.
344 movq(rax, Immediate(num_arguments));
345 movq(rbx, ExternalReference(f));
346 CEntryStub ces(f->result_size);
347 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000348}
349
350
Andrei Popescu402d9372010-02-26 13:31:12 +0000351void MacroAssembler::CallExternalReference(const ExternalReference& ext,
352 int num_arguments) {
353 movq(rax, Immediate(num_arguments));
354 movq(rbx, ext);
355
356 CEntryStub stub(1);
357 CallStub(&stub);
358}
359
360
Steve Block6ded16b2010-05-10 14:33:55 +0100361void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
362 int num_arguments,
363 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000364 // ----------- S t a t e -------------
365 // -- rsp[0] : return address
366 // -- rsp[8] : argument num_arguments - 1
367 // ...
368 // -- rsp[8 * num_arguments] : argument 0 (receiver)
369 // -----------------------------------
370
371 // TODO(1236192): Most runtime routines don't need the number of
372 // arguments passed in because it is constant. At some point we
373 // should remove this need and make the runtime routine entry code
374 // smarter.
375 movq(rax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +0100376 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000377}
378
379
Steve Block6ded16b2010-05-10 14:33:55 +0100380void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
381 int num_arguments,
382 int result_size) {
383 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
384}
385
386
387void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
388 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000389 // Set the entry point and jump to the C entry runtime stub.
390 movq(rbx, ext);
391 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000392 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000393}
394
395
Andrei Popescu402d9372010-02-26 13:31:12 +0000396void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
397 // Calls are not allowed in some stubs.
398 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000399
Andrei Popescu402d9372010-02-26 13:31:12 +0000400 // Rely on the assertion to check that the number of provided
401 // arguments match the expected number of arguments. Fake a
402 // parameter count to avoid emitting code to do the check.
403 ParameterCount expected(0);
404 GetBuiltinEntry(rdx, id);
405 InvokeCode(rdx, expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +0000406}
407
Andrei Popescu402d9372010-02-26 13:31:12 +0000408
409void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100410 ASSERT(!target.is(rdi));
411
412 // Load the builtins object into target register.
413 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
414 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
415
Andrei Popescu402d9372010-02-26 13:31:12 +0000416 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +0100417 movq(rdi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
418
419 // Load the code entry point from the builtins object.
420 movq(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
421 if (FLAG_debug_code) {
422 // Make sure the code objects in the builtins object and in the
423 // builtin function are the same.
424 push(target);
425 movq(target, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
426 movq(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
427 cmpq(target, Operand(rsp, 0));
428 Assert(equal, "Builtin code object changed");
429 pop(target);
430 }
431 lea(target, FieldOperand(target, Code::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000432}
433
434
435void MacroAssembler::Set(Register dst, int64_t x) {
436 if (x == 0) {
437 xor_(dst, dst);
438 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000439 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000440 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000441 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000442 } else {
443 movq(dst, x, RelocInfo::NONE);
444 }
445}
446
447
448void MacroAssembler::Set(const Operand& dst, int64_t x) {
449 if (x == 0) {
450 xor_(kScratchRegister, kScratchRegister);
451 movq(dst, kScratchRegister);
452 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000453 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000454 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000455 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000456 } else {
457 movq(kScratchRegister, x, RelocInfo::NONE);
458 movq(dst, kScratchRegister);
459 }
460}
461
Steve Blocka7e24c12009-10-30 11:49:00 +0000462// ----------------------------------------------------------------------------
463// Smi tagging, untagging and tag detection.
464
Steve Block3ce2e202009-11-05 08:53:23 +0000465static int kSmiShift = kSmiTagSize + kSmiShiftSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000466
467void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000468 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000469 if (!dst.is(src)) {
470 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000471 }
Steve Block3ce2e202009-11-05 08:53:23 +0000472 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000473}
474
475
476void MacroAssembler::Integer32ToSmi(Register dst,
477 Register src,
478 Label* on_overflow) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000479 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000480 // 32-bit integer always fits in a long smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000481 if (!dst.is(src)) {
482 movl(dst, src);
483 }
Steve Block3ce2e202009-11-05 08:53:23 +0000484 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000485}
486
487
Steve Block3ce2e202009-11-05 08:53:23 +0000488void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
489 Register src,
490 int constant) {
491 if (dst.is(src)) {
492 addq(dst, Immediate(constant));
493 } else {
494 lea(dst, Operand(src, constant));
495 }
496 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000497}
498
499
500void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000501 ASSERT_EQ(0, kSmiTag);
502 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000503 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000504 }
Steve Block3ce2e202009-11-05 08:53:23 +0000505 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000506}
507
508
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100509void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
510 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
511}
512
513
Steve Blocka7e24c12009-10-30 11:49:00 +0000514void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000515 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000516 if (!dst.is(src)) {
517 movq(dst, src);
518 }
519 sar(dst, Immediate(kSmiShift));
520}
521
522
523void MacroAssembler::SmiTest(Register src) {
524 testq(src, src);
525}
526
527
528void MacroAssembler::SmiCompare(Register dst, Register src) {
529 cmpq(dst, src);
530}
531
532
533void MacroAssembler::SmiCompare(Register dst, Smi* src) {
534 ASSERT(!dst.is(kScratchRegister));
535 if (src->value() == 0) {
536 testq(dst, dst);
537 } else {
538 Move(kScratchRegister, src);
539 cmpq(dst, kScratchRegister);
540 }
541}
542
543
Leon Clarkef7060e22010-06-03 12:02:55 +0100544void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Steve Block6ded16b2010-05-10 14:33:55 +0100545 cmpq(dst, src);
546}
547
548
Steve Block3ce2e202009-11-05 08:53:23 +0000549void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
550 cmpq(dst, src);
551}
552
553
554void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100555 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000556}
557
558
559void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
560 Register src,
561 int power) {
562 ASSERT(power >= 0);
563 ASSERT(power < 64);
564 if (power == 0) {
565 SmiToInteger64(dst, src);
566 return;
567 }
Steve Block3ce2e202009-11-05 08:53:23 +0000568 if (!dst.is(src)) {
569 movq(dst, src);
570 }
571 if (power < kSmiShift) {
572 sar(dst, Immediate(kSmiShift - power));
573 } else if (power > kSmiShift) {
574 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000575 }
576}
577
578
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100579void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
580 Register src,
581 int power) {
582 ASSERT((0 <= power) && (power < 32));
583 if (dst.is(src)) {
584 shr(dst, Immediate(power + kSmiShift));
585 } else {
586 UNIMPLEMENTED(); // Not used.
587 }
588}
589
590
Steve Blocka7e24c12009-10-30 11:49:00 +0000591Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000592 ASSERT_EQ(0, kSmiTag);
593 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000594 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000595}
596
597
598Condition MacroAssembler::CheckPositiveSmi(Register src) {
599 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000600 movq(kScratchRegister, src);
601 rol(kScratchRegister, Immediate(1));
602 testl(kScratchRegister, Immediate(0x03));
Steve Blocka7e24c12009-10-30 11:49:00 +0000603 return zero;
604}
605
606
Steve Blocka7e24c12009-10-30 11:49:00 +0000607Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
608 if (first.is(second)) {
609 return CheckSmi(first);
610 }
611 movl(kScratchRegister, first);
612 orl(kScratchRegister, second);
Steve Block3ce2e202009-11-05 08:53:23 +0000613 testb(kScratchRegister, Immediate(kSmiTagMask));
614 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000615}
616
617
Leon Clarked91b9f72010-01-27 17:25:45 +0000618Condition MacroAssembler::CheckBothPositiveSmi(Register first,
619 Register second) {
620 if (first.is(second)) {
621 return CheckPositiveSmi(first);
622 }
623 movl(kScratchRegister, first);
624 orl(kScratchRegister, second);
625 rol(kScratchRegister, Immediate(1));
626 testl(kScratchRegister, Immediate(0x03));
627 return zero;
628}
629
630
631
Leon Clarkee46be812010-01-19 14:06:41 +0000632Condition MacroAssembler::CheckEitherSmi(Register first, Register second) {
633 if (first.is(second)) {
634 return CheckSmi(first);
635 }
636 movl(kScratchRegister, first);
637 andl(kScratchRegister, second);
638 testb(kScratchRegister, Immediate(kSmiTagMask));
639 return zero;
640}
641
642
Steve Blocka7e24c12009-10-30 11:49:00 +0000643Condition MacroAssembler::CheckIsMinSmi(Register src) {
644 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Steve Block3ce2e202009-11-05 08:53:23 +0000645 movq(kScratchRegister, src);
646 rol(kScratchRegister, Immediate(1));
647 cmpq(kScratchRegister, Immediate(1));
Steve Blocka7e24c12009-10-30 11:49:00 +0000648 return equal;
649}
650
Steve Blocka7e24c12009-10-30 11:49:00 +0000651
652Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000653 // A 32-bit integer value can always be converted to a smi.
654 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +0000655}
656
657
Steve Block3ce2e202009-11-05 08:53:23 +0000658Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
659 // An unsigned 32-bit integer value is valid as long as the high bit
660 // is not set.
661 testq(src, Immediate(0x80000000));
662 return zero;
663}
664
665
666void MacroAssembler::SmiNeg(Register dst, Register src, Label* on_smi_result) {
667 if (dst.is(src)) {
668 ASSERT(!dst.is(kScratchRegister));
669 movq(kScratchRegister, src);
670 neg(dst); // Low 32 bits are retained as zero by negation.
671 // Test if result is zero or Smi::kMinValue.
672 cmpq(dst, kScratchRegister);
673 j(not_equal, on_smi_result);
674 movq(src, kScratchRegister);
675 } else {
676 movq(dst, src);
677 neg(dst);
678 cmpq(dst, src);
679 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
680 j(not_equal, on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000681 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000682}
683
684
685void MacroAssembler::SmiAdd(Register dst,
686 Register src1,
687 Register src2,
688 Label* on_not_smi_result) {
689 ASSERT(!dst.is(src2));
Steve Block6ded16b2010-05-10 14:33:55 +0100690 if (on_not_smi_result == NULL) {
691 // No overflow checking. Use only when it's known that
692 // overflowing is impossible.
693 if (dst.is(src1)) {
694 addq(dst, src2);
695 } else {
696 movq(dst, src1);
697 addq(dst, src2);
698 }
699 Assert(no_overflow, "Smi addition onverflow");
700 } else if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000701 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000702 Label smi_result;
703 j(no_overflow, &smi_result);
704 // Restore src1.
Steve Block3ce2e202009-11-05 08:53:23 +0000705 subq(src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000706 jmp(on_not_smi_result);
707 bind(&smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +0000708 } else {
709 movq(dst, src1);
710 addq(dst, src2);
711 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000712 }
713}
714
715
Steve Blocka7e24c12009-10-30 11:49:00 +0000716void MacroAssembler::SmiSub(Register dst,
717 Register src1,
718 Register src2,
719 Label* on_not_smi_result) {
720 ASSERT(!dst.is(src2));
Leon Clarked91b9f72010-01-27 17:25:45 +0000721 if (on_not_smi_result == NULL) {
722 // No overflow checking. Use only when it's known that
723 // overflowing is impossible (e.g., subtracting two positive smis).
724 if (dst.is(src1)) {
725 subq(dst, src2);
726 } else {
727 movq(dst, src1);
728 subq(dst, src2);
729 }
730 Assert(no_overflow, "Smi substraction onverflow");
731 } else if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000732 subq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000733 Label smi_result;
734 j(no_overflow, &smi_result);
735 // Restore src1.
Steve Block3ce2e202009-11-05 08:53:23 +0000736 addq(src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000737 jmp(on_not_smi_result);
738 bind(&smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +0000739 } else {
740 movq(dst, src1);
741 subq(dst, src2);
742 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000743 }
744}
745
746
Steve Block6ded16b2010-05-10 14:33:55 +0100747void MacroAssembler::SmiSub(Register dst,
748 Register src1,
Leon Clarkef7060e22010-06-03 12:02:55 +0100749 const Operand& src2,
Steve Block6ded16b2010-05-10 14:33:55 +0100750 Label* on_not_smi_result) {
751 if (on_not_smi_result == NULL) {
752 // No overflow checking. Use only when it's known that
753 // overflowing is impossible (e.g., subtracting two positive smis).
754 if (dst.is(src1)) {
755 subq(dst, src2);
756 } else {
757 movq(dst, src1);
758 subq(dst, src2);
759 }
760 Assert(no_overflow, "Smi substraction onverflow");
761 } else if (dst.is(src1)) {
762 subq(dst, src2);
763 Label smi_result;
764 j(no_overflow, &smi_result);
765 // Restore src1.
766 addq(src1, src2);
767 jmp(on_not_smi_result);
768 bind(&smi_result);
769 } else {
770 movq(dst, src1);
771 subq(dst, src2);
772 j(overflow, on_not_smi_result);
773 }
774}
775
Steve Blocka7e24c12009-10-30 11:49:00 +0000776void MacroAssembler::SmiMul(Register dst,
777 Register src1,
778 Register src2,
779 Label* on_not_smi_result) {
780 ASSERT(!dst.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +0000781 ASSERT(!dst.is(kScratchRegister));
782 ASSERT(!src1.is(kScratchRegister));
783 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000784
785 if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000786 Label failure, zero_correct_result;
787 movq(kScratchRegister, src1); // Create backup for later testing.
788 SmiToInteger64(dst, src1);
789 imul(dst, src2);
790 j(overflow, &failure);
791
792 // Check for negative zero result. If product is zero, and one
793 // argument is negative, go to slow case.
794 Label correct_result;
795 testq(dst, dst);
796 j(not_zero, &correct_result);
797
798 movq(dst, kScratchRegister);
799 xor_(dst, src2);
800 j(positive, &zero_correct_result); // Result was positive zero.
801
802 bind(&failure); // Reused failure exit, restores src1.
803 movq(src1, kScratchRegister);
804 jmp(on_not_smi_result);
805
806 bind(&zero_correct_result);
807 xor_(dst, dst);
808
809 bind(&correct_result);
810 } else {
811 SmiToInteger64(dst, src1);
812 imul(dst, src2);
813 j(overflow, on_not_smi_result);
814 // Check for negative zero result. If product is zero, and one
815 // argument is negative, go to slow case.
816 Label correct_result;
817 testq(dst, dst);
818 j(not_zero, &correct_result);
819 // One of src1 and src2 is zero, the check whether the other is
820 // negative.
Steve Blocka7e24c12009-10-30 11:49:00 +0000821 movq(kScratchRegister, src1);
Steve Block3ce2e202009-11-05 08:53:23 +0000822 xor_(kScratchRegister, src2);
823 j(negative, on_not_smi_result);
824 bind(&correct_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000825 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000826}
827
828
829void MacroAssembler::SmiTryAddConstant(Register dst,
830 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000831 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000832 Label* on_not_smi_result) {
833 // Does not assume that src is a smi.
Steve Block3ce2e202009-11-05 08:53:23 +0000834 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +0000835 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000836 ASSERT(!dst.is(kScratchRegister));
837 ASSERT(!src.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000838
Steve Block3ce2e202009-11-05 08:53:23 +0000839 JumpIfNotSmi(src, on_not_smi_result);
840 Register tmp = (dst.is(src) ? kScratchRegister : dst);
841 Move(tmp, constant);
842 addq(tmp, src);
843 j(overflow, on_not_smi_result);
844 if (dst.is(src)) {
845 movq(dst, tmp);
846 }
847}
848
849
850void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
851 if (constant->value() == 0) {
852 if (!dst.is(src)) {
853 movq(dst, src);
854 }
855 } else if (dst.is(src)) {
856 ASSERT(!dst.is(kScratchRegister));
857
858 Move(kScratchRegister, constant);
859 addq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000860 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000861 Move(dst, constant);
862 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000863 }
864}
865
866
Leon Clarkef7060e22010-06-03 12:02:55 +0100867void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
868 if (constant->value() != 0) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100869 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
Leon Clarkef7060e22010-06-03 12:02:55 +0100870 }
871}
872
873
Steve Blocka7e24c12009-10-30 11:49:00 +0000874void MacroAssembler::SmiAddConstant(Register dst,
875 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000876 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000877 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +0000878 if (constant->value() == 0) {
879 if (!dst.is(src)) {
880 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000881 }
Steve Block3ce2e202009-11-05 08:53:23 +0000882 } else if (dst.is(src)) {
883 ASSERT(!dst.is(kScratchRegister));
884
885 Move(kScratchRegister, constant);
886 addq(dst, kScratchRegister);
887 Label result_ok;
888 j(no_overflow, &result_ok);
889 subq(dst, kScratchRegister);
890 jmp(on_not_smi_result);
891 bind(&result_ok);
Steve Blocka7e24c12009-10-30 11:49:00 +0000892 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000893 Move(dst, constant);
894 addq(dst, src);
895 j(overflow, on_not_smi_result);
896 }
897}
898
899
900void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
901 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000902 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000903 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000904 }
Steve Block3ce2e202009-11-05 08:53:23 +0000905 } else if (dst.is(src)) {
906 ASSERT(!dst.is(kScratchRegister));
907
908 Move(kScratchRegister, constant);
909 subq(dst, kScratchRegister);
910 } else {
911 // Subtract by adding the negative, to do it in two operations.
912 if (constant->value() == Smi::kMinValue) {
913 Move(kScratchRegister, constant);
914 movq(dst, src);
915 subq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000916 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000917 Move(dst, Smi::FromInt(-constant->value()));
918 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000919 }
920 }
921}
922
923
924void MacroAssembler::SmiSubConstant(Register dst,
925 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000926 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000927 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +0000928 if (constant->value() == 0) {
929 if (!dst.is(src)) {
930 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000931 }
Steve Block3ce2e202009-11-05 08:53:23 +0000932 } else if (dst.is(src)) {
933 ASSERT(!dst.is(kScratchRegister));
934
935 Move(kScratchRegister, constant);
936 subq(dst, kScratchRegister);
937 Label sub_success;
938 j(no_overflow, &sub_success);
939 addq(src, kScratchRegister);
940 jmp(on_not_smi_result);
941 bind(&sub_success);
Steve Blocka7e24c12009-10-30 11:49:00 +0000942 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000943 if (constant->value() == Smi::kMinValue) {
944 Move(kScratchRegister, constant);
945 movq(dst, src);
946 subq(dst, kScratchRegister);
947 j(overflow, on_not_smi_result);
948 } else {
949 Move(dst, Smi::FromInt(-(constant->value())));
950 addq(dst, src);
951 j(overflow, on_not_smi_result);
952 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000953 }
954}
955
956
957void MacroAssembler::SmiDiv(Register dst,
958 Register src1,
959 Register src2,
960 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +0000961 ASSERT(!src1.is(kScratchRegister));
962 ASSERT(!src2.is(kScratchRegister));
963 ASSERT(!dst.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000964 ASSERT(!src2.is(rax));
965 ASSERT(!src2.is(rdx));
966 ASSERT(!src1.is(rdx));
967
968 // Check for 0 divisor (result is +/-Infinity).
969 Label positive_divisor;
Steve Block3ce2e202009-11-05 08:53:23 +0000970 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000971 j(zero, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000972
Steve Block3ce2e202009-11-05 08:53:23 +0000973 if (src1.is(rax)) {
974 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000975 }
Steve Block3ce2e202009-11-05 08:53:23 +0000976 SmiToInteger32(rax, src1);
977 // We need to rule out dividing Smi::kMinValue by -1, since that would
978 // overflow in idiv and raise an exception.
979 // We combine this with negative zero test (negative zero only happens
980 // when dividing zero by a negative number).
Steve Blocka7e24c12009-10-30 11:49:00 +0000981
Steve Block3ce2e202009-11-05 08:53:23 +0000982 // We overshoot a little and go to slow case if we divide min-value
983 // by any negative value, not just -1.
984 Label safe_div;
985 testl(rax, Immediate(0x7fffffff));
986 j(not_zero, &safe_div);
987 testq(src2, src2);
988 if (src1.is(rax)) {
989 j(positive, &safe_div);
990 movq(src1, kScratchRegister);
991 jmp(on_not_smi_result);
992 } else {
993 j(negative, on_not_smi_result);
994 }
995 bind(&safe_div);
996
997 SmiToInteger32(src2, src2);
998 // Sign extend src1 into edx:eax.
999 cdq();
Steve Blocka7e24c12009-10-30 11:49:00 +00001000 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001001 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001002 // Check that the remainder is zero.
1003 testl(rdx, rdx);
Steve Block3ce2e202009-11-05 08:53:23 +00001004 if (src1.is(rax)) {
1005 Label smi_result;
1006 j(zero, &smi_result);
1007 movq(src1, kScratchRegister);
1008 jmp(on_not_smi_result);
1009 bind(&smi_result);
1010 } else {
1011 j(not_zero, on_not_smi_result);
1012 }
1013 if (!dst.is(src1) && src1.is(rax)) {
1014 movq(src1, kScratchRegister);
1015 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001016 Integer32ToSmi(dst, rax);
1017}
1018
1019
1020void MacroAssembler::SmiMod(Register dst,
1021 Register src1,
1022 Register src2,
1023 Label* on_not_smi_result) {
1024 ASSERT(!dst.is(kScratchRegister));
1025 ASSERT(!src1.is(kScratchRegister));
1026 ASSERT(!src2.is(kScratchRegister));
1027 ASSERT(!src2.is(rax));
1028 ASSERT(!src2.is(rdx));
1029 ASSERT(!src1.is(rdx));
Steve Block3ce2e202009-11-05 08:53:23 +00001030 ASSERT(!src1.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001031
Steve Block3ce2e202009-11-05 08:53:23 +00001032 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001033 j(zero, on_not_smi_result);
1034
1035 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001036 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001037 }
Steve Block3ce2e202009-11-05 08:53:23 +00001038 SmiToInteger32(rax, src1);
1039 SmiToInteger32(src2, src2);
1040
1041 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1042 Label safe_div;
1043 cmpl(rax, Immediate(Smi::kMinValue));
1044 j(not_equal, &safe_div);
1045 cmpl(src2, Immediate(-1));
1046 j(not_equal, &safe_div);
1047 // Retag inputs and go slow case.
1048 Integer32ToSmi(src2, src2);
1049 if (src1.is(rax)) {
1050 movq(src1, kScratchRegister);
1051 }
1052 jmp(on_not_smi_result);
1053 bind(&safe_div);
1054
Steve Blocka7e24c12009-10-30 11:49:00 +00001055 // Sign extend eax into edx:eax.
1056 cdq();
1057 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001058 // Restore smi tags on inputs.
1059 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001060 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001061 movq(src1, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001062 }
Steve Block3ce2e202009-11-05 08:53:23 +00001063 // Check for a negative zero result. If the result is zero, and the
1064 // dividend is negative, go slow to return a floating point negative zero.
1065 Label smi_result;
1066 testl(rdx, rdx);
1067 j(not_zero, &smi_result);
1068 testq(src1, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001069 j(negative, on_not_smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +00001070 bind(&smi_result);
1071 Integer32ToSmi(dst, rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001072}
1073
1074
1075void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001076 ASSERT(!dst.is(kScratchRegister));
1077 ASSERT(!src.is(kScratchRegister));
1078 // Set tag and padding bits before negating, so that they are zero afterwards.
1079 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001080 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001081 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001082 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001083 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001084 }
Steve Block3ce2e202009-11-05 08:53:23 +00001085 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001086}
1087
1088
1089void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001090 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001091 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001092 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001093 }
1094 and_(dst, src2);
1095}
1096
1097
Steve Block3ce2e202009-11-05 08:53:23 +00001098void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1099 if (constant->value() == 0) {
1100 xor_(dst, dst);
1101 } else if (dst.is(src)) {
1102 ASSERT(!dst.is(kScratchRegister));
1103 Move(kScratchRegister, constant);
1104 and_(dst, kScratchRegister);
1105 } else {
1106 Move(dst, constant);
1107 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001108 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001109}
1110
1111
1112void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1113 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001114 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001115 }
1116 or_(dst, src2);
1117}
1118
1119
Steve Block3ce2e202009-11-05 08:53:23 +00001120void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1121 if (dst.is(src)) {
1122 ASSERT(!dst.is(kScratchRegister));
1123 Move(kScratchRegister, constant);
1124 or_(dst, kScratchRegister);
1125 } else {
1126 Move(dst, constant);
1127 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001128 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001129}
1130
Steve Block3ce2e202009-11-05 08:53:23 +00001131
Steve Blocka7e24c12009-10-30 11:49:00 +00001132void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1133 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001134 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001135 }
1136 xor_(dst, src2);
1137}
1138
1139
Steve Block3ce2e202009-11-05 08:53:23 +00001140void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1141 if (dst.is(src)) {
1142 ASSERT(!dst.is(kScratchRegister));
1143 Move(kScratchRegister, constant);
1144 xor_(dst, kScratchRegister);
1145 } else {
1146 Move(dst, constant);
1147 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001148 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001149}
1150
1151
Steve Blocka7e24c12009-10-30 11:49:00 +00001152void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1153 Register src,
1154 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001155 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001156 if (shift_value > 0) {
1157 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001158 sar(dst, Immediate(shift_value + kSmiShift));
1159 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001160 } else {
1161 UNIMPLEMENTED(); // Not used.
1162 }
1163 }
1164}
1165
1166
1167void MacroAssembler::SmiShiftLogicalRightConstant(Register dst,
1168 Register src,
1169 int shift_value,
1170 Label* on_not_smi_result) {
1171 // Logic right shift interprets its result as an *unsigned* number.
1172 if (dst.is(src)) {
1173 UNIMPLEMENTED(); // Not used.
1174 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001175 movq(dst, src);
1176 if (shift_value == 0) {
1177 testq(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001178 j(negative, on_not_smi_result);
1179 }
Steve Block3ce2e202009-11-05 08:53:23 +00001180 shr(dst, Immediate(shift_value + kSmiShift));
1181 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001182 }
1183}
1184
1185
1186void MacroAssembler::SmiShiftLeftConstant(Register dst,
1187 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001188 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001189 if (!dst.is(src)) {
1190 movq(dst, src);
1191 }
1192 if (shift_value > 0) {
1193 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001194 }
1195}
1196
1197
1198void MacroAssembler::SmiShiftLeft(Register dst,
1199 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001200 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001201 ASSERT(!dst.is(rcx));
1202 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001203 // Untag shift amount.
1204 if (!dst.is(src1)) {
1205 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001206 }
Steve Block3ce2e202009-11-05 08:53:23 +00001207 SmiToInteger32(rcx, src2);
1208 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1209 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001210 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001211}
1212
1213
1214void MacroAssembler::SmiShiftLogicalRight(Register dst,
1215 Register src1,
1216 Register src2,
1217 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001218 ASSERT(!dst.is(kScratchRegister));
1219 ASSERT(!src1.is(kScratchRegister));
1220 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001221 ASSERT(!dst.is(rcx));
1222 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001223 if (src1.is(rcx) || src2.is(rcx)) {
1224 movq(kScratchRegister, rcx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001225 }
Steve Block3ce2e202009-11-05 08:53:23 +00001226 if (!dst.is(src1)) {
1227 movq(dst, src1);
1228 }
1229 SmiToInteger32(rcx, src2);
1230 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001231 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
Steve Block3ce2e202009-11-05 08:53:23 +00001232 shl(dst, Immediate(kSmiShift));
1233 testq(dst, dst);
1234 if (src1.is(rcx) || src2.is(rcx)) {
1235 Label positive_result;
1236 j(positive, &positive_result);
1237 if (src1.is(rcx)) {
1238 movq(src1, kScratchRegister);
1239 } else {
1240 movq(src2, kScratchRegister);
1241 }
1242 jmp(on_not_smi_result);
1243 bind(&positive_result);
1244 } else {
1245 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1246 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001247}
1248
1249
1250void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1251 Register src1,
1252 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001253 ASSERT(!dst.is(kScratchRegister));
1254 ASSERT(!src1.is(kScratchRegister));
1255 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001256 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001257 if (src1.is(rcx)) {
1258 movq(kScratchRegister, src1);
1259 } else if (src2.is(rcx)) {
1260 movq(kScratchRegister, src2);
1261 }
1262 if (!dst.is(src1)) {
1263 movq(dst, src1);
1264 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001265 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001266 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001267 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001268 shl(dst, Immediate(kSmiShift));
1269 if (src1.is(rcx)) {
1270 movq(src1, kScratchRegister);
1271 } else if (src2.is(rcx)) {
1272 movq(src2, kScratchRegister);
1273 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001274}
1275
1276
1277void MacroAssembler::SelectNonSmi(Register dst,
1278 Register src1,
1279 Register src2,
1280 Label* on_not_smis) {
Steve Block3ce2e202009-11-05 08:53:23 +00001281 ASSERT(!dst.is(kScratchRegister));
1282 ASSERT(!src1.is(kScratchRegister));
1283 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001284 ASSERT(!dst.is(src1));
1285 ASSERT(!dst.is(src2));
1286 // Both operands must not be smis.
1287#ifdef DEBUG
Steve Block3ce2e202009-11-05 08:53:23 +00001288 if (allow_stub_calls()) { // Check contains a stub call.
1289 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1290 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1291 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001292#endif
1293 ASSERT_EQ(0, kSmiTag);
1294 ASSERT_EQ(0, Smi::FromInt(0));
Steve Block3ce2e202009-11-05 08:53:23 +00001295 movl(kScratchRegister, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001296 and_(kScratchRegister, src1);
1297 testl(kScratchRegister, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001298 // If non-zero then both are smis.
Steve Blocka7e24c12009-10-30 11:49:00 +00001299 j(not_zero, on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00001300
Steve Block3ce2e202009-11-05 08:53:23 +00001301 // Exactly one operand is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001302 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1303 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1304 subq(kScratchRegister, Immediate(1));
1305 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1306 movq(dst, src1);
1307 xor_(dst, src2);
1308 and_(dst, kScratchRegister);
1309 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1310 xor_(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001311 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001312}
1313
Steve Block3ce2e202009-11-05 08:53:23 +00001314SmiIndex MacroAssembler::SmiToIndex(Register dst,
1315 Register src,
1316 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001317 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001318 // There is a possible optimization if shift is in the range 60-63, but that
1319 // will (and must) never happen.
1320 if (!dst.is(src)) {
1321 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001322 }
Steve Block3ce2e202009-11-05 08:53:23 +00001323 if (shift < kSmiShift) {
1324 sar(dst, Immediate(kSmiShift - shift));
1325 } else {
1326 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001327 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001328 return SmiIndex(dst, times_1);
1329}
1330
Steve Blocka7e24c12009-10-30 11:49:00 +00001331SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1332 Register src,
1333 int shift) {
1334 // Register src holds a positive smi.
1335 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001336 if (!dst.is(src)) {
1337 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001338 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001339 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001340 if (shift < kSmiShift) {
1341 sar(dst, Immediate(kSmiShift - shift));
1342 } else {
1343 shl(dst, Immediate(shift - kSmiShift));
1344 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001345 return SmiIndex(dst, times_1);
1346}
1347
1348
Steve Block3ce2e202009-11-05 08:53:23 +00001349void MacroAssembler::JumpIfSmi(Register src, Label* on_smi) {
1350 ASSERT_EQ(0, kSmiTag);
1351 Condition smi = CheckSmi(src);
1352 j(smi, on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001353}
1354
Steve Block3ce2e202009-11-05 08:53:23 +00001355
1356void MacroAssembler::JumpIfNotSmi(Register src, Label* on_not_smi) {
1357 Condition smi = CheckSmi(src);
1358 j(NegateCondition(smi), on_not_smi);
1359}
1360
1361
1362void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1363 Label* on_not_positive_smi) {
1364 Condition positive_smi = CheckPositiveSmi(src);
1365 j(NegateCondition(positive_smi), on_not_positive_smi);
1366}
1367
1368
1369void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1370 Smi* constant,
1371 Label* on_equals) {
1372 SmiCompare(src, constant);
1373 j(equal, on_equals);
1374}
1375
1376
1377void MacroAssembler::JumpIfNotValidSmiValue(Register src, Label* on_invalid) {
1378 Condition is_valid = CheckInteger32ValidSmiValue(src);
1379 j(NegateCondition(is_valid), on_invalid);
1380}
1381
1382
1383void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1384 Label* on_invalid) {
1385 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1386 j(NegateCondition(is_valid), on_invalid);
1387}
1388
1389
1390void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2,
1391 Label* on_not_both_smi) {
1392 Condition both_smi = CheckBothSmi(src1, src2);
1393 j(NegateCondition(both_smi), on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001394}
1395
1396
Leon Clarked91b9f72010-01-27 17:25:45 +00001397void MacroAssembler::JumpIfNotBothPositiveSmi(Register src1, Register src2,
1398 Label* on_not_both_smi) {
1399 Condition both_smi = CheckBothPositiveSmi(src1, src2);
1400 j(NegateCondition(both_smi), on_not_both_smi);
1401}
1402
1403
1404
Leon Clarkee46be812010-01-19 14:06:41 +00001405void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1406 Register second_object,
1407 Register scratch1,
1408 Register scratch2,
1409 Label* on_fail) {
1410 // Check that both objects are not smis.
1411 Condition either_smi = CheckEitherSmi(first_object, second_object);
1412 j(either_smi, on_fail);
1413
1414 // Load instance type for both strings.
1415 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1416 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1417 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1418 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1419
1420 // Check that both are flat ascii strings.
1421 ASSERT(kNotStringTag != 0);
1422 const int kFlatAsciiStringMask =
1423 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Leon Clarked91b9f72010-01-27 17:25:45 +00001424 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
Leon Clarkee46be812010-01-19 14:06:41 +00001425
1426 andl(scratch1, Immediate(kFlatAsciiStringMask));
1427 andl(scratch2, Immediate(kFlatAsciiStringMask));
1428 // Interleave the bits to check both scratch1 and scratch2 in one test.
1429 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1430 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1431 cmpl(scratch1,
Leon Clarked91b9f72010-01-27 17:25:45 +00001432 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
Leon Clarkee46be812010-01-19 14:06:41 +00001433 j(not_equal, on_fail);
1434}
1435
1436
Steve Block6ded16b2010-05-10 14:33:55 +01001437void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1438 Register instance_type,
1439 Register scratch,
1440 Label *failure) {
1441 if (!scratch.is(instance_type)) {
1442 movl(scratch, instance_type);
1443 }
1444
1445 const int kFlatAsciiStringMask =
1446 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1447
1448 andl(scratch, Immediate(kFlatAsciiStringMask));
1449 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1450 j(not_equal, failure);
1451}
1452
1453
1454void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1455 Register first_object_instance_type,
1456 Register second_object_instance_type,
1457 Register scratch1,
1458 Register scratch2,
1459 Label* on_fail) {
1460 // Load instance type for both strings.
1461 movq(scratch1, first_object_instance_type);
1462 movq(scratch2, second_object_instance_type);
1463
1464 // Check that both are flat ascii strings.
1465 ASSERT(kNotStringTag != 0);
1466 const int kFlatAsciiStringMask =
1467 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1468 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1469
1470 andl(scratch1, Immediate(kFlatAsciiStringMask));
1471 andl(scratch2, Immediate(kFlatAsciiStringMask));
1472 // Interleave the bits to check both scratch1 and scratch2 in one test.
1473 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1474 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1475 cmpl(scratch1,
1476 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1477 j(not_equal, on_fail);
1478}
1479
1480
Steve Blocka7e24c12009-10-30 11:49:00 +00001481void MacroAssembler::Move(Register dst, Handle<Object> source) {
1482 ASSERT(!source->IsFailure());
1483 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001484 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001485 } else {
1486 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1487 }
1488}
1489
1490
1491void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001492 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001493 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001494 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001495 } else {
1496 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1497 movq(dst, kScratchRegister);
1498 }
1499}
1500
1501
1502void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001503 if (source->IsSmi()) {
1504 SmiCompare(dst, Smi::cast(*source));
1505 } else {
1506 Move(kScratchRegister, source);
1507 cmpq(dst, kScratchRegister);
1508 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001509}
1510
1511
1512void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1513 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001514 SmiCompare(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001515 } else {
1516 ASSERT(source->IsHeapObject());
1517 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1518 cmpq(dst, kScratchRegister);
1519 }
1520}
1521
1522
1523void MacroAssembler::Push(Handle<Object> source) {
1524 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001525 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001526 } else {
1527 ASSERT(source->IsHeapObject());
1528 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1529 push(kScratchRegister);
1530 }
1531}
1532
1533
1534void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001535 intptr_t smi = reinterpret_cast<intptr_t>(source);
1536 if (is_int32(smi)) {
1537 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001538 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001539 Set(kScratchRegister, smi);
1540 push(kScratchRegister);
1541 }
1542}
1543
1544
Leon Clarkee46be812010-01-19 14:06:41 +00001545void MacroAssembler::Drop(int stack_elements) {
1546 if (stack_elements > 0) {
1547 addq(rsp, Immediate(stack_elements * kPointerSize));
1548 }
1549}
1550
1551
Steve Block3ce2e202009-11-05 08:53:23 +00001552void MacroAssembler::Test(const Operand& src, Smi* source) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001553 testl(Operand(src, kIntSize), Immediate(source->value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001554}
1555
1556
1557void MacroAssembler::Jump(ExternalReference ext) {
1558 movq(kScratchRegister, ext);
1559 jmp(kScratchRegister);
1560}
1561
1562
1563void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1564 movq(kScratchRegister, destination, rmode);
1565 jmp(kScratchRegister);
1566}
1567
1568
1569void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001570 // TODO(X64): Inline this
1571 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001572}
1573
1574
1575void MacroAssembler::Call(ExternalReference ext) {
1576 movq(kScratchRegister, ext);
1577 call(kScratchRegister);
1578}
1579
1580
1581void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1582 movq(kScratchRegister, destination, rmode);
1583 call(kScratchRegister);
1584}
1585
1586
1587void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1588 ASSERT(RelocInfo::IsCodeTarget(rmode));
1589 WriteRecordedPositions();
Steve Block3ce2e202009-11-05 08:53:23 +00001590 call(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001591}
1592
1593
1594void MacroAssembler::PushTryHandler(CodeLocation try_location,
1595 HandlerType type) {
1596 // Adjust this code if not the case.
1597 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1598
1599 // The pc (return address) is already on TOS. This code pushes state,
1600 // frame pointer and current handler. Check that they are expected
1601 // next on the stack, in that order.
1602 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1603 StackHandlerConstants::kPCOffset - kPointerSize);
1604 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1605 StackHandlerConstants::kStateOffset - kPointerSize);
1606 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1607 StackHandlerConstants::kFPOffset - kPointerSize);
1608
1609 if (try_location == IN_JAVASCRIPT) {
1610 if (type == TRY_CATCH_HANDLER) {
1611 push(Immediate(StackHandler::TRY_CATCH));
1612 } else {
1613 push(Immediate(StackHandler::TRY_FINALLY));
1614 }
1615 push(rbp);
1616 } else {
1617 ASSERT(try_location == IN_JS_ENTRY);
1618 // The frame pointer does not point to a JS frame so we save NULL
1619 // for rbp. We expect the code throwing an exception to check rbp
1620 // before dereferencing it to restore the context.
1621 push(Immediate(StackHandler::ENTRY));
1622 push(Immediate(0)); // NULL frame pointer.
1623 }
1624 // Save the current handler.
1625 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1626 push(Operand(kScratchRegister, 0));
1627 // Link this handler.
1628 movq(Operand(kScratchRegister, 0), rsp);
1629}
1630
1631
Leon Clarkee46be812010-01-19 14:06:41 +00001632void MacroAssembler::PopTryHandler() {
1633 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1634 // Unlink this handler.
1635 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1636 pop(Operand(kScratchRegister, 0));
1637 // Remove the remaining fields.
1638 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1639}
1640
1641
Steve Blocka7e24c12009-10-30 11:49:00 +00001642void MacroAssembler::Ret() {
1643 ret(0);
1644}
1645
1646
1647void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001648 fucomip();
1649 ffree(0);
1650 fincstp();
Steve Blocka7e24c12009-10-30 11:49:00 +00001651}
1652
1653
1654void MacroAssembler::CmpObjectType(Register heap_object,
1655 InstanceType type,
1656 Register map) {
1657 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1658 CmpInstanceType(map, type);
1659}
1660
1661
1662void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1663 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1664 Immediate(static_cast<int8_t>(type)));
1665}
1666
1667
Andrei Popescu31002712010-02-23 13:46:05 +00001668void MacroAssembler::CheckMap(Register obj,
1669 Handle<Map> map,
1670 Label* fail,
1671 bool is_heap_object) {
1672 if (!is_heap_object) {
1673 JumpIfSmi(obj, fail);
1674 }
1675 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1676 j(not_equal, fail);
1677}
1678
1679
Leon Clarkef7060e22010-06-03 12:02:55 +01001680void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001681 Label ok;
1682 Condition is_smi = CheckSmi(object);
1683 j(is_smi, &ok);
1684 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1685 Factory::heap_number_map());
Leon Clarkef7060e22010-06-03 12:02:55 +01001686 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +00001687 bind(&ok);
1688}
1689
1690
Leon Clarkef7060e22010-06-03 12:02:55 +01001691void MacroAssembler::AbortIfNotSmi(Register object) {
Steve Block6ded16b2010-05-10 14:33:55 +01001692 Label ok;
1693 Condition is_smi = CheckSmi(object);
Leon Clarkef7060e22010-06-03 12:02:55 +01001694 Assert(is_smi, "Operand not a smi");
Steve Block6ded16b2010-05-10 14:33:55 +01001695}
1696
1697
Leon Clarked91b9f72010-01-27 17:25:45 +00001698Condition MacroAssembler::IsObjectStringType(Register heap_object,
1699 Register map,
1700 Register instance_type) {
1701 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00001702 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00001703 ASSERT(kNotStringTag != 0);
1704 testb(instance_type, Immediate(kIsNotStringMask));
1705 return zero;
1706}
1707
1708
Steve Blocka7e24c12009-10-30 11:49:00 +00001709void MacroAssembler::TryGetFunctionPrototype(Register function,
1710 Register result,
1711 Label* miss) {
1712 // Check that the receiver isn't a smi.
1713 testl(function, Immediate(kSmiTagMask));
1714 j(zero, miss);
1715
1716 // Check that the function really is a function.
1717 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1718 j(not_equal, miss);
1719
1720 // Make sure that the function has an instance prototype.
1721 Label non_instance;
1722 testb(FieldOperand(result, Map::kBitFieldOffset),
1723 Immediate(1 << Map::kHasNonInstancePrototype));
1724 j(not_zero, &non_instance);
1725
1726 // Get the prototype or initial map from the function.
1727 movq(result,
1728 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1729
1730 // If the prototype or initial map is the hole, don't return it and
1731 // simply miss the cache instead. This will allow us to allocate a
1732 // prototype object on-demand in the runtime system.
1733 CompareRoot(result, Heap::kTheHoleValueRootIndex);
1734 j(equal, miss);
1735
1736 // If the function does not have an initial map, we're done.
1737 Label done;
1738 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1739 j(not_equal, &done);
1740
1741 // Get the prototype from the initial map.
1742 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1743 jmp(&done);
1744
1745 // Non-instance prototype: Fetch prototype from constructor field
1746 // in initial map.
1747 bind(&non_instance);
1748 movq(result, FieldOperand(result, Map::kConstructorOffset));
1749
1750 // All done.
1751 bind(&done);
1752}
1753
1754
1755void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1756 if (FLAG_native_code_counters && counter->Enabled()) {
1757 movq(kScratchRegister, ExternalReference(counter));
1758 movl(Operand(kScratchRegister, 0), Immediate(value));
1759 }
1760}
1761
1762
1763void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1764 ASSERT(value > 0);
1765 if (FLAG_native_code_counters && counter->Enabled()) {
1766 movq(kScratchRegister, ExternalReference(counter));
1767 Operand operand(kScratchRegister, 0);
1768 if (value == 1) {
1769 incl(operand);
1770 } else {
1771 addl(operand, Immediate(value));
1772 }
1773 }
1774}
1775
1776
1777void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1778 ASSERT(value > 0);
1779 if (FLAG_native_code_counters && counter->Enabled()) {
1780 movq(kScratchRegister, ExternalReference(counter));
1781 Operand operand(kScratchRegister, 0);
1782 if (value == 1) {
1783 decl(operand);
1784 } else {
1785 subl(operand, Immediate(value));
1786 }
1787 }
1788}
1789
Steve Blocka7e24c12009-10-30 11:49:00 +00001790#ifdef ENABLE_DEBUGGER_SUPPORT
1791
1792void MacroAssembler::PushRegistersFromMemory(RegList regs) {
1793 ASSERT((regs & ~kJSCallerSaved) == 0);
1794 // Push the content of the memory location to the stack.
1795 for (int i = 0; i < kNumJSCallerSaved; i++) {
1796 int r = JSCallerSavedCode(i);
1797 if ((regs & (1 << r)) != 0) {
1798 ExternalReference reg_addr =
1799 ExternalReference(Debug_Address::Register(i));
1800 movq(kScratchRegister, reg_addr);
1801 push(Operand(kScratchRegister, 0));
1802 }
1803 }
1804}
1805
Steve Block3ce2e202009-11-05 08:53:23 +00001806
Steve Blocka7e24c12009-10-30 11:49:00 +00001807void MacroAssembler::SaveRegistersToMemory(RegList regs) {
1808 ASSERT((regs & ~kJSCallerSaved) == 0);
1809 // Copy the content of registers to memory location.
1810 for (int i = 0; i < kNumJSCallerSaved; i++) {
1811 int r = JSCallerSavedCode(i);
1812 if ((regs & (1 << r)) != 0) {
1813 Register reg = { r };
1814 ExternalReference reg_addr =
1815 ExternalReference(Debug_Address::Register(i));
1816 movq(kScratchRegister, reg_addr);
1817 movq(Operand(kScratchRegister, 0), reg);
1818 }
1819 }
1820}
1821
1822
1823void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
1824 ASSERT((regs & ~kJSCallerSaved) == 0);
1825 // Copy the content of memory location to registers.
1826 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1827 int r = JSCallerSavedCode(i);
1828 if ((regs & (1 << r)) != 0) {
1829 Register reg = { r };
1830 ExternalReference reg_addr =
1831 ExternalReference(Debug_Address::Register(i));
1832 movq(kScratchRegister, reg_addr);
1833 movq(reg, Operand(kScratchRegister, 0));
1834 }
1835 }
1836}
1837
1838
1839void MacroAssembler::PopRegistersToMemory(RegList regs) {
1840 ASSERT((regs & ~kJSCallerSaved) == 0);
1841 // Pop the content from the stack to the memory location.
1842 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1843 int r = JSCallerSavedCode(i);
1844 if ((regs & (1 << r)) != 0) {
1845 ExternalReference reg_addr =
1846 ExternalReference(Debug_Address::Register(i));
1847 movq(kScratchRegister, reg_addr);
1848 pop(Operand(kScratchRegister, 0));
1849 }
1850 }
1851}
1852
1853
1854void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
1855 Register scratch,
1856 RegList regs) {
1857 ASSERT(!scratch.is(kScratchRegister));
1858 ASSERT(!base.is(kScratchRegister));
1859 ASSERT(!base.is(scratch));
1860 ASSERT((regs & ~kJSCallerSaved) == 0);
1861 // Copy the content of the stack to the memory location and adjust base.
1862 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1863 int r = JSCallerSavedCode(i);
1864 if ((regs & (1 << r)) != 0) {
1865 movq(scratch, Operand(base, 0));
1866 ExternalReference reg_addr =
1867 ExternalReference(Debug_Address::Register(i));
1868 movq(kScratchRegister, reg_addr);
1869 movq(Operand(kScratchRegister, 0), scratch);
1870 lea(base, Operand(base, kPointerSize));
1871 }
1872 }
1873}
1874
Andrei Popescu402d9372010-02-26 13:31:12 +00001875void MacroAssembler::DebugBreak() {
1876 ASSERT(allow_stub_calls());
1877 xor_(rax, rax); // no arguments
1878 movq(rbx, ExternalReference(Runtime::kDebugBreak));
1879 CEntryStub ces(1);
1880 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00001881}
Andrei Popescu402d9372010-02-26 13:31:12 +00001882#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00001883
1884
1885void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1886 const ParameterCount& actual,
1887 Handle<Code> code_constant,
1888 Register code_register,
1889 Label* done,
1890 InvokeFlag flag) {
1891 bool definitely_matches = false;
1892 Label invoke;
1893 if (expected.is_immediate()) {
1894 ASSERT(actual.is_immediate());
1895 if (expected.immediate() == actual.immediate()) {
1896 definitely_matches = true;
1897 } else {
1898 movq(rax, Immediate(actual.immediate()));
1899 if (expected.immediate() ==
Steve Block3ce2e202009-11-05 08:53:23 +00001900 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001901 // Don't worry about adapting arguments for built-ins that
1902 // don't want that done. Skip adaption code by making it look
1903 // like we have a match between expected and actual number of
1904 // arguments.
1905 definitely_matches = true;
1906 } else {
1907 movq(rbx, Immediate(expected.immediate()));
1908 }
1909 }
1910 } else {
1911 if (actual.is_immediate()) {
1912 // Expected is in register, actual is immediate. This is the
1913 // case when we invoke function values without going through the
1914 // IC mechanism.
1915 cmpq(expected.reg(), Immediate(actual.immediate()));
1916 j(equal, &invoke);
1917 ASSERT(expected.reg().is(rbx));
1918 movq(rax, Immediate(actual.immediate()));
1919 } else if (!expected.reg().is(actual.reg())) {
1920 // Both expected and actual are in (different) registers. This
1921 // is the case when we invoke functions using call and apply.
1922 cmpq(expected.reg(), actual.reg());
1923 j(equal, &invoke);
1924 ASSERT(actual.reg().is(rax));
1925 ASSERT(expected.reg().is(rbx));
1926 }
1927 }
1928
1929 if (!definitely_matches) {
1930 Handle<Code> adaptor =
1931 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1932 if (!code_constant.is_null()) {
1933 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1934 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1935 } else if (!code_register.is(rdx)) {
1936 movq(rdx, code_register);
1937 }
1938
1939 if (flag == CALL_FUNCTION) {
1940 Call(adaptor, RelocInfo::CODE_TARGET);
1941 jmp(done);
1942 } else {
1943 Jump(adaptor, RelocInfo::CODE_TARGET);
1944 }
1945 bind(&invoke);
1946 }
1947}
1948
1949
1950void MacroAssembler::InvokeCode(Register code,
1951 const ParameterCount& expected,
1952 const ParameterCount& actual,
1953 InvokeFlag flag) {
1954 Label done;
1955 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1956 if (flag == CALL_FUNCTION) {
1957 call(code);
1958 } else {
1959 ASSERT(flag == JUMP_FUNCTION);
1960 jmp(code);
1961 }
1962 bind(&done);
1963}
1964
1965
1966void MacroAssembler::InvokeCode(Handle<Code> code,
1967 const ParameterCount& expected,
1968 const ParameterCount& actual,
1969 RelocInfo::Mode rmode,
1970 InvokeFlag flag) {
1971 Label done;
1972 Register dummy = rax;
1973 InvokePrologue(expected, actual, code, dummy, &done, flag);
1974 if (flag == CALL_FUNCTION) {
1975 Call(code, rmode);
1976 } else {
1977 ASSERT(flag == JUMP_FUNCTION);
1978 Jump(code, rmode);
1979 }
1980 bind(&done);
1981}
1982
1983
1984void MacroAssembler::InvokeFunction(Register function,
1985 const ParameterCount& actual,
1986 InvokeFlag flag) {
1987 ASSERT(function.is(rdi));
1988 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1989 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
1990 movsxlq(rbx,
1991 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
1992 movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
1993 // Advances rdx to the end of the Code object header, to the start of
1994 // the executable code.
1995 lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
1996
1997 ParameterCount expected(rbx);
1998 InvokeCode(rdx, expected, actual, flag);
1999}
2000
2001
Andrei Popescu402d9372010-02-26 13:31:12 +00002002void MacroAssembler::InvokeFunction(JSFunction* function,
2003 const ParameterCount& actual,
2004 InvokeFlag flag) {
2005 ASSERT(function->is_compiled());
2006 // Get the function and setup the context.
2007 Move(rdi, Handle<JSFunction>(function));
2008 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2009
2010 // Invoke the cached code.
2011 Handle<Code> code(function->code());
2012 ParameterCount expected(function->shared()->formal_parameter_count());
2013 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
2014}
2015
2016
Steve Blocka7e24c12009-10-30 11:49:00 +00002017void MacroAssembler::EnterFrame(StackFrame::Type type) {
2018 push(rbp);
2019 movq(rbp, rsp);
2020 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00002021 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002022 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2023 push(kScratchRegister);
2024 if (FLAG_debug_code) {
2025 movq(kScratchRegister,
2026 Factory::undefined_value(),
2027 RelocInfo::EMBEDDED_OBJECT);
2028 cmpq(Operand(rsp, 0), kScratchRegister);
2029 Check(not_equal, "code object not properly patched");
2030 }
2031}
2032
2033
2034void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2035 if (FLAG_debug_code) {
Steve Block3ce2e202009-11-05 08:53:23 +00002036 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002037 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2038 Check(equal, "stack frame types must match");
2039 }
2040 movq(rsp, rbp);
2041 pop(rbp);
2042}
2043
2044
Steve Blockd0582a62009-12-15 09:54:21 +00002045void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002046 // Setup the frame structure on the stack.
2047 // All constants are relative to the frame pointer of the exit frame.
2048 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
2049 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
2050 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
2051 push(rbp);
2052 movq(rbp, rsp);
2053
2054 // Reserve room for entry stack pointer and push the debug marker.
Steve Block3ce2e202009-11-05 08:53:23 +00002055 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00002056 push(Immediate(0)); // Saved entry sp, patched before call.
2057 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2058 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00002059
2060 // Save the frame pointer and the context in top.
2061 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2062 ExternalReference context_address(Top::k_context_address);
2063 movq(r14, rax); // Backup rax before we use it.
2064
2065 movq(rax, rbp);
2066 store_rax(c_entry_fp_address);
2067 movq(rax, rsi);
2068 store_rax(context_address);
2069
2070 // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
2071 // so it must be retained across the C-call.
2072 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
2073 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
2074
2075#ifdef ENABLE_DEBUGGER_SUPPORT
2076 // Save the state of all registers to the stack from the memory
2077 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00002078 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002079 // TODO(1243899): This should be symmetric to
2080 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
2081 // correct here, but computed for the other call. Very error
2082 // prone! FIX THIS. Actually there are deeper problems with
2083 // register saving than this asymmetry (see the bug report
2084 // associated with this issue).
2085 PushRegistersFromMemory(kJSCallerSaved);
2086 }
2087#endif
2088
2089#ifdef _WIN64
2090 // Reserve space on stack for result and argument structures, if necessary.
2091 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
2092 // Reserve space for the Arguments object. The Windows 64-bit ABI
2093 // requires us to pass this structure as a pointer to its location on
2094 // the stack. The structure contains 2 values.
2095 int argument_stack_space = 2 * kPointerSize;
2096 // We also need backing space for 4 parameters, even though
2097 // we only pass one or two parameter, and it is in a register.
2098 int argument_mirror_space = 4 * kPointerSize;
2099 int total_stack_space =
2100 argument_mirror_space + argument_stack_space + result_stack_space;
2101 subq(rsp, Immediate(total_stack_space));
2102#endif
2103
2104 // Get the required frame alignment for the OS.
2105 static const int kFrameAlignment = OS::ActivationFrameAlignment();
2106 if (kFrameAlignment > 0) {
2107 ASSERT(IsPowerOf2(kFrameAlignment));
2108 movq(kScratchRegister, Immediate(-kFrameAlignment));
2109 and_(rsp, kScratchRegister);
2110 }
2111
2112 // Patch the saved entry sp.
2113 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2114}
2115
2116
Steve Blockd0582a62009-12-15 09:54:21 +00002117void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002118 // Registers:
2119 // r15 : argv
2120#ifdef ENABLE_DEBUGGER_SUPPORT
2121 // Restore the memory copy of the registers by digging them out from
2122 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00002123 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002124 // It's okay to clobber register rbx below because we don't need
2125 // the function pointer after this.
2126 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +00002127 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002128 lea(rbx, Operand(rbp, kOffset));
2129 CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
2130 }
2131#endif
2132
2133 // Get the return address from the stack and restore the frame pointer.
2134 movq(rcx, Operand(rbp, 1 * kPointerSize));
2135 movq(rbp, Operand(rbp, 0 * kPointerSize));
2136
Steve Blocka7e24c12009-10-30 11:49:00 +00002137 // Pop everything up to and including the arguments and the receiver
2138 // from the caller stack.
2139 lea(rsp, Operand(r15, 1 * kPointerSize));
2140
2141 // Restore current context from top and clear it in debug mode.
2142 ExternalReference context_address(Top::k_context_address);
2143 movq(kScratchRegister, context_address);
2144 movq(rsi, Operand(kScratchRegister, 0));
2145#ifdef DEBUG
2146 movq(Operand(kScratchRegister, 0), Immediate(0));
2147#endif
2148
2149 // Push the return address to get ready to return.
2150 push(rcx);
2151
2152 // Clear the top frame.
2153 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2154 movq(kScratchRegister, c_entry_fp_address);
2155 movq(Operand(kScratchRegister, 0), Immediate(0));
2156}
2157
2158
Steve Block3ce2e202009-11-05 08:53:23 +00002159Register MacroAssembler::CheckMaps(JSObject* object,
2160 Register object_reg,
2161 JSObject* holder,
2162 Register holder_reg,
Steve Blocka7e24c12009-10-30 11:49:00 +00002163 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01002164 int save_at_depth,
Steve Blocka7e24c12009-10-30 11:49:00 +00002165 Label* miss) {
2166 // Make sure there's no overlap between scratch and the other
2167 // registers.
2168 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
2169
2170 // Keep track of the current object in register reg. On the first
2171 // iteration, reg is an alias for object_reg, on later iterations,
2172 // it is an alias for holder_reg.
2173 Register reg = object_reg;
Steve Block6ded16b2010-05-10 14:33:55 +01002174 int depth = 0;
2175
2176 if (save_at_depth == depth) {
2177 movq(Operand(rsp, kPointerSize), object_reg);
2178 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002179
2180 // Check the maps in the prototype chain.
2181 // Traverse the prototype chain from the object and do map checks.
2182 while (object != holder) {
2183 depth++;
2184
2185 // Only global objects and objects that do not require access
2186 // checks are allowed in stubs.
2187 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2188
2189 JSObject* prototype = JSObject::cast(object->GetPrototype());
2190 if (Heap::InNewSpace(prototype)) {
2191 // Get the map of the current object.
2192 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2193 Cmp(scratch, Handle<Map>(object->map()));
2194 // Branch on the result of the map check.
2195 j(not_equal, miss);
2196 // Check access rights to the global object. This has to happen
2197 // after the map check so that we know that the object is
2198 // actually a global object.
2199 if (object->IsJSGlobalProxy()) {
2200 CheckAccessGlobalProxy(reg, scratch, miss);
2201
2202 // Restore scratch register to be the map of the object.
2203 // We load the prototype from the map in the scratch register.
2204 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2205 }
2206 // The prototype is in new space; we cannot store a reference
2207 // to it in the code. Load it from the map.
2208 reg = holder_reg; // from now the object is in holder_reg
2209 movq(reg, FieldOperand(scratch, Map::kPrototypeOffset));
2210
2211 } else {
2212 // Check the map of the current object.
2213 Cmp(FieldOperand(reg, HeapObject::kMapOffset),
2214 Handle<Map>(object->map()));
2215 // Branch on the result of the map check.
2216 j(not_equal, miss);
2217 // Check access rights to the global object. This has to happen
2218 // after the map check so that we know that the object is
2219 // actually a global object.
2220 if (object->IsJSGlobalProxy()) {
2221 CheckAccessGlobalProxy(reg, scratch, miss);
2222 }
2223 // The prototype is in old space; load it directly.
2224 reg = holder_reg; // from now the object is in holder_reg
2225 Move(reg, Handle<JSObject>(prototype));
2226 }
2227
Steve Block6ded16b2010-05-10 14:33:55 +01002228 if (save_at_depth == depth) {
2229 movq(Operand(rsp, kPointerSize), reg);
2230 }
2231
Steve Blocka7e24c12009-10-30 11:49:00 +00002232 // Go to the next object in the prototype chain.
2233 object = prototype;
2234 }
2235
2236 // Check the holder map.
Steve Block3ce2e202009-11-05 08:53:23 +00002237 Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002238 j(not_equal, miss);
2239
2240 // Log the check depth.
Steve Block6ded16b2010-05-10 14:33:55 +01002241 LOG(IntEvent("check-maps-depth", depth + 1));
Steve Blocka7e24c12009-10-30 11:49:00 +00002242
2243 // Perform security check for access to the global object and return
2244 // the holder register.
2245 ASSERT(object == holder);
2246 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2247 if (object->IsJSGlobalProxy()) {
2248 CheckAccessGlobalProxy(reg, scratch, miss);
2249 }
2250 return reg;
2251}
2252
2253
Steve Blocka7e24c12009-10-30 11:49:00 +00002254void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2255 Register scratch,
2256 Label* miss) {
2257 Label same_contexts;
2258
2259 ASSERT(!holder_reg.is(scratch));
2260 ASSERT(!scratch.is(kScratchRegister));
2261 // Load current lexical context from the stack frame.
2262 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2263
2264 // When generating debug code, make sure the lexical context is set.
2265 if (FLAG_debug_code) {
2266 cmpq(scratch, Immediate(0));
2267 Check(not_equal, "we should not have an empty lexical context");
2268 }
2269 // Load the global context of the current context.
2270 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2271 movq(scratch, FieldOperand(scratch, offset));
2272 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2273
2274 // Check the context is a global context.
2275 if (FLAG_debug_code) {
2276 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2277 Factory::global_context_map());
2278 Check(equal, "JSGlobalObject::global_context should be a global context.");
2279 }
2280
2281 // Check if both contexts are the same.
2282 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2283 j(equal, &same_contexts);
2284
2285 // Compare security tokens.
2286 // Check that the security token in the calling global object is
2287 // compatible with the security token in the receiving global
2288 // object.
2289
2290 // Check the context is a global context.
2291 if (FLAG_debug_code) {
2292 // Preserve original value of holder_reg.
2293 push(holder_reg);
2294 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2295 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2296 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2297
2298 // Read the first word and compare to global_context_map(),
2299 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2300 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2301 Check(equal, "JSGlobalObject::global_context should be a global context.");
2302 pop(holder_reg);
2303 }
2304
2305 movq(kScratchRegister,
2306 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00002307 int token_offset =
2308 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002309 movq(scratch, FieldOperand(scratch, token_offset));
2310 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2311 j(not_equal, miss);
2312
2313 bind(&same_contexts);
2314}
2315
2316
2317void MacroAssembler::LoadAllocationTopHelper(Register result,
2318 Register result_end,
2319 Register scratch,
2320 AllocationFlags flags) {
2321 ExternalReference new_space_allocation_top =
2322 ExternalReference::new_space_allocation_top_address();
2323
2324 // Just return if allocation top is already known.
2325 if ((flags & RESULT_CONTAINS_TOP) != 0) {
2326 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01002327 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00002328#ifdef DEBUG
2329 // Assert that result actually contains top on entry.
2330 movq(kScratchRegister, new_space_allocation_top);
2331 cmpq(result, Operand(kScratchRegister, 0));
2332 Check(equal, "Unexpected allocation top");
2333#endif
2334 return;
2335 }
2336
Steve Block6ded16b2010-05-10 14:33:55 +01002337 // Move address of new object to result. Use scratch register if available,
2338 // and keep address in scratch until call to UpdateAllocationTopHelper.
2339 if (scratch.is_valid()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002340 ASSERT(!scratch.is(result_end));
2341 movq(scratch, new_space_allocation_top);
2342 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01002343 } else if (result.is(rax)) {
2344 load_rax(new_space_allocation_top);
2345 } else {
2346 movq(kScratchRegister, new_space_allocation_top);
2347 movq(result, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002348 }
2349}
2350
2351
2352void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2353 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +00002354 if (FLAG_debug_code) {
2355 testq(result_end, Immediate(kObjectAlignmentMask));
2356 Check(zero, "Unaligned allocation in new space");
2357 }
2358
Steve Blocka7e24c12009-10-30 11:49:00 +00002359 ExternalReference new_space_allocation_top =
2360 ExternalReference::new_space_allocation_top_address();
2361
2362 // Update new top.
2363 if (result_end.is(rax)) {
2364 // rax can be stored directly to a memory location.
2365 store_rax(new_space_allocation_top);
2366 } else {
2367 // Register required - use scratch provided if available.
Steve Block6ded16b2010-05-10 14:33:55 +01002368 if (scratch.is_valid()) {
2369 movq(Operand(scratch, 0), result_end);
2370 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00002371 movq(kScratchRegister, new_space_allocation_top);
2372 movq(Operand(kScratchRegister, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00002373 }
2374 }
2375}
2376
2377
2378void MacroAssembler::AllocateInNewSpace(int object_size,
2379 Register result,
2380 Register result_end,
2381 Register scratch,
2382 Label* gc_required,
2383 AllocationFlags flags) {
2384 ASSERT(!result.is(result_end));
2385
2386 // Load address of new object into result.
2387 LoadAllocationTopHelper(result, result_end, scratch, flags);
2388
2389 // Calculate new top and bail out if new space is exhausted.
2390 ExternalReference new_space_allocation_limit =
2391 ExternalReference::new_space_allocation_limit_address();
Steve Block6ded16b2010-05-10 14:33:55 +01002392
2393 Register top_reg = result_end.is_valid() ? result_end : result;
2394
2395 if (top_reg.is(result)) {
2396 addq(top_reg, Immediate(object_size));
2397 } else {
2398 lea(top_reg, Operand(result, object_size));
2399 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002400 movq(kScratchRegister, new_space_allocation_limit);
Steve Block6ded16b2010-05-10 14:33:55 +01002401 cmpq(top_reg, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002402 j(above, gc_required);
2403
2404 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01002405 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00002406
Steve Block6ded16b2010-05-10 14:33:55 +01002407 if (top_reg.is(result)) {
2408 if ((flags & TAG_OBJECT) != 0) {
2409 subq(result, Immediate(object_size - kHeapObjectTag));
2410 } else {
2411 subq(result, Immediate(object_size));
2412 }
2413 } else if ((flags & TAG_OBJECT) != 0) {
2414 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00002415 addq(result, Immediate(kHeapObjectTag));
2416 }
2417}
2418
2419
2420void MacroAssembler::AllocateInNewSpace(int header_size,
2421 ScaleFactor element_size,
2422 Register element_count,
2423 Register result,
2424 Register result_end,
2425 Register scratch,
2426 Label* gc_required,
2427 AllocationFlags flags) {
2428 ASSERT(!result.is(result_end));
2429
2430 // Load address of new object into result.
2431 LoadAllocationTopHelper(result, result_end, scratch, flags);
2432
2433 // Calculate new top and bail out if new space is exhausted.
2434 ExternalReference new_space_allocation_limit =
2435 ExternalReference::new_space_allocation_limit_address();
2436 lea(result_end, Operand(result, element_count, element_size, header_size));
2437 movq(kScratchRegister, new_space_allocation_limit);
2438 cmpq(result_end, Operand(kScratchRegister, 0));
2439 j(above, gc_required);
2440
2441 // Update allocation top.
2442 UpdateAllocationTopHelper(result_end, scratch);
2443
2444 // Tag the result if requested.
2445 if ((flags & TAG_OBJECT) != 0) {
2446 addq(result, Immediate(kHeapObjectTag));
2447 }
2448}
2449
2450
2451void MacroAssembler::AllocateInNewSpace(Register object_size,
2452 Register result,
2453 Register result_end,
2454 Register scratch,
2455 Label* gc_required,
2456 AllocationFlags flags) {
2457 // Load address of new object into result.
2458 LoadAllocationTopHelper(result, result_end, scratch, flags);
2459
2460 // Calculate new top and bail out if new space is exhausted.
2461 ExternalReference new_space_allocation_limit =
2462 ExternalReference::new_space_allocation_limit_address();
2463 if (!object_size.is(result_end)) {
2464 movq(result_end, object_size);
2465 }
2466 addq(result_end, result);
2467 movq(kScratchRegister, new_space_allocation_limit);
2468 cmpq(result_end, Operand(kScratchRegister, 0));
2469 j(above, gc_required);
2470
2471 // Update allocation top.
2472 UpdateAllocationTopHelper(result_end, scratch);
2473
2474 // Tag the result if requested.
2475 if ((flags & TAG_OBJECT) != 0) {
2476 addq(result, Immediate(kHeapObjectTag));
2477 }
2478}
2479
2480
2481void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2482 ExternalReference new_space_allocation_top =
2483 ExternalReference::new_space_allocation_top_address();
2484
2485 // Make sure the object has no tag before resetting top.
2486 and_(object, Immediate(~kHeapObjectTagMask));
2487 movq(kScratchRegister, new_space_allocation_top);
2488#ifdef DEBUG
2489 cmpq(object, Operand(kScratchRegister, 0));
2490 Check(below, "Undo allocation of non allocated memory");
2491#endif
2492 movq(Operand(kScratchRegister, 0), object);
2493}
2494
2495
Steve Block3ce2e202009-11-05 08:53:23 +00002496void MacroAssembler::AllocateHeapNumber(Register result,
2497 Register scratch,
2498 Label* gc_required) {
2499 // Allocate heap number in new space.
2500 AllocateInNewSpace(HeapNumber::kSize,
2501 result,
2502 scratch,
2503 no_reg,
2504 gc_required,
2505 TAG_OBJECT);
2506
2507 // Set the map.
2508 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2509 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2510}
2511
2512
Leon Clarkee46be812010-01-19 14:06:41 +00002513void MacroAssembler::AllocateTwoByteString(Register result,
2514 Register length,
2515 Register scratch1,
2516 Register scratch2,
2517 Register scratch3,
2518 Label* gc_required) {
2519 // Calculate the number of bytes needed for the characters in the string while
2520 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002521 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2522 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002523 ASSERT(kShortSize == 2);
2524 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01002525 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2526 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002527 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002528 if (kHeaderAlignment > 0) {
2529 subq(scratch1, Immediate(kHeaderAlignment));
2530 }
Leon Clarkee46be812010-01-19 14:06:41 +00002531
2532 // Allocate two byte string in new space.
2533 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2534 times_1,
2535 scratch1,
2536 result,
2537 scratch2,
2538 scratch3,
2539 gc_required,
2540 TAG_OBJECT);
2541
2542 // Set the map, length and hash field.
2543 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2544 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002545 Integer32ToSmi(scratch1, length);
2546 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002547 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002548 Immediate(String::kEmptyHashField));
2549}
2550
2551
2552void MacroAssembler::AllocateAsciiString(Register result,
2553 Register length,
2554 Register scratch1,
2555 Register scratch2,
2556 Register scratch3,
2557 Label* gc_required) {
2558 // Calculate the number of bytes needed for the characters in the string while
2559 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002560 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2561 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002562 movl(scratch1, length);
2563 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01002564 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002565 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002566 if (kHeaderAlignment > 0) {
2567 subq(scratch1, Immediate(kHeaderAlignment));
2568 }
Leon Clarkee46be812010-01-19 14:06:41 +00002569
2570 // Allocate ascii string in new space.
2571 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2572 times_1,
2573 scratch1,
2574 result,
2575 scratch2,
2576 scratch3,
2577 gc_required,
2578 TAG_OBJECT);
2579
2580 // Set the map, length and hash field.
2581 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2582 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002583 Integer32ToSmi(scratch1, length);
2584 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002585 movq(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00002586 Immediate(String::kEmptyHashField));
2587}
2588
2589
2590void MacroAssembler::AllocateConsString(Register result,
2591 Register scratch1,
2592 Register scratch2,
2593 Label* gc_required) {
2594 // Allocate heap number in new space.
2595 AllocateInNewSpace(ConsString::kSize,
2596 result,
2597 scratch1,
2598 scratch2,
2599 gc_required,
2600 TAG_OBJECT);
2601
2602 // Set the map. The other fields are left uninitialized.
2603 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2604 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2605}
2606
2607
2608void MacroAssembler::AllocateAsciiConsString(Register result,
2609 Register scratch1,
2610 Register scratch2,
2611 Label* gc_required) {
2612 // Allocate heap number in new space.
2613 AllocateInNewSpace(ConsString::kSize,
2614 result,
2615 scratch1,
2616 scratch2,
2617 gc_required,
2618 TAG_OBJECT);
2619
2620 // Set the map. The other fields are left uninitialized.
2621 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2622 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2623}
2624
2625
Steve Blockd0582a62009-12-15 09:54:21 +00002626void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2627 if (context_chain_length > 0) {
2628 // Move up the chain of contexts to the context containing the slot.
2629 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2630 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002631 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002632 for (int i = 1; i < context_chain_length; i++) {
2633 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2634 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2635 }
2636 // The context may be an intermediate context, not a function context.
2637 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2638 } else { // context is the current function context.
2639 // The context may be an intermediate context, not a function context.
2640 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2641 }
2642}
2643
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002644
Leon Clarke4515c472010-02-03 11:58:03 +00002645int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002646 // On Windows 64 stack slots are reserved by the caller for all arguments
2647 // including the ones passed in registers, and space is always allocated for
2648 // the four register arguments even if the function takes fewer than four
2649 // arguments.
2650 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2651 // and the caller does not reserve stack slots for them.
Leon Clarke4515c472010-02-03 11:58:03 +00002652 ASSERT(num_arguments >= 0);
2653#ifdef _WIN64
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002654 static const int kMinimumStackSlots = 4;
2655 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2656 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002657#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002658 static const int kRegisterPassedArguments = 6;
2659 if (num_arguments < kRegisterPassedArguments) return 0;
2660 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00002661#endif
Leon Clarke4515c472010-02-03 11:58:03 +00002662}
2663
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002664
Leon Clarke4515c472010-02-03 11:58:03 +00002665void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2666 int frame_alignment = OS::ActivationFrameAlignment();
2667 ASSERT(frame_alignment != 0);
2668 ASSERT(num_arguments >= 0);
2669 // Make stack end at alignment and allocate space for arguments and old rsp.
2670 movq(kScratchRegister, rsp);
2671 ASSERT(IsPowerOf2(frame_alignment));
2672 int argument_slots_on_stack =
2673 ArgumentStackSlotsForCFunctionCall(num_arguments);
2674 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2675 and_(rsp, Immediate(-frame_alignment));
2676 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2677}
2678
2679
2680void MacroAssembler::CallCFunction(ExternalReference function,
2681 int num_arguments) {
2682 movq(rax, function);
2683 CallCFunction(rax, num_arguments);
2684}
2685
2686
2687void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01002688 // Check stack alignment.
2689 if (FLAG_debug_code) {
2690 CheckStackAlignment();
2691 }
2692
Leon Clarke4515c472010-02-03 11:58:03 +00002693 call(function);
2694 ASSERT(OS::ActivationFrameAlignment() != 0);
2695 ASSERT(num_arguments >= 0);
2696 int argument_slots_on_stack =
2697 ArgumentStackSlotsForCFunctionCall(num_arguments);
2698 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2699}
2700
Steve Blockd0582a62009-12-15 09:54:21 +00002701
Steve Blocka7e24c12009-10-30 11:49:00 +00002702CodePatcher::CodePatcher(byte* address, int size)
2703 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2704 // Create a new macro assembler pointing to the address of the code to patch.
2705 // The size is adjusted with kGap on order for the assembler to generate size
2706 // bytes of instructions without failing with buffer size constraints.
2707 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2708}
2709
2710
2711CodePatcher::~CodePatcher() {
2712 // Indicate that code has changed.
2713 CPU::FlushICache(address_, size_);
2714
2715 // Check that the code was patched as expected.
2716 ASSERT(masm_.pc_ == address_ + size_);
2717 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2718}
2719
Steve Blocka7e24c12009-10-30 11:49:00 +00002720} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002721
2722#endif // V8_TARGET_ARCH_X64