blob: f9b444b73b98088f8a1d05bb8114f6edfc3ef5a6 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "assembler-x64.h"
33#include "macro-assembler-x64.h"
34#include "serialize.h"
35#include "debug.h"
36
37namespace v8 {
38namespace internal {
39
40MacroAssembler::MacroAssembler(void* buffer, int size)
Steve Block3ce2e202009-11-05 08:53:23 +000041 : Assembler(buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000042 generating_stub_(false),
43 allow_stub_calls_(true),
44 code_object_(Heap::undefined_value()) {
Steve Blocka7e24c12009-10-30 11:49:00 +000045}
46
47
Steve Block3ce2e202009-11-05 08:53:23 +000048void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010049 movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000050}
51
52
Kristian Monsen25f61362010-05-21 11:50:48 +010053void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
54 movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
55}
56
57
Steve Blocka7e24c12009-10-30 11:49:00 +000058void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010059 push(Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000060}
61
62
Steve Block3ce2e202009-11-05 08:53:23 +000063void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010064 cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +000065}
66
67
Steve Block3ce2e202009-11-05 08:53:23 +000068void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
Steve Blocka7e24c12009-10-30 11:49:00 +000069 LoadRoot(kScratchRegister, index);
70 cmpq(with, kScratchRegister);
71}
72
73
Steve Blockd0582a62009-12-15 09:54:21 +000074void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
75 CompareRoot(rsp, Heap::kStackLimitRootIndex);
76 j(below, on_stack_overflow);
77}
78
79
Steve Block6ded16b2010-05-10 14:33:55 +010080void MacroAssembler::RecordWriteHelper(Register object,
81 Register addr,
82 Register scratch) {
83 if (FLAG_debug_code) {
84 // Check that the object is not in new space.
85 Label not_in_new_space;
86 InNewSpace(object, scratch, not_equal, &not_in_new_space);
87 Abort("new-space object passed to RecordWriteHelper");
88 bind(&not_in_new_space);
89 }
90
Steve Blocka7e24c12009-10-30 11:49:00 +000091 Label fast;
92
93 // Compute the page start address from the heap object pointer, and reuse
94 // the 'object' register for it.
95 ASSERT(is_int32(~Page::kPageAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +010096 and_(object,
97 Immediate(static_cast<int32_t>(~Page::kPageAlignmentMask)));
Steve Blocka7e24c12009-10-30 11:49:00 +000098 Register page_start = object;
99
100 // Compute the bit addr in the remembered set/index of the pointer in the
101 // page. Reuse 'addr' as pointer_offset.
Steve Block6ded16b2010-05-10 14:33:55 +0100102 subq(addr, page_start);
103 shr(addr, Immediate(kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000104 Register pointer_offset = addr;
105
106 // If the bit offset lies beyond the normal remembered set range, it is in
107 // the extra remembered set area of a large object.
Steve Block6ded16b2010-05-10 14:33:55 +0100108 cmpq(pointer_offset, Immediate(Page::kPageSize / kPointerSize));
109 j(below, &fast);
110
111 // We have a large object containing pointers. It must be a FixedArray.
Steve Blocka7e24c12009-10-30 11:49:00 +0000112
113 // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
114 // extra remembered set after the large object.
115
116 // Load the array length into 'scratch'.
Steve Block6ded16b2010-05-10 14:33:55 +0100117 movl(scratch,
118 Operand(page_start,
119 Page::kObjectStartOffset + FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000120 Register array_length = scratch;
121
122 // Extra remembered set starts right after the large object (a FixedArray), at
123 // page_start + kObjectStartOffset + objectSize
124 // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
125 // Add the delta between the end of the normal RSet and the start of the
126 // extra RSet to 'page_start', so that addressing the bit using
127 // 'pointer_offset' hits the extra RSet words.
Steve Block6ded16b2010-05-10 14:33:55 +0100128 lea(page_start,
129 Operand(page_start, array_length, times_pointer_size,
130 Page::kObjectStartOffset + FixedArray::kHeaderSize
131 - Page::kRSetEndOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000132
133 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
134 // to limit code size. We should probably evaluate this decision by
135 // measuring the performance of an equivalent implementation using
136 // "simpler" instructions
Steve Block6ded16b2010-05-10 14:33:55 +0100137 bind(&fast);
138 bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
Steve Blocka7e24c12009-10-30 11:49:00 +0000139}
140
141
142// Set the remembered set bit for [object+offset].
143// object is the object being stored into, value is the object being stored.
Steve Block3ce2e202009-11-05 08:53:23 +0000144// If offset is zero, then the smi_index register contains the array index into
145// the elements array represented as a smi. Otherwise it can be used as a
146// scratch register.
Steve Blocka7e24c12009-10-30 11:49:00 +0000147// All registers are clobbered by the operation.
148void MacroAssembler::RecordWrite(Register object,
149 int offset,
150 Register value,
Steve Block3ce2e202009-11-05 08:53:23 +0000151 Register smi_index) {
Leon Clarke4515c472010-02-03 11:58:03 +0000152 // The compiled code assumes that record write doesn't change the
153 // context register, so we check that none of the clobbered
154 // registers are rsi.
155 ASSERT(!object.is(rsi) && !value.is(rsi) && !smi_index.is(rsi));
156
Steve Blocka7e24c12009-10-30 11:49:00 +0000157 // First, check if a remembered set write is even needed. The tests below
158 // catch stores of Smis and stores into young gen (which does not have space
Steve Block6ded16b2010-05-10 14:33:55 +0100159 // for the remembered set bits).
Steve Blocka7e24c12009-10-30 11:49:00 +0000160 Label done;
Steve Block3ce2e202009-11-05 08:53:23 +0000161 JumpIfSmi(value, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000162
Steve Block3ce2e202009-11-05 08:53:23 +0000163 RecordWriteNonSmi(object, offset, value, smi_index);
164 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000165
166 // Clobber all input registers when running with the debug-code flag
167 // turned on to provoke errors. This clobbering repeats the
168 // clobbering done inside RecordWriteNonSmi but it's necessary to
169 // avoid having the fast case for smis leave the registers
170 // unchanged.
171 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100172 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
173 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
174 movq(smi_index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
Leon Clarke4515c472010-02-03 11:58:03 +0000175 }
Steve Block3ce2e202009-11-05 08:53:23 +0000176}
177
178
179void MacroAssembler::RecordWriteNonSmi(Register object,
180 int offset,
181 Register scratch,
182 Register smi_index) {
183 Label done;
Leon Clarke4515c472010-02-03 11:58:03 +0000184
185 if (FLAG_debug_code) {
186 Label okay;
187 JumpIfNotSmi(object, &okay);
188 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
189 bind(&okay);
190 }
191
Steve Blocka7e24c12009-10-30 11:49:00 +0000192 // Test that the object address is not in the new space. We cannot
193 // set remembered set bits in the new space.
Steve Block6ded16b2010-05-10 14:33:55 +0100194 InNewSpace(object, scratch, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000195
Steve Block6ded16b2010-05-10 14:33:55 +0100196 // The offset is relative to a tagged or untagged HeapObject pointer,
197 // so either offset or offset + kHeapObjectTag must be a
198 // multiple of kPointerSize.
199 ASSERT(IsAligned(offset, kPointerSize) ||
200 IsAligned(offset + kHeapObjectTag, kPointerSize));
201
202 // We use optimized write barrier code if the word being written to is not in
203 // a large object page, or is in the first "page" of a large object page.
204 // We make sure that an offset is inside the right limits whether it is
205 // tagged or untagged.
206 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize - kHeapObjectTag)) {
207 // Compute the bit offset in the remembered set, leave it in 'scratch'.
Steve Block3ce2e202009-11-05 08:53:23 +0000208 lea(scratch, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000209 ASSERT(is_int32(Page::kPageAlignmentMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000210 and_(scratch, Immediate(static_cast<int32_t>(Page::kPageAlignmentMask)));
Steve Block6ded16b2010-05-10 14:33:55 +0100211 shr(scratch, Immediate(kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000212
213 // Compute the page address from the heap object pointer, leave it in
214 // 'object' (immediate value is sign extended).
215 and_(object, Immediate(~Page::kPageAlignmentMask));
216
217 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
218 // to limit code size. We should probably evaluate this decision by
219 // measuring the performance of an equivalent implementation using
220 // "simpler" instructions
Steve Block3ce2e202009-11-05 08:53:23 +0000221 bts(Operand(object, Page::kRSetOffset), scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000222 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000223 Register dst = smi_index;
Steve Blocka7e24c12009-10-30 11:49:00 +0000224 if (offset != 0) {
225 lea(dst, Operand(object, offset));
226 } else {
227 // array access: calculate the destination address in the same manner as
Steve Block3ce2e202009-11-05 08:53:23 +0000228 // KeyedStoreIC::GenerateGeneric.
229 SmiIndex index = SmiToIndex(smi_index, smi_index, kPointerSizeLog2);
Steve Block6ded16b2010-05-10 14:33:55 +0100230 lea(dst, FieldOperand(object,
231 index.reg,
232 index.scale,
233 FixedArray::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000234 }
235 // If we are already generating a shared stub, not inlining the
236 // record write code isn't going to save us any memory.
237 if (generating_stub()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100238 RecordWriteHelper(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000239 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000240 RecordWriteStub stub(object, dst, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000241 CallStub(&stub);
242 }
243 }
244
245 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000246
247 // Clobber all input registers when running with the debug-code flag
248 // turned on to provoke errors.
249 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100250 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
251 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
252 movq(smi_index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
253 }
254}
255
256
257void MacroAssembler::InNewSpace(Register object,
258 Register scratch,
259 Condition cc,
260 Label* branch) {
261 if (Serializer::enabled()) {
262 // Can't do arithmetic on external references if it might get serialized.
263 // The mask isn't really an address. We load it as an external reference in
264 // case the size of the new space is different between the snapshot maker
265 // and the running system.
266 if (scratch.is(object)) {
267 movq(kScratchRegister, ExternalReference::new_space_mask());
268 and_(scratch, kScratchRegister);
269 } else {
270 movq(scratch, ExternalReference::new_space_mask());
271 and_(scratch, object);
272 }
273 movq(kScratchRegister, ExternalReference::new_space_start());
274 cmpq(scratch, kScratchRegister);
275 j(cc, branch);
276 } else {
277 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
278 intptr_t new_space_start =
279 reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
280 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
281 if (scratch.is(object)) {
282 addq(scratch, kScratchRegister);
283 } else {
284 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
285 }
286 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
287 j(cc, branch);
Leon Clarke4515c472010-02-03 11:58:03 +0000288 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000289}
290
291
292void MacroAssembler::Assert(Condition cc, const char* msg) {
293 if (FLAG_debug_code) Check(cc, msg);
294}
295
296
297void MacroAssembler::Check(Condition cc, const char* msg) {
298 Label L;
299 j(cc, &L);
300 Abort(msg);
301 // will not return here
302 bind(&L);
303}
304
305
Steve Block6ded16b2010-05-10 14:33:55 +0100306void MacroAssembler::CheckStackAlignment() {
307 int frame_alignment = OS::ActivationFrameAlignment();
308 int frame_alignment_mask = frame_alignment - 1;
309 if (frame_alignment > kPointerSize) {
310 ASSERT(IsPowerOf2(frame_alignment));
311 Label alignment_as_expected;
312 testq(rsp, Immediate(frame_alignment_mask));
313 j(zero, &alignment_as_expected);
314 // Abort if stack is not aligned.
315 int3();
316 bind(&alignment_as_expected);
317 }
318}
319
320
Steve Blocka7e24c12009-10-30 11:49:00 +0000321void MacroAssembler::NegativeZeroTest(Register result,
322 Register op,
323 Label* then_label) {
324 Label ok;
325 testl(result, result);
326 j(not_zero, &ok);
327 testl(op, op);
328 j(sign, then_label);
329 bind(&ok);
330}
331
332
333void MacroAssembler::Abort(const char* msg) {
334 // We want to pass the msg string like a smi to avoid GC
335 // problems, however msg is not guaranteed to be aligned
336 // properly. Instead, we pass an aligned pointer that is
337 // a proper v8 smi, but also pass the alignment difference
338 // from the real pointer as a smi.
339 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
340 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
341 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
342 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
343#ifdef DEBUG
344 if (msg != NULL) {
345 RecordComment("Abort message: ");
346 RecordComment(msg);
347 }
348#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000349 // Disable stub call restrictions to always allow calls to abort.
350 set_allow_stub_calls(true);
351
Steve Blocka7e24c12009-10-30 11:49:00 +0000352 push(rax);
353 movq(kScratchRegister, p0, RelocInfo::NONE);
354 push(kScratchRegister);
355 movq(kScratchRegister,
Steve Blockd0582a62009-12-15 09:54:21 +0000356 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
Steve Blocka7e24c12009-10-30 11:49:00 +0000357 RelocInfo::NONE);
358 push(kScratchRegister);
359 CallRuntime(Runtime::kAbort, 2);
360 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +0000361 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000362}
363
364
365void MacroAssembler::CallStub(CodeStub* stub) {
366 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
367 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
368}
369
370
Leon Clarkee46be812010-01-19 14:06:41 +0000371void MacroAssembler::TailCallStub(CodeStub* stub) {
372 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
373 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
374}
375
376
Steve Blocka7e24c12009-10-30 11:49:00 +0000377void MacroAssembler::StubReturn(int argc) {
378 ASSERT(argc >= 1 && generating_stub());
379 ret((argc - 1) * kPointerSize);
380}
381
382
383void MacroAssembler::IllegalOperation(int num_arguments) {
384 if (num_arguments > 0) {
385 addq(rsp, Immediate(num_arguments * kPointerSize));
386 }
387 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
388}
389
390
391void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
392 CallRuntime(Runtime::FunctionForId(id), num_arguments);
393}
394
395
396void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
397 // If the expected number of arguments of the runtime function is
398 // constant, we check that the actual number of arguments match the
399 // expectation.
400 if (f->nargs >= 0 && f->nargs != num_arguments) {
401 IllegalOperation(num_arguments);
402 return;
403 }
404
Leon Clarke4515c472010-02-03 11:58:03 +0000405 // TODO(1236192): Most runtime routines don't need the number of
406 // arguments passed in because it is constant. At some point we
407 // should remove this need and make the runtime routine entry code
408 // smarter.
409 movq(rax, Immediate(num_arguments));
410 movq(rbx, ExternalReference(f));
411 CEntryStub ces(f->result_size);
412 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000413}
414
415
Andrei Popescu402d9372010-02-26 13:31:12 +0000416void MacroAssembler::CallExternalReference(const ExternalReference& ext,
417 int num_arguments) {
418 movq(rax, Immediate(num_arguments));
419 movq(rbx, ext);
420
421 CEntryStub stub(1);
422 CallStub(&stub);
423}
424
425
Steve Block6ded16b2010-05-10 14:33:55 +0100426void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
427 int num_arguments,
428 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000429 // ----------- S t a t e -------------
430 // -- rsp[0] : return address
431 // -- rsp[8] : argument num_arguments - 1
432 // ...
433 // -- rsp[8 * num_arguments] : argument 0 (receiver)
434 // -----------------------------------
435
436 // TODO(1236192): Most runtime routines don't need the number of
437 // arguments passed in because it is constant. At some point we
438 // should remove this need and make the runtime routine entry code
439 // smarter.
440 movq(rax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +0100441 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000442}
443
444
Steve Block6ded16b2010-05-10 14:33:55 +0100445void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
446 int num_arguments,
447 int result_size) {
448 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
449}
450
451
452void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
453 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000454 // Set the entry point and jump to the C entry runtime stub.
455 movq(rbx, ext);
456 CEntryStub ces(result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000457 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000458}
459
460
Andrei Popescu402d9372010-02-26 13:31:12 +0000461void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
462 // Calls are not allowed in some stubs.
463 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
Steve Blocka7e24c12009-10-30 11:49:00 +0000464
Andrei Popescu402d9372010-02-26 13:31:12 +0000465 // Rely on the assertion to check that the number of provided
466 // arguments match the expected number of arguments. Fake a
467 // parameter count to avoid emitting code to do the check.
468 ParameterCount expected(0);
469 GetBuiltinEntry(rdx, id);
470 InvokeCode(rdx, expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +0000471}
472
Andrei Popescu402d9372010-02-26 13:31:12 +0000473
474void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100475 ASSERT(!target.is(rdi));
476
477 // Load the builtins object into target register.
478 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
479 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
480
Andrei Popescu402d9372010-02-26 13:31:12 +0000481 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +0100482 movq(rdi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
483
484 // Load the code entry point from the builtins object.
485 movq(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
486 if (FLAG_debug_code) {
487 // Make sure the code objects in the builtins object and in the
488 // builtin function are the same.
489 push(target);
490 movq(target, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
491 movq(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
492 cmpq(target, Operand(rsp, 0));
493 Assert(equal, "Builtin code object changed");
494 pop(target);
495 }
496 lea(target, FieldOperand(target, Code::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000497}
498
499
500void MacroAssembler::Set(Register dst, int64_t x) {
501 if (x == 0) {
502 xor_(dst, dst);
503 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000504 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000505 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000506 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000507 } else {
508 movq(dst, x, RelocInfo::NONE);
509 }
510}
511
512
513void MacroAssembler::Set(const Operand& dst, int64_t x) {
514 if (x == 0) {
515 xor_(kScratchRegister, kScratchRegister);
516 movq(dst, kScratchRegister);
517 } else if (is_int32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000518 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000519 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000520 movl(dst, Immediate(static_cast<uint32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000521 } else {
522 movq(kScratchRegister, x, RelocInfo::NONE);
523 movq(dst, kScratchRegister);
524 }
525}
526
Steve Blocka7e24c12009-10-30 11:49:00 +0000527// ----------------------------------------------------------------------------
528// Smi tagging, untagging and tag detection.
529
Steve Block3ce2e202009-11-05 08:53:23 +0000530static int kSmiShift = kSmiTagSize + kSmiShiftSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000531
532void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000533 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000534 if (!dst.is(src)) {
535 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000536 }
Steve Block3ce2e202009-11-05 08:53:23 +0000537 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000538}
539
540
541void MacroAssembler::Integer32ToSmi(Register dst,
542 Register src,
543 Label* on_overflow) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000544 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000545 // 32-bit integer always fits in a long smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000546 if (!dst.is(src)) {
547 movl(dst, src);
548 }
Steve Block3ce2e202009-11-05 08:53:23 +0000549 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000550}
551
552
Steve Block3ce2e202009-11-05 08:53:23 +0000553void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
554 Register src,
555 int constant) {
556 if (dst.is(src)) {
557 addq(dst, Immediate(constant));
558 } else {
559 lea(dst, Operand(src, constant));
560 }
561 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000562}
563
564
565void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000566 ASSERT_EQ(0, kSmiTag);
567 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000568 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000569 }
Steve Block3ce2e202009-11-05 08:53:23 +0000570 shr(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000571}
572
573
574void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000575 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000576 if (!dst.is(src)) {
577 movq(dst, src);
578 }
579 sar(dst, Immediate(kSmiShift));
580}
581
582
583void MacroAssembler::SmiTest(Register src) {
584 testq(src, src);
585}
586
587
588void MacroAssembler::SmiCompare(Register dst, Register src) {
589 cmpq(dst, src);
590}
591
592
593void MacroAssembler::SmiCompare(Register dst, Smi* src) {
594 ASSERT(!dst.is(kScratchRegister));
595 if (src->value() == 0) {
596 testq(dst, dst);
597 } else {
598 Move(kScratchRegister, src);
599 cmpq(dst, kScratchRegister);
600 }
601}
602
603
Steve Block6ded16b2010-05-10 14:33:55 +0100604void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
605 cmpq(dst, src);
606}
607
608
Steve Block3ce2e202009-11-05 08:53:23 +0000609void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
610 cmpq(dst, src);
611}
612
613
614void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
615 if (src->value() == 0) {
616 // Only tagged long smi to have 32-bit representation.
617 cmpq(dst, Immediate(0));
618 } else {
619 Move(kScratchRegister, src);
620 cmpq(dst, kScratchRegister);
621 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000622}
623
624
625void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
626 Register src,
627 int power) {
628 ASSERT(power >= 0);
629 ASSERT(power < 64);
630 if (power == 0) {
631 SmiToInteger64(dst, src);
632 return;
633 }
Steve Block3ce2e202009-11-05 08:53:23 +0000634 if (!dst.is(src)) {
635 movq(dst, src);
636 }
637 if (power < kSmiShift) {
638 sar(dst, Immediate(kSmiShift - power));
639 } else if (power > kSmiShift) {
640 shl(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +0000641 }
642}
643
644
Steve Blocka7e24c12009-10-30 11:49:00 +0000645Condition MacroAssembler::CheckSmi(Register src) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000646 ASSERT_EQ(0, kSmiTag);
647 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +0000648 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000649}
650
651
652Condition MacroAssembler::CheckPositiveSmi(Register src) {
653 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000654 movq(kScratchRegister, src);
655 rol(kScratchRegister, Immediate(1));
656 testl(kScratchRegister, Immediate(0x03));
Steve Blocka7e24c12009-10-30 11:49:00 +0000657 return zero;
658}
659
660
Steve Blocka7e24c12009-10-30 11:49:00 +0000661Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
662 if (first.is(second)) {
663 return CheckSmi(first);
664 }
665 movl(kScratchRegister, first);
666 orl(kScratchRegister, second);
Steve Block3ce2e202009-11-05 08:53:23 +0000667 testb(kScratchRegister, Immediate(kSmiTagMask));
668 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +0000669}
670
671
Leon Clarked91b9f72010-01-27 17:25:45 +0000672Condition MacroAssembler::CheckBothPositiveSmi(Register first,
673 Register second) {
674 if (first.is(second)) {
675 return CheckPositiveSmi(first);
676 }
677 movl(kScratchRegister, first);
678 orl(kScratchRegister, second);
679 rol(kScratchRegister, Immediate(1));
680 testl(kScratchRegister, Immediate(0x03));
681 return zero;
682}
683
684
685
Leon Clarkee46be812010-01-19 14:06:41 +0000686Condition MacroAssembler::CheckEitherSmi(Register first, Register second) {
687 if (first.is(second)) {
688 return CheckSmi(first);
689 }
690 movl(kScratchRegister, first);
691 andl(kScratchRegister, second);
692 testb(kScratchRegister, Immediate(kSmiTagMask));
693 return zero;
694}
695
696
Steve Blocka7e24c12009-10-30 11:49:00 +0000697Condition MacroAssembler::CheckIsMinSmi(Register src) {
698 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Steve Block3ce2e202009-11-05 08:53:23 +0000699 movq(kScratchRegister, src);
700 rol(kScratchRegister, Immediate(1));
701 cmpq(kScratchRegister, Immediate(1));
Steve Blocka7e24c12009-10-30 11:49:00 +0000702 return equal;
703}
704
Steve Blocka7e24c12009-10-30 11:49:00 +0000705
706Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +0000707 // A 32-bit integer value can always be converted to a smi.
708 return always;
Steve Blocka7e24c12009-10-30 11:49:00 +0000709}
710
711
Steve Block3ce2e202009-11-05 08:53:23 +0000712Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
713 // An unsigned 32-bit integer value is valid as long as the high bit
714 // is not set.
715 testq(src, Immediate(0x80000000));
716 return zero;
717}
718
719
720void MacroAssembler::SmiNeg(Register dst, Register src, Label* on_smi_result) {
721 if (dst.is(src)) {
722 ASSERT(!dst.is(kScratchRegister));
723 movq(kScratchRegister, src);
724 neg(dst); // Low 32 bits are retained as zero by negation.
725 // Test if result is zero or Smi::kMinValue.
726 cmpq(dst, kScratchRegister);
727 j(not_equal, on_smi_result);
728 movq(src, kScratchRegister);
729 } else {
730 movq(dst, src);
731 neg(dst);
732 cmpq(dst, src);
733 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
734 j(not_equal, on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000735 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000736}
737
738
739void MacroAssembler::SmiAdd(Register dst,
740 Register src1,
741 Register src2,
742 Label* on_not_smi_result) {
743 ASSERT(!dst.is(src2));
Steve Block6ded16b2010-05-10 14:33:55 +0100744 if (on_not_smi_result == NULL) {
745 // No overflow checking. Use only when it's known that
746 // overflowing is impossible.
747 if (dst.is(src1)) {
748 addq(dst, src2);
749 } else {
750 movq(dst, src1);
751 addq(dst, src2);
752 }
753 Assert(no_overflow, "Smi addition onverflow");
754 } else if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000755 addq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000756 Label smi_result;
757 j(no_overflow, &smi_result);
758 // Restore src1.
Steve Block3ce2e202009-11-05 08:53:23 +0000759 subq(src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000760 jmp(on_not_smi_result);
761 bind(&smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +0000762 } else {
763 movq(dst, src1);
764 addq(dst, src2);
765 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000766 }
767}
768
769
Steve Blocka7e24c12009-10-30 11:49:00 +0000770void MacroAssembler::SmiSub(Register dst,
771 Register src1,
772 Register src2,
773 Label* on_not_smi_result) {
774 ASSERT(!dst.is(src2));
Leon Clarked91b9f72010-01-27 17:25:45 +0000775 if (on_not_smi_result == NULL) {
776 // No overflow checking. Use only when it's known that
777 // overflowing is impossible (e.g., subtracting two positive smis).
778 if (dst.is(src1)) {
779 subq(dst, src2);
780 } else {
781 movq(dst, src1);
782 subq(dst, src2);
783 }
784 Assert(no_overflow, "Smi substraction onverflow");
785 } else if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000786 subq(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000787 Label smi_result;
788 j(no_overflow, &smi_result);
789 // Restore src1.
Steve Block3ce2e202009-11-05 08:53:23 +0000790 addq(src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000791 jmp(on_not_smi_result);
792 bind(&smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +0000793 } else {
794 movq(dst, src1);
795 subq(dst, src2);
796 j(overflow, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000797 }
798}
799
800
Steve Block6ded16b2010-05-10 14:33:55 +0100801void MacroAssembler::SmiSub(Register dst,
802 Register src1,
803 Operand const& src2,
804 Label* on_not_smi_result) {
805 if (on_not_smi_result == NULL) {
806 // No overflow checking. Use only when it's known that
807 // overflowing is impossible (e.g., subtracting two positive smis).
808 if (dst.is(src1)) {
809 subq(dst, src2);
810 } else {
811 movq(dst, src1);
812 subq(dst, src2);
813 }
814 Assert(no_overflow, "Smi substraction onverflow");
815 } else if (dst.is(src1)) {
816 subq(dst, src2);
817 Label smi_result;
818 j(no_overflow, &smi_result);
819 // Restore src1.
820 addq(src1, src2);
821 jmp(on_not_smi_result);
822 bind(&smi_result);
823 } else {
824 movq(dst, src1);
825 subq(dst, src2);
826 j(overflow, on_not_smi_result);
827 }
828}
829
Steve Blocka7e24c12009-10-30 11:49:00 +0000830void MacroAssembler::SmiMul(Register dst,
831 Register src1,
832 Register src2,
833 Label* on_not_smi_result) {
834 ASSERT(!dst.is(src2));
Steve Block3ce2e202009-11-05 08:53:23 +0000835 ASSERT(!dst.is(kScratchRegister));
836 ASSERT(!src1.is(kScratchRegister));
837 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000838
839 if (dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000840 Label failure, zero_correct_result;
841 movq(kScratchRegister, src1); // Create backup for later testing.
842 SmiToInteger64(dst, src1);
843 imul(dst, src2);
844 j(overflow, &failure);
845
846 // Check for negative zero result. If product is zero, and one
847 // argument is negative, go to slow case.
848 Label correct_result;
849 testq(dst, dst);
850 j(not_zero, &correct_result);
851
852 movq(dst, kScratchRegister);
853 xor_(dst, src2);
854 j(positive, &zero_correct_result); // Result was positive zero.
855
856 bind(&failure); // Reused failure exit, restores src1.
857 movq(src1, kScratchRegister);
858 jmp(on_not_smi_result);
859
860 bind(&zero_correct_result);
861 xor_(dst, dst);
862
863 bind(&correct_result);
864 } else {
865 SmiToInteger64(dst, src1);
866 imul(dst, src2);
867 j(overflow, on_not_smi_result);
868 // Check for negative zero result. If product is zero, and one
869 // argument is negative, go to slow case.
870 Label correct_result;
871 testq(dst, dst);
872 j(not_zero, &correct_result);
873 // One of src1 and src2 is zero, the check whether the other is
874 // negative.
Steve Blocka7e24c12009-10-30 11:49:00 +0000875 movq(kScratchRegister, src1);
Steve Block3ce2e202009-11-05 08:53:23 +0000876 xor_(kScratchRegister, src2);
877 j(negative, on_not_smi_result);
878 bind(&correct_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000879 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000880}
881
882
883void MacroAssembler::SmiTryAddConstant(Register dst,
884 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000885 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000886 Label* on_not_smi_result) {
887 // Does not assume that src is a smi.
Steve Block3ce2e202009-11-05 08:53:23 +0000888 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +0000889 ASSERT_EQ(0, kSmiTag);
Steve Block3ce2e202009-11-05 08:53:23 +0000890 ASSERT(!dst.is(kScratchRegister));
891 ASSERT(!src.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000892
Steve Block3ce2e202009-11-05 08:53:23 +0000893 JumpIfNotSmi(src, on_not_smi_result);
894 Register tmp = (dst.is(src) ? kScratchRegister : dst);
895 Move(tmp, constant);
896 addq(tmp, src);
897 j(overflow, on_not_smi_result);
898 if (dst.is(src)) {
899 movq(dst, tmp);
900 }
901}
902
903
904void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
905 if (constant->value() == 0) {
906 if (!dst.is(src)) {
907 movq(dst, src);
908 }
909 } else if (dst.is(src)) {
910 ASSERT(!dst.is(kScratchRegister));
911
912 Move(kScratchRegister, constant);
913 addq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000914 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000915 Move(dst, constant);
916 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000917 }
918}
919
920
921void MacroAssembler::SmiAddConstant(Register dst,
922 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000923 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000924 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +0000925 if (constant->value() == 0) {
926 if (!dst.is(src)) {
927 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000928 }
Steve Block3ce2e202009-11-05 08:53:23 +0000929 } else if (dst.is(src)) {
930 ASSERT(!dst.is(kScratchRegister));
931
932 Move(kScratchRegister, constant);
933 addq(dst, kScratchRegister);
934 Label result_ok;
935 j(no_overflow, &result_ok);
936 subq(dst, kScratchRegister);
937 jmp(on_not_smi_result);
938 bind(&result_ok);
Steve Blocka7e24c12009-10-30 11:49:00 +0000939 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000940 Move(dst, constant);
941 addq(dst, src);
942 j(overflow, on_not_smi_result);
943 }
944}
945
946
947void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
948 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000949 if (!dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000950 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000951 }
Steve Block3ce2e202009-11-05 08:53:23 +0000952 } else if (dst.is(src)) {
953 ASSERT(!dst.is(kScratchRegister));
954
955 Move(kScratchRegister, constant);
956 subq(dst, kScratchRegister);
957 } else {
958 // Subtract by adding the negative, to do it in two operations.
959 if (constant->value() == Smi::kMinValue) {
960 Move(kScratchRegister, constant);
961 movq(dst, src);
962 subq(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000963 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000964 Move(dst, Smi::FromInt(-constant->value()));
965 addq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000966 }
967 }
968}
969
970
971void MacroAssembler::SmiSubConstant(Register dst,
972 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000973 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000974 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +0000975 if (constant->value() == 0) {
976 if (!dst.is(src)) {
977 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000978 }
Steve Block3ce2e202009-11-05 08:53:23 +0000979 } else if (dst.is(src)) {
980 ASSERT(!dst.is(kScratchRegister));
981
982 Move(kScratchRegister, constant);
983 subq(dst, kScratchRegister);
984 Label sub_success;
985 j(no_overflow, &sub_success);
986 addq(src, kScratchRegister);
987 jmp(on_not_smi_result);
988 bind(&sub_success);
Steve Blocka7e24c12009-10-30 11:49:00 +0000989 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000990 if (constant->value() == Smi::kMinValue) {
991 Move(kScratchRegister, constant);
992 movq(dst, src);
993 subq(dst, kScratchRegister);
994 j(overflow, on_not_smi_result);
995 } else {
996 Move(dst, Smi::FromInt(-(constant->value())));
997 addq(dst, src);
998 j(overflow, on_not_smi_result);
999 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001000 }
1001}
1002
1003
1004void MacroAssembler::SmiDiv(Register dst,
1005 Register src1,
1006 Register src2,
1007 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001008 ASSERT(!src1.is(kScratchRegister));
1009 ASSERT(!src2.is(kScratchRegister));
1010 ASSERT(!dst.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001011 ASSERT(!src2.is(rax));
1012 ASSERT(!src2.is(rdx));
1013 ASSERT(!src1.is(rdx));
1014
1015 // Check for 0 divisor (result is +/-Infinity).
1016 Label positive_divisor;
Steve Block3ce2e202009-11-05 08:53:23 +00001017 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001018 j(zero, on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001019
Steve Block3ce2e202009-11-05 08:53:23 +00001020 if (src1.is(rax)) {
1021 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001022 }
Steve Block3ce2e202009-11-05 08:53:23 +00001023 SmiToInteger32(rax, src1);
1024 // We need to rule out dividing Smi::kMinValue by -1, since that would
1025 // overflow in idiv and raise an exception.
1026 // We combine this with negative zero test (negative zero only happens
1027 // when dividing zero by a negative number).
Steve Blocka7e24c12009-10-30 11:49:00 +00001028
Steve Block3ce2e202009-11-05 08:53:23 +00001029 // We overshoot a little and go to slow case if we divide min-value
1030 // by any negative value, not just -1.
1031 Label safe_div;
1032 testl(rax, Immediate(0x7fffffff));
1033 j(not_zero, &safe_div);
1034 testq(src2, src2);
1035 if (src1.is(rax)) {
1036 j(positive, &safe_div);
1037 movq(src1, kScratchRegister);
1038 jmp(on_not_smi_result);
1039 } else {
1040 j(negative, on_not_smi_result);
1041 }
1042 bind(&safe_div);
1043
1044 SmiToInteger32(src2, src2);
1045 // Sign extend src1 into edx:eax.
1046 cdq();
Steve Blocka7e24c12009-10-30 11:49:00 +00001047 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001048 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001049 // Check that the remainder is zero.
1050 testl(rdx, rdx);
Steve Block3ce2e202009-11-05 08:53:23 +00001051 if (src1.is(rax)) {
1052 Label smi_result;
1053 j(zero, &smi_result);
1054 movq(src1, kScratchRegister);
1055 jmp(on_not_smi_result);
1056 bind(&smi_result);
1057 } else {
1058 j(not_zero, on_not_smi_result);
1059 }
1060 if (!dst.is(src1) && src1.is(rax)) {
1061 movq(src1, kScratchRegister);
1062 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001063 Integer32ToSmi(dst, rax);
1064}
1065
1066
1067void MacroAssembler::SmiMod(Register dst,
1068 Register src1,
1069 Register src2,
1070 Label* on_not_smi_result) {
1071 ASSERT(!dst.is(kScratchRegister));
1072 ASSERT(!src1.is(kScratchRegister));
1073 ASSERT(!src2.is(kScratchRegister));
1074 ASSERT(!src2.is(rax));
1075 ASSERT(!src2.is(rdx));
1076 ASSERT(!src1.is(rdx));
Steve Block3ce2e202009-11-05 08:53:23 +00001077 ASSERT(!src1.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001078
Steve Block3ce2e202009-11-05 08:53:23 +00001079 testq(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001080 j(zero, on_not_smi_result);
1081
1082 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001083 movq(kScratchRegister, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001084 }
Steve Block3ce2e202009-11-05 08:53:23 +00001085 SmiToInteger32(rax, src1);
1086 SmiToInteger32(src2, src2);
1087
1088 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1089 Label safe_div;
1090 cmpl(rax, Immediate(Smi::kMinValue));
1091 j(not_equal, &safe_div);
1092 cmpl(src2, Immediate(-1));
1093 j(not_equal, &safe_div);
1094 // Retag inputs and go slow case.
1095 Integer32ToSmi(src2, src2);
1096 if (src1.is(rax)) {
1097 movq(src1, kScratchRegister);
1098 }
1099 jmp(on_not_smi_result);
1100 bind(&safe_div);
1101
Steve Blocka7e24c12009-10-30 11:49:00 +00001102 // Sign extend eax into edx:eax.
1103 cdq();
1104 idivl(src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001105 // Restore smi tags on inputs.
1106 Integer32ToSmi(src2, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001107 if (src1.is(rax)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001108 movq(src1, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001109 }
Steve Block3ce2e202009-11-05 08:53:23 +00001110 // Check for a negative zero result. If the result is zero, and the
1111 // dividend is negative, go slow to return a floating point negative zero.
1112 Label smi_result;
1113 testl(rdx, rdx);
1114 j(not_zero, &smi_result);
1115 testq(src1, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001116 j(negative, on_not_smi_result);
Steve Block3ce2e202009-11-05 08:53:23 +00001117 bind(&smi_result);
1118 Integer32ToSmi(dst, rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001119}
1120
1121
1122void MacroAssembler::SmiNot(Register dst, Register src) {
Steve Block3ce2e202009-11-05 08:53:23 +00001123 ASSERT(!dst.is(kScratchRegister));
1124 ASSERT(!src.is(kScratchRegister));
1125 // Set tag and padding bits before negating, so that they are zero afterwards.
1126 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001127 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001128 xor_(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001129 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001130 lea(dst, Operand(src, kScratchRegister, times_1, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001131 }
Steve Block3ce2e202009-11-05 08:53:23 +00001132 not_(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001133}
1134
1135
1136void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001137 ASSERT(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001138 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001139 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001140 }
1141 and_(dst, src2);
1142}
1143
1144
Steve Block3ce2e202009-11-05 08:53:23 +00001145void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1146 if (constant->value() == 0) {
1147 xor_(dst, dst);
1148 } else if (dst.is(src)) {
1149 ASSERT(!dst.is(kScratchRegister));
1150 Move(kScratchRegister, constant);
1151 and_(dst, kScratchRegister);
1152 } else {
1153 Move(dst, constant);
1154 and_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001155 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001156}
1157
1158
1159void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1160 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001161 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001162 }
1163 or_(dst, src2);
1164}
1165
1166
Steve Block3ce2e202009-11-05 08:53:23 +00001167void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1168 if (dst.is(src)) {
1169 ASSERT(!dst.is(kScratchRegister));
1170 Move(kScratchRegister, constant);
1171 or_(dst, kScratchRegister);
1172 } else {
1173 Move(dst, constant);
1174 or_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001175 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001176}
1177
Steve Block3ce2e202009-11-05 08:53:23 +00001178
Steve Blocka7e24c12009-10-30 11:49:00 +00001179void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1180 if (!dst.is(src1)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001181 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001182 }
1183 xor_(dst, src2);
1184}
1185
1186
Steve Block3ce2e202009-11-05 08:53:23 +00001187void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1188 if (dst.is(src)) {
1189 ASSERT(!dst.is(kScratchRegister));
1190 Move(kScratchRegister, constant);
1191 xor_(dst, kScratchRegister);
1192 } else {
1193 Move(dst, constant);
1194 xor_(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001195 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001196}
1197
1198
Steve Blocka7e24c12009-10-30 11:49:00 +00001199void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1200 Register src,
1201 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001202 ASSERT(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001203 if (shift_value > 0) {
1204 if (dst.is(src)) {
Steve Block3ce2e202009-11-05 08:53:23 +00001205 sar(dst, Immediate(shift_value + kSmiShift));
1206 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001207 } else {
1208 UNIMPLEMENTED(); // Not used.
1209 }
1210 }
1211}
1212
1213
1214void MacroAssembler::SmiShiftLogicalRightConstant(Register dst,
1215 Register src,
1216 int shift_value,
1217 Label* on_not_smi_result) {
1218 // Logic right shift interprets its result as an *unsigned* number.
1219 if (dst.is(src)) {
1220 UNIMPLEMENTED(); // Not used.
1221 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001222 movq(dst, src);
1223 if (shift_value == 0) {
1224 testq(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001225 j(negative, on_not_smi_result);
1226 }
Steve Block3ce2e202009-11-05 08:53:23 +00001227 shr(dst, Immediate(shift_value + kSmiShift));
1228 shl(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001229 }
1230}
1231
1232
1233void MacroAssembler::SmiShiftLeftConstant(Register dst,
1234 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +01001235 int shift_value) {
Steve Block3ce2e202009-11-05 08:53:23 +00001236 if (!dst.is(src)) {
1237 movq(dst, src);
1238 }
1239 if (shift_value > 0) {
1240 shl(dst, Immediate(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001241 }
1242}
1243
1244
1245void MacroAssembler::SmiShiftLeft(Register dst,
1246 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +01001247 Register src2) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001248 ASSERT(!dst.is(rcx));
1249 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001250 // Untag shift amount.
1251 if (!dst.is(src1)) {
1252 movq(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001253 }
Steve Block3ce2e202009-11-05 08:53:23 +00001254 SmiToInteger32(rcx, src2);
1255 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1256 and_(rcx, Immediate(0x1f));
Steve Blockd0582a62009-12-15 09:54:21 +00001257 shl_cl(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001258}
1259
1260
1261void MacroAssembler::SmiShiftLogicalRight(Register dst,
1262 Register src1,
1263 Register src2,
1264 Label* on_not_smi_result) {
Steve Block3ce2e202009-11-05 08:53:23 +00001265 ASSERT(!dst.is(kScratchRegister));
1266 ASSERT(!src1.is(kScratchRegister));
1267 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001268 ASSERT(!dst.is(rcx));
1269 Label result_ok;
Steve Block3ce2e202009-11-05 08:53:23 +00001270 if (src1.is(rcx) || src2.is(rcx)) {
1271 movq(kScratchRegister, rcx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001272 }
Steve Block3ce2e202009-11-05 08:53:23 +00001273 if (!dst.is(src1)) {
1274 movq(dst, src1);
1275 }
1276 SmiToInteger32(rcx, src2);
1277 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001278 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
Steve Block3ce2e202009-11-05 08:53:23 +00001279 shl(dst, Immediate(kSmiShift));
1280 testq(dst, dst);
1281 if (src1.is(rcx) || src2.is(rcx)) {
1282 Label positive_result;
1283 j(positive, &positive_result);
1284 if (src1.is(rcx)) {
1285 movq(src1, kScratchRegister);
1286 } else {
1287 movq(src2, kScratchRegister);
1288 }
1289 jmp(on_not_smi_result);
1290 bind(&positive_result);
1291 } else {
1292 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1293 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001294}
1295
1296
1297void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1298 Register src1,
1299 Register src2) {
Steve Block3ce2e202009-11-05 08:53:23 +00001300 ASSERT(!dst.is(kScratchRegister));
1301 ASSERT(!src1.is(kScratchRegister));
1302 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001303 ASSERT(!dst.is(rcx));
Steve Block3ce2e202009-11-05 08:53:23 +00001304 if (src1.is(rcx)) {
1305 movq(kScratchRegister, src1);
1306 } else if (src2.is(rcx)) {
1307 movq(kScratchRegister, src2);
1308 }
1309 if (!dst.is(src1)) {
1310 movq(dst, src1);
1311 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001312 SmiToInteger32(rcx, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001313 orl(rcx, Immediate(kSmiShift));
Steve Blockd0582a62009-12-15 09:54:21 +00001314 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
Steve Block3ce2e202009-11-05 08:53:23 +00001315 shl(dst, Immediate(kSmiShift));
1316 if (src1.is(rcx)) {
1317 movq(src1, kScratchRegister);
1318 } else if (src2.is(rcx)) {
1319 movq(src2, kScratchRegister);
1320 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001321}
1322
1323
1324void MacroAssembler::SelectNonSmi(Register dst,
1325 Register src1,
1326 Register src2,
1327 Label* on_not_smis) {
Steve Block3ce2e202009-11-05 08:53:23 +00001328 ASSERT(!dst.is(kScratchRegister));
1329 ASSERT(!src1.is(kScratchRegister));
1330 ASSERT(!src2.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00001331 ASSERT(!dst.is(src1));
1332 ASSERT(!dst.is(src2));
1333 // Both operands must not be smis.
1334#ifdef DEBUG
Steve Block3ce2e202009-11-05 08:53:23 +00001335 if (allow_stub_calls()) { // Check contains a stub call.
1336 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1337 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1338 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001339#endif
1340 ASSERT_EQ(0, kSmiTag);
1341 ASSERT_EQ(0, Smi::FromInt(0));
Steve Block3ce2e202009-11-05 08:53:23 +00001342 movl(kScratchRegister, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001343 and_(kScratchRegister, src1);
1344 testl(kScratchRegister, src2);
Steve Block3ce2e202009-11-05 08:53:23 +00001345 // If non-zero then both are smis.
Steve Blocka7e24c12009-10-30 11:49:00 +00001346 j(not_zero, on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00001347
Steve Block3ce2e202009-11-05 08:53:23 +00001348 // Exactly one operand is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001349 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1350 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1351 subq(kScratchRegister, Immediate(1));
1352 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1353 movq(dst, src1);
1354 xor_(dst, src2);
1355 and_(dst, kScratchRegister);
1356 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1357 xor_(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00001358 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001359}
1360
Steve Block3ce2e202009-11-05 08:53:23 +00001361SmiIndex MacroAssembler::SmiToIndex(Register dst,
1362 Register src,
1363 int shift) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001364 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001365 // There is a possible optimization if shift is in the range 60-63, but that
1366 // will (and must) never happen.
1367 if (!dst.is(src)) {
1368 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001369 }
Steve Block3ce2e202009-11-05 08:53:23 +00001370 if (shift < kSmiShift) {
1371 sar(dst, Immediate(kSmiShift - shift));
1372 } else {
1373 shl(dst, Immediate(shift - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001374 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001375 return SmiIndex(dst, times_1);
1376}
1377
Steve Blocka7e24c12009-10-30 11:49:00 +00001378SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1379 Register src,
1380 int shift) {
1381 // Register src holds a positive smi.
1382 ASSERT(is_uint6(shift));
Steve Block3ce2e202009-11-05 08:53:23 +00001383 if (!dst.is(src)) {
1384 movq(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001385 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001386 neg(dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001387 if (shift < kSmiShift) {
1388 sar(dst, Immediate(kSmiShift - shift));
1389 } else {
1390 shl(dst, Immediate(shift - kSmiShift));
1391 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001392 return SmiIndex(dst, times_1);
1393}
1394
1395
Steve Block3ce2e202009-11-05 08:53:23 +00001396void MacroAssembler::JumpIfSmi(Register src, Label* on_smi) {
1397 ASSERT_EQ(0, kSmiTag);
1398 Condition smi = CheckSmi(src);
1399 j(smi, on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001400}
1401
Steve Block3ce2e202009-11-05 08:53:23 +00001402
1403void MacroAssembler::JumpIfNotSmi(Register src, Label* on_not_smi) {
1404 Condition smi = CheckSmi(src);
1405 j(NegateCondition(smi), on_not_smi);
1406}
1407
1408
1409void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1410 Label* on_not_positive_smi) {
1411 Condition positive_smi = CheckPositiveSmi(src);
1412 j(NegateCondition(positive_smi), on_not_positive_smi);
1413}
1414
1415
1416void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1417 Smi* constant,
1418 Label* on_equals) {
1419 SmiCompare(src, constant);
1420 j(equal, on_equals);
1421}
1422
1423
1424void MacroAssembler::JumpIfNotValidSmiValue(Register src, Label* on_invalid) {
1425 Condition is_valid = CheckInteger32ValidSmiValue(src);
1426 j(NegateCondition(is_valid), on_invalid);
1427}
1428
1429
1430void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1431 Label* on_invalid) {
1432 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1433 j(NegateCondition(is_valid), on_invalid);
1434}
1435
1436
1437void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2,
1438 Label* on_not_both_smi) {
1439 Condition both_smi = CheckBothSmi(src1, src2);
1440 j(NegateCondition(both_smi), on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001441}
1442
1443
Leon Clarked91b9f72010-01-27 17:25:45 +00001444void MacroAssembler::JumpIfNotBothPositiveSmi(Register src1, Register src2,
1445 Label* on_not_both_smi) {
1446 Condition both_smi = CheckBothPositiveSmi(src1, src2);
1447 j(NegateCondition(both_smi), on_not_both_smi);
1448}
1449
1450
1451
Leon Clarkee46be812010-01-19 14:06:41 +00001452void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1453 Register second_object,
1454 Register scratch1,
1455 Register scratch2,
1456 Label* on_fail) {
1457 // Check that both objects are not smis.
1458 Condition either_smi = CheckEitherSmi(first_object, second_object);
1459 j(either_smi, on_fail);
1460
1461 // Load instance type for both strings.
1462 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1463 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1464 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1465 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1466
1467 // Check that both are flat ascii strings.
1468 ASSERT(kNotStringTag != 0);
1469 const int kFlatAsciiStringMask =
1470 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Leon Clarked91b9f72010-01-27 17:25:45 +00001471 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
Leon Clarkee46be812010-01-19 14:06:41 +00001472
1473 andl(scratch1, Immediate(kFlatAsciiStringMask));
1474 andl(scratch2, Immediate(kFlatAsciiStringMask));
1475 // Interleave the bits to check both scratch1 and scratch2 in one test.
1476 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1477 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1478 cmpl(scratch1,
Leon Clarked91b9f72010-01-27 17:25:45 +00001479 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
Leon Clarkee46be812010-01-19 14:06:41 +00001480 j(not_equal, on_fail);
1481}
1482
1483
Steve Block6ded16b2010-05-10 14:33:55 +01001484void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1485 Register instance_type,
1486 Register scratch,
1487 Label *failure) {
1488 if (!scratch.is(instance_type)) {
1489 movl(scratch, instance_type);
1490 }
1491
1492 const int kFlatAsciiStringMask =
1493 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1494
1495 andl(scratch, Immediate(kFlatAsciiStringMask));
1496 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1497 j(not_equal, failure);
1498}
1499
1500
1501void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1502 Register first_object_instance_type,
1503 Register second_object_instance_type,
1504 Register scratch1,
1505 Register scratch2,
1506 Label* on_fail) {
1507 // Load instance type for both strings.
1508 movq(scratch1, first_object_instance_type);
1509 movq(scratch2, second_object_instance_type);
1510
1511 // Check that both are flat ascii strings.
1512 ASSERT(kNotStringTag != 0);
1513 const int kFlatAsciiStringMask =
1514 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1515 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1516
1517 andl(scratch1, Immediate(kFlatAsciiStringMask));
1518 andl(scratch2, Immediate(kFlatAsciiStringMask));
1519 // Interleave the bits to check both scratch1 and scratch2 in one test.
1520 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1521 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1522 cmpl(scratch1,
1523 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1524 j(not_equal, on_fail);
1525}
1526
1527
Steve Blocka7e24c12009-10-30 11:49:00 +00001528void MacroAssembler::Move(Register dst, Handle<Object> source) {
1529 ASSERT(!source->IsFailure());
1530 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001531 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001532 } else {
1533 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1534 }
1535}
1536
1537
1538void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001539 ASSERT(!source->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +00001540 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001541 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001542 } else {
1543 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1544 movq(dst, kScratchRegister);
1545 }
1546}
1547
1548
1549void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001550 if (source->IsSmi()) {
1551 SmiCompare(dst, Smi::cast(*source));
1552 } else {
1553 Move(kScratchRegister, source);
1554 cmpq(dst, kScratchRegister);
1555 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001556}
1557
1558
1559void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1560 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001561 SmiCompare(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001562 } else {
1563 ASSERT(source->IsHeapObject());
1564 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1565 cmpq(dst, kScratchRegister);
1566 }
1567}
1568
1569
1570void MacroAssembler::Push(Handle<Object> source) {
1571 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001572 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00001573 } else {
1574 ASSERT(source->IsHeapObject());
1575 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1576 push(kScratchRegister);
1577 }
1578}
1579
1580
1581void MacroAssembler::Push(Smi* source) {
Steve Block3ce2e202009-11-05 08:53:23 +00001582 intptr_t smi = reinterpret_cast<intptr_t>(source);
1583 if (is_int32(smi)) {
1584 push(Immediate(static_cast<int32_t>(smi)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001585 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001586 Set(kScratchRegister, smi);
1587 push(kScratchRegister);
1588 }
1589}
1590
1591
Leon Clarkee46be812010-01-19 14:06:41 +00001592void MacroAssembler::Drop(int stack_elements) {
1593 if (stack_elements > 0) {
1594 addq(rsp, Immediate(stack_elements * kPointerSize));
1595 }
1596}
1597
1598
Steve Block3ce2e202009-11-05 08:53:23 +00001599void MacroAssembler::Test(const Operand& src, Smi* source) {
1600 intptr_t smi = reinterpret_cast<intptr_t>(source);
1601 if (is_int32(smi)) {
1602 testl(src, Immediate(static_cast<int32_t>(smi)));
1603 } else {
1604 Move(kScratchRegister, source);
1605 testq(src, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00001606 }
1607}
1608
1609
1610void MacroAssembler::Jump(ExternalReference ext) {
1611 movq(kScratchRegister, ext);
1612 jmp(kScratchRegister);
1613}
1614
1615
1616void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1617 movq(kScratchRegister, destination, rmode);
1618 jmp(kScratchRegister);
1619}
1620
1621
1622void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001623 // TODO(X64): Inline this
1624 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001625}
1626
1627
1628void MacroAssembler::Call(ExternalReference ext) {
1629 movq(kScratchRegister, ext);
1630 call(kScratchRegister);
1631}
1632
1633
1634void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1635 movq(kScratchRegister, destination, rmode);
1636 call(kScratchRegister);
1637}
1638
1639
1640void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1641 ASSERT(RelocInfo::IsCodeTarget(rmode));
1642 WriteRecordedPositions();
Steve Block3ce2e202009-11-05 08:53:23 +00001643 call(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001644}
1645
1646
1647void MacroAssembler::PushTryHandler(CodeLocation try_location,
1648 HandlerType type) {
1649 // Adjust this code if not the case.
1650 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1651
1652 // The pc (return address) is already on TOS. This code pushes state,
1653 // frame pointer and current handler. Check that they are expected
1654 // next on the stack, in that order.
1655 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1656 StackHandlerConstants::kPCOffset - kPointerSize);
1657 ASSERT_EQ(StackHandlerConstants::kFPOffset,
1658 StackHandlerConstants::kStateOffset - kPointerSize);
1659 ASSERT_EQ(StackHandlerConstants::kNextOffset,
1660 StackHandlerConstants::kFPOffset - kPointerSize);
1661
1662 if (try_location == IN_JAVASCRIPT) {
1663 if (type == TRY_CATCH_HANDLER) {
1664 push(Immediate(StackHandler::TRY_CATCH));
1665 } else {
1666 push(Immediate(StackHandler::TRY_FINALLY));
1667 }
1668 push(rbp);
1669 } else {
1670 ASSERT(try_location == IN_JS_ENTRY);
1671 // The frame pointer does not point to a JS frame so we save NULL
1672 // for rbp. We expect the code throwing an exception to check rbp
1673 // before dereferencing it to restore the context.
1674 push(Immediate(StackHandler::ENTRY));
1675 push(Immediate(0)); // NULL frame pointer.
1676 }
1677 // Save the current handler.
1678 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1679 push(Operand(kScratchRegister, 0));
1680 // Link this handler.
1681 movq(Operand(kScratchRegister, 0), rsp);
1682}
1683
1684
Leon Clarkee46be812010-01-19 14:06:41 +00001685void MacroAssembler::PopTryHandler() {
1686 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1687 // Unlink this handler.
1688 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1689 pop(Operand(kScratchRegister, 0));
1690 // Remove the remaining fields.
1691 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1692}
1693
1694
Steve Blocka7e24c12009-10-30 11:49:00 +00001695void MacroAssembler::Ret() {
1696 ret(0);
1697}
1698
1699
1700void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00001701 fucomip();
1702 ffree(0);
1703 fincstp();
Steve Blocka7e24c12009-10-30 11:49:00 +00001704}
1705
1706
1707void MacroAssembler::CmpObjectType(Register heap_object,
1708 InstanceType type,
1709 Register map) {
1710 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1711 CmpInstanceType(map, type);
1712}
1713
1714
1715void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1716 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1717 Immediate(static_cast<int8_t>(type)));
1718}
1719
1720
Andrei Popescu31002712010-02-23 13:46:05 +00001721void MacroAssembler::CheckMap(Register obj,
1722 Handle<Map> map,
1723 Label* fail,
1724 bool is_heap_object) {
1725 if (!is_heap_object) {
1726 JumpIfSmi(obj, fail);
1727 }
1728 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1729 j(not_equal, fail);
1730}
1731
1732
Andrei Popescu402d9372010-02-26 13:31:12 +00001733void MacroAssembler::AbortIfNotNumber(Register object, const char* msg) {
1734 Label ok;
1735 Condition is_smi = CheckSmi(object);
1736 j(is_smi, &ok);
1737 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1738 Factory::heap_number_map());
1739 Assert(equal, msg);
1740 bind(&ok);
1741}
1742
1743
Steve Block6ded16b2010-05-10 14:33:55 +01001744void MacroAssembler::AbortIfNotSmi(Register object, const char* msg) {
1745 Label ok;
1746 Condition is_smi = CheckSmi(object);
1747 j(is_smi, &ok);
1748 Assert(equal, msg);
1749 bind(&ok);
1750}
1751
1752
Leon Clarked91b9f72010-01-27 17:25:45 +00001753Condition MacroAssembler::IsObjectStringType(Register heap_object,
1754 Register map,
1755 Register instance_type) {
1756 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00001757 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00001758 ASSERT(kNotStringTag != 0);
1759 testb(instance_type, Immediate(kIsNotStringMask));
1760 return zero;
1761}
1762
1763
Steve Blocka7e24c12009-10-30 11:49:00 +00001764void MacroAssembler::TryGetFunctionPrototype(Register function,
1765 Register result,
1766 Label* miss) {
1767 // Check that the receiver isn't a smi.
1768 testl(function, Immediate(kSmiTagMask));
1769 j(zero, miss);
1770
1771 // Check that the function really is a function.
1772 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1773 j(not_equal, miss);
1774
1775 // Make sure that the function has an instance prototype.
1776 Label non_instance;
1777 testb(FieldOperand(result, Map::kBitFieldOffset),
1778 Immediate(1 << Map::kHasNonInstancePrototype));
1779 j(not_zero, &non_instance);
1780
1781 // Get the prototype or initial map from the function.
1782 movq(result,
1783 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1784
1785 // If the prototype or initial map is the hole, don't return it and
1786 // simply miss the cache instead. This will allow us to allocate a
1787 // prototype object on-demand in the runtime system.
1788 CompareRoot(result, Heap::kTheHoleValueRootIndex);
1789 j(equal, miss);
1790
1791 // If the function does not have an initial map, we're done.
1792 Label done;
1793 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1794 j(not_equal, &done);
1795
1796 // Get the prototype from the initial map.
1797 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1798 jmp(&done);
1799
1800 // Non-instance prototype: Fetch prototype from constructor field
1801 // in initial map.
1802 bind(&non_instance);
1803 movq(result, FieldOperand(result, Map::kConstructorOffset));
1804
1805 // All done.
1806 bind(&done);
1807}
1808
1809
1810void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1811 if (FLAG_native_code_counters && counter->Enabled()) {
1812 movq(kScratchRegister, ExternalReference(counter));
1813 movl(Operand(kScratchRegister, 0), Immediate(value));
1814 }
1815}
1816
1817
1818void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1819 ASSERT(value > 0);
1820 if (FLAG_native_code_counters && counter->Enabled()) {
1821 movq(kScratchRegister, ExternalReference(counter));
1822 Operand operand(kScratchRegister, 0);
1823 if (value == 1) {
1824 incl(operand);
1825 } else {
1826 addl(operand, Immediate(value));
1827 }
1828 }
1829}
1830
1831
1832void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1833 ASSERT(value > 0);
1834 if (FLAG_native_code_counters && counter->Enabled()) {
1835 movq(kScratchRegister, ExternalReference(counter));
1836 Operand operand(kScratchRegister, 0);
1837 if (value == 1) {
1838 decl(operand);
1839 } else {
1840 subl(operand, Immediate(value));
1841 }
1842 }
1843}
1844
Steve Blocka7e24c12009-10-30 11:49:00 +00001845#ifdef ENABLE_DEBUGGER_SUPPORT
1846
1847void MacroAssembler::PushRegistersFromMemory(RegList regs) {
1848 ASSERT((regs & ~kJSCallerSaved) == 0);
1849 // Push the content of the memory location to the stack.
1850 for (int i = 0; i < kNumJSCallerSaved; i++) {
1851 int r = JSCallerSavedCode(i);
1852 if ((regs & (1 << r)) != 0) {
1853 ExternalReference reg_addr =
1854 ExternalReference(Debug_Address::Register(i));
1855 movq(kScratchRegister, reg_addr);
1856 push(Operand(kScratchRegister, 0));
1857 }
1858 }
1859}
1860
Steve Block3ce2e202009-11-05 08:53:23 +00001861
Steve Blocka7e24c12009-10-30 11:49:00 +00001862void MacroAssembler::SaveRegistersToMemory(RegList regs) {
1863 ASSERT((regs & ~kJSCallerSaved) == 0);
1864 // Copy the content of registers to memory location.
1865 for (int i = 0; i < kNumJSCallerSaved; i++) {
1866 int r = JSCallerSavedCode(i);
1867 if ((regs & (1 << r)) != 0) {
1868 Register reg = { r };
1869 ExternalReference reg_addr =
1870 ExternalReference(Debug_Address::Register(i));
1871 movq(kScratchRegister, reg_addr);
1872 movq(Operand(kScratchRegister, 0), reg);
1873 }
1874 }
1875}
1876
1877
1878void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
1879 ASSERT((regs & ~kJSCallerSaved) == 0);
1880 // Copy the content of memory location to registers.
1881 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1882 int r = JSCallerSavedCode(i);
1883 if ((regs & (1 << r)) != 0) {
1884 Register reg = { r };
1885 ExternalReference reg_addr =
1886 ExternalReference(Debug_Address::Register(i));
1887 movq(kScratchRegister, reg_addr);
1888 movq(reg, Operand(kScratchRegister, 0));
1889 }
1890 }
1891}
1892
1893
1894void MacroAssembler::PopRegistersToMemory(RegList regs) {
1895 ASSERT((regs & ~kJSCallerSaved) == 0);
1896 // Pop the content from the stack to the memory location.
1897 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1898 int r = JSCallerSavedCode(i);
1899 if ((regs & (1 << r)) != 0) {
1900 ExternalReference reg_addr =
1901 ExternalReference(Debug_Address::Register(i));
1902 movq(kScratchRegister, reg_addr);
1903 pop(Operand(kScratchRegister, 0));
1904 }
1905 }
1906}
1907
1908
1909void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
1910 Register scratch,
1911 RegList regs) {
1912 ASSERT(!scratch.is(kScratchRegister));
1913 ASSERT(!base.is(kScratchRegister));
1914 ASSERT(!base.is(scratch));
1915 ASSERT((regs & ~kJSCallerSaved) == 0);
1916 // Copy the content of the stack to the memory location and adjust base.
1917 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1918 int r = JSCallerSavedCode(i);
1919 if ((regs & (1 << r)) != 0) {
1920 movq(scratch, Operand(base, 0));
1921 ExternalReference reg_addr =
1922 ExternalReference(Debug_Address::Register(i));
1923 movq(kScratchRegister, reg_addr);
1924 movq(Operand(kScratchRegister, 0), scratch);
1925 lea(base, Operand(base, kPointerSize));
1926 }
1927 }
1928}
1929
Andrei Popescu402d9372010-02-26 13:31:12 +00001930void MacroAssembler::DebugBreak() {
1931 ASSERT(allow_stub_calls());
1932 xor_(rax, rax); // no arguments
1933 movq(rbx, ExternalReference(Runtime::kDebugBreak));
1934 CEntryStub ces(1);
1935 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00001936}
Andrei Popescu402d9372010-02-26 13:31:12 +00001937#endif // ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +00001938
1939
1940void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1941 const ParameterCount& actual,
1942 Handle<Code> code_constant,
1943 Register code_register,
1944 Label* done,
1945 InvokeFlag flag) {
1946 bool definitely_matches = false;
1947 Label invoke;
1948 if (expected.is_immediate()) {
1949 ASSERT(actual.is_immediate());
1950 if (expected.immediate() == actual.immediate()) {
1951 definitely_matches = true;
1952 } else {
1953 movq(rax, Immediate(actual.immediate()));
1954 if (expected.immediate() ==
Steve Block3ce2e202009-11-05 08:53:23 +00001955 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001956 // Don't worry about adapting arguments for built-ins that
1957 // don't want that done. Skip adaption code by making it look
1958 // like we have a match between expected and actual number of
1959 // arguments.
1960 definitely_matches = true;
1961 } else {
1962 movq(rbx, Immediate(expected.immediate()));
1963 }
1964 }
1965 } else {
1966 if (actual.is_immediate()) {
1967 // Expected is in register, actual is immediate. This is the
1968 // case when we invoke function values without going through the
1969 // IC mechanism.
1970 cmpq(expected.reg(), Immediate(actual.immediate()));
1971 j(equal, &invoke);
1972 ASSERT(expected.reg().is(rbx));
1973 movq(rax, Immediate(actual.immediate()));
1974 } else if (!expected.reg().is(actual.reg())) {
1975 // Both expected and actual are in (different) registers. This
1976 // is the case when we invoke functions using call and apply.
1977 cmpq(expected.reg(), actual.reg());
1978 j(equal, &invoke);
1979 ASSERT(actual.reg().is(rax));
1980 ASSERT(expected.reg().is(rbx));
1981 }
1982 }
1983
1984 if (!definitely_matches) {
1985 Handle<Code> adaptor =
1986 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1987 if (!code_constant.is_null()) {
1988 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1989 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1990 } else if (!code_register.is(rdx)) {
1991 movq(rdx, code_register);
1992 }
1993
1994 if (flag == CALL_FUNCTION) {
1995 Call(adaptor, RelocInfo::CODE_TARGET);
1996 jmp(done);
1997 } else {
1998 Jump(adaptor, RelocInfo::CODE_TARGET);
1999 }
2000 bind(&invoke);
2001 }
2002}
2003
2004
2005void MacroAssembler::InvokeCode(Register code,
2006 const ParameterCount& expected,
2007 const ParameterCount& actual,
2008 InvokeFlag flag) {
2009 Label done;
2010 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
2011 if (flag == CALL_FUNCTION) {
2012 call(code);
2013 } else {
2014 ASSERT(flag == JUMP_FUNCTION);
2015 jmp(code);
2016 }
2017 bind(&done);
2018}
2019
2020
2021void MacroAssembler::InvokeCode(Handle<Code> code,
2022 const ParameterCount& expected,
2023 const ParameterCount& actual,
2024 RelocInfo::Mode rmode,
2025 InvokeFlag flag) {
2026 Label done;
2027 Register dummy = rax;
2028 InvokePrologue(expected, actual, code, dummy, &done, flag);
2029 if (flag == CALL_FUNCTION) {
2030 Call(code, rmode);
2031 } else {
2032 ASSERT(flag == JUMP_FUNCTION);
2033 Jump(code, rmode);
2034 }
2035 bind(&done);
2036}
2037
2038
2039void MacroAssembler::InvokeFunction(Register function,
2040 const ParameterCount& actual,
2041 InvokeFlag flag) {
2042 ASSERT(function.is(rdi));
2043 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2044 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
2045 movsxlq(rbx,
2046 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2047 movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
2048 // Advances rdx to the end of the Code object header, to the start of
2049 // the executable code.
2050 lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
2051
2052 ParameterCount expected(rbx);
2053 InvokeCode(rdx, expected, actual, flag);
2054}
2055
2056
Andrei Popescu402d9372010-02-26 13:31:12 +00002057void MacroAssembler::InvokeFunction(JSFunction* function,
2058 const ParameterCount& actual,
2059 InvokeFlag flag) {
2060 ASSERT(function->is_compiled());
2061 // Get the function and setup the context.
2062 Move(rdi, Handle<JSFunction>(function));
2063 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2064
2065 // Invoke the cached code.
2066 Handle<Code> code(function->code());
2067 ParameterCount expected(function->shared()->formal_parameter_count());
2068 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
2069}
2070
2071
Steve Blocka7e24c12009-10-30 11:49:00 +00002072void MacroAssembler::EnterFrame(StackFrame::Type type) {
2073 push(rbp);
2074 movq(rbp, rsp);
2075 push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00002076 Push(Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002077 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2078 push(kScratchRegister);
2079 if (FLAG_debug_code) {
2080 movq(kScratchRegister,
2081 Factory::undefined_value(),
2082 RelocInfo::EMBEDDED_OBJECT);
2083 cmpq(Operand(rsp, 0), kScratchRegister);
2084 Check(not_equal, "code object not properly patched");
2085 }
2086}
2087
2088
2089void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2090 if (FLAG_debug_code) {
Steve Block3ce2e202009-11-05 08:53:23 +00002091 Move(kScratchRegister, Smi::FromInt(type));
Steve Blocka7e24c12009-10-30 11:49:00 +00002092 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2093 Check(equal, "stack frame types must match");
2094 }
2095 movq(rsp, rbp);
2096 pop(rbp);
2097}
2098
2099
Steve Blockd0582a62009-12-15 09:54:21 +00002100void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002101 // Setup the frame structure on the stack.
2102 // All constants are relative to the frame pointer of the exit frame.
2103 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
2104 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
2105 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
2106 push(rbp);
2107 movq(rbp, rsp);
2108
2109 // Reserve room for entry stack pointer and push the debug marker.
Steve Block3ce2e202009-11-05 08:53:23 +00002110 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00002111 push(Immediate(0)); // Saved entry sp, patched before call.
2112 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2113 push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00002114
2115 // Save the frame pointer and the context in top.
2116 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2117 ExternalReference context_address(Top::k_context_address);
2118 movq(r14, rax); // Backup rax before we use it.
2119
2120 movq(rax, rbp);
2121 store_rax(c_entry_fp_address);
2122 movq(rax, rsi);
2123 store_rax(context_address);
2124
2125 // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
2126 // so it must be retained across the C-call.
2127 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
2128 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
2129
2130#ifdef ENABLE_DEBUGGER_SUPPORT
2131 // Save the state of all registers to the stack from the memory
2132 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00002133 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002134 // TODO(1243899): This should be symmetric to
2135 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
2136 // correct here, but computed for the other call. Very error
2137 // prone! FIX THIS. Actually there are deeper problems with
2138 // register saving than this asymmetry (see the bug report
2139 // associated with this issue).
2140 PushRegistersFromMemory(kJSCallerSaved);
2141 }
2142#endif
2143
2144#ifdef _WIN64
2145 // Reserve space on stack for result and argument structures, if necessary.
2146 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
2147 // Reserve space for the Arguments object. The Windows 64-bit ABI
2148 // requires us to pass this structure as a pointer to its location on
2149 // the stack. The structure contains 2 values.
2150 int argument_stack_space = 2 * kPointerSize;
2151 // We also need backing space for 4 parameters, even though
2152 // we only pass one or two parameter, and it is in a register.
2153 int argument_mirror_space = 4 * kPointerSize;
2154 int total_stack_space =
2155 argument_mirror_space + argument_stack_space + result_stack_space;
2156 subq(rsp, Immediate(total_stack_space));
2157#endif
2158
2159 // Get the required frame alignment for the OS.
2160 static const int kFrameAlignment = OS::ActivationFrameAlignment();
2161 if (kFrameAlignment > 0) {
2162 ASSERT(IsPowerOf2(kFrameAlignment));
2163 movq(kScratchRegister, Immediate(-kFrameAlignment));
2164 and_(rsp, kScratchRegister);
2165 }
2166
2167 // Patch the saved entry sp.
2168 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2169}
2170
2171
Steve Blockd0582a62009-12-15 09:54:21 +00002172void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002173 // Registers:
2174 // r15 : argv
2175#ifdef ENABLE_DEBUGGER_SUPPORT
2176 // Restore the memory copy of the registers by digging them out from
2177 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +00002178 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002179 // It's okay to clobber register rbx below because we don't need
2180 // the function pointer after this.
2181 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +00002182 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002183 lea(rbx, Operand(rbp, kOffset));
2184 CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
2185 }
2186#endif
2187
2188 // Get the return address from the stack and restore the frame pointer.
2189 movq(rcx, Operand(rbp, 1 * kPointerSize));
2190 movq(rbp, Operand(rbp, 0 * kPointerSize));
2191
Steve Blocka7e24c12009-10-30 11:49:00 +00002192 // Pop everything up to and including the arguments and the receiver
2193 // from the caller stack.
2194 lea(rsp, Operand(r15, 1 * kPointerSize));
2195
2196 // Restore current context from top and clear it in debug mode.
2197 ExternalReference context_address(Top::k_context_address);
2198 movq(kScratchRegister, context_address);
2199 movq(rsi, Operand(kScratchRegister, 0));
2200#ifdef DEBUG
2201 movq(Operand(kScratchRegister, 0), Immediate(0));
2202#endif
2203
2204 // Push the return address to get ready to return.
2205 push(rcx);
2206
2207 // Clear the top frame.
2208 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2209 movq(kScratchRegister, c_entry_fp_address);
2210 movq(Operand(kScratchRegister, 0), Immediate(0));
2211}
2212
2213
Steve Block3ce2e202009-11-05 08:53:23 +00002214Register MacroAssembler::CheckMaps(JSObject* object,
2215 Register object_reg,
2216 JSObject* holder,
2217 Register holder_reg,
Steve Blocka7e24c12009-10-30 11:49:00 +00002218 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01002219 int save_at_depth,
Steve Blocka7e24c12009-10-30 11:49:00 +00002220 Label* miss) {
2221 // Make sure there's no overlap between scratch and the other
2222 // registers.
2223 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
2224
2225 // Keep track of the current object in register reg. On the first
2226 // iteration, reg is an alias for object_reg, on later iterations,
2227 // it is an alias for holder_reg.
2228 Register reg = object_reg;
Steve Block6ded16b2010-05-10 14:33:55 +01002229 int depth = 0;
2230
2231 if (save_at_depth == depth) {
2232 movq(Operand(rsp, kPointerSize), object_reg);
2233 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002234
2235 // Check the maps in the prototype chain.
2236 // Traverse the prototype chain from the object and do map checks.
2237 while (object != holder) {
2238 depth++;
2239
2240 // Only global objects and objects that do not require access
2241 // checks are allowed in stubs.
2242 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2243
2244 JSObject* prototype = JSObject::cast(object->GetPrototype());
2245 if (Heap::InNewSpace(prototype)) {
2246 // Get the map of the current object.
2247 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2248 Cmp(scratch, Handle<Map>(object->map()));
2249 // Branch on the result of the map check.
2250 j(not_equal, miss);
2251 // Check access rights to the global object. This has to happen
2252 // after the map check so that we know that the object is
2253 // actually a global object.
2254 if (object->IsJSGlobalProxy()) {
2255 CheckAccessGlobalProxy(reg, scratch, miss);
2256
2257 // Restore scratch register to be the map of the object.
2258 // We load the prototype from the map in the scratch register.
2259 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2260 }
2261 // The prototype is in new space; we cannot store a reference
2262 // to it in the code. Load it from the map.
2263 reg = holder_reg; // from now the object is in holder_reg
2264 movq(reg, FieldOperand(scratch, Map::kPrototypeOffset));
2265
2266 } else {
2267 // Check the map of the current object.
2268 Cmp(FieldOperand(reg, HeapObject::kMapOffset),
2269 Handle<Map>(object->map()));
2270 // Branch on the result of the map check.
2271 j(not_equal, miss);
2272 // Check access rights to the global object. This has to happen
2273 // after the map check so that we know that the object is
2274 // actually a global object.
2275 if (object->IsJSGlobalProxy()) {
2276 CheckAccessGlobalProxy(reg, scratch, miss);
2277 }
2278 // The prototype is in old space; load it directly.
2279 reg = holder_reg; // from now the object is in holder_reg
2280 Move(reg, Handle<JSObject>(prototype));
2281 }
2282
Steve Block6ded16b2010-05-10 14:33:55 +01002283 if (save_at_depth == depth) {
2284 movq(Operand(rsp, kPointerSize), reg);
2285 }
2286
Steve Blocka7e24c12009-10-30 11:49:00 +00002287 // Go to the next object in the prototype chain.
2288 object = prototype;
2289 }
2290
2291 // Check the holder map.
Steve Block3ce2e202009-11-05 08:53:23 +00002292 Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002293 j(not_equal, miss);
2294
2295 // Log the check depth.
Steve Block6ded16b2010-05-10 14:33:55 +01002296 LOG(IntEvent("check-maps-depth", depth + 1));
Steve Blocka7e24c12009-10-30 11:49:00 +00002297
2298 // Perform security check for access to the global object and return
2299 // the holder register.
2300 ASSERT(object == holder);
2301 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2302 if (object->IsJSGlobalProxy()) {
2303 CheckAccessGlobalProxy(reg, scratch, miss);
2304 }
2305 return reg;
2306}
2307
2308
Steve Blocka7e24c12009-10-30 11:49:00 +00002309void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2310 Register scratch,
2311 Label* miss) {
2312 Label same_contexts;
2313
2314 ASSERT(!holder_reg.is(scratch));
2315 ASSERT(!scratch.is(kScratchRegister));
2316 // Load current lexical context from the stack frame.
2317 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2318
2319 // When generating debug code, make sure the lexical context is set.
2320 if (FLAG_debug_code) {
2321 cmpq(scratch, Immediate(0));
2322 Check(not_equal, "we should not have an empty lexical context");
2323 }
2324 // Load the global context of the current context.
2325 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2326 movq(scratch, FieldOperand(scratch, offset));
2327 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2328
2329 // Check the context is a global context.
2330 if (FLAG_debug_code) {
2331 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2332 Factory::global_context_map());
2333 Check(equal, "JSGlobalObject::global_context should be a global context.");
2334 }
2335
2336 // Check if both contexts are the same.
2337 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2338 j(equal, &same_contexts);
2339
2340 // Compare security tokens.
2341 // Check that the security token in the calling global object is
2342 // compatible with the security token in the receiving global
2343 // object.
2344
2345 // Check the context is a global context.
2346 if (FLAG_debug_code) {
2347 // Preserve original value of holder_reg.
2348 push(holder_reg);
2349 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2350 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2351 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2352
2353 // Read the first word and compare to global_context_map(),
2354 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2355 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2356 Check(equal, "JSGlobalObject::global_context should be a global context.");
2357 pop(holder_reg);
2358 }
2359
2360 movq(kScratchRegister,
2361 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00002362 int token_offset =
2363 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00002364 movq(scratch, FieldOperand(scratch, token_offset));
2365 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2366 j(not_equal, miss);
2367
2368 bind(&same_contexts);
2369}
2370
2371
2372void MacroAssembler::LoadAllocationTopHelper(Register result,
2373 Register result_end,
2374 Register scratch,
2375 AllocationFlags flags) {
2376 ExternalReference new_space_allocation_top =
2377 ExternalReference::new_space_allocation_top_address();
2378
2379 // Just return if allocation top is already known.
2380 if ((flags & RESULT_CONTAINS_TOP) != 0) {
2381 // No use of scratch if allocation top is provided.
Steve Block6ded16b2010-05-10 14:33:55 +01002382 ASSERT(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00002383#ifdef DEBUG
2384 // Assert that result actually contains top on entry.
2385 movq(kScratchRegister, new_space_allocation_top);
2386 cmpq(result, Operand(kScratchRegister, 0));
2387 Check(equal, "Unexpected allocation top");
2388#endif
2389 return;
2390 }
2391
Steve Block6ded16b2010-05-10 14:33:55 +01002392 // Move address of new object to result. Use scratch register if available,
2393 // and keep address in scratch until call to UpdateAllocationTopHelper.
2394 if (scratch.is_valid()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002395 ASSERT(!scratch.is(result_end));
2396 movq(scratch, new_space_allocation_top);
2397 movq(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01002398 } else if (result.is(rax)) {
2399 load_rax(new_space_allocation_top);
2400 } else {
2401 movq(kScratchRegister, new_space_allocation_top);
2402 movq(result, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002403 }
2404}
2405
2406
2407void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2408 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +00002409 if (FLAG_debug_code) {
2410 testq(result_end, Immediate(kObjectAlignmentMask));
2411 Check(zero, "Unaligned allocation in new space");
2412 }
2413
Steve Blocka7e24c12009-10-30 11:49:00 +00002414 ExternalReference new_space_allocation_top =
2415 ExternalReference::new_space_allocation_top_address();
2416
2417 // Update new top.
2418 if (result_end.is(rax)) {
2419 // rax can be stored directly to a memory location.
2420 store_rax(new_space_allocation_top);
2421 } else {
2422 // Register required - use scratch provided if available.
Steve Block6ded16b2010-05-10 14:33:55 +01002423 if (scratch.is_valid()) {
2424 movq(Operand(scratch, 0), result_end);
2425 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00002426 movq(kScratchRegister, new_space_allocation_top);
2427 movq(Operand(kScratchRegister, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00002428 }
2429 }
2430}
2431
2432
2433void MacroAssembler::AllocateInNewSpace(int object_size,
2434 Register result,
2435 Register result_end,
2436 Register scratch,
2437 Label* gc_required,
2438 AllocationFlags flags) {
2439 ASSERT(!result.is(result_end));
2440
2441 // Load address of new object into result.
2442 LoadAllocationTopHelper(result, result_end, scratch, flags);
2443
2444 // Calculate new top and bail out if new space is exhausted.
2445 ExternalReference new_space_allocation_limit =
2446 ExternalReference::new_space_allocation_limit_address();
Steve Block6ded16b2010-05-10 14:33:55 +01002447
2448 Register top_reg = result_end.is_valid() ? result_end : result;
2449
2450 if (top_reg.is(result)) {
2451 addq(top_reg, Immediate(object_size));
2452 } else {
2453 lea(top_reg, Operand(result, object_size));
2454 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002455 movq(kScratchRegister, new_space_allocation_limit);
Steve Block6ded16b2010-05-10 14:33:55 +01002456 cmpq(top_reg, Operand(kScratchRegister, 0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002457 j(above, gc_required);
2458
2459 // Update allocation top.
Steve Block6ded16b2010-05-10 14:33:55 +01002460 UpdateAllocationTopHelper(top_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00002461
Steve Block6ded16b2010-05-10 14:33:55 +01002462 if (top_reg.is(result)) {
2463 if ((flags & TAG_OBJECT) != 0) {
2464 subq(result, Immediate(object_size - kHeapObjectTag));
2465 } else {
2466 subq(result, Immediate(object_size));
2467 }
2468 } else if ((flags & TAG_OBJECT) != 0) {
2469 // Tag the result if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00002470 addq(result, Immediate(kHeapObjectTag));
2471 }
2472}
2473
2474
2475void MacroAssembler::AllocateInNewSpace(int header_size,
2476 ScaleFactor element_size,
2477 Register element_count,
2478 Register result,
2479 Register result_end,
2480 Register scratch,
2481 Label* gc_required,
2482 AllocationFlags flags) {
2483 ASSERT(!result.is(result_end));
2484
2485 // Load address of new object into result.
2486 LoadAllocationTopHelper(result, result_end, scratch, flags);
2487
2488 // Calculate new top and bail out if new space is exhausted.
2489 ExternalReference new_space_allocation_limit =
2490 ExternalReference::new_space_allocation_limit_address();
2491 lea(result_end, Operand(result, element_count, element_size, header_size));
2492 movq(kScratchRegister, new_space_allocation_limit);
2493 cmpq(result_end, Operand(kScratchRegister, 0));
2494 j(above, gc_required);
2495
2496 // Update allocation top.
2497 UpdateAllocationTopHelper(result_end, scratch);
2498
2499 // Tag the result if requested.
2500 if ((flags & TAG_OBJECT) != 0) {
2501 addq(result, Immediate(kHeapObjectTag));
2502 }
2503}
2504
2505
2506void MacroAssembler::AllocateInNewSpace(Register object_size,
2507 Register result,
2508 Register result_end,
2509 Register scratch,
2510 Label* gc_required,
2511 AllocationFlags flags) {
2512 // Load address of new object into result.
2513 LoadAllocationTopHelper(result, result_end, scratch, flags);
2514
2515 // Calculate new top and bail out if new space is exhausted.
2516 ExternalReference new_space_allocation_limit =
2517 ExternalReference::new_space_allocation_limit_address();
2518 if (!object_size.is(result_end)) {
2519 movq(result_end, object_size);
2520 }
2521 addq(result_end, result);
2522 movq(kScratchRegister, new_space_allocation_limit);
2523 cmpq(result_end, Operand(kScratchRegister, 0));
2524 j(above, gc_required);
2525
2526 // Update allocation top.
2527 UpdateAllocationTopHelper(result_end, scratch);
2528
2529 // Tag the result if requested.
2530 if ((flags & TAG_OBJECT) != 0) {
2531 addq(result, Immediate(kHeapObjectTag));
2532 }
2533}
2534
2535
2536void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2537 ExternalReference new_space_allocation_top =
2538 ExternalReference::new_space_allocation_top_address();
2539
2540 // Make sure the object has no tag before resetting top.
2541 and_(object, Immediate(~kHeapObjectTagMask));
2542 movq(kScratchRegister, new_space_allocation_top);
2543#ifdef DEBUG
2544 cmpq(object, Operand(kScratchRegister, 0));
2545 Check(below, "Undo allocation of non allocated memory");
2546#endif
2547 movq(Operand(kScratchRegister, 0), object);
2548}
2549
2550
Steve Block3ce2e202009-11-05 08:53:23 +00002551void MacroAssembler::AllocateHeapNumber(Register result,
2552 Register scratch,
2553 Label* gc_required) {
2554 // Allocate heap number in new space.
2555 AllocateInNewSpace(HeapNumber::kSize,
2556 result,
2557 scratch,
2558 no_reg,
2559 gc_required,
2560 TAG_OBJECT);
2561
2562 // Set the map.
2563 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2564 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2565}
2566
2567
Leon Clarkee46be812010-01-19 14:06:41 +00002568void MacroAssembler::AllocateTwoByteString(Register result,
2569 Register length,
2570 Register scratch1,
2571 Register scratch2,
2572 Register scratch3,
2573 Label* gc_required) {
2574 // Calculate the number of bytes needed for the characters in the string while
2575 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002576 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2577 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002578 ASSERT(kShortSize == 2);
2579 // scratch1 = length * 2 + kObjectAlignmentMask.
Steve Block6ded16b2010-05-10 14:33:55 +01002580 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2581 kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002582 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002583 if (kHeaderAlignment > 0) {
2584 subq(scratch1, Immediate(kHeaderAlignment));
2585 }
Leon Clarkee46be812010-01-19 14:06:41 +00002586
2587 // Allocate two byte string in new space.
2588 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2589 times_1,
2590 scratch1,
2591 result,
2592 scratch2,
2593 scratch3,
2594 gc_required,
2595 TAG_OBJECT);
2596
2597 // Set the map, length and hash field.
2598 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2599 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002600 Integer32ToSmi(scratch1, length);
2601 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Leon Clarkee46be812010-01-19 14:06:41 +00002602 movl(FieldOperand(result, String::kHashFieldOffset),
2603 Immediate(String::kEmptyHashField));
2604}
2605
2606
2607void MacroAssembler::AllocateAsciiString(Register result,
2608 Register length,
2609 Register scratch1,
2610 Register scratch2,
2611 Register scratch3,
2612 Label* gc_required) {
2613 // Calculate the number of bytes needed for the characters in the string while
2614 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01002615 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2616 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00002617 movl(scratch1, length);
2618 ASSERT(kCharSize == 1);
Steve Block6ded16b2010-05-10 14:33:55 +01002619 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
Leon Clarkee46be812010-01-19 14:06:41 +00002620 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01002621 if (kHeaderAlignment > 0) {
2622 subq(scratch1, Immediate(kHeaderAlignment));
2623 }
Leon Clarkee46be812010-01-19 14:06:41 +00002624
2625 // Allocate ascii string in new space.
2626 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2627 times_1,
2628 scratch1,
2629 result,
2630 scratch2,
2631 scratch3,
2632 gc_required,
2633 TAG_OBJECT);
2634
2635 // Set the map, length and hash field.
2636 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2637 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01002638 Integer32ToSmi(scratch1, length);
2639 movq(FieldOperand(result, String::kLengthOffset), scratch1);
Leon Clarkee46be812010-01-19 14:06:41 +00002640 movl(FieldOperand(result, String::kHashFieldOffset),
2641 Immediate(String::kEmptyHashField));
2642}
2643
2644
2645void MacroAssembler::AllocateConsString(Register result,
2646 Register scratch1,
2647 Register scratch2,
2648 Label* gc_required) {
2649 // Allocate heap number in new space.
2650 AllocateInNewSpace(ConsString::kSize,
2651 result,
2652 scratch1,
2653 scratch2,
2654 gc_required,
2655 TAG_OBJECT);
2656
2657 // Set the map. The other fields are left uninitialized.
2658 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2659 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2660}
2661
2662
2663void MacroAssembler::AllocateAsciiConsString(Register result,
2664 Register scratch1,
2665 Register scratch2,
2666 Label* gc_required) {
2667 // Allocate heap number in new space.
2668 AllocateInNewSpace(ConsString::kSize,
2669 result,
2670 scratch1,
2671 scratch2,
2672 gc_required,
2673 TAG_OBJECT);
2674
2675 // Set the map. The other fields are left uninitialized.
2676 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2677 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2678}
2679
2680
Steve Blockd0582a62009-12-15 09:54:21 +00002681void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2682 if (context_chain_length > 0) {
2683 // Move up the chain of contexts to the context containing the slot.
2684 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2685 // Load the function context (which is the incoming, outer context).
Leon Clarkee46be812010-01-19 14:06:41 +00002686 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00002687 for (int i = 1; i < context_chain_length; i++) {
2688 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2689 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2690 }
2691 // The context may be an intermediate context, not a function context.
2692 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2693 } else { // context is the current function context.
2694 // The context may be an intermediate context, not a function context.
2695 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2696 }
2697}
2698
Leon Clarke4515c472010-02-03 11:58:03 +00002699int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2700 // On Windows stack slots are reserved by the caller for all arguments
2701 // including the ones passed in registers. On Linux 6 arguments are passed in
2702 // registers and the caller does not reserve stack slots for them.
2703 ASSERT(num_arguments >= 0);
2704#ifdef _WIN64
2705 static const int kArgumentsWithoutStackSlot = 0;
2706#else
2707 static const int kArgumentsWithoutStackSlot = 6;
2708#endif
2709 return num_arguments > kArgumentsWithoutStackSlot ?
2710 num_arguments - kArgumentsWithoutStackSlot : 0;
2711}
2712
2713void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2714 int frame_alignment = OS::ActivationFrameAlignment();
2715 ASSERT(frame_alignment != 0);
2716 ASSERT(num_arguments >= 0);
2717 // Make stack end at alignment and allocate space for arguments and old rsp.
2718 movq(kScratchRegister, rsp);
2719 ASSERT(IsPowerOf2(frame_alignment));
2720 int argument_slots_on_stack =
2721 ArgumentStackSlotsForCFunctionCall(num_arguments);
2722 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2723 and_(rsp, Immediate(-frame_alignment));
2724 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2725}
2726
2727
2728void MacroAssembler::CallCFunction(ExternalReference function,
2729 int num_arguments) {
2730 movq(rax, function);
2731 CallCFunction(rax, num_arguments);
2732}
2733
2734
2735void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01002736 // Check stack alignment.
2737 if (FLAG_debug_code) {
2738 CheckStackAlignment();
2739 }
2740
Leon Clarke4515c472010-02-03 11:58:03 +00002741 call(function);
2742 ASSERT(OS::ActivationFrameAlignment() != 0);
2743 ASSERT(num_arguments >= 0);
2744 int argument_slots_on_stack =
2745 ArgumentStackSlotsForCFunctionCall(num_arguments);
2746 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2747}
2748
Steve Blockd0582a62009-12-15 09:54:21 +00002749
Steve Blocka7e24c12009-10-30 11:49:00 +00002750CodePatcher::CodePatcher(byte* address, int size)
2751 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2752 // Create a new macro assembler pointing to the address of the code to patch.
2753 // The size is adjusted with kGap on order for the assembler to generate size
2754 // bytes of instructions without failing with buffer size constraints.
2755 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2756}
2757
2758
2759CodePatcher::~CodePatcher() {
2760 // Indicate that code has changed.
2761 CPU::FlushICache(address_, size_);
2762
2763 // Check that the code was patched as expected.
2764 ASSERT(masm_.pc_ == address_ + size_);
2765 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2766}
2767
Steve Blocka7e24c12009-10-30 11:49:00 +00002768} } // namespace v8::internal