blob: cd6b90ce956d9c46de5d407655bfb07b292b5b06 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_X64
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/base/bits.h"
8#include "src/base/division-by-constant.h"
9#include "src/bootstrapper.h"
10#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/debug/debug.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012#include "src/heap/heap.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/register-configuration.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014#include "src/x64/assembler-x64.h"
15#include "src/x64/macro-assembler-x64.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000016
17namespace v8 {
18namespace internal {
19
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
21 CodeObjectRequired create_code_object)
Ben Murdoch8b112d22011-06-08 16:22:53 +010022 : Assembler(arg_isolate, buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000023 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010024 has_frame_(false),
Ben Murdoch8b112d22011-06-08 16:22:53 +010025 root_array_available_(true) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000026 if (create_code_object == CodeObjectRequired::kYes) {
27 code_object_ =
28 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +010029 }
Steve Block44f0eee2011-05-26 01:26:41 +010030}
31
32
Ben Murdochb8a8cc12014-11-26 15:28:44 +000033static const int64_t kInvalidRootRegisterDelta = -1;
34
35
36int64_t MacroAssembler::RootRegisterDelta(ExternalReference other) {
37 if (predictable_code_size() &&
38 (other.address() < reinterpret_cast<Address>(isolate()) ||
39 other.address() >= reinterpret_cast<Address>(isolate() + 1))) {
40 return kInvalidRootRegisterDelta;
41 }
Steve Block44f0eee2011-05-26 01:26:41 +010042 Address roots_register_value = kRootRegisterBias +
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 reinterpret_cast<Address>(isolate()->heap()->roots_array_start());
44
45 int64_t delta = kInvalidRootRegisterDelta; // Bogus initialization.
46 if (kPointerSize == kInt64Size) {
47 delta = other.address() - roots_register_value;
48 } else {
49 // For x32, zero extend the address to 64-bit and calculate the delta.
50 uint64_t o = static_cast<uint32_t>(
51 reinterpret_cast<intptr_t>(other.address()));
52 uint64_t r = static_cast<uint32_t>(
53 reinterpret_cast<intptr_t>(roots_register_value));
54 delta = o - r;
55 }
Steve Block44f0eee2011-05-26 01:26:41 +010056 return delta;
57}
58
59
60Operand MacroAssembler::ExternalOperand(ExternalReference target,
61 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000062 if (root_array_available_ && !serializer_enabled()) {
63 int64_t delta = RootRegisterDelta(target);
64 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
Steve Block44f0eee2011-05-26 01:26:41 +010065 return Operand(kRootRegister, static_cast<int32_t>(delta));
66 }
67 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000068 Move(scratch, target);
Steve Block44f0eee2011-05-26 01:26:41 +010069 return Operand(scratch, 0);
70}
71
72
73void MacroAssembler::Load(Register destination, ExternalReference source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000074 if (root_array_available_ && !serializer_enabled()) {
75 int64_t delta = RootRegisterDelta(source);
76 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
77 movp(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
Steve Block44f0eee2011-05-26 01:26:41 +010078 return;
79 }
80 }
81 // Safe code.
82 if (destination.is(rax)) {
83 load_rax(source);
84 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085 Move(kScratchRegister, source);
86 movp(destination, Operand(kScratchRegister, 0));
Steve Block44f0eee2011-05-26 01:26:41 +010087 }
88}
89
90
91void MacroAssembler::Store(ExternalReference destination, Register source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000092 if (root_array_available_ && !serializer_enabled()) {
93 int64_t delta = RootRegisterDelta(destination);
94 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
95 movp(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
Steve Block44f0eee2011-05-26 01:26:41 +010096 return;
97 }
98 }
99 // Safe code.
100 if (source.is(rax)) {
101 store_rax(destination);
102 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000103 Move(kScratchRegister, destination);
104 movp(Operand(kScratchRegister, 0), source);
Steve Block44f0eee2011-05-26 01:26:41 +0100105 }
106}
107
108
109void MacroAssembler::LoadAddress(Register destination,
110 ExternalReference source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000111 if (root_array_available_ && !serializer_enabled()) {
112 int64_t delta = RootRegisterDelta(source);
113 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
114 leap(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
Steve Block44f0eee2011-05-26 01:26:41 +0100115 return;
116 }
117 }
118 // Safe code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000119 Move(destination, source);
Steve Block44f0eee2011-05-26 01:26:41 +0100120}
121
122
123int MacroAssembler::LoadAddressSize(ExternalReference source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000124 if (root_array_available_ && !serializer_enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +0100125 // This calculation depends on the internals of LoadAddress.
126 // It's correctness is ensured by the asserts in the Call
127 // instruction below.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000128 int64_t delta = RootRegisterDelta(source);
129 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
130 // Operand is leap(scratch, Operand(kRootRegister, delta));
Steve Block44f0eee2011-05-26 01:26:41 +0100131 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
132 int size = 4;
133 if (!is_int8(static_cast<int32_t>(delta))) {
134 size += 3; // Need full four-byte displacement in lea.
135 }
136 return size;
137 }
138 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000139 // Size of movp(destination, src);
140 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength;
141}
142
143
144void MacroAssembler::PushAddress(ExternalReference source) {
145 int64_t address = reinterpret_cast<int64_t>(source.address());
146 if (is_int32(address) && !serializer_enabled()) {
147 if (emit_debug_code()) {
148 Move(kScratchRegister, kZapValue, Assembler::RelocInfoNone());
149 }
150 Push(Immediate(static_cast<int32_t>(address)));
151 return;
152 }
153 LoadAddress(kScratchRegister, source);
154 Push(kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000155}
156
157
Steve Block3ce2e202009-11-05 08:53:23 +0000158void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000159 DCHECK(root_array_available_);
160 movp(destination, Operand(kRootRegister,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100161 (index << kPointerSizeLog2) - kRootRegisterBias));
162}
163
164
165void MacroAssembler::LoadRootIndexed(Register destination,
166 Register variable_offset,
167 int fixed_offset) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000168 DCHECK(root_array_available_);
169 movp(destination,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100170 Operand(kRootRegister,
171 variable_offset, times_pointer_size,
172 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000173}
174
175
Kristian Monsen25f61362010-05-21 11:50:48 +0100176void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000177 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000178 DCHECK(root_array_available_);
179 movp(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100180 source);
Kristian Monsen25f61362010-05-21 11:50:48 +0100181}
182
183
Steve Blocka7e24c12009-10-30 11:49:00 +0000184void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000185 DCHECK(root_array_available_);
186 Push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000187}
188
189
Steve Block3ce2e202009-11-05 08:53:23 +0000190void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000191 DCHECK(root_array_available_);
192 cmpp(with, Operand(kRootRegister,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100193 (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000194}
195
196
Steve Block1e0659c2011-05-24 12:43:12 +0100197void MacroAssembler::CompareRoot(const Operand& with,
198 Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000199 DCHECK(root_array_available_);
200 DCHECK(!with.AddressUsesRegister(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000201 LoadRoot(kScratchRegister, index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000202 cmpp(with, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000203}
204
205
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100206void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
207 Register addr,
208 Register scratch,
209 SaveFPRegsMode save_fp,
210 RememberedSetFinalAction and_then) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000211 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100212 Label ok;
213 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
214 int3();
215 bind(&ok);
Steve Block6ded16b2010-05-10 14:33:55 +0100216 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100217 // Load store buffer top.
Ben Murdochda12d292016-06-02 14:46:10 +0100218 ExternalReference store_buffer =
219 ExternalReference::store_buffer_top(isolate());
220 movp(scratch, ExternalOperand(store_buffer));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100221 // Store pointer to buffer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000222 movp(Operand(scratch, 0), addr);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100223 // Increment buffer top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000224 addp(scratch, Immediate(kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100225 // Write back new top of buffer.
Ben Murdochda12d292016-06-02 14:46:10 +0100226 movp(ExternalOperand(store_buffer), scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100227 // Call stub on end of buffer.
228 Label done;
229 // Check for end of buffer.
Ben Murdochda12d292016-06-02 14:46:10 +0100230 testp(scratch, Immediate(StoreBuffer::kStoreBufferMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100231 if (and_then == kReturnAtEnd) {
232 Label buffer_overflowed;
Ben Murdochda12d292016-06-02 14:46:10 +0100233 j(equal, &buffer_overflowed, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100234 ret(0);
235 bind(&buffer_overflowed);
236 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000237 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdochda12d292016-06-02 14:46:10 +0100238 j(not_equal, &done, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100239 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000240 StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100241 CallStub(&store_buffer_overflow);
242 if (and_then == kReturnAtEnd) {
243 ret(0);
244 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000245 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100246 bind(&done);
247 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000248}
249
250
Ben Murdoch257744e2011-11-30 15:57:28 +0000251void MacroAssembler::InNewSpace(Register object,
252 Register scratch,
253 Condition cc,
254 Label* branch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100255 Label::Distance distance) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100256 const int mask =
257 (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
258 CheckPageFlag(object, scratch, mask, cc, branch, distance);
Ben Murdoch257744e2011-11-30 15:57:28 +0000259}
260
261
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100262void MacroAssembler::RecordWriteField(
263 Register object,
264 int offset,
265 Register value,
266 Register dst,
267 SaveFPRegsMode save_fp,
268 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000269 SmiCheck smi_check,
270 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100271 // First, check if a write barrier is even needed. The tests below
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100272 // catch stores of Smis.
Steve Blocka7e24c12009-10-30 11:49:00 +0000273 Label done;
274
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100275 // Skip barrier if writing a smi.
276 if (smi_check == INLINE_SMI_CHECK) {
277 JumpIfSmi(value, &done);
278 }
279
280 // Although the object register is tagged, the offset is relative to the start
281 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000282 DCHECK(IsAligned(offset, kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100283
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000284 leap(dst, FieldOperand(object, offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100285 if (emit_debug_code()) {
286 Label ok;
287 testb(dst, Immediate((1 << kPointerSizeLog2) - 1));
288 j(zero, &ok, Label::kNear);
289 int3();
290 bind(&ok);
291 }
292
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000293 RecordWrite(object, dst, value, save_fp, remembered_set_action,
294 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100295
Steve Block3ce2e202009-11-05 08:53:23 +0000296 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000297
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100298 // Clobber clobbered input registers when running with the debug-code flag
299 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100300 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000301 Move(value, kZapValue, Assembler::RelocInfoNone());
302 Move(dst, kZapValue, Assembler::RelocInfoNone());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100303 }
304}
305
306
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000307void MacroAssembler::RecordWriteArray(
308 Register object,
309 Register value,
310 Register index,
311 SaveFPRegsMode save_fp,
312 RememberedSetAction remembered_set_action,
313 SmiCheck smi_check,
314 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100315 // First, check if a write barrier is even needed. The tests below
316 // catch stores of Smis.
317 Label done;
318
319 // Skip barrier if writing a smi.
320 if (smi_check == INLINE_SMI_CHECK) {
321 JumpIfSmi(value, &done);
322 }
323
324 // Array access: calculate the destination address. Index is not a smi.
325 Register dst = index;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000326 leap(dst, Operand(object, index, times_pointer_size,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100327 FixedArray::kHeaderSize - kHeapObjectTag));
328
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000329 RecordWrite(object, dst, value, save_fp, remembered_set_action,
330 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100331
332 bind(&done);
333
334 // Clobber clobbered input registers when running with the debug-code flag
335 // turned on to provoke errors.
336 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000337 Move(value, kZapValue, Assembler::RelocInfoNone());
338 Move(index, kZapValue, Assembler::RelocInfoNone());
Leon Clarke4515c472010-02-03 11:58:03 +0000339 }
Steve Block3ce2e202009-11-05 08:53:23 +0000340}
341
342
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000343void MacroAssembler::RecordWriteForMap(Register object,
344 Register map,
345 Register dst,
346 SaveFPRegsMode fp_mode) {
347 DCHECK(!object.is(kScratchRegister));
348 DCHECK(!object.is(map));
349 DCHECK(!object.is(dst));
350 DCHECK(!map.is(dst));
351 AssertNotSmi(object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100352
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100353 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000354 Label ok;
355 if (map.is(kScratchRegister)) pushq(map);
356 CompareMap(map, isolate()->factory()->meta_map());
357 if (map.is(kScratchRegister)) popq(map);
358 j(equal, &ok, Label::kNear);
359 int3();
360 bind(&ok);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100361 }
362
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000363 if (!FLAG_incremental_marking) {
364 return;
365 }
366
367 if (emit_debug_code()) {
368 Label ok;
369 if (map.is(kScratchRegister)) pushq(map);
370 cmpp(map, FieldOperand(object, HeapObject::kMapOffset));
371 if (map.is(kScratchRegister)) popq(map);
372 j(equal, &ok, Label::kNear);
373 int3();
374 bind(&ok);
375 }
376
377 // Compute the address.
378 leap(dst, FieldOperand(object, HeapObject::kMapOffset));
379
380 // First, check if a write barrier is even needed. The tests below
381 // catch stores of smis and stores into the young generation.
382 Label done;
383
384 // A single check of the map's pages interesting flag suffices, since it is
385 // only set during incremental collection, and then it's also guaranteed that
386 // the from object's page's interesting flag is also set. This optimization
387 // relies on the fact that maps can never be in new space.
388 CheckPageFlag(map,
389 map, // Used as scratch.
390 MemoryChunk::kPointersToHereAreInterestingMask,
391 zero,
392 &done,
393 Label::kNear);
394
395 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
396 fp_mode);
397 CallStub(&stub);
398
399 bind(&done);
400
401 // Count number of write barriers in generated code.
402 isolate()->counters()->write_barriers_static()->Increment();
403 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
404
405 // Clobber clobbered registers when running with the debug-code flag
406 // turned on to provoke errors.
407 if (emit_debug_code()) {
408 Move(dst, kZapValue, Assembler::RelocInfoNone());
409 Move(map, kZapValue, Assembler::RelocInfoNone());
410 }
411}
412
413
414void MacroAssembler::RecordWrite(
415 Register object,
416 Register address,
417 Register value,
418 SaveFPRegsMode fp_mode,
419 RememberedSetAction remembered_set_action,
420 SmiCheck smi_check,
421 PointersToHereCheck pointers_to_here_check_for_value) {
422 DCHECK(!object.is(value));
423 DCHECK(!object.is(address));
424 DCHECK(!value.is(address));
425 AssertNotSmi(object);
426
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100427 if (remembered_set_action == OMIT_REMEMBERED_SET &&
428 !FLAG_incremental_marking) {
429 return;
430 }
431
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000432 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100433 Label ok;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000434 cmpp(value, Operand(address, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100435 j(equal, &ok, Label::kNear);
436 int3();
437 bind(&ok);
438 }
Steve Block8defd9f2010-07-08 12:39:36 +0100439
440 // First, check if a write barrier is even needed. The tests below
Steve Block44f0eee2011-05-26 01:26:41 +0100441 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100442 Label done;
Steve Block8defd9f2010-07-08 12:39:36 +0100443
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100444 if (smi_check == INLINE_SMI_CHECK) {
445 // Skip barrier if writing a smi.
446 JumpIfSmi(value, &done);
447 }
Steve Block8defd9f2010-07-08 12:39:36 +0100448
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000449 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
450 CheckPageFlag(value,
451 value, // Used as scratch.
452 MemoryChunk::kPointersToHereAreInterestingMask,
453 zero,
454 &done,
455 Label::kNear);
456 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100457
458 CheckPageFlag(object,
459 value, // Used as scratch.
460 MemoryChunk::kPointersFromHereAreInterestingMask,
461 zero,
462 &done,
463 Label::kNear);
464
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000465 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
466 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100467 CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +0100468
469 bind(&done);
470
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000471 // Count number of write barriers in generated code.
472 isolate()->counters()->write_barriers_static()->Increment();
473 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
474
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100475 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100476 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100477 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000478 Move(address, kZapValue, Assembler::RelocInfoNone());
479 Move(value, kZapValue, Assembler::RelocInfoNone());
Steve Block8defd9f2010-07-08 12:39:36 +0100480 }
481}
482
Ben Murdoch097c5b22016-05-18 11:27:45 +0100483void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
484 Register code_entry,
485 Register scratch) {
486 const int offset = JSFunction::kCodeEntryOffset;
487
488 // The input registers are fixed to make calling the C write barrier function
489 // easier.
490 DCHECK(js_function.is(rdi));
491 DCHECK(code_entry.is(rcx));
Ben Murdochc5610432016-08-08 18:44:38 +0100492 DCHECK(scratch.is(r15));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100493
494 // Since a code entry (value) is always in old space, we don't need to update
495 // remembered set. If incremental marking is off, there is nothing for us to
496 // do.
497 if (!FLAG_incremental_marking) return;
498
499 AssertNotSmi(js_function);
500
501 if (emit_debug_code()) {
502 Label ok;
503 leap(scratch, FieldOperand(js_function, offset));
504 cmpp(code_entry, Operand(scratch, 0));
505 j(equal, &ok, Label::kNear);
506 int3();
507 bind(&ok);
508 }
509
510 // First, check if a write barrier is even needed. The tests below
511 // catch stores of Smis and stores into young gen.
512 Label done;
513
514 CheckPageFlag(code_entry, scratch,
515 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
516 Label::kNear);
517 CheckPageFlag(js_function, scratch,
518 MemoryChunk::kPointersFromHereAreInterestingMask, zero, &done,
519 Label::kNear);
520
521 // Save input registers.
522 Push(js_function);
523 Push(code_entry);
524
525 const Register dst = scratch;
526 leap(dst, FieldOperand(js_function, offset));
527
528 // Save caller-saved registers.
529 PushCallerSaved(kDontSaveFPRegs, js_function, code_entry);
530
531 int argument_count = 3;
532 PrepareCallCFunction(argument_count);
533
534 // Load the argument registers.
535 if (arg_reg_1.is(rcx)) {
536 // Windows calling convention.
537 DCHECK(arg_reg_2.is(rdx) && arg_reg_3.is(r8));
538
539 movp(arg_reg_1, js_function); // rcx gets rdi.
Ben Murdochc5610432016-08-08 18:44:38 +0100540 movp(arg_reg_2, dst); // rdx gets r15.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100541 } else {
542 // AMD64 calling convention.
543 DCHECK(arg_reg_1.is(rdi) && arg_reg_2.is(rsi) && arg_reg_3.is(rdx));
544
545 // rdi is already loaded with js_function.
Ben Murdochc5610432016-08-08 18:44:38 +0100546 movp(arg_reg_2, dst); // rsi gets r15.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100547 }
548 Move(arg_reg_3, ExternalReference::isolate_address(isolate()));
549
550 {
551 AllowExternalCallThatCantCauseGC scope(this);
552 CallCFunction(
553 ExternalReference::incremental_marking_record_write_code_entry_function(
554 isolate()),
555 argument_count);
556 }
557
558 // Restore caller-saved registers.
559 PopCallerSaved(kDontSaveFPRegs, js_function, code_entry);
560
561 // Restore input registers.
562 Pop(code_entry);
563 Pop(js_function);
564
565 bind(&done);
566}
Steve Block8defd9f2010-07-08 12:39:36 +0100567
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000568void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
569 if (emit_debug_code()) Check(cc, reason);
Steve Blocka7e24c12009-10-30 11:49:00 +0000570}
571
572
Iain Merrick75681382010-08-19 15:07:18 +0100573void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +0100574 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000575 Label ok;
Iain Merrick75681382010-08-19 15:07:18 +0100576 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
577 Heap::kFixedArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000578 j(equal, &ok, Label::kNear);
Iain Merrick75681382010-08-19 15:07:18 +0100579 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000580 Heap::kFixedDoubleArrayMapRootIndex);
581 j(equal, &ok, Label::kNear);
582 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Iain Merrick75681382010-08-19 15:07:18 +0100583 Heap::kFixedCOWArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000584 j(equal, &ok, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000585 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Iain Merrick75681382010-08-19 15:07:18 +0100586 bind(&ok);
587 }
588}
589
590
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000591void MacroAssembler::Check(Condition cc, BailoutReason reason) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000592 Label L;
593 j(cc, &L, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594 Abort(reason);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100595 // Control will not return here.
Steve Blocka7e24c12009-10-30 11:49:00 +0000596 bind(&L);
597}
598
599
Steve Block6ded16b2010-05-10 14:33:55 +0100600void MacroAssembler::CheckStackAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000601 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +0100602 int frame_alignment_mask = frame_alignment - 1;
603 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000604 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +0000605 Label alignment_as_expected;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000606 testp(rsp, Immediate(frame_alignment_mask));
Ben Murdoch257744e2011-11-30 15:57:28 +0000607 j(zero, &alignment_as_expected, Label::kNear);
Steve Block6ded16b2010-05-10 14:33:55 +0100608 // Abort if stack is not aligned.
609 int3();
610 bind(&alignment_as_expected);
611 }
612}
613
614
Steve Blocka7e24c12009-10-30 11:49:00 +0000615void MacroAssembler::NegativeZeroTest(Register result,
616 Register op,
617 Label* then_label) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000618 Label ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000619 testl(result, result);
Ben Murdoch257744e2011-11-30 15:57:28 +0000620 j(not_zero, &ok, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000621 testl(op, op);
622 j(sign, then_label);
623 bind(&ok);
624}
625
626
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000627void MacroAssembler::Abort(BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000628#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000629 const char* msg = GetBailoutReason(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +0000630 if (msg != NULL) {
631 RecordComment("Abort message: ");
632 RecordComment(msg);
633 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000634
635 if (FLAG_trap_on_abort) {
636 int3();
637 return;
638 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000639#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000640
641 Move(kScratchRegister, Smi::FromInt(static_cast<int>(reason)),
642 Assembler::RelocInfoNone());
643 Push(kScratchRegister);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100644
645 if (!has_frame_) {
646 // We don't actually want to generate a pile of code for this, so just
647 // claim there is a stack frame, without generating one.
648 FrameScope scope(this, StackFrame::NONE);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100649 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100650 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100651 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100652 }
653 // Control will not return here.
Steve Blockd0582a62009-12-15 09:54:21 +0000654 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000655}
656
657
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000658void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
659 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs
Ben Murdoch257744e2011-11-30 15:57:28 +0000660 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000661}
662
663
Leon Clarkee46be812010-01-19 14:06:41 +0000664void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000665 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
666}
667
668
Steve Blocka7e24c12009-10-30 11:49:00 +0000669void MacroAssembler::StubReturn(int argc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000670 DCHECK(argc >= 1 && generating_stub());
Steve Blocka7e24c12009-10-30 11:49:00 +0000671 ret((argc - 1) * kPointerSize);
672}
673
674
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100675bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000676 return has_frame_ || !stub->SometimesSetsUpAFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000677}
678
679
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100680void MacroAssembler::IndexFromHash(Register hash, Register index) {
681 // The assert checks that the constants for the maximum number of digits
682 // for an array index cached in the hash field and the number of bits
683 // reserved for it does not conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000684 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100685 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000686 if (!hash.is(index)) {
687 movl(index, hash);
688 }
689 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
Steve Block1e0659c2011-05-24 12:43:12 +0100690}
691
692
Steve Block44f0eee2011-05-26 01:26:41 +0100693void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000694 int num_arguments,
695 SaveFPRegsMode save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000696 // If the expected number of arguments of the runtime function is
697 // constant, we check that the actual number of arguments match the
698 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000699 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000700
Leon Clarke4515c472010-02-03 11:58:03 +0000701 // TODO(1236192): Most runtime routines don't need the number of
702 // arguments passed in because it is constant. At some point we
703 // should remove this need and make the runtime routine entry code
704 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100705 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100706 LoadAddress(rbx, ExternalReference(f, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000707 CEntryStub ces(isolate(), f->result_size, save_doubles);
Leon Clarke4515c472010-02-03 11:58:03 +0000708 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000709}
710
711
Andrei Popescu402d9372010-02-26 13:31:12 +0000712void MacroAssembler::CallExternalReference(const ExternalReference& ext,
713 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100714 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100715 LoadAddress(rbx, ext);
Andrei Popescu402d9372010-02-26 13:31:12 +0000716
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000717 CEntryStub stub(isolate(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +0000718 CallStub(&stub);
719}
720
721
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000722void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000723 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000724 // -- rsp[0] : return address
725 // -- rsp[8] : argument num_arguments - 1
Steve Blocka7e24c12009-10-30 11:49:00 +0000726 // ...
727 // -- rsp[8 * num_arguments] : argument 0 (receiver)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000728 //
729 // For runtime functions with variable arguments:
730 // -- rax : number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +0000731 // -----------------------------------
732
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000733 const Runtime::Function* function = Runtime::FunctionForId(fid);
734 DCHECK_EQ(1, function->result_size);
735 if (function->nargs >= 0) {
736 Set(rax, function->nargs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000737 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000738 JumpToExternalReference(ExternalReference(fid, isolate()));
Ben Murdochbb769b22010-08-11 14:56:33 +0100739}
740
741
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000742void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000743 // Set the entry point and jump to the C entry runtime stub.
Steve Block44f0eee2011-05-26 01:26:41 +0100744 LoadAddress(rbx, ext);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000745 CEntryStub ces(isolate(), 1);
Steve Block3ce2e202009-11-05 08:53:23 +0000746 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000747}
748
749
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000750#define REG(Name) \
751 { Register::kCode_##Name }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100752
753static const Register saved_regs[] = {
754 REG(rax), REG(rcx), REG(rdx), REG(rbx), REG(rbp), REG(rsi), REG(rdi), REG(r8),
755 REG(r9), REG(r10), REG(r11)
756};
757
758#undef REG
759
760static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
761
762
763void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
764 Register exclusion1,
765 Register exclusion2,
766 Register exclusion3) {
767 // We don't allow a GC during a store buffer overflow so there is no need to
768 // store the registers in any particular way, but we do have to store and
769 // restore them.
770 for (int i = 0; i < kNumberOfSavedRegs; i++) {
771 Register reg = saved_regs[i];
772 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000773 pushq(reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100774 }
775 }
776 // R12 to r15 are callee save on all platforms.
777 if (fp_mode == kSaveFPRegs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000778 subp(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
779 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100780 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000781 Movsd(Operand(rsp, i * kDoubleSize), reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100782 }
783 }
784}
785
786
787void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode,
788 Register exclusion1,
789 Register exclusion2,
790 Register exclusion3) {
791 if (fp_mode == kSaveFPRegs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000792 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100793 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000794 Movsd(reg, Operand(rsp, i * kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100795 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000796 addp(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100797 }
798 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
799 Register reg = saved_regs[i];
800 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000801 popq(reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100802 }
803 }
804}
805
806
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000807void MacroAssembler::Cvtss2sd(XMMRegister dst, XMMRegister src) {
808 if (CpuFeatures::IsSupported(AVX)) {
809 CpuFeatureScope scope(this, AVX);
810 vcvtss2sd(dst, src, src);
811 } else {
812 cvtss2sd(dst, src);
813 }
814}
815
816
817void MacroAssembler::Cvtss2sd(XMMRegister dst, const Operand& src) {
818 if (CpuFeatures::IsSupported(AVX)) {
819 CpuFeatureScope scope(this, AVX);
820 vcvtss2sd(dst, dst, src);
821 } else {
822 cvtss2sd(dst, src);
823 }
824}
825
826
827void MacroAssembler::Cvtsd2ss(XMMRegister dst, XMMRegister src) {
828 if (CpuFeatures::IsSupported(AVX)) {
829 CpuFeatureScope scope(this, AVX);
830 vcvtsd2ss(dst, src, src);
831 } else {
832 cvtsd2ss(dst, src);
833 }
834}
835
836
837void MacroAssembler::Cvtsd2ss(XMMRegister dst, const Operand& src) {
838 if (CpuFeatures::IsSupported(AVX)) {
839 CpuFeatureScope scope(this, AVX);
840 vcvtsd2ss(dst, dst, src);
841 } else {
842 cvtsd2ss(dst, src);
843 }
844}
845
846
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000847void MacroAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000848 if (CpuFeatures::IsSupported(AVX)) {
849 CpuFeatureScope scope(this, AVX);
850 vxorpd(dst, dst, dst);
851 vcvtlsi2sd(dst, dst, src);
852 } else {
853 xorpd(dst, dst);
854 cvtlsi2sd(dst, src);
855 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000856}
857
858
859void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000860 if (CpuFeatures::IsSupported(AVX)) {
861 CpuFeatureScope scope(this, AVX);
862 vxorpd(dst, dst, dst);
863 vcvtlsi2sd(dst, dst, src);
864 } else {
865 xorpd(dst, dst);
866 cvtlsi2sd(dst, src);
867 }
868}
869
870
Ben Murdoch097c5b22016-05-18 11:27:45 +0100871void MacroAssembler::Cvtlsi2ss(XMMRegister dst, Register src) {
872 if (CpuFeatures::IsSupported(AVX)) {
873 CpuFeatureScope scope(this, AVX);
874 vxorps(dst, dst, dst);
875 vcvtlsi2ss(dst, dst, src);
876 } else {
877 xorps(dst, dst);
878 cvtlsi2ss(dst, src);
879 }
880}
881
882
883void MacroAssembler::Cvtlsi2ss(XMMRegister dst, const Operand& src) {
884 if (CpuFeatures::IsSupported(AVX)) {
885 CpuFeatureScope scope(this, AVX);
886 vxorps(dst, dst, dst);
887 vcvtlsi2ss(dst, dst, src);
888 } else {
889 xorps(dst, dst);
890 cvtlsi2ss(dst, src);
891 }
892}
893
894
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000895void MacroAssembler::Cvtqsi2ss(XMMRegister dst, Register src) {
896 if (CpuFeatures::IsSupported(AVX)) {
897 CpuFeatureScope scope(this, AVX);
898 vxorps(dst, dst, dst);
899 vcvtqsi2ss(dst, dst, src);
900 } else {
901 xorps(dst, dst);
902 cvtqsi2ss(dst, src);
903 }
904}
905
906
907void MacroAssembler::Cvtqsi2ss(XMMRegister dst, const Operand& src) {
908 if (CpuFeatures::IsSupported(AVX)) {
909 CpuFeatureScope scope(this, AVX);
910 vxorps(dst, dst, dst);
911 vcvtqsi2ss(dst, dst, src);
912 } else {
913 xorps(dst, dst);
914 cvtqsi2ss(dst, src);
915 }
916}
917
918
919void MacroAssembler::Cvtqsi2sd(XMMRegister dst, Register src) {
920 if (CpuFeatures::IsSupported(AVX)) {
921 CpuFeatureScope scope(this, AVX);
922 vxorpd(dst, dst, dst);
923 vcvtqsi2sd(dst, dst, src);
924 } else {
925 xorpd(dst, dst);
926 cvtqsi2sd(dst, src);
927 }
928}
929
930
931void MacroAssembler::Cvtqsi2sd(XMMRegister dst, const Operand& src) {
932 if (CpuFeatures::IsSupported(AVX)) {
933 CpuFeatureScope scope(this, AVX);
934 vxorpd(dst, dst, dst);
935 vcvtqsi2sd(dst, dst, src);
936 } else {
937 xorpd(dst, dst);
938 cvtqsi2sd(dst, src);
939 }
940}
941
942
943void MacroAssembler::Cvtqui2ss(XMMRegister dst, Register src, Register tmp) {
944 Label msb_set_src;
945 Label jmp_return;
946 testq(src, src);
947 j(sign, &msb_set_src, Label::kNear);
948 Cvtqsi2ss(dst, src);
949 jmp(&jmp_return, Label::kNear);
950 bind(&msb_set_src);
951 movq(tmp, src);
952 shrq(src, Immediate(1));
953 // Recover the least significant bit to avoid rounding errors.
954 andq(tmp, Immediate(1));
955 orq(src, tmp);
956 Cvtqsi2ss(dst, src);
957 addss(dst, dst);
958 bind(&jmp_return);
959}
960
961
962void MacroAssembler::Cvtqui2sd(XMMRegister dst, Register src, Register tmp) {
963 Label msb_set_src;
964 Label jmp_return;
965 testq(src, src);
966 j(sign, &msb_set_src, Label::kNear);
967 Cvtqsi2sd(dst, src);
968 jmp(&jmp_return, Label::kNear);
969 bind(&msb_set_src);
970 movq(tmp, src);
971 shrq(src, Immediate(1));
972 andq(tmp, Immediate(1));
973 orq(src, tmp);
974 Cvtqsi2sd(dst, src);
975 addsd(dst, dst);
976 bind(&jmp_return);
977}
978
979
980void MacroAssembler::Cvtsd2si(Register dst, XMMRegister src) {
981 if (CpuFeatures::IsSupported(AVX)) {
982 CpuFeatureScope scope(this, AVX);
983 vcvtsd2si(dst, src);
984 } else {
985 cvtsd2si(dst, src);
986 }
987}
988
989
Ben Murdoch097c5b22016-05-18 11:27:45 +0100990void MacroAssembler::Cvttss2si(Register dst, XMMRegister src) {
991 if (CpuFeatures::IsSupported(AVX)) {
992 CpuFeatureScope scope(this, AVX);
993 vcvttss2si(dst, src);
994 } else {
995 cvttss2si(dst, src);
996 }
997}
998
999
1000void MacroAssembler::Cvttss2si(Register dst, const Operand& src) {
1001 if (CpuFeatures::IsSupported(AVX)) {
1002 CpuFeatureScope scope(this, AVX);
1003 vcvttss2si(dst, src);
1004 } else {
1005 cvttss2si(dst, src);
1006 }
1007}
1008
1009
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001010void MacroAssembler::Cvttsd2si(Register dst, XMMRegister src) {
1011 if (CpuFeatures::IsSupported(AVX)) {
1012 CpuFeatureScope scope(this, AVX);
1013 vcvttsd2si(dst, src);
1014 } else {
1015 cvttsd2si(dst, src);
1016 }
1017}
1018
1019
1020void MacroAssembler::Cvttsd2si(Register dst, const Operand& src) {
1021 if (CpuFeatures::IsSupported(AVX)) {
1022 CpuFeatureScope scope(this, AVX);
1023 vcvttsd2si(dst, src);
1024 } else {
1025 cvttsd2si(dst, src);
1026 }
1027}
1028
1029
1030void MacroAssembler::Cvttss2siq(Register dst, XMMRegister src) {
1031 if (CpuFeatures::IsSupported(AVX)) {
1032 CpuFeatureScope scope(this, AVX);
1033 vcvttss2siq(dst, src);
1034 } else {
1035 cvttss2siq(dst, src);
1036 }
1037}
1038
1039
1040void MacroAssembler::Cvttss2siq(Register dst, const Operand& src) {
1041 if (CpuFeatures::IsSupported(AVX)) {
1042 CpuFeatureScope scope(this, AVX);
1043 vcvttss2siq(dst, src);
1044 } else {
1045 cvttss2siq(dst, src);
1046 }
1047}
1048
1049
1050void MacroAssembler::Cvttsd2siq(Register dst, XMMRegister src) {
1051 if (CpuFeatures::IsSupported(AVX)) {
1052 CpuFeatureScope scope(this, AVX);
1053 vcvttsd2siq(dst, src);
1054 } else {
1055 cvttsd2siq(dst, src);
1056 }
1057}
1058
1059
1060void MacroAssembler::Cvttsd2siq(Register dst, const Operand& src) {
1061 if (CpuFeatures::IsSupported(AVX)) {
1062 CpuFeatureScope scope(this, AVX);
1063 vcvttsd2siq(dst, src);
1064 } else {
1065 cvttsd2siq(dst, src);
1066 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001067}
1068
1069
1070void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
1071 DCHECK(!r.IsDouble());
1072 if (r.IsInteger8()) {
1073 movsxbq(dst, src);
1074 } else if (r.IsUInteger8()) {
1075 movzxbl(dst, src);
1076 } else if (r.IsInteger16()) {
1077 movsxwq(dst, src);
1078 } else if (r.IsUInteger16()) {
1079 movzxwl(dst, src);
1080 } else if (r.IsInteger32()) {
1081 movl(dst, src);
1082 } else {
1083 movp(dst, src);
1084 }
1085}
1086
1087
1088void MacroAssembler::Store(const Operand& dst, Register src, Representation r) {
1089 DCHECK(!r.IsDouble());
1090 if (r.IsInteger8() || r.IsUInteger8()) {
1091 movb(dst, src);
1092 } else if (r.IsInteger16() || r.IsUInteger16()) {
1093 movw(dst, src);
1094 } else if (r.IsInteger32()) {
1095 movl(dst, src);
1096 } else {
1097 if (r.IsHeapObject()) {
1098 AssertNotSmi(src);
1099 } else if (r.IsSmi()) {
1100 AssertSmi(src);
1101 }
1102 movp(dst, src);
1103 }
1104}
1105
1106
Steve Blocka7e24c12009-10-30 11:49:00 +00001107void MacroAssembler::Set(Register dst, int64_t x) {
1108 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +01001109 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001110 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +00001111 movl(dst, Immediate(static_cast<uint32_t>(x)));
Ben Murdoch8b112d22011-06-08 16:22:53 +01001112 } else if (is_int32(x)) {
1113 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001114 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001115 movq(dst, x);
Steve Blocka7e24c12009-10-30 11:49:00 +00001116 }
1117}
1118
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001119void MacroAssembler::Set(const Operand& dst, intptr_t x) {
1120 if (kPointerSize == kInt64Size) {
1121 if (is_int32(x)) {
1122 movp(dst, Immediate(static_cast<int32_t>(x)));
1123 } else {
1124 Set(kScratchRegister, x);
1125 movp(dst, kScratchRegister);
1126 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001127 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001128 movp(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001129 }
1130}
1131
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001132
Steve Blocka7e24c12009-10-30 11:49:00 +00001133// ----------------------------------------------------------------------------
1134// Smi tagging, untagging and tag detection.
1135
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001136bool MacroAssembler::IsUnsafeInt(const int32_t x) {
1137 static const int kMaxBits = 17;
1138 return !is_intn(x, kMaxBits);
1139}
1140
1141
1142void MacroAssembler::SafeMove(Register dst, Smi* src) {
1143 DCHECK(!dst.is(kScratchRegister));
1144 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1145 if (SmiValuesAre32Bits()) {
1146 // JIT cookie can be converted to Smi.
1147 Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
1148 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
1149 xorp(dst, kScratchRegister);
1150 } else {
1151 DCHECK(SmiValuesAre31Bits());
1152 int32_t value = static_cast<int32_t>(reinterpret_cast<intptr_t>(src));
1153 movp(dst, Immediate(value ^ jit_cookie()));
1154 xorp(dst, Immediate(jit_cookie()));
1155 }
1156 } else {
1157 Move(dst, src);
1158 }
1159}
1160
1161
1162void MacroAssembler::SafePush(Smi* src) {
1163 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1164 if (SmiValuesAre32Bits()) {
1165 // JIT cookie can be converted to Smi.
1166 Push(Smi::FromInt(src->value() ^ jit_cookie()));
1167 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
1168 xorp(Operand(rsp, 0), kScratchRegister);
1169 } else {
1170 DCHECK(SmiValuesAre31Bits());
1171 int32_t value = static_cast<int32_t>(reinterpret_cast<intptr_t>(src));
1172 Push(Immediate(value ^ jit_cookie()));
1173 xorp(Operand(rsp, 0), Immediate(jit_cookie()));
1174 }
1175 } else {
1176 Push(src);
1177 }
1178}
1179
1180
Steve Block8defd9f2010-07-08 12:39:36 +01001181Register MacroAssembler::GetSmiConstant(Smi* source) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001182 STATIC_ASSERT(kSmiTag == 0);
Steve Block8defd9f2010-07-08 12:39:36 +01001183 int value = source->value();
1184 if (value == 0) {
1185 xorl(kScratchRegister, kScratchRegister);
1186 return kScratchRegister;
1187 }
Steve Block8defd9f2010-07-08 12:39:36 +01001188 LoadSmiConstant(kScratchRegister, source);
1189 return kScratchRegister;
1190}
1191
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001192
Steve Block8defd9f2010-07-08 12:39:36 +01001193void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001194 STATIC_ASSERT(kSmiTag == 0);
Steve Block44f0eee2011-05-26 01:26:41 +01001195 int value = source->value();
1196 if (value == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +01001197 xorl(dst, dst);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001198 } else {
1199 Move(dst, source, Assembler::RelocInfoNone());
Steve Block8defd9f2010-07-08 12:39:36 +01001200 }
1201}
1202
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001203
Steve Blocka7e24c12009-10-30 11:49:00 +00001204void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001205 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001206 if (!dst.is(src)) {
1207 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001208 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001209 shlp(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001210}
1211
1212
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001213void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001214 if (emit_debug_code()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001215 testb(dst, Immediate(0x01));
Ben Murdoch257744e2011-11-30 15:57:28 +00001216 Label ok;
1217 j(zero, &ok, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001218 Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001219 bind(&ok);
1220 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001221
1222 if (SmiValuesAre32Bits()) {
1223 DCHECK(kSmiShift % kBitsPerByte == 0);
1224 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
1225 } else {
1226 DCHECK(SmiValuesAre31Bits());
1227 Integer32ToSmi(kScratchRegister, src);
1228 movp(dst, kScratchRegister);
1229 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001230}
1231
1232
Steve Block3ce2e202009-11-05 08:53:23 +00001233void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
1234 Register src,
1235 int constant) {
1236 if (dst.is(src)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001237 addl(dst, Immediate(constant));
Steve Block3ce2e202009-11-05 08:53:23 +00001238 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001239 leal(dst, Operand(src, constant));
Steve Block3ce2e202009-11-05 08:53:23 +00001240 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001241 shlp(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001242}
1243
1244
1245void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001246 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001247 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001248 movp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001249 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001250
1251 if (SmiValuesAre32Bits()) {
1252 shrp(dst, Immediate(kSmiShift));
1253 } else {
1254 DCHECK(SmiValuesAre31Bits());
1255 sarl(dst, Immediate(kSmiShift));
1256 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001257}
1258
1259
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001260void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001261 if (SmiValuesAre32Bits()) {
1262 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1263 } else {
1264 DCHECK(SmiValuesAre31Bits());
1265 movl(dst, src);
1266 sarl(dst, Immediate(kSmiShift));
1267 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001268}
1269
1270
Steve Blocka7e24c12009-10-30 11:49:00 +00001271void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001272 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001273 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001274 movp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001275 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001276 sarp(dst, Immediate(kSmiShift));
1277 if (kPointerSize == kInt32Size) {
1278 // Sign extend to 64-bit.
1279 movsxlq(dst, dst);
1280 }
Steve Block3ce2e202009-11-05 08:53:23 +00001281}
1282
1283
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001284void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001285 if (SmiValuesAre32Bits()) {
1286 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
1287 } else {
1288 DCHECK(SmiValuesAre31Bits());
1289 movp(dst, src);
1290 SmiToInteger64(dst, dst);
1291 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001292}
1293
1294
Steve Block3ce2e202009-11-05 08:53:23 +00001295void MacroAssembler::SmiTest(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001296 AssertSmi(src);
1297 testp(src, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001298}
1299
1300
Steve Block44f0eee2011-05-26 01:26:41 +01001301void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001302 AssertSmi(smi1);
1303 AssertSmi(smi2);
1304 cmpp(smi1, smi2);
Steve Block3ce2e202009-11-05 08:53:23 +00001305}
1306
1307
1308void MacroAssembler::SmiCompare(Register dst, Smi* src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001309 AssertSmi(dst);
Steve Block44f0eee2011-05-26 01:26:41 +01001310 Cmp(dst, src);
1311}
1312
1313
1314void MacroAssembler::Cmp(Register dst, Smi* src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001315 DCHECK(!dst.is(kScratchRegister));
Steve Block3ce2e202009-11-05 08:53:23 +00001316 if (src->value() == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001317 testp(dst, dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001318 } else {
Iain Merrick75681382010-08-19 15:07:18 +01001319 Register constant_reg = GetSmiConstant(src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001320 cmpp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001321 }
1322}
1323
1324
Leon Clarkef7060e22010-06-03 12:02:55 +01001325void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001326 AssertSmi(dst);
1327 AssertSmi(src);
1328 cmpp(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01001329}
1330
1331
Steve Block3ce2e202009-11-05 08:53:23 +00001332void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001333 AssertSmi(dst);
1334 AssertSmi(src);
1335 cmpp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001336}
1337
1338
1339void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001340 AssertSmi(dst);
1341 if (SmiValuesAre32Bits()) {
1342 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
1343 } else {
1344 DCHECK(SmiValuesAre31Bits());
1345 cmpl(dst, Immediate(src));
Steve Block44f0eee2011-05-26 01:26:41 +01001346 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001347}
1348
1349
Steve Block44f0eee2011-05-26 01:26:41 +01001350void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1351 // The Operand cannot use the smi register.
1352 Register smi_reg = GetSmiConstant(src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001353 DCHECK(!dst.AddressUsesRegister(smi_reg));
1354 cmpp(dst, smi_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01001355}
1356
1357
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001358void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001359 if (SmiValuesAre32Bits()) {
1360 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1361 } else {
1362 DCHECK(SmiValuesAre31Bits());
1363 SmiToInteger32(kScratchRegister, dst);
1364 cmpl(kScratchRegister, src);
1365 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001366}
1367
1368
Steve Blocka7e24c12009-10-30 11:49:00 +00001369void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1370 Register src,
1371 int power) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001372 DCHECK(power >= 0);
1373 DCHECK(power < 64);
Steve Blocka7e24c12009-10-30 11:49:00 +00001374 if (power == 0) {
1375 SmiToInteger64(dst, src);
1376 return;
1377 }
Steve Block3ce2e202009-11-05 08:53:23 +00001378 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001379 movp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001380 }
1381 if (power < kSmiShift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001382 sarp(dst, Immediate(kSmiShift - power));
Steve Block3ce2e202009-11-05 08:53:23 +00001383 } else if (power > kSmiShift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001384 shlp(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001385 }
1386}
1387
1388
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001389void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1390 Register src,
1391 int power) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001392 DCHECK((0 <= power) && (power < 32));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001393 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001394 shrp(dst, Immediate(power + kSmiShift));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001395 } else {
1396 UNIMPLEMENTED(); // Not used.
1397 }
1398}
1399
1400
Ben Murdoch257744e2011-11-30 15:57:28 +00001401void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1402 Label* on_not_smis,
1403 Label::Distance near_jump) {
1404 if (dst.is(src1) || dst.is(src2)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001405 DCHECK(!src1.is(kScratchRegister));
1406 DCHECK(!src2.is(kScratchRegister));
1407 movp(kScratchRegister, src1);
1408 orp(kScratchRegister, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001409 JumpIfNotSmi(kScratchRegister, on_not_smis, near_jump);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001410 movp(dst, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001411 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001412 movp(dst, src1);
1413 orp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001414 JumpIfNotSmi(dst, on_not_smis, near_jump);
1415 }
1416}
1417
1418
Steve Blocka7e24c12009-10-30 11:49:00 +00001419Condition MacroAssembler::CheckSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001420 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001421 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +00001422 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001423}
1424
1425
Steve Block1e0659c2011-05-24 12:43:12 +01001426Condition MacroAssembler::CheckSmi(const Operand& src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001427 STATIC_ASSERT(kSmiTag == 0);
Steve Block1e0659c2011-05-24 12:43:12 +01001428 testb(src, Immediate(kSmiTagMask));
1429 return zero;
1430}
1431
1432
Ben Murdochf87a2032010-10-22 12:50:53 +01001433Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001434 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001435 // Test that both bits of the mask 0x8000000000000001 are zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001436 movp(kScratchRegister, src);
1437 rolp(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +01001438 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +00001439 return zero;
1440}
1441
1442
Steve Blocka7e24c12009-10-30 11:49:00 +00001443Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1444 if (first.is(second)) {
1445 return CheckSmi(first);
1446 }
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001447 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001448 if (SmiValuesAre32Bits()) {
1449 leal(kScratchRegister, Operand(first, second, times_1, 0));
1450 testb(kScratchRegister, Immediate(0x03));
1451 } else {
1452 DCHECK(SmiValuesAre31Bits());
1453 movl(kScratchRegister, first);
1454 orl(kScratchRegister, second);
1455 testb(kScratchRegister, Immediate(kSmiTagMask));
1456 }
Steve Block3ce2e202009-11-05 08:53:23 +00001457 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001458}
1459
1460
Ben Murdochf87a2032010-10-22 12:50:53 +01001461Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1462 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001463 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +01001464 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +00001465 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001466 movp(kScratchRegister, first);
1467 orp(kScratchRegister, second);
1468 rolp(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +01001469 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +00001470 return zero;
1471}
1472
1473
Ben Murdochbb769b22010-08-11 14:56:33 +01001474Condition MacroAssembler::CheckEitherSmi(Register first,
1475 Register second,
1476 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +00001477 if (first.is(second)) {
1478 return CheckSmi(first);
1479 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001480 if (scratch.is(second)) {
1481 andl(scratch, first);
1482 } else {
1483 if (!scratch.is(first)) {
1484 movl(scratch, first);
1485 }
1486 andl(scratch, second);
1487 }
1488 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +00001489 return zero;
1490}
1491
1492
Steve Blocka7e24c12009-10-30 11:49:00 +00001493Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001494 if (SmiValuesAre32Bits()) {
1495 // A 32-bit integer value can always be converted to a smi.
1496 return always;
1497 } else {
1498 DCHECK(SmiValuesAre31Bits());
1499 cmpl(src, Immediate(0xc0000000));
1500 return positive;
1501 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001502}
1503
1504
Steve Block3ce2e202009-11-05 08:53:23 +00001505Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001506 if (SmiValuesAre32Bits()) {
1507 // An unsigned 32-bit integer value is valid as long as the high bit
1508 // is not set.
1509 testl(src, src);
1510 return positive;
1511 } else {
1512 DCHECK(SmiValuesAre31Bits());
1513 testl(src, Immediate(0xc0000000));
1514 return zero;
1515 }
Steve Block3ce2e202009-11-05 08:53:23 +00001516}
1517
1518
Steve Block1e0659c2011-05-24 12:43:12 +01001519void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1520 if (dst.is(src)) {
1521 andl(dst, Immediate(kSmiTagMask));
1522 } else {
1523 movl(dst, Immediate(kSmiTagMask));
1524 andl(dst, src);
1525 }
1526}
1527
1528
1529void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
1530 if (!(src.AddressUsesRegister(dst))) {
1531 movl(dst, Immediate(kSmiTagMask));
1532 andl(dst, src);
1533 } else {
1534 movl(dst, src);
1535 andl(dst, Immediate(kSmiTagMask));
1536 }
1537}
1538
1539
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001540void MacroAssembler::JumpIfValidSmiValue(Register src,
1541 Label* on_valid,
1542 Label::Distance near_jump) {
1543 Condition is_valid = CheckInteger32ValidSmiValue(src);
1544 j(is_valid, on_valid, near_jump);
1545}
1546
1547
Ben Murdoch257744e2011-11-30 15:57:28 +00001548void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1549 Label* on_invalid,
1550 Label::Distance near_jump) {
1551 Condition is_valid = CheckInteger32ValidSmiValue(src);
1552 j(NegateCondition(is_valid), on_invalid, near_jump);
1553}
1554
1555
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001556void MacroAssembler::JumpIfUIntValidSmiValue(Register src,
1557 Label* on_valid,
1558 Label::Distance near_jump) {
1559 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1560 j(is_valid, on_valid, near_jump);
1561}
1562
1563
Ben Murdoch257744e2011-11-30 15:57:28 +00001564void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1565 Label* on_invalid,
1566 Label::Distance near_jump) {
1567 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1568 j(NegateCondition(is_valid), on_invalid, near_jump);
1569}
1570
1571
1572void MacroAssembler::JumpIfSmi(Register src,
1573 Label* on_smi,
1574 Label::Distance near_jump) {
1575 Condition smi = CheckSmi(src);
1576 j(smi, on_smi, near_jump);
1577}
1578
1579
1580void MacroAssembler::JumpIfNotSmi(Register src,
1581 Label* on_not_smi,
1582 Label::Distance near_jump) {
1583 Condition smi = CheckSmi(src);
1584 j(NegateCondition(smi), on_not_smi, near_jump);
1585}
1586
1587
1588void MacroAssembler::JumpUnlessNonNegativeSmi(
1589 Register src, Label* on_not_smi_or_negative,
1590 Label::Distance near_jump) {
1591 Condition non_negative_smi = CheckNonNegativeSmi(src);
1592 j(NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump);
1593}
1594
1595
1596void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1597 Smi* constant,
1598 Label* on_equals,
1599 Label::Distance near_jump) {
1600 SmiCompare(src, constant);
1601 j(equal, on_equals, near_jump);
1602}
1603
1604
1605void MacroAssembler::JumpIfNotBothSmi(Register src1,
1606 Register src2,
1607 Label* on_not_both_smi,
1608 Label::Distance near_jump) {
1609 Condition both_smi = CheckBothSmi(src1, src2);
1610 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1611}
1612
1613
1614void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1615 Register src2,
1616 Label* on_not_both_smi,
1617 Label::Distance near_jump) {
1618 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1619 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1620}
1621
1622
Steve Block3ce2e202009-11-05 08:53:23 +00001623void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1624 if (constant->value() == 0) {
1625 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001626 movp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001627 }
Steve Block8defd9f2010-07-08 12:39:36 +01001628 return;
Steve Block3ce2e202009-11-05 08:53:23 +00001629 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001630 DCHECK(!dst.is(kScratchRegister));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001631 Register constant_reg = GetSmiConstant(constant);
1632 addp(dst, constant_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +00001633 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001634 LoadSmiConstant(dst, constant);
1635 addp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001636 }
1637}
1638
1639
Leon Clarkef7060e22010-06-03 12:02:55 +01001640void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1641 if (constant->value() != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001642 if (SmiValuesAre32Bits()) {
1643 addl(Operand(dst, kSmiShift / kBitsPerByte),
1644 Immediate(constant->value()));
1645 } else {
1646 DCHECK(SmiValuesAre31Bits());
1647 addp(dst, Immediate(constant));
1648 }
Leon Clarkef7060e22010-06-03 12:02:55 +01001649 }
1650}
1651
1652
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001653void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant,
1654 SmiOperationConstraints constraints,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001655 Label* bailout_label,
Ben Murdoch257744e2011-11-30 15:57:28 +00001656 Label::Distance near_jump) {
1657 if (constant->value() == 0) {
1658 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001659 movp(dst, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00001660 }
1661 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001662 DCHECK(!dst.is(kScratchRegister));
Ben Murdoch257744e2011-11-30 15:57:28 +00001663 LoadSmiConstant(kScratchRegister, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001664 addp(dst, kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001665 if (constraints & SmiOperationConstraint::kBailoutOnNoOverflow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001666 j(no_overflow, bailout_label, near_jump);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001667 DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001668 subp(dst, kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001669 } else if (constraints & SmiOperationConstraint::kBailoutOnOverflow) {
1670 if (constraints & SmiOperationConstraint::kPreserveSourceRegister) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001671 Label done;
1672 j(no_overflow, &done, Label::kNear);
1673 subp(dst, kScratchRegister);
1674 jmp(bailout_label, near_jump);
1675 bind(&done);
1676 } else {
1677 // Bailout if overflow without reserving src.
1678 j(overflow, bailout_label, near_jump);
1679 }
1680 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001681 UNREACHABLE();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001682 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001683 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001684 DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
1685 DCHECK(constraints & SmiOperationConstraint::kBailoutOnOverflow);
Ben Murdoch257744e2011-11-30 15:57:28 +00001686 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001687 addp(dst, src);
1688 j(overflow, bailout_label, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001689 }
1690}
1691
1692
Steve Block3ce2e202009-11-05 08:53:23 +00001693void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1694 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001695 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001696 movp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001697 }
Steve Block3ce2e202009-11-05 08:53:23 +00001698 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001699 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001700 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001701 subp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001702 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001703 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001704 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001705 // Adding and subtracting the min-value gives the same result, it only
1706 // differs on the overflow bit, which we don't check here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001707 addp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001708 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001709 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001710 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001711 addp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001712 }
1713 }
1714}
1715
1716
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001717void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant,
1718 SmiOperationConstraints constraints,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001719 Label* bailout_label,
Ben Murdoch257744e2011-11-30 15:57:28 +00001720 Label::Distance near_jump) {
1721 if (constant->value() == 0) {
1722 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001723 movp(dst, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00001724 }
1725 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001726 DCHECK(!dst.is(kScratchRegister));
1727 LoadSmiConstant(kScratchRegister, constant);
1728 subp(dst, kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001729 if (constraints & SmiOperationConstraint::kBailoutOnNoOverflow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001730 j(no_overflow, bailout_label, near_jump);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001731 DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001732 addp(dst, kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001733 } else if (constraints & SmiOperationConstraint::kBailoutOnOverflow) {
1734 if (constraints & SmiOperationConstraint::kPreserveSourceRegister) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001735 Label done;
1736 j(no_overflow, &done, Label::kNear);
1737 addp(dst, kScratchRegister);
1738 jmp(bailout_label, near_jump);
1739 bind(&done);
1740 } else {
1741 // Bailout if overflow without reserving src.
1742 j(overflow, bailout_label, near_jump);
1743 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001744 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001745 UNREACHABLE();
Ben Murdoch257744e2011-11-30 15:57:28 +00001746 }
1747 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001748 DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
1749 DCHECK(constraints & SmiOperationConstraint::kBailoutOnOverflow);
Ben Murdoch257744e2011-11-30 15:57:28 +00001750 if (constant->value() == Smi::kMinValue) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001751 DCHECK(!dst.is(kScratchRegister));
1752 movp(dst, src);
1753 LoadSmiConstant(kScratchRegister, constant);
1754 subp(dst, kScratchRegister);
1755 j(overflow, bailout_label, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001756 } else {
1757 // Subtract by adding the negation.
1758 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001759 addp(dst, src);
1760 j(overflow, bailout_label, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001761 }
1762 }
1763}
1764
1765
1766void MacroAssembler::SmiNeg(Register dst,
1767 Register src,
1768 Label* on_smi_result,
1769 Label::Distance near_jump) {
1770 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001771 DCHECK(!dst.is(kScratchRegister));
1772 movp(kScratchRegister, src);
1773 negp(dst); // Low 32 bits are retained as zero by negation.
Ben Murdoch257744e2011-11-30 15:57:28 +00001774 // Test if result is zero or Smi::kMinValue.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001775 cmpp(dst, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001776 j(not_equal, on_smi_result, near_jump);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001777 movp(src, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001778 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001779 movp(dst, src);
1780 negp(dst);
1781 cmpp(dst, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00001782 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1783 j(not_equal, on_smi_result, near_jump);
1784 }
1785}
1786
1787
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001788template<class T>
1789static void SmiAddHelper(MacroAssembler* masm,
1790 Register dst,
1791 Register src1,
1792 T src2,
1793 Label* on_not_smi_result,
1794 Label::Distance near_jump) {
1795 if (dst.is(src1)) {
1796 Label done;
1797 masm->addp(dst, src2);
1798 masm->j(no_overflow, &done, Label::kNear);
1799 // Restore src1.
1800 masm->subp(dst, src2);
1801 masm->jmp(on_not_smi_result, near_jump);
1802 masm->bind(&done);
1803 } else {
1804 masm->movp(dst, src1);
1805 masm->addp(dst, src2);
1806 masm->j(overflow, on_not_smi_result, near_jump);
1807 }
1808}
1809
1810
Ben Murdoch257744e2011-11-30 15:57:28 +00001811void MacroAssembler::SmiAdd(Register dst,
1812 Register src1,
1813 Register src2,
1814 Label* on_not_smi_result,
1815 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001816 DCHECK_NOT_NULL(on_not_smi_result);
1817 DCHECK(!dst.is(src2));
1818 SmiAddHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001819}
1820
1821
1822void MacroAssembler::SmiAdd(Register dst,
1823 Register src1,
1824 const Operand& src2,
1825 Label* on_not_smi_result,
1826 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001827 DCHECK_NOT_NULL(on_not_smi_result);
1828 DCHECK(!src2.AddressUsesRegister(dst));
1829 SmiAddHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001830}
1831
1832
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001833void MacroAssembler::SmiAdd(Register dst,
1834 Register src1,
1835 Register src2) {
1836 // No overflow checking. Use only when it's known that
1837 // overflowing is impossible.
Steve Block44f0eee2011-05-26 01:26:41 +01001838 if (!dst.is(src1)) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001839 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001840 movp(kScratchRegister, src1);
1841 addp(kScratchRegister, src2);
1842 Check(no_overflow, kSmiAdditionOverflow);
Ben Murdoch257744e2011-11-30 15:57:28 +00001843 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001844 leap(dst, Operand(src1, src2, times_1, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001845 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001846 addp(dst, src2);
1847 Assert(no_overflow, kSmiAdditionOverflow);
1848 }
1849}
1850
1851
1852template<class T>
1853static void SmiSubHelper(MacroAssembler* masm,
1854 Register dst,
1855 Register src1,
1856 T src2,
1857 Label* on_not_smi_result,
1858 Label::Distance near_jump) {
1859 if (dst.is(src1)) {
1860 Label done;
1861 masm->subp(dst, src2);
1862 masm->j(no_overflow, &done, Label::kNear);
1863 // Restore src1.
1864 masm->addp(dst, src2);
1865 masm->jmp(on_not_smi_result, near_jump);
1866 masm->bind(&done);
1867 } else {
1868 masm->movp(dst, src1);
1869 masm->subp(dst, src2);
1870 masm->j(overflow, on_not_smi_result, near_jump);
Steve Blocka7e24c12009-10-30 11:49:00 +00001871 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001872}
1873
1874
1875void MacroAssembler::SmiSub(Register dst,
1876 Register src1,
1877 Register src2,
1878 Label* on_not_smi_result,
1879 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001880 DCHECK_NOT_NULL(on_not_smi_result);
1881 DCHECK(!dst.is(src2));
1882 SmiSubHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
Steve Blocka7e24c12009-10-30 11:49:00 +00001883}
1884
1885
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001886void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001887 Register src1,
Ben Murdoch257744e2011-11-30 15:57:28 +00001888 const Operand& src2,
1889 Label* on_not_smi_result,
1890 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001891 DCHECK_NOT_NULL(on_not_smi_result);
1892 DCHECK(!src2.AddressUsesRegister(dst));
1893 SmiSubHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
1894}
1895
1896
1897template<class T>
1898static void SmiSubNoOverflowHelper(MacroAssembler* masm,
1899 Register dst,
1900 Register src1,
1901 T src2) {
1902 // No overflow checking. Use only when it's known that
1903 // overflowing is impossible (e.g., subtracting two positive smis).
1904 if (!dst.is(src1)) {
1905 masm->movp(dst, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001906 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001907 masm->subp(dst, src2);
1908 masm->Assert(no_overflow, kSmiSubtractionOverflow);
1909}
1910
1911
1912void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1913 DCHECK(!dst.is(src2));
1914 SmiSubNoOverflowHelper<Register>(this, dst, src1, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001915}
1916
1917
1918void MacroAssembler::SmiSub(Register dst,
1919 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001920 const Operand& src2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001921 SmiSubNoOverflowHelper<Operand>(this, dst, src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001922}
1923
1924
Ben Murdoch257744e2011-11-30 15:57:28 +00001925void MacroAssembler::SmiMul(Register dst,
1926 Register src1,
1927 Register src2,
1928 Label* on_not_smi_result,
1929 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001930 DCHECK(!dst.is(src2));
1931 DCHECK(!dst.is(kScratchRegister));
1932 DCHECK(!src1.is(kScratchRegister));
1933 DCHECK(!src2.is(kScratchRegister));
Ben Murdoch257744e2011-11-30 15:57:28 +00001934
1935 if (dst.is(src1)) {
1936 Label failure, zero_correct_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001937 movp(kScratchRegister, src1); // Create backup for later testing.
Ben Murdoch257744e2011-11-30 15:57:28 +00001938 SmiToInteger64(dst, src1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001939 imulp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001940 j(overflow, &failure, Label::kNear);
1941
1942 // Check for negative zero result. If product is zero, and one
1943 // argument is negative, go to slow case.
1944 Label correct_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001945 testp(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00001946 j(not_zero, &correct_result, Label::kNear);
1947
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001948 movp(dst, kScratchRegister);
1949 xorp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001950 // Result was positive zero.
1951 j(positive, &zero_correct_result, Label::kNear);
1952
1953 bind(&failure); // Reused failure exit, restores src1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001954 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001955 jmp(on_not_smi_result, near_jump);
1956
1957 bind(&zero_correct_result);
1958 Set(dst, 0);
1959
1960 bind(&correct_result);
1961 } else {
1962 SmiToInteger64(dst, src1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001963 imulp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001964 j(overflow, on_not_smi_result, near_jump);
1965 // Check for negative zero result. If product is zero, and one
1966 // argument is negative, go to slow case.
1967 Label correct_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001968 testp(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00001969 j(not_zero, &correct_result, Label::kNear);
1970 // One of src1 and src2 is zero, the check whether the other is
1971 // negative.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001972 movp(kScratchRegister, src1);
1973 xorp(kScratchRegister, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001974 j(negative, on_not_smi_result, near_jump);
1975 bind(&correct_result);
1976 }
1977}
1978
1979
1980void MacroAssembler::SmiDiv(Register dst,
1981 Register src1,
1982 Register src2,
1983 Label* on_not_smi_result,
1984 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001985 DCHECK(!src1.is(kScratchRegister));
1986 DCHECK(!src2.is(kScratchRegister));
1987 DCHECK(!dst.is(kScratchRegister));
1988 DCHECK(!src2.is(rax));
1989 DCHECK(!src2.is(rdx));
1990 DCHECK(!src1.is(rdx));
Ben Murdoch257744e2011-11-30 15:57:28 +00001991
1992 // Check for 0 divisor (result is +/-Infinity).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001993 testp(src2, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001994 j(zero, on_not_smi_result, near_jump);
1995
1996 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001997 movp(kScratchRegister, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001998 }
1999 SmiToInteger32(rax, src1);
2000 // We need to rule out dividing Smi::kMinValue by -1, since that would
2001 // overflow in idiv and raise an exception.
2002 // We combine this with negative zero test (negative zero only happens
2003 // when dividing zero by a negative number).
2004
2005 // We overshoot a little and go to slow case if we divide min-value
2006 // by any negative value, not just -1.
2007 Label safe_div;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002008 testl(rax, Immediate(~Smi::kMinValue));
Ben Murdoch257744e2011-11-30 15:57:28 +00002009 j(not_zero, &safe_div, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002010 testp(src2, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002011 if (src1.is(rax)) {
2012 j(positive, &safe_div, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002013 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002014 jmp(on_not_smi_result, near_jump);
2015 } else {
2016 j(negative, on_not_smi_result, near_jump);
2017 }
2018 bind(&safe_div);
2019
2020 SmiToInteger32(src2, src2);
2021 // Sign extend src1 into edx:eax.
2022 cdq();
2023 idivl(src2);
2024 Integer32ToSmi(src2, src2);
2025 // Check that the remainder is zero.
2026 testl(rdx, rdx);
2027 if (src1.is(rax)) {
2028 Label smi_result;
2029 j(zero, &smi_result, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002030 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002031 jmp(on_not_smi_result, near_jump);
2032 bind(&smi_result);
2033 } else {
2034 j(not_zero, on_not_smi_result, near_jump);
2035 }
2036 if (!dst.is(src1) && src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002037 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002038 }
2039 Integer32ToSmi(dst, rax);
2040}
2041
2042
2043void MacroAssembler::SmiMod(Register dst,
2044 Register src1,
2045 Register src2,
2046 Label* on_not_smi_result,
2047 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002048 DCHECK(!dst.is(kScratchRegister));
2049 DCHECK(!src1.is(kScratchRegister));
2050 DCHECK(!src2.is(kScratchRegister));
2051 DCHECK(!src2.is(rax));
2052 DCHECK(!src2.is(rdx));
2053 DCHECK(!src1.is(rdx));
2054 DCHECK(!src1.is(src2));
Ben Murdoch257744e2011-11-30 15:57:28 +00002055
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002056 testp(src2, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002057 j(zero, on_not_smi_result, near_jump);
2058
2059 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002060 movp(kScratchRegister, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002061 }
2062 SmiToInteger32(rax, src1);
2063 SmiToInteger32(src2, src2);
2064
2065 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
2066 Label safe_div;
2067 cmpl(rax, Immediate(Smi::kMinValue));
2068 j(not_equal, &safe_div, Label::kNear);
2069 cmpl(src2, Immediate(-1));
2070 j(not_equal, &safe_div, Label::kNear);
2071 // Retag inputs and go slow case.
2072 Integer32ToSmi(src2, src2);
2073 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002074 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002075 }
2076 jmp(on_not_smi_result, near_jump);
2077 bind(&safe_div);
2078
2079 // Sign extend eax into edx:eax.
2080 cdq();
2081 idivl(src2);
2082 // Restore smi tags on inputs.
2083 Integer32ToSmi(src2, src2);
2084 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002085 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002086 }
2087 // Check for a negative zero result. If the result is zero, and the
2088 // dividend is negative, go slow to return a floating point negative zero.
2089 Label smi_result;
2090 testl(rdx, rdx);
2091 j(not_zero, &smi_result, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002092 testp(src1, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002093 j(negative, on_not_smi_result, near_jump);
2094 bind(&smi_result);
2095 Integer32ToSmi(dst, rdx);
2096}
2097
2098
Steve Blocka7e24c12009-10-30 11:49:00 +00002099void MacroAssembler::SmiNot(Register dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002100 DCHECK(!dst.is(kScratchRegister));
2101 DCHECK(!src.is(kScratchRegister));
2102 if (SmiValuesAre32Bits()) {
2103 // Set tag and padding bits before negating, so that they are zero
2104 // afterwards.
2105 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002106 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002107 DCHECK(SmiValuesAre31Bits());
2108 movl(kScratchRegister, Immediate(1));
Steve Blocka7e24c12009-10-30 11:49:00 +00002109 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002110 if (dst.is(src)) {
2111 xorp(dst, kScratchRegister);
2112 } else {
2113 leap(dst, Operand(src, kScratchRegister, times_1, 0));
2114 }
2115 notp(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00002116}
2117
2118
2119void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002120 DCHECK(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00002121 if (!dst.is(src1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002122 movp(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002123 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002124 andp(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002125}
2126
2127
Steve Block3ce2e202009-11-05 08:53:23 +00002128void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
2129 if (constant->value() == 0) {
Steve Block9fac8402011-05-12 15:51:54 +01002130 Set(dst, 0);
Steve Block3ce2e202009-11-05 08:53:23 +00002131 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002132 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01002133 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002134 andp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00002135 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002136 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002137 andp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002138 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002139}
2140
2141
2142void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
2143 if (!dst.is(src1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002144 DCHECK(!src1.is(src2));
2145 movp(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002146 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002147 orp(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002148}
2149
2150
Steve Block3ce2e202009-11-05 08:53:23 +00002151void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
2152 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002153 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01002154 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002155 orp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00002156 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002157 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002158 orp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002159 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002160}
2161
Steve Block3ce2e202009-11-05 08:53:23 +00002162
Steve Blocka7e24c12009-10-30 11:49:00 +00002163void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
2164 if (!dst.is(src1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002165 DCHECK(!src1.is(src2));
2166 movp(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002167 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002168 xorp(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002169}
2170
2171
Steve Block3ce2e202009-11-05 08:53:23 +00002172void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
2173 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002174 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01002175 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002176 xorp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00002177 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002178 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002179 xorp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002180 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002181}
2182
2183
Steve Blocka7e24c12009-10-30 11:49:00 +00002184void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
2185 Register src,
2186 int shift_value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002187 DCHECK(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002188 if (shift_value > 0) {
2189 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002190 sarp(dst, Immediate(shift_value + kSmiShift));
2191 shlp(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00002192 } else {
2193 UNIMPLEMENTED(); // Not used.
2194 }
2195 }
2196}
2197
2198
Steve Blocka7e24c12009-10-30 11:49:00 +00002199void MacroAssembler::SmiShiftLeftConstant(Register dst,
2200 Register src,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002201 int shift_value,
2202 Label* on_not_smi_result,
2203 Label::Distance near_jump) {
2204 if (SmiValuesAre32Bits()) {
2205 if (!dst.is(src)) {
2206 movp(dst, src);
2207 }
2208 if (shift_value > 0) {
2209 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2210 shlq(dst, Immediate(shift_value & 0x1f));
2211 }
2212 } else {
2213 DCHECK(SmiValuesAre31Bits());
2214 if (dst.is(src)) {
2215 UNIMPLEMENTED(); // Not used.
2216 } else {
2217 SmiToInteger32(dst, src);
2218 shll(dst, Immediate(shift_value));
2219 JumpIfNotValidSmiValue(dst, on_not_smi_result, near_jump);
2220 Integer32ToSmi(dst, dst);
2221 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002222 }
2223}
2224
2225
Ben Murdoch257744e2011-11-30 15:57:28 +00002226void MacroAssembler::SmiShiftLogicalRightConstant(
2227 Register dst, Register src, int shift_value,
2228 Label* on_not_smi_result, Label::Distance near_jump) {
2229 // Logic right shift interprets its result as an *unsigned* number.
2230 if (dst.is(src)) {
2231 UNIMPLEMENTED(); // Not used.
2232 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002233 if (shift_value == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002234 testp(src, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00002235 j(negative, on_not_smi_result, near_jump);
2236 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002237 if (SmiValuesAre32Bits()) {
2238 movp(dst, src);
2239 shrp(dst, Immediate(shift_value + kSmiShift));
2240 shlp(dst, Immediate(kSmiShift));
2241 } else {
2242 DCHECK(SmiValuesAre31Bits());
2243 SmiToInteger32(dst, src);
2244 shrp(dst, Immediate(shift_value));
2245 JumpIfUIntNotValidSmiValue(dst, on_not_smi_result, near_jump);
2246 Integer32ToSmi(dst, dst);
2247 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002248 }
2249}
2250
2251
Steve Blocka7e24c12009-10-30 11:49:00 +00002252void MacroAssembler::SmiShiftLeft(Register dst,
2253 Register src1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002254 Register src2,
2255 Label* on_not_smi_result,
2256 Label::Distance near_jump) {
2257 if (SmiValuesAre32Bits()) {
2258 DCHECK(!dst.is(rcx));
2259 if (!dst.is(src1)) {
2260 movp(dst, src1);
2261 }
2262 // Untag shift amount.
2263 SmiToInteger32(rcx, src2);
2264 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2265 andp(rcx, Immediate(0x1f));
2266 shlq_cl(dst);
2267 } else {
2268 DCHECK(SmiValuesAre31Bits());
2269 DCHECK(!dst.is(kScratchRegister));
2270 DCHECK(!src1.is(kScratchRegister));
2271 DCHECK(!src2.is(kScratchRegister));
2272 DCHECK(!dst.is(src2));
2273 DCHECK(!dst.is(rcx));
2274
2275 if (src1.is(rcx) || src2.is(rcx)) {
2276 movq(kScratchRegister, rcx);
2277 }
2278 if (dst.is(src1)) {
2279 UNIMPLEMENTED(); // Not used.
2280 } else {
2281 Label valid_result;
2282 SmiToInteger32(dst, src1);
2283 SmiToInteger32(rcx, src2);
2284 shll_cl(dst);
2285 JumpIfValidSmiValue(dst, &valid_result, Label::kNear);
2286 // As src1 or src2 could not be dst, we do not need to restore them for
2287 // clobbering dst.
2288 if (src1.is(rcx) || src2.is(rcx)) {
2289 if (src1.is(rcx)) {
2290 movq(src1, kScratchRegister);
2291 } else {
2292 movq(src2, kScratchRegister);
2293 }
2294 }
2295 jmp(on_not_smi_result, near_jump);
2296 bind(&valid_result);
2297 Integer32ToSmi(dst, dst);
2298 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002299 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002300}
2301
2302
Ben Murdoch257744e2011-11-30 15:57:28 +00002303void MacroAssembler::SmiShiftLogicalRight(Register dst,
2304 Register src1,
2305 Register src2,
2306 Label* on_not_smi_result,
2307 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002308 DCHECK(!dst.is(kScratchRegister));
2309 DCHECK(!src1.is(kScratchRegister));
2310 DCHECK(!src2.is(kScratchRegister));
2311 DCHECK(!dst.is(src2));
2312 DCHECK(!dst.is(rcx));
Ben Murdoch257744e2011-11-30 15:57:28 +00002313 if (src1.is(rcx) || src2.is(rcx)) {
2314 movq(kScratchRegister, rcx);
2315 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002316 if (dst.is(src1)) {
2317 UNIMPLEMENTED(); // Not used.
Ben Murdoch257744e2011-11-30 15:57:28 +00002318 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002319 Label valid_result;
2320 SmiToInteger32(dst, src1);
2321 SmiToInteger32(rcx, src2);
2322 shrl_cl(dst);
2323 JumpIfUIntValidSmiValue(dst, &valid_result, Label::kNear);
2324 // As src1 or src2 could not be dst, we do not need to restore them for
2325 // clobbering dst.
2326 if (src1.is(rcx) || src2.is(rcx)) {
2327 if (src1.is(rcx)) {
2328 movq(src1, kScratchRegister);
2329 } else {
2330 movq(src2, kScratchRegister);
2331 }
2332 }
2333 jmp(on_not_smi_result, near_jump);
2334 bind(&valid_result);
2335 Integer32ToSmi(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00002336 }
2337}
2338
2339
Steve Blocka7e24c12009-10-30 11:49:00 +00002340void MacroAssembler::SmiShiftArithmeticRight(Register dst,
2341 Register src1,
2342 Register src2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002343 DCHECK(!dst.is(kScratchRegister));
2344 DCHECK(!src1.is(kScratchRegister));
2345 DCHECK(!src2.is(kScratchRegister));
2346 DCHECK(!dst.is(rcx));
2347
Steve Blocka7e24c12009-10-30 11:49:00 +00002348 SmiToInteger32(rcx, src2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002349 if (!dst.is(src1)) {
2350 movp(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00002351 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002352 SmiToInteger32(dst, dst);
2353 sarl_cl(dst);
2354 Integer32ToSmi(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00002355}
2356
2357
Ben Murdoch257744e2011-11-30 15:57:28 +00002358void MacroAssembler::SelectNonSmi(Register dst,
2359 Register src1,
2360 Register src2,
2361 Label* on_not_smis,
2362 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002363 DCHECK(!dst.is(kScratchRegister));
2364 DCHECK(!src1.is(kScratchRegister));
2365 DCHECK(!src2.is(kScratchRegister));
2366 DCHECK(!dst.is(src1));
2367 DCHECK(!dst.is(src2));
Ben Murdoch257744e2011-11-30 15:57:28 +00002368 // Both operands must not be smis.
2369#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002370 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
2371 Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
Ben Murdoch257744e2011-11-30 15:57:28 +00002372#endif
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002373 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002374 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002375 movl(kScratchRegister, Immediate(kSmiTagMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002376 andp(kScratchRegister, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002377 testl(kScratchRegister, src2);
2378 // If non-zero then both are smis.
2379 j(not_zero, on_not_smis, near_jump);
2380
2381 // Exactly one operand is a smi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002382 DCHECK_EQ(1, static_cast<int>(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002383 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002384 subp(kScratchRegister, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00002385 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002386 movp(dst, src1);
2387 xorp(dst, src2);
2388 andp(dst, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002389 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002390 xorp(dst, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002391 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
2392}
2393
2394
Steve Block3ce2e202009-11-05 08:53:23 +00002395SmiIndex MacroAssembler::SmiToIndex(Register dst,
2396 Register src,
2397 int shift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002398 if (SmiValuesAre32Bits()) {
2399 DCHECK(is_uint6(shift));
2400 // There is a possible optimization if shift is in the range 60-63, but that
2401 // will (and must) never happen.
2402 if (!dst.is(src)) {
2403 movp(dst, src);
2404 }
2405 if (shift < kSmiShift) {
2406 sarp(dst, Immediate(kSmiShift - shift));
2407 } else {
2408 shlp(dst, Immediate(shift - kSmiShift));
2409 }
2410 return SmiIndex(dst, times_1);
Steve Block3ce2e202009-11-05 08:53:23 +00002411 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002412 DCHECK(SmiValuesAre31Bits());
2413 DCHECK(shift >= times_1 && shift <= (static_cast<int>(times_8) + 1));
2414 if (!dst.is(src)) {
2415 movp(dst, src);
2416 }
2417 // We have to sign extend the index register to 64-bit as the SMI might
2418 // be negative.
2419 movsxlq(dst, dst);
2420 if (shift == times_1) {
2421 sarq(dst, Immediate(kSmiShift));
2422 return SmiIndex(dst, times_1);
2423 }
2424 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
Steve Blocka7e24c12009-10-30 11:49:00 +00002425 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002426}
2427
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002428
Steve Blocka7e24c12009-10-30 11:49:00 +00002429SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2430 Register src,
2431 int shift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002432 if (SmiValuesAre32Bits()) {
2433 // Register src holds a positive smi.
2434 DCHECK(is_uint6(shift));
2435 if (!dst.is(src)) {
2436 movp(dst, src);
2437 }
2438 negp(dst);
2439 if (shift < kSmiShift) {
2440 sarp(dst, Immediate(kSmiShift - shift));
2441 } else {
2442 shlp(dst, Immediate(shift - kSmiShift));
2443 }
2444 return SmiIndex(dst, times_1);
Steve Block3ce2e202009-11-05 08:53:23 +00002445 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002446 DCHECK(SmiValuesAre31Bits());
2447 DCHECK(shift >= times_1 && shift <= (static_cast<int>(times_8) + 1));
2448 if (!dst.is(src)) {
2449 movp(dst, src);
2450 }
2451 negq(dst);
2452 if (shift == times_1) {
2453 sarq(dst, Immediate(kSmiShift));
2454 return SmiIndex(dst, times_1);
2455 }
2456 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
Steve Block3ce2e202009-11-05 08:53:23 +00002457 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002458}
2459
2460
Steve Block44f0eee2011-05-26 01:26:41 +01002461void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002462 if (SmiValuesAre32Bits()) {
2463 DCHECK_EQ(0, kSmiShift % kBitsPerByte);
2464 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
2465 } else {
2466 DCHECK(SmiValuesAre31Bits());
2467 SmiToInteger32(kScratchRegister, src);
2468 addl(dst, kScratchRegister);
2469 }
2470}
2471
2472
2473void MacroAssembler::Push(Smi* source) {
2474 intptr_t smi = reinterpret_cast<intptr_t>(source);
2475 if (is_int32(smi)) {
2476 Push(Immediate(static_cast<int32_t>(smi)));
2477 } else {
2478 Register constant = GetSmiConstant(source);
2479 Push(constant);
2480 }
2481}
2482
2483
2484void MacroAssembler::PushRegisterAsTwoSmis(Register src, Register scratch) {
2485 DCHECK(!src.is(scratch));
2486 movp(scratch, src);
2487 // High bits.
2488 shrp(src, Immediate(kPointerSize * kBitsPerByte - kSmiShift));
2489 shlp(src, Immediate(kSmiShift));
2490 Push(src);
2491 // Low bits.
2492 shlp(scratch, Immediate(kSmiShift));
2493 Push(scratch);
2494}
2495
2496
2497void MacroAssembler::PopRegisterAsTwoSmis(Register dst, Register scratch) {
2498 DCHECK(!dst.is(scratch));
2499 Pop(scratch);
2500 // Low bits.
2501 shrp(scratch, Immediate(kSmiShift));
2502 Pop(dst);
2503 shrp(dst, Immediate(kSmiShift));
2504 // High bits.
2505 shlp(dst, Immediate(kPointerSize * kBitsPerByte - kSmiShift));
2506 orp(dst, scratch);
2507}
2508
2509
2510void MacroAssembler::Test(const Operand& src, Smi* source) {
2511 if (SmiValuesAre32Bits()) {
2512 testl(Operand(src, kIntSize), Immediate(source->value()));
2513 } else {
2514 DCHECK(SmiValuesAre31Bits());
2515 testl(src, Immediate(source));
2516 }
2517}
2518
2519
2520// ----------------------------------------------------------------------------
2521
2522
Ben Murdoch257744e2011-11-30 15:57:28 +00002523void MacroAssembler::JumpIfNotString(Register object,
2524 Register object_map,
2525 Label* not_string,
2526 Label::Distance near_jump) {
2527 Condition is_smi = CheckSmi(object);
2528 j(is_smi, not_string, near_jump);
2529 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
2530 j(above_equal, not_string, near_jump);
2531}
2532
2533
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002534void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(
2535 Register first_object, Register second_object, Register scratch1,
2536 Register scratch2, Label* on_fail, Label::Distance near_jump) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002537 // Check that both objects are not smis.
2538 Condition either_smi = CheckEitherSmi(first_object, second_object);
2539 j(either_smi, on_fail, near_jump);
2540
2541 // Load instance type for both strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002542 movp(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
2543 movp(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002544 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2545 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2546
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002547 // Check that both are flat one-byte strings.
2548 DCHECK(kNotStringTag != 0);
2549 const int kFlatOneByteStringMask =
Ben Murdoch257744e2011-11-30 15:57:28 +00002550 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002551 const int kFlatOneByteStringTag =
2552 kStringTag | kOneByteStringTag | kSeqStringTag;
Ben Murdoch257744e2011-11-30 15:57:28 +00002553
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002554 andl(scratch1, Immediate(kFlatOneByteStringMask));
2555 andl(scratch2, Immediate(kFlatOneByteStringMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002556 // Interleave the bits to check both scratch1 and scratch2 in one test.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002557 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2558 leap(scratch1, Operand(scratch1, scratch2, times_8, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002559 cmpl(scratch1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002560 Immediate(kFlatOneByteStringTag + (kFlatOneByteStringTag << 3)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002561 j(not_equal, on_fail, near_jump);
2562}
2563
2564
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002565void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2566 Register instance_type, Register scratch, Label* failure,
Ben Murdoch257744e2011-11-30 15:57:28 +00002567 Label::Distance near_jump) {
2568 if (!scratch.is(instance_type)) {
2569 movl(scratch, instance_type);
2570 }
2571
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002572 const int kFlatOneByteStringMask =
Ben Murdoch257744e2011-11-30 15:57:28 +00002573 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2574
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002575 andl(scratch, Immediate(kFlatOneByteStringMask));
2576 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kOneByteStringTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00002577 j(not_equal, failure, near_jump);
2578}
2579
2580
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002581void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
2582 Register first_object_instance_type, Register second_object_instance_type,
2583 Register scratch1, Register scratch2, Label* on_fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00002584 Label::Distance near_jump) {
2585 // Load instance type for both strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002586 movp(scratch1, first_object_instance_type);
2587 movp(scratch2, second_object_instance_type);
Ben Murdoch257744e2011-11-30 15:57:28 +00002588
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002589 // Check that both are flat one-byte strings.
2590 DCHECK(kNotStringTag != 0);
2591 const int kFlatOneByteStringMask =
Ben Murdoch257744e2011-11-30 15:57:28 +00002592 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002593 const int kFlatOneByteStringTag =
2594 kStringTag | kOneByteStringTag | kSeqStringTag;
Ben Murdoch257744e2011-11-30 15:57:28 +00002595
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002596 andl(scratch1, Immediate(kFlatOneByteStringMask));
2597 andl(scratch2, Immediate(kFlatOneByteStringMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002598 // Interleave the bits to check both scratch1 and scratch2 in one test.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002599 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2600 leap(scratch1, Operand(scratch1, scratch2, times_8, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002601 cmpl(scratch1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002602 Immediate(kFlatOneByteStringTag + (kFlatOneByteStringTag << 3)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002603 j(not_equal, on_fail, near_jump);
2604}
2605
2606
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002607template<class T>
2608static void JumpIfNotUniqueNameHelper(MacroAssembler* masm,
2609 T operand_or_register,
2610 Label* not_unique_name,
2611 Label::Distance distance) {
2612 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2613 Label succeed;
2614 masm->testb(operand_or_register,
2615 Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2616 masm->j(zero, &succeed, Label::kNear);
2617 masm->cmpb(operand_or_register, Immediate(static_cast<uint8_t>(SYMBOL_TYPE)));
2618 masm->j(not_equal, not_unique_name, distance);
2619
2620 masm->bind(&succeed);
2621}
2622
2623
2624void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2625 Label* not_unique_name,
2626 Label::Distance distance) {
2627 JumpIfNotUniqueNameHelper<Operand>(this, operand, not_unique_name, distance);
2628}
2629
2630
2631void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
2632 Label* not_unique_name,
2633 Label::Distance distance) {
2634 JumpIfNotUniqueNameHelper<Register>(this, reg, not_unique_name, distance);
2635}
2636
Steve Block44f0eee2011-05-26 01:26:41 +01002637
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002638void MacroAssembler::Move(Register dst, Register src) {
2639 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002640 movp(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01002641 }
Steve Block6ded16b2010-05-10 14:33:55 +01002642}
2643
2644
Steve Blocka7e24c12009-10-30 11:49:00 +00002645void MacroAssembler::Move(Register dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002646 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002647 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002648 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002649 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002650 MoveHeapObject(dst, source);
Steve Blocka7e24c12009-10-30 11:49:00 +00002651 }
2652}
2653
2654
2655void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002656 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002657 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002658 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002659 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002660 MoveHeapObject(kScratchRegister, source);
2661 movp(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00002662 }
2663}
2664
2665
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002666void MacroAssembler::Move(XMMRegister dst, uint32_t src) {
2667 if (src == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002668 Xorpd(dst, dst);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002669 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002670 unsigned pop = base::bits::CountPopulation32(src);
2671 DCHECK_NE(0u, pop);
2672 if (pop == 32) {
2673 Pcmpeqd(dst, dst);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002674 } else {
2675 movl(kScratchRegister, Immediate(src));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002676 Movq(dst, kScratchRegister);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002677 }
2678 }
2679}
2680
2681
2682void MacroAssembler::Move(XMMRegister dst, uint64_t src) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002683 if (src == 0) {
2684 Xorpd(dst, dst);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002685 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002686 unsigned nlz = base::bits::CountLeadingZeros64(src);
2687 unsigned ntz = base::bits::CountTrailingZeros64(src);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002688 unsigned pop = base::bits::CountPopulation64(src);
2689 DCHECK_NE(0u, pop);
2690 if (pop == 64) {
2691 Pcmpeqd(dst, dst);
2692 } else if (pop + ntz == 64) {
2693 Pcmpeqd(dst, dst);
2694 Psllq(dst, ntz);
2695 } else if (pop + nlz == 64) {
2696 Pcmpeqd(dst, dst);
2697 Psrlq(dst, nlz);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002698 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002699 uint32_t lower = static_cast<uint32_t>(src);
2700 uint32_t upper = static_cast<uint32_t>(src >> 32);
2701 if (upper == 0) {
2702 Move(dst, lower);
2703 } else {
2704 movq(kScratchRegister, src);
2705 Movq(dst, kScratchRegister);
2706 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002707 }
2708 }
2709}
2710
2711
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002712void MacroAssembler::Movaps(XMMRegister dst, XMMRegister src) {
2713 if (CpuFeatures::IsSupported(AVX)) {
2714 CpuFeatureScope scope(this, AVX);
2715 vmovaps(dst, src);
2716 } else {
2717 movaps(dst, src);
2718 }
2719}
2720
2721
2722void MacroAssembler::Movapd(XMMRegister dst, XMMRegister src) {
2723 if (CpuFeatures::IsSupported(AVX)) {
2724 CpuFeatureScope scope(this, AVX);
2725 vmovapd(dst, src);
2726 } else {
2727 movapd(dst, src);
2728 }
2729}
2730
2731
2732void MacroAssembler::Movsd(XMMRegister dst, XMMRegister src) {
2733 if (CpuFeatures::IsSupported(AVX)) {
2734 CpuFeatureScope scope(this, AVX);
2735 vmovsd(dst, dst, src);
2736 } else {
2737 movsd(dst, src);
2738 }
2739}
2740
2741
2742void MacroAssembler::Movsd(XMMRegister dst, const Operand& src) {
2743 if (CpuFeatures::IsSupported(AVX)) {
2744 CpuFeatureScope scope(this, AVX);
2745 vmovsd(dst, src);
2746 } else {
2747 movsd(dst, src);
2748 }
2749}
2750
2751
2752void MacroAssembler::Movsd(const Operand& dst, XMMRegister src) {
2753 if (CpuFeatures::IsSupported(AVX)) {
2754 CpuFeatureScope scope(this, AVX);
2755 vmovsd(dst, src);
2756 } else {
2757 movsd(dst, src);
2758 }
2759}
2760
2761
2762void MacroAssembler::Movss(XMMRegister dst, XMMRegister src) {
2763 if (CpuFeatures::IsSupported(AVX)) {
2764 CpuFeatureScope scope(this, AVX);
2765 vmovss(dst, dst, src);
2766 } else {
2767 movss(dst, src);
2768 }
2769}
2770
2771
2772void MacroAssembler::Movss(XMMRegister dst, const Operand& src) {
2773 if (CpuFeatures::IsSupported(AVX)) {
2774 CpuFeatureScope scope(this, AVX);
2775 vmovss(dst, src);
2776 } else {
2777 movss(dst, src);
2778 }
2779}
2780
2781
2782void MacroAssembler::Movss(const Operand& dst, XMMRegister src) {
2783 if (CpuFeatures::IsSupported(AVX)) {
2784 CpuFeatureScope scope(this, AVX);
2785 vmovss(dst, src);
2786 } else {
2787 movss(dst, src);
2788 }
2789}
2790
2791
2792void MacroAssembler::Movd(XMMRegister dst, Register src) {
2793 if (CpuFeatures::IsSupported(AVX)) {
2794 CpuFeatureScope scope(this, AVX);
2795 vmovd(dst, src);
2796 } else {
2797 movd(dst, src);
2798 }
2799}
2800
2801
2802void MacroAssembler::Movd(XMMRegister dst, const Operand& src) {
2803 if (CpuFeatures::IsSupported(AVX)) {
2804 CpuFeatureScope scope(this, AVX);
2805 vmovd(dst, src);
2806 } else {
2807 movd(dst, src);
2808 }
2809}
2810
2811
2812void MacroAssembler::Movd(Register dst, XMMRegister src) {
2813 if (CpuFeatures::IsSupported(AVX)) {
2814 CpuFeatureScope scope(this, AVX);
2815 vmovd(dst, src);
2816 } else {
2817 movd(dst, src);
2818 }
2819}
2820
2821
2822void MacroAssembler::Movq(XMMRegister dst, Register src) {
2823 if (CpuFeatures::IsSupported(AVX)) {
2824 CpuFeatureScope scope(this, AVX);
2825 vmovq(dst, src);
2826 } else {
2827 movq(dst, src);
2828 }
2829}
2830
2831
2832void MacroAssembler::Movq(Register dst, XMMRegister src) {
2833 if (CpuFeatures::IsSupported(AVX)) {
2834 CpuFeatureScope scope(this, AVX);
2835 vmovq(dst, src);
2836 } else {
2837 movq(dst, src);
2838 }
2839}
2840
2841
2842void MacroAssembler::Movmskpd(Register dst, XMMRegister src) {
2843 if (CpuFeatures::IsSupported(AVX)) {
2844 CpuFeatureScope scope(this, AVX);
2845 vmovmskpd(dst, src);
2846 } else {
2847 movmskpd(dst, src);
2848 }
2849}
2850
2851
2852void MacroAssembler::Roundss(XMMRegister dst, XMMRegister src,
2853 RoundingMode mode) {
2854 if (CpuFeatures::IsSupported(AVX)) {
2855 CpuFeatureScope scope(this, AVX);
2856 vroundss(dst, dst, src, mode);
2857 } else {
2858 roundss(dst, src, mode);
2859 }
2860}
2861
2862
2863void MacroAssembler::Roundsd(XMMRegister dst, XMMRegister src,
2864 RoundingMode mode) {
2865 if (CpuFeatures::IsSupported(AVX)) {
2866 CpuFeatureScope scope(this, AVX);
2867 vroundsd(dst, dst, src, mode);
2868 } else {
2869 roundsd(dst, src, mode);
2870 }
2871}
2872
2873
2874void MacroAssembler::Sqrtsd(XMMRegister dst, XMMRegister src) {
2875 if (CpuFeatures::IsSupported(AVX)) {
2876 CpuFeatureScope scope(this, AVX);
2877 vsqrtsd(dst, dst, src);
2878 } else {
2879 sqrtsd(dst, src);
2880 }
2881}
2882
2883
2884void MacroAssembler::Sqrtsd(XMMRegister dst, const Operand& src) {
2885 if (CpuFeatures::IsSupported(AVX)) {
2886 CpuFeatureScope scope(this, AVX);
2887 vsqrtsd(dst, dst, src);
2888 } else {
2889 sqrtsd(dst, src);
2890 }
2891}
2892
2893
2894void MacroAssembler::Ucomiss(XMMRegister src1, XMMRegister src2) {
2895 if (CpuFeatures::IsSupported(AVX)) {
2896 CpuFeatureScope scope(this, AVX);
2897 vucomiss(src1, src2);
2898 } else {
2899 ucomiss(src1, src2);
2900 }
2901}
2902
2903
2904void MacroAssembler::Ucomiss(XMMRegister src1, const Operand& src2) {
2905 if (CpuFeatures::IsSupported(AVX)) {
2906 CpuFeatureScope scope(this, AVX);
2907 vucomiss(src1, src2);
2908 } else {
2909 ucomiss(src1, src2);
2910 }
2911}
2912
2913
2914void MacroAssembler::Ucomisd(XMMRegister src1, XMMRegister src2) {
2915 if (CpuFeatures::IsSupported(AVX)) {
2916 CpuFeatureScope scope(this, AVX);
2917 vucomisd(src1, src2);
2918 } else {
2919 ucomisd(src1, src2);
2920 }
2921}
2922
2923
2924void MacroAssembler::Ucomisd(XMMRegister src1, const Operand& src2) {
2925 if (CpuFeatures::IsSupported(AVX)) {
2926 CpuFeatureScope scope(this, AVX);
2927 vucomisd(src1, src2);
2928 } else {
2929 ucomisd(src1, src2);
2930 }
2931}
2932
2933
Steve Blocka7e24c12009-10-30 11:49:00 +00002934void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002935 AllowDeferredHandleDereference smi_check;
Steve Block3ce2e202009-11-05 08:53:23 +00002936 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002937 Cmp(dst, Smi::cast(*source));
Steve Block3ce2e202009-11-05 08:53:23 +00002938 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002939 MoveHeapObject(kScratchRegister, source);
2940 cmpp(dst, kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +00002941 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002942}
2943
2944
2945void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002946 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002947 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002948 Cmp(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002949 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002950 MoveHeapObject(kScratchRegister, source);
2951 cmpp(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00002952 }
2953}
2954
2955
2956void MacroAssembler::Push(Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002957 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002958 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002959 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002960 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002961 MoveHeapObject(kScratchRegister, source);
2962 Push(kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00002963 }
2964}
2965
2966
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002967void MacroAssembler::MoveHeapObject(Register result,
2968 Handle<Object> object) {
2969 AllowDeferredHandleDereference using_raw_address;
2970 DCHECK(object->IsHeapObject());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002971 if (isolate()->heap()->InNewSpace(*object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002972 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2973 Move(result, cell, RelocInfo::CELL);
2974 movp(result, Operand(result, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002975 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002976 Move(result, object, RelocInfo::EMBEDDED_OBJECT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002977 }
2978}
2979
2980
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002981void MacroAssembler::LoadGlobalCell(Register dst, Handle<Cell> cell) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002982 if (dst.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002983 AllowDeferredHandleDereference embedding_raw_address;
2984 load_rax(cell.location(), RelocInfo::CELL);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002985 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002986 Move(dst, cell, RelocInfo::CELL);
2987 movp(dst, Operand(dst, 0));
Steve Block3ce2e202009-11-05 08:53:23 +00002988 }
2989}
2990
2991
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002992void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2993 Register scratch) {
2994 Move(scratch, cell, RelocInfo::EMBEDDED_OBJECT);
2995 cmpp(value, FieldOperand(scratch, WeakCell::kValueOffset));
2996}
2997
2998
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002999void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003000 Move(value, cell, RelocInfo::EMBEDDED_OBJECT);
3001 movp(value, FieldOperand(value, WeakCell::kValueOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003002}
3003
3004
3005void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
3006 Label* miss) {
3007 GetWeakValue(value, cell);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003008 JumpIfSmi(value, miss);
3009}
3010
3011
Leon Clarkee46be812010-01-19 14:06:41 +00003012void MacroAssembler::Drop(int stack_elements) {
3013 if (stack_elements > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003014 addp(rsp, Immediate(stack_elements * kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +00003015 }
3016}
3017
3018
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003019void MacroAssembler::DropUnderReturnAddress(int stack_elements,
3020 Register scratch) {
3021 DCHECK(stack_elements > 0);
3022 if (kPointerSize == kInt64Size && stack_elements == 1) {
3023 popq(MemOperand(rsp, 0));
3024 return;
3025 }
3026
3027 PopReturnAddressTo(scratch);
3028 Drop(stack_elements);
3029 PushReturnAddressFrom(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00003030}
3031
3032
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003033void MacroAssembler::Push(Register src) {
3034 if (kPointerSize == kInt64Size) {
3035 pushq(src);
3036 } else {
3037 // x32 uses 64-bit push for rbp in the prologue.
3038 DCHECK(src.code() != rbp.code());
3039 leal(rsp, Operand(rsp, -4));
3040 movp(Operand(rsp, 0), src);
3041 }
3042}
3043
3044
3045void MacroAssembler::Push(const Operand& src) {
3046 if (kPointerSize == kInt64Size) {
3047 pushq(src);
3048 } else {
3049 movp(kScratchRegister, src);
3050 leal(rsp, Operand(rsp, -4));
3051 movp(Operand(rsp, 0), kScratchRegister);
3052 }
3053}
3054
3055
3056void MacroAssembler::PushQuad(const Operand& src) {
3057 if (kPointerSize == kInt64Size) {
3058 pushq(src);
3059 } else {
3060 movp(kScratchRegister, src);
3061 pushq(kScratchRegister);
3062 }
3063}
3064
3065
3066void MacroAssembler::Push(Immediate value) {
3067 if (kPointerSize == kInt64Size) {
3068 pushq(value);
3069 } else {
3070 leal(rsp, Operand(rsp, -4));
3071 movp(Operand(rsp, 0), value);
3072 }
3073}
3074
3075
3076void MacroAssembler::PushImm32(int32_t imm32) {
3077 if (kPointerSize == kInt64Size) {
3078 pushq_imm32(imm32);
3079 } else {
3080 leal(rsp, Operand(rsp, -4));
3081 movp(Operand(rsp, 0), Immediate(imm32));
3082 }
3083}
3084
3085
3086void MacroAssembler::Pop(Register dst) {
3087 if (kPointerSize == kInt64Size) {
3088 popq(dst);
3089 } else {
3090 // x32 uses 64-bit pop for rbp in the epilogue.
3091 DCHECK(dst.code() != rbp.code());
3092 movp(dst, Operand(rsp, 0));
3093 leal(rsp, Operand(rsp, 4));
3094 }
3095}
3096
3097
3098void MacroAssembler::Pop(const Operand& dst) {
3099 if (kPointerSize == kInt64Size) {
3100 popq(dst);
3101 } else {
3102 Register scratch = dst.AddressUsesRegister(kScratchRegister)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003103 ? kRootRegister : kScratchRegister;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003104 movp(scratch, Operand(rsp, 0));
3105 movp(dst, scratch);
3106 leal(rsp, Operand(rsp, 4));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003107 if (scratch.is(kRootRegister)) {
3108 // Restore kRootRegister.
3109 InitializeRootRegister();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003110 }
3111 }
3112}
3113
3114
3115void MacroAssembler::PopQuad(const Operand& dst) {
3116 if (kPointerSize == kInt64Size) {
3117 popq(dst);
3118 } else {
3119 popq(kScratchRegister);
3120 movp(dst, kScratchRegister);
3121 }
3122}
3123
3124
3125void MacroAssembler::LoadSharedFunctionInfoSpecialField(Register dst,
3126 Register base,
3127 int offset) {
3128 DCHECK(offset > SharedFunctionInfo::kLengthOffset &&
3129 offset <= SharedFunctionInfo::kSize &&
3130 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1));
3131 if (kPointerSize == kInt64Size) {
3132 movsxlq(dst, FieldOperand(base, offset));
3133 } else {
3134 movp(dst, FieldOperand(base, offset));
3135 SmiToInteger32(dst, dst);
3136 }
3137}
3138
3139
3140void MacroAssembler::TestBitSharedFunctionInfoSpecialField(Register base,
3141 int offset,
3142 int bits) {
3143 DCHECK(offset > SharedFunctionInfo::kLengthOffset &&
3144 offset <= SharedFunctionInfo::kSize &&
3145 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1));
3146 if (kPointerSize == kInt32Size) {
3147 // On x32, this field is represented by SMI.
3148 bits += kSmiShift;
3149 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003150 int byte_offset = bits / kBitsPerByte;
3151 int bit_in_byte = bits & (kBitsPerByte - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003152 testb(FieldOperand(base, offset + byte_offset), Immediate(1 << bit_in_byte));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003153}
3154
3155
Steve Blocka7e24c12009-10-30 11:49:00 +00003156void MacroAssembler::Jump(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01003157 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00003158 jmp(kScratchRegister);
3159}
3160
3161
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003162void MacroAssembler::Jump(const Operand& op) {
3163 if (kPointerSize == kInt64Size) {
3164 jmp(op);
3165 } else {
3166 movp(kScratchRegister, op);
3167 jmp(kScratchRegister);
3168 }
3169}
3170
3171
Steve Blocka7e24c12009-10-30 11:49:00 +00003172void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003173 Move(kScratchRegister, destination, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00003174 jmp(kScratchRegister);
3175}
3176
3177
3178void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00003179 // TODO(X64): Inline this
3180 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00003181}
3182
3183
Steve Block44f0eee2011-05-26 01:26:41 +01003184int MacroAssembler::CallSize(ExternalReference ext) {
3185 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003186 return LoadAddressSize(ext) +
3187 Assembler::kCallScratchRegisterInstructionLength;
Steve Block44f0eee2011-05-26 01:26:41 +01003188}
3189
3190
Steve Blocka7e24c12009-10-30 11:49:00 +00003191void MacroAssembler::Call(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01003192#ifdef DEBUG
3193 int end_position = pc_offset() + CallSize(ext);
3194#endif
3195 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00003196 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01003197#ifdef DEBUG
3198 CHECK_EQ(end_position, pc_offset());
3199#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003200}
3201
3202
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003203void MacroAssembler::Call(const Operand& op) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003204 if (kPointerSize == kInt64Size && !CpuFeatures::IsSupported(ATOM)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003205 call(op);
3206 } else {
3207 movp(kScratchRegister, op);
3208 call(kScratchRegister);
3209 }
3210}
3211
3212
Steve Blocka7e24c12009-10-30 11:49:00 +00003213void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
Steve Block44f0eee2011-05-26 01:26:41 +01003214#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003215 int end_position = pc_offset() + CallSize(destination);
Steve Block44f0eee2011-05-26 01:26:41 +01003216#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003217 Move(kScratchRegister, destination, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00003218 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01003219#ifdef DEBUG
3220 CHECK_EQ(pc_offset(), end_position);
3221#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003222}
3223
3224
Ben Murdoch257744e2011-11-30 15:57:28 +00003225void MacroAssembler::Call(Handle<Code> code_object,
3226 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003227 TypeFeedbackId ast_id) {
Steve Block44f0eee2011-05-26 01:26:41 +01003228#ifdef DEBUG
3229 int end_position = pc_offset() + CallSize(code_object);
3230#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003231 DCHECK(RelocInfo::IsCodeTarget(rmode) ||
3232 rmode == RelocInfo::CODE_AGE_SEQUENCE);
Ben Murdoch257744e2011-11-30 15:57:28 +00003233 call(code_object, rmode, ast_id);
Steve Block44f0eee2011-05-26 01:26:41 +01003234#ifdef DEBUG
3235 CHECK_EQ(end_position, pc_offset());
3236#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003237}
3238
3239
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003240void MacroAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
3241 if (imm8 == 0) {
3242 Movd(dst, src);
3243 return;
3244 }
3245 DCHECK_EQ(1, imm8);
3246 if (CpuFeatures::IsSupported(SSE4_1)) {
3247 CpuFeatureScope sse_scope(this, SSE4_1);
3248 pextrd(dst, src, imm8);
3249 return;
3250 }
3251 movq(dst, src);
3252 shrq(dst, Immediate(32));
3253}
3254
3255
3256void MacroAssembler::Pinsrd(XMMRegister dst, Register src, int8_t imm8) {
3257 if (CpuFeatures::IsSupported(SSE4_1)) {
3258 CpuFeatureScope sse_scope(this, SSE4_1);
3259 pinsrd(dst, src, imm8);
3260 return;
3261 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01003262 Movd(kScratchDoubleReg, src);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003263 if (imm8 == 1) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01003264 punpckldq(dst, kScratchDoubleReg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003265 } else {
3266 DCHECK_EQ(0, imm8);
Ben Murdoch61f157c2016-09-16 13:49:30 +01003267 Movss(dst, kScratchDoubleReg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003268 }
3269}
3270
3271
3272void MacroAssembler::Pinsrd(XMMRegister dst, const Operand& src, int8_t imm8) {
3273 DCHECK(imm8 == 0 || imm8 == 1);
3274 if (CpuFeatures::IsSupported(SSE4_1)) {
3275 CpuFeatureScope sse_scope(this, SSE4_1);
3276 pinsrd(dst, src, imm8);
3277 return;
3278 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01003279 Movd(kScratchDoubleReg, src);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003280 if (imm8 == 1) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01003281 punpckldq(dst, kScratchDoubleReg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003282 } else {
3283 DCHECK_EQ(0, imm8);
Ben Murdoch61f157c2016-09-16 13:49:30 +01003284 Movss(dst, kScratchDoubleReg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003285 }
3286}
3287
3288
3289void MacroAssembler::Lzcntl(Register dst, Register src) {
3290 if (CpuFeatures::IsSupported(LZCNT)) {
3291 CpuFeatureScope scope(this, LZCNT);
3292 lzcntl(dst, src);
3293 return;
3294 }
3295 Label not_zero_src;
3296 bsrl(dst, src);
3297 j(not_zero, &not_zero_src, Label::kNear);
3298 Set(dst, 63); // 63^31 == 32
3299 bind(&not_zero_src);
3300 xorl(dst, Immediate(31)); // for x in [0..31], 31^x == 31 - x
3301}
3302
3303
3304void MacroAssembler::Lzcntl(Register dst, const Operand& src) {
3305 if (CpuFeatures::IsSupported(LZCNT)) {
3306 CpuFeatureScope scope(this, LZCNT);
3307 lzcntl(dst, src);
3308 return;
3309 }
3310 Label not_zero_src;
3311 bsrl(dst, src);
3312 j(not_zero, &not_zero_src, Label::kNear);
3313 Set(dst, 63); // 63^31 == 32
3314 bind(&not_zero_src);
3315 xorl(dst, Immediate(31)); // for x in [0..31], 31^x == 31 - x
3316}
3317
3318
3319void MacroAssembler::Lzcntq(Register dst, Register src) {
3320 if (CpuFeatures::IsSupported(LZCNT)) {
3321 CpuFeatureScope scope(this, LZCNT);
3322 lzcntq(dst, src);
3323 return;
3324 }
3325 Label not_zero_src;
3326 bsrq(dst, src);
3327 j(not_zero, &not_zero_src, Label::kNear);
3328 Set(dst, 127); // 127^63 == 64
3329 bind(&not_zero_src);
3330 xorl(dst, Immediate(63)); // for x in [0..63], 63^x == 63 - x
3331}
3332
3333
3334void MacroAssembler::Lzcntq(Register dst, const Operand& src) {
3335 if (CpuFeatures::IsSupported(LZCNT)) {
3336 CpuFeatureScope scope(this, LZCNT);
3337 lzcntq(dst, src);
3338 return;
3339 }
3340 Label not_zero_src;
3341 bsrq(dst, src);
3342 j(not_zero, &not_zero_src, Label::kNear);
3343 Set(dst, 127); // 127^63 == 64
3344 bind(&not_zero_src);
3345 xorl(dst, Immediate(63)); // for x in [0..63], 63^x == 63 - x
3346}
3347
3348
3349void MacroAssembler::Tzcntq(Register dst, Register src) {
3350 if (CpuFeatures::IsSupported(BMI1)) {
3351 CpuFeatureScope scope(this, BMI1);
3352 tzcntq(dst, src);
3353 return;
3354 }
3355 Label not_zero_src;
3356 bsfq(dst, src);
3357 j(not_zero, &not_zero_src, Label::kNear);
3358 // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
3359 Set(dst, 64);
3360 bind(&not_zero_src);
3361}
3362
3363
3364void MacroAssembler::Tzcntq(Register dst, const Operand& src) {
3365 if (CpuFeatures::IsSupported(BMI1)) {
3366 CpuFeatureScope scope(this, BMI1);
3367 tzcntq(dst, src);
3368 return;
3369 }
3370 Label not_zero_src;
3371 bsfq(dst, src);
3372 j(not_zero, &not_zero_src, Label::kNear);
3373 // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
3374 Set(dst, 64);
3375 bind(&not_zero_src);
3376}
3377
3378
3379void MacroAssembler::Tzcntl(Register dst, Register src) {
3380 if (CpuFeatures::IsSupported(BMI1)) {
3381 CpuFeatureScope scope(this, BMI1);
3382 tzcntl(dst, src);
3383 return;
3384 }
3385 Label not_zero_src;
3386 bsfl(dst, src);
3387 j(not_zero, &not_zero_src, Label::kNear);
3388 Set(dst, 32); // The result of tzcnt is 32 if src = 0.
3389 bind(&not_zero_src);
3390}
3391
3392
3393void MacroAssembler::Tzcntl(Register dst, const Operand& src) {
3394 if (CpuFeatures::IsSupported(BMI1)) {
3395 CpuFeatureScope scope(this, BMI1);
3396 tzcntl(dst, src);
3397 return;
3398 }
3399 Label not_zero_src;
3400 bsfl(dst, src);
3401 j(not_zero, &not_zero_src, Label::kNear);
3402 Set(dst, 32); // The result of tzcnt is 32 if src = 0.
3403 bind(&not_zero_src);
3404}
3405
3406
3407void MacroAssembler::Popcntl(Register dst, Register src) {
3408 if (CpuFeatures::IsSupported(POPCNT)) {
3409 CpuFeatureScope scope(this, POPCNT);
3410 popcntl(dst, src);
3411 return;
3412 }
3413 UNREACHABLE();
3414}
3415
3416
3417void MacroAssembler::Popcntl(Register dst, const Operand& src) {
3418 if (CpuFeatures::IsSupported(POPCNT)) {
3419 CpuFeatureScope scope(this, POPCNT);
3420 popcntl(dst, src);
3421 return;
3422 }
3423 UNREACHABLE();
3424}
3425
3426
3427void MacroAssembler::Popcntq(Register dst, Register src) {
3428 if (CpuFeatures::IsSupported(POPCNT)) {
3429 CpuFeatureScope scope(this, POPCNT);
3430 popcntq(dst, src);
3431 return;
3432 }
3433 UNREACHABLE();
3434}
3435
3436
3437void MacroAssembler::Popcntq(Register dst, const Operand& src) {
3438 if (CpuFeatures::IsSupported(POPCNT)) {
3439 CpuFeatureScope scope(this, POPCNT);
3440 popcntq(dst, src);
3441 return;
3442 }
3443 UNREACHABLE();
3444}
3445
3446
Steve Block1e0659c2011-05-24 12:43:12 +01003447void MacroAssembler::Pushad() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003448 Push(rax);
3449 Push(rcx);
3450 Push(rdx);
3451 Push(rbx);
Steve Block1e0659c2011-05-24 12:43:12 +01003452 // Not pushing rsp or rbp.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003453 Push(rsi);
3454 Push(rdi);
3455 Push(r8);
3456 Push(r9);
Steve Block1e0659c2011-05-24 12:43:12 +01003457 // r10 is kScratchRegister.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003458 Push(r11);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003459 Push(r12);
Steve Block1e0659c2011-05-24 12:43:12 +01003460 // r13 is kRootRegister.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003461 Push(r14);
3462 Push(r15);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003463 STATIC_ASSERT(12 == kNumSafepointSavedRegisters);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003464 // Use lea for symmetry with Popad.
3465 int sp_delta =
3466 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003467 leap(rsp, Operand(rsp, -sp_delta));
Steve Block1e0659c2011-05-24 12:43:12 +01003468}
3469
3470
3471void MacroAssembler::Popad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003472 // Popad must not change the flags, so use lea instead of addq.
3473 int sp_delta =
3474 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003475 leap(rsp, Operand(rsp, sp_delta));
3476 Pop(r15);
3477 Pop(r14);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003478 Pop(r12);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003479 Pop(r11);
3480 Pop(r9);
3481 Pop(r8);
3482 Pop(rdi);
3483 Pop(rsi);
3484 Pop(rbx);
3485 Pop(rdx);
3486 Pop(rcx);
3487 Pop(rax);
Steve Block1e0659c2011-05-24 12:43:12 +01003488}
3489
3490
3491void MacroAssembler::Dropad() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003492 addp(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
Steve Block1e0659c2011-05-24 12:43:12 +01003493}
3494
3495
3496// Order general registers are pushed by Pushad:
Steve Block44f0eee2011-05-26 01:26:41 +01003497// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003498const int
3499MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
Steve Block1e0659c2011-05-24 12:43:12 +01003500 0,
3501 1,
3502 2,
3503 3,
3504 -1,
3505 -1,
3506 4,
3507 5,
3508 6,
3509 7,
3510 -1,
3511 8,
Steve Block44f0eee2011-05-26 01:26:41 +01003512 9,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003513 -1,
3514 10,
3515 11
Steve Block1e0659c2011-05-24 12:43:12 +01003516};
3517
3518
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003519void MacroAssembler::StoreToSafepointRegisterSlot(Register dst,
3520 const Immediate& imm) {
3521 movp(SafepointRegisterSlot(dst), imm);
3522}
3523
3524
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003525void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003526 movp(SafepointRegisterSlot(dst), src);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003527}
3528
3529
3530void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003531 movp(dst, SafepointRegisterSlot(src));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003532}
3533
3534
3535Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
3536 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
3537}
3538
3539
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003540void MacroAssembler::PushStackHandler() {
Steve Blocka7e24c12009-10-30 11:49:00 +00003541 // Adjust this code if not the case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003542 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003543 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003544
3545 // Link the current handler as the next handler.
3546 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003547 Push(ExternalOperand(handler_address));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003548
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003549 // Set this new handler as the current one.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003550 movp(ExternalOperand(handler_address), rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00003551}
3552
3553
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003554void MacroAssembler::PopStackHandler() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003555 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
3556 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003557 Pop(ExternalOperand(handler_address));
3558 addp(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +00003559}
3560
3561
Steve Blocka7e24c12009-10-30 11:49:00 +00003562void MacroAssembler::Ret() {
3563 ret(0);
3564}
3565
3566
Steve Block1e0659c2011-05-24 12:43:12 +01003567void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
3568 if (is_uint16(bytes_dropped)) {
3569 ret(bytes_dropped);
3570 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003571 PopReturnAddressTo(scratch);
3572 addp(rsp, Immediate(bytes_dropped));
3573 PushReturnAddressFrom(scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01003574 ret(0);
3575 }
3576}
3577
3578
Steve Blocka7e24c12009-10-30 11:49:00 +00003579void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00003580 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01003581 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003582}
3583
3584
3585void MacroAssembler::CmpObjectType(Register heap_object,
3586 InstanceType type,
3587 Register map) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003588 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003589 CmpInstanceType(map, type);
3590}
3591
3592
3593void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
3594 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
3595 Immediate(static_cast<int8_t>(type)));
3596}
3597
3598
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003599void MacroAssembler::CheckFastElements(Register map,
3600 Label* fail,
3601 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003602 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3603 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3604 STATIC_ASSERT(FAST_ELEMENTS == 2);
3605 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003606 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003607 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003608 j(above, fail, distance);
3609}
3610
3611
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003612void MacroAssembler::CheckFastObjectElements(Register map,
3613 Label* fail,
3614 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003615 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3616 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3617 STATIC_ASSERT(FAST_ELEMENTS == 2);
3618 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003619 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003620 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003621 j(below_equal, fail, distance);
3622 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003623 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003624 j(above, fail, distance);
3625}
3626
3627
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003628void MacroAssembler::CheckFastSmiElements(Register map,
3629 Label* fail,
3630 Label::Distance distance) {
3631 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3632 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003633 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003634 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003635 j(above, fail, distance);
3636}
3637
3638
3639void MacroAssembler::StoreNumberToDoubleElements(
3640 Register maybe_number,
3641 Register elements,
3642 Register index,
3643 XMMRegister xmm_scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003644 Label* fail,
3645 int elements_offset) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003646 Label smi_value, done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003647
3648 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
3649
3650 CheckMap(maybe_number,
3651 isolate()->factory()->heap_number_map(),
3652 fail,
3653 DONT_DO_SMI_CHECK);
3654
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003655 // Double value, turn potential sNaN into qNaN.
3656 Move(xmm_scratch, 1.0);
3657 mulsd(xmm_scratch, FieldOperand(maybe_number, HeapNumber::kValueOffset));
3658 jmp(&done, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003659
3660 bind(&smi_value);
3661 // Value is a smi. convert to a double and store.
3662 // Preserve original value.
3663 SmiToInteger32(kScratchRegister, maybe_number);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003664 Cvtlsi2sd(xmm_scratch, kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003665 bind(&done);
3666 Movsd(FieldOperand(elements, index, times_8,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003667 FixedDoubleArray::kHeaderSize - elements_offset),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003668 xmm_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003669}
3670
3671
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003672void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003673 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003674}
3675
3676
Andrei Popescu31002712010-02-23 13:46:05 +00003677void MacroAssembler::CheckMap(Register obj,
3678 Handle<Map> map,
3679 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003680 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003681 if (smi_check_type == DO_SMI_CHECK) {
Andrei Popescu31002712010-02-23 13:46:05 +00003682 JumpIfSmi(obj, fail);
3683 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003684
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003685 CompareMap(obj, map);
Andrei Popescu31002712010-02-23 13:46:05 +00003686 j(not_equal, fail);
3687}
3688
3689
Ben Murdoch257744e2011-11-30 15:57:28 +00003690void MacroAssembler::ClampUint8(Register reg) {
3691 Label done;
3692 testl(reg, Immediate(0xFFFFFF00));
3693 j(zero, &done, Label::kNear);
3694 setcc(negative, reg); // 1 if negative, 0 if positive.
3695 decb(reg); // 0 if negative, 255 if positive.
3696 bind(&done);
3697}
3698
3699
3700void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
3701 XMMRegister temp_xmm_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003702 Register result_reg) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003703 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003704 Label conv_failure;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003705 Xorpd(temp_xmm_reg, temp_xmm_reg);
3706 Cvtsd2si(result_reg, input_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00003707 testl(result_reg, Immediate(0xFFFFFF00));
3708 j(zero, &done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003709 cmpl(result_reg, Immediate(1));
3710 j(overflow, &conv_failure, Label::kNear);
3711 movl(result_reg, Immediate(0));
3712 setcc(sign, result_reg);
3713 subl(result_reg, Immediate(1));
3714 andl(result_reg, Immediate(255));
3715 jmp(&done, Label::kNear);
3716 bind(&conv_failure);
3717 Set(result_reg, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003718 Ucomisd(input_reg, temp_xmm_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003719 j(below, &done, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00003720 Set(result_reg, 255);
3721 bind(&done);
3722}
3723
3724
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003725void MacroAssembler::LoadUint32(XMMRegister dst,
3726 Register src) {
3727 if (FLAG_debug_code) {
3728 cmpq(src, Immediate(0xffffffff));
3729 Assert(below_equal, kInputGPRIsExpectedToHaveUpper32Cleared);
3730 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003731 Cvtqsi2sd(dst, src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003732}
3733
3734
3735void MacroAssembler::SlowTruncateToI(Register result_reg,
3736 Register input_reg,
3737 int offset) {
3738 DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
3739 call(stub.GetCode(), RelocInfo::CODE_TARGET);
3740}
3741
3742
3743void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
3744 Register input_reg) {
3745 Label done;
Ben Murdoch61f157c2016-09-16 13:49:30 +01003746 Movsd(kScratchDoubleReg, FieldOperand(input_reg, HeapNumber::kValueOffset));
3747 Cvttsd2siq(result_reg, kScratchDoubleReg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003748 cmpq(result_reg, Immediate(1));
3749 j(no_overflow, &done, Label::kNear);
3750
3751 // Slow case.
3752 if (input_reg.is(result_reg)) {
3753 subp(rsp, Immediate(kDoubleSize));
Ben Murdoch61f157c2016-09-16 13:49:30 +01003754 Movsd(MemOperand(rsp, 0), kScratchDoubleReg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003755 SlowTruncateToI(result_reg, rsp, 0);
3756 addp(rsp, Immediate(kDoubleSize));
3757 } else {
3758 SlowTruncateToI(result_reg, input_reg);
3759 }
3760
3761 bind(&done);
3762 // Keep our invariant that the upper 32 bits are zero.
3763 movl(result_reg, result_reg);
3764}
3765
3766
3767void MacroAssembler::TruncateDoubleToI(Register result_reg,
3768 XMMRegister input_reg) {
3769 Label done;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003770 Cvttsd2siq(result_reg, input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003771 cmpq(result_reg, Immediate(1));
3772 j(no_overflow, &done, Label::kNear);
3773
3774 subp(rsp, Immediate(kDoubleSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003775 Movsd(MemOperand(rsp, 0), input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003776 SlowTruncateToI(result_reg, rsp, 0);
3777 addp(rsp, Immediate(kDoubleSize));
3778
3779 bind(&done);
3780 // Keep our invariant that the upper 32 bits are zero.
3781 movl(result_reg, result_reg);
3782}
3783
3784
3785void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
3786 XMMRegister scratch,
3787 MinusZeroMode minus_zero_mode,
3788 Label* lost_precision, Label* is_nan,
3789 Label* minus_zero, Label::Distance dst) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003790 Cvttsd2si(result_reg, input_reg);
Ben Murdoch61f157c2016-09-16 13:49:30 +01003791 Cvtlsi2sd(kScratchDoubleReg, result_reg);
3792 Ucomisd(kScratchDoubleReg, input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003793 j(not_equal, lost_precision, dst);
3794 j(parity_even, is_nan, dst); // NaN.
3795 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
3796 Label done;
3797 // The integer converted back is equal to the original. We
3798 // only have to test if we got -0 as an input.
3799 testl(result_reg, result_reg);
3800 j(not_zero, &done, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003801 Movmskpd(result_reg, input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003802 // Bit 0 contains the sign of the double in input_reg.
3803 // If input was positive, we are ok and return 0, otherwise
3804 // jump to minus_zero.
3805 andl(result_reg, Immediate(1));
3806 j(not_zero, minus_zero, dst);
3807 bind(&done);
3808 }
3809}
3810
3811
Ben Murdoch257744e2011-11-30 15:57:28 +00003812void MacroAssembler::LoadInstanceDescriptors(Register map,
3813 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003814 movp(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
3815}
3816
3817
3818void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3819 movl(dst, FieldOperand(map, Map::kBitField3Offset));
3820 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3821}
3822
3823
3824void MacroAssembler::EnumLength(Register dst, Register map) {
3825 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3826 movl(dst, FieldOperand(map, Map::kBitField3Offset));
3827 andl(dst, Immediate(Map::EnumLengthBits::kMask));
3828 Integer32ToSmi(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00003829}
3830
3831
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003832void MacroAssembler::LoadAccessor(Register dst, Register holder,
3833 int accessor_index,
3834 AccessorComponent accessor) {
3835 movp(dst, FieldOperand(holder, HeapObject::kMapOffset));
3836 LoadInstanceDescriptors(dst, dst);
3837 movp(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
3838 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
3839 : AccessorPair::kSetterOffset;
3840 movp(dst, FieldOperand(dst, offset));
3841}
3842
3843
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003844void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
3845 Register scratch2, Handle<WeakCell> cell,
3846 Handle<Code> success,
3847 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003848 Label fail;
3849 if (smi_check_type == DO_SMI_CHECK) {
3850 JumpIfSmi(obj, &fail);
3851 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003852 movq(scratch1, FieldOperand(obj, HeapObject::kMapOffset));
3853 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003854 j(equal, success, RelocInfo::CODE_TARGET);
Ben Murdoch257744e2011-11-30 15:57:28 +00003855 bind(&fail);
3856}
3857
3858
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003859void MacroAssembler::AssertNumber(Register object) {
3860 if (emit_debug_code()) {
3861 Label ok;
3862 Condition is_smi = CheckSmi(object);
3863 j(is_smi, &ok, Label::kNear);
3864 Cmp(FieldOperand(object, HeapObject::kMapOffset),
3865 isolate()->factory()->heap_number_map());
3866 Check(equal, kOperandIsNotANumber);
3867 bind(&ok);
3868 }
Andrei Popescu402d9372010-02-26 13:31:12 +00003869}
3870
Ben Murdochda12d292016-06-02 14:46:10 +01003871void MacroAssembler::AssertNotNumber(Register object) {
3872 if (emit_debug_code()) {
3873 Condition is_smi = CheckSmi(object);
3874 Check(NegateCondition(is_smi), kOperandIsANumber);
3875 Cmp(FieldOperand(object, HeapObject::kMapOffset),
3876 isolate()->factory()->heap_number_map());
3877 Check(not_equal, kOperandIsANumber);
3878 }
3879}
Andrei Popescu402d9372010-02-26 13:31:12 +00003880
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003881void MacroAssembler::AssertNotSmi(Register object) {
3882 if (emit_debug_code()) {
3883 Condition is_smi = CheckSmi(object);
3884 Check(NegateCondition(is_smi), kOperandIsASmi);
3885 }
Iain Merrick75681382010-08-19 15:07:18 +01003886}
3887
3888
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003889void MacroAssembler::AssertSmi(Register object) {
3890 if (emit_debug_code()) {
3891 Condition is_smi = CheckSmi(object);
3892 Check(is_smi, kOperandIsNotASmi);
3893 }
Steve Block44f0eee2011-05-26 01:26:41 +01003894}
3895
3896
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003897void MacroAssembler::AssertSmi(const Operand& object) {
3898 if (emit_debug_code()) {
3899 Condition is_smi = CheckSmi(object);
3900 Check(is_smi, kOperandIsNotASmi);
3901 }
Steve Block6ded16b2010-05-10 14:33:55 +01003902}
3903
3904
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003905void MacroAssembler::AssertZeroExtended(Register int32_register) {
3906 if (emit_debug_code()) {
3907 DCHECK(!int32_register.is(kScratchRegister));
3908 movq(kScratchRegister, V8_INT64_C(0x0000000100000000));
3909 cmpq(kScratchRegister, int32_register);
3910 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended);
3911 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003912}
3913
3914
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003915void MacroAssembler::AssertString(Register object) {
3916 if (emit_debug_code()) {
3917 testb(object, Immediate(kSmiTagMask));
3918 Check(not_equal, kOperandIsASmiAndNotAString);
3919 Push(object);
3920 movp(object, FieldOperand(object, HeapObject::kMapOffset));
3921 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
3922 Pop(object);
3923 Check(below, kOperandIsNotAString);
3924 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003925}
3926
3927
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003928void MacroAssembler::AssertName(Register object) {
3929 if (emit_debug_code()) {
3930 testb(object, Immediate(kSmiTagMask));
3931 Check(not_equal, kOperandIsASmiAndNotAName);
3932 Push(object);
3933 movp(object, FieldOperand(object, HeapObject::kMapOffset));
3934 CmpInstanceType(object, LAST_NAME_TYPE);
3935 Pop(object);
3936 Check(below_equal, kOperandIsNotAName);
3937 }
3938}
3939
3940
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003941void MacroAssembler::AssertFunction(Register object) {
3942 if (emit_debug_code()) {
3943 testb(object, Immediate(kSmiTagMask));
3944 Check(not_equal, kOperandIsASmiAndNotAFunction);
3945 Push(object);
3946 CmpObjectType(object, JS_FUNCTION_TYPE, object);
3947 Pop(object);
3948 Check(equal, kOperandIsNotAFunction);
3949 }
3950}
3951
3952
3953void MacroAssembler::AssertBoundFunction(Register object) {
3954 if (emit_debug_code()) {
3955 testb(object, Immediate(kSmiTagMask));
3956 Check(not_equal, kOperandIsASmiAndNotABoundFunction);
3957 Push(object);
3958 CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
3959 Pop(object);
3960 Check(equal, kOperandIsNotABoundFunction);
3961 }
3962}
3963
Ben Murdochc5610432016-08-08 18:44:38 +01003964void MacroAssembler::AssertGeneratorObject(Register object) {
3965 if (emit_debug_code()) {
3966 testb(object, Immediate(kSmiTagMask));
3967 Check(not_equal, kOperandIsASmiAndNotAGeneratorObject);
3968 Push(object);
3969 CmpObjectType(object, JS_GENERATOR_OBJECT_TYPE, object);
3970 Pop(object);
3971 Check(equal, kOperandIsNotAGeneratorObject);
3972 }
3973}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003974
Ben Murdoch097c5b22016-05-18 11:27:45 +01003975void MacroAssembler::AssertReceiver(Register object) {
3976 if (emit_debug_code()) {
3977 testb(object, Immediate(kSmiTagMask));
3978 Check(not_equal, kOperandIsASmiAndNotAReceiver);
3979 Push(object);
3980 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3981 CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, object);
3982 Pop(object);
3983 Check(above_equal, kOperandIsNotAReceiver);
3984 }
3985}
3986
3987
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003988void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
3989 if (emit_debug_code()) {
3990 Label done_checking;
3991 AssertNotSmi(object);
3992 Cmp(object, isolate()->factory()->undefined_value());
3993 j(equal, &done_checking);
3994 Cmp(FieldOperand(object, 0), isolate()->factory()->allocation_site_map());
3995 Assert(equal, kExpectedUndefinedOrCell);
3996 bind(&done_checking);
3997 }
3998}
3999
4000
4001void MacroAssembler::AssertRootValue(Register src,
4002 Heap::RootListIndex root_value_index,
4003 BailoutReason reason) {
4004 if (emit_debug_code()) {
4005 DCHECK(!src.is(kScratchRegister));
4006 LoadRoot(kScratchRegister, root_value_index);
4007 cmpp(src, kScratchRegister);
4008 Check(equal, reason);
4009 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004010}
4011
4012
4013
Leon Clarked91b9f72010-01-27 17:25:45 +00004014Condition MacroAssembler::IsObjectStringType(Register heap_object,
4015 Register map,
4016 Register instance_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004017 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00004018 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004019 STATIC_ASSERT(kNotStringTag != 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00004020 testb(instance_type, Immediate(kIsNotStringMask));
4021 return zero;
4022}
4023
4024
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004025Condition MacroAssembler::IsObjectNameType(Register heap_object,
4026 Register map,
4027 Register instance_type) {
4028 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
4029 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
4030 cmpb(instance_type, Immediate(static_cast<uint8_t>(LAST_NAME_TYPE)));
4031 return below_equal;
4032}
4033
4034
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004035void MacroAssembler::GetMapConstructor(Register result, Register map,
4036 Register temp) {
4037 Label done, loop;
4038 movp(result, FieldOperand(map, Map::kConstructorOrBackPointerOffset));
4039 bind(&loop);
4040 JumpIfSmi(result, &done, Label::kNear);
4041 CmpObjectType(result, MAP_TYPE, temp);
4042 j(not_equal, &done, Label::kNear);
4043 movp(result, FieldOperand(result, Map::kConstructorOrBackPointerOffset));
4044 jmp(&loop);
4045 bind(&done);
4046}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004047
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004048
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004049void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
4050 Label* miss) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004051 // Get the prototype or initial map from the function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004052 movp(result,
Steve Blocka7e24c12009-10-30 11:49:00 +00004053 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
4054
4055 // If the prototype or initial map is the hole, don't return it and
4056 // simply miss the cache instead. This will allow us to allocate a
4057 // prototype object on-demand in the runtime system.
4058 CompareRoot(result, Heap::kTheHoleValueRootIndex);
4059 j(equal, miss);
4060
4061 // If the function does not have an initial map, we're done.
Ben Murdoch257744e2011-11-30 15:57:28 +00004062 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +00004063 CmpObjectType(result, MAP_TYPE, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00004064 j(not_equal, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00004065
4066 // Get the prototype from the initial map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004067 movp(result, FieldOperand(result, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004068
Steve Blocka7e24c12009-10-30 11:49:00 +00004069 // All done.
4070 bind(&done);
4071}
4072
4073
4074void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
4075 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004076 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Ben Murdoch8b112d22011-06-08 16:22:53 +01004077 movl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00004078 }
4079}
4080
4081
4082void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004083 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004084 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004085 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00004086 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01004087 incl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004088 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01004089 addl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00004090 }
4091 }
4092}
4093
4094
4095void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004096 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004097 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004098 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00004099 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01004100 decl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004101 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01004102 subl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00004103 }
4104 }
4105}
4106
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004107
Andrei Popescu402d9372010-02-26 13:31:12 +00004108void MacroAssembler::DebugBreak() {
Steve Block9fac8402011-05-12 15:51:54 +01004109 Set(rax, 0); // No arguments.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004110 LoadAddress(rbx,
4111 ExternalReference(Runtime::kHandleDebuggerStatement, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004112 CEntryStub ces(isolate(), 1);
4113 DCHECK(AllowThisStubCall(&ces));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004114 Call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004115}
Ben Murdoch257744e2011-11-30 15:57:28 +00004116
Ben Murdochda12d292016-06-02 14:46:10 +01004117void MacroAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
4118 Register caller_args_count_reg,
4119 Register scratch0, Register scratch1,
4120 ReturnAddressState ra_state) {
4121#if DEBUG
4122 if (callee_args_count.is_reg()) {
4123 DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
4124 scratch1));
4125 } else {
4126 DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
4127 }
4128#endif
4129
4130 // Calculate the destination address where we will put the return address
4131 // after we drop current frame.
4132 Register new_sp_reg = scratch0;
4133 if (callee_args_count.is_reg()) {
4134 subp(caller_args_count_reg, callee_args_count.reg());
4135 leap(new_sp_reg, Operand(rbp, caller_args_count_reg, times_pointer_size,
4136 StandardFrameConstants::kCallerPCOffset));
4137 } else {
4138 leap(new_sp_reg, Operand(rbp, caller_args_count_reg, times_pointer_size,
4139 StandardFrameConstants::kCallerPCOffset -
4140 callee_args_count.immediate() * kPointerSize));
4141 }
4142
4143 if (FLAG_debug_code) {
4144 cmpp(rsp, new_sp_reg);
4145 Check(below, kStackAccessBelowStackPointer);
4146 }
4147
4148 // Copy return address from caller's frame to current frame's return address
4149 // to avoid its trashing and let the following loop copy it to the right
4150 // place.
4151 Register tmp_reg = scratch1;
4152 if (ra_state == ReturnAddressState::kOnStack) {
4153 movp(tmp_reg, Operand(rbp, StandardFrameConstants::kCallerPCOffset));
4154 movp(Operand(rsp, 0), tmp_reg);
4155 } else {
4156 DCHECK(ReturnAddressState::kNotOnStack == ra_state);
4157 Push(Operand(rbp, StandardFrameConstants::kCallerPCOffset));
4158 }
4159
4160 // Restore caller's frame pointer now as it could be overwritten by
4161 // the copying loop.
4162 movp(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
4163
4164 // +2 here is to copy both receiver and return address.
4165 Register count_reg = caller_args_count_reg;
4166 if (callee_args_count.is_reg()) {
4167 leap(count_reg, Operand(callee_args_count.reg(), 2));
4168 } else {
4169 movp(count_reg, Immediate(callee_args_count.immediate() + 2));
4170 // TODO(ishell): Unroll copying loop for small immediate values.
4171 }
4172
4173 // Now copy callee arguments to the caller frame going backwards to avoid
4174 // callee arguments corruption (source and destination areas could overlap).
4175 Label loop, entry;
4176 jmp(&entry, Label::kNear);
4177 bind(&loop);
4178 decp(count_reg);
4179 movp(tmp_reg, Operand(rsp, count_reg, times_pointer_size, 0));
4180 movp(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg);
4181 bind(&entry);
4182 cmpp(count_reg, Immediate(0));
4183 j(not_equal, &loop, Label::kNear);
4184
4185 // Leave current frame.
4186 movp(rsp, new_sp_reg);
4187}
Ben Murdoch257744e2011-11-30 15:57:28 +00004188
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004189void MacroAssembler::InvokeFunction(Register function,
4190 Register new_target,
4191 const ParameterCount& actual,
4192 InvokeFlag flag,
4193 const CallWrapper& call_wrapper) {
4194 movp(rbx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
4195 LoadSharedFunctionInfoSpecialField(
4196 rbx, rbx, SharedFunctionInfo::kFormalParameterCountOffset);
4197
4198 ParameterCount expected(rbx);
4199 InvokeFunction(function, new_target, expected, actual, flag, call_wrapper);
4200}
4201
4202
4203void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
4204 const ParameterCount& expected,
4205 const ParameterCount& actual,
4206 InvokeFlag flag,
4207 const CallWrapper& call_wrapper) {
4208 Move(rdi, function);
4209 InvokeFunction(rdi, no_reg, expected, actual, flag, call_wrapper);
4210}
4211
4212
4213void MacroAssembler::InvokeFunction(Register function,
4214 Register new_target,
4215 const ParameterCount& expected,
4216 const ParameterCount& actual,
4217 InvokeFlag flag,
4218 const CallWrapper& call_wrapper) {
4219 DCHECK(function.is(rdi));
4220 movp(rsi, FieldOperand(function, JSFunction::kContextOffset));
4221 InvokeFunctionCode(rdi, new_target, expected, actual, flag, call_wrapper);
4222}
4223
4224
4225void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
4226 const ParameterCount& expected,
4227 const ParameterCount& actual,
4228 InvokeFlag flag,
4229 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004230 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004231 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004232 DCHECK(function.is(rdi));
4233 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(rdx));
4234
4235 if (call_wrapper.NeedsDebugStepCheck()) {
4236 FloodFunctionIfStepping(function, new_target, expected, actual);
4237 }
4238
4239 // Clear the new.target register if not given.
4240 if (!new_target.is_valid()) {
4241 LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
4242 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004243
Ben Murdoch257744e2011-11-30 15:57:28 +00004244 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004245 bool definitely_mismatches = false;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004246 InvokePrologue(expected,
4247 actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004248 &done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004249 &definitely_mismatches,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004250 flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00004251 Label::kNear,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004252 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004253 if (!definitely_mismatches) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004254 // We call indirectly through the code field in the function to
4255 // allow recompilation to take effect without changing any of the
4256 // call sites.
4257 Operand code = FieldOperand(function, JSFunction::kCodeEntryOffset);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004258 if (flag == CALL_FUNCTION) {
4259 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004260 call(code);
4261 call_wrapper.AfterCall();
4262 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004263 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004264 jmp(code);
4265 }
4266 bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00004267 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004268}
4269
4270
Ben Murdoch257744e2011-11-30 15:57:28 +00004271void MacroAssembler::InvokePrologue(const ParameterCount& expected,
4272 const ParameterCount& actual,
Ben Murdoch257744e2011-11-30 15:57:28 +00004273 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004274 bool* definitely_mismatches,
Ben Murdoch257744e2011-11-30 15:57:28 +00004275 InvokeFlag flag,
4276 Label::Distance near_jump,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004277 const CallWrapper& call_wrapper) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004278 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004279 *definitely_mismatches = false;
Ben Murdoch257744e2011-11-30 15:57:28 +00004280 Label invoke;
4281 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004282 DCHECK(actual.is_immediate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004283 Set(rax, actual.immediate());
Ben Murdoch257744e2011-11-30 15:57:28 +00004284 if (expected.immediate() == actual.immediate()) {
4285 definitely_matches = true;
4286 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004287 if (expected.immediate() ==
4288 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
4289 // Don't worry about adapting arguments for built-ins that
4290 // don't want that done. Skip adaption code by making it look
4291 // like we have a match between expected and actual number of
4292 // arguments.
4293 definitely_matches = true;
4294 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004295 *definitely_mismatches = true;
Ben Murdoch257744e2011-11-30 15:57:28 +00004296 Set(rbx, expected.immediate());
4297 }
4298 }
4299 } else {
4300 if (actual.is_immediate()) {
4301 // Expected is in register, actual is immediate. This is the
4302 // case when we invoke function values without going through the
4303 // IC mechanism.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004304 Set(rax, actual.immediate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004305 cmpp(expected.reg(), Immediate(actual.immediate()));
Ben Murdoch257744e2011-11-30 15:57:28 +00004306 j(equal, &invoke, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004307 DCHECK(expected.reg().is(rbx));
Ben Murdoch257744e2011-11-30 15:57:28 +00004308 } else if (!expected.reg().is(actual.reg())) {
4309 // Both expected and actual are in (different) registers. This
4310 // is the case when we invoke functions using call and apply.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004311 cmpp(expected.reg(), actual.reg());
Ben Murdoch257744e2011-11-30 15:57:28 +00004312 j(equal, &invoke, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004313 DCHECK(actual.reg().is(rax));
4314 DCHECK(expected.reg().is(rbx));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004315 } else {
4316 Move(rax, actual.reg());
Ben Murdoch257744e2011-11-30 15:57:28 +00004317 }
4318 }
4319
4320 if (!definitely_matches) {
4321 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
Ben Murdoch257744e2011-11-30 15:57:28 +00004322 if (flag == CALL_FUNCTION) {
4323 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch257744e2011-11-30 15:57:28 +00004324 Call(adaptor, RelocInfo::CODE_TARGET);
4325 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004326 if (!*definitely_mismatches) {
4327 jmp(done, near_jump);
4328 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004329 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004330 Jump(adaptor, RelocInfo::CODE_TARGET);
4331 }
4332 bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01004333 }
Andrei Popescu402d9372010-02-26 13:31:12 +00004334}
4335
4336
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004337void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
4338 const ParameterCount& expected,
4339 const ParameterCount& actual) {
4340 Label skip_flooding;
Ben Murdoch61f157c2016-09-16 13:49:30 +01004341 ExternalReference last_step_action =
4342 ExternalReference::debug_last_step_action_address(isolate());
4343 Operand last_step_action_operand = ExternalOperand(last_step_action);
4344 STATIC_ASSERT(StepFrame > StepIn);
4345 cmpb(last_step_action_operand, Immediate(StepIn));
4346 j(less, &skip_flooding);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004347 {
4348 FrameScope frame(this,
4349 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
4350 if (expected.is_reg()) {
4351 Integer32ToSmi(expected.reg(), expected.reg());
4352 Push(expected.reg());
4353 }
4354 if (actual.is_reg()) {
4355 Integer32ToSmi(actual.reg(), actual.reg());
4356 Push(actual.reg());
4357 }
4358 if (new_target.is_valid()) {
4359 Push(new_target);
4360 }
4361 Push(fun);
4362 Push(fun);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004363 CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004364 Pop(fun);
4365 if (new_target.is_valid()) {
4366 Pop(new_target);
4367 }
4368 if (actual.is_reg()) {
4369 Pop(actual.reg());
4370 SmiToInteger64(actual.reg(), actual.reg());
4371 }
4372 if (expected.is_reg()) {
4373 Pop(expected.reg());
4374 SmiToInteger64(expected.reg(), expected.reg());
4375 }
4376 }
4377 bind(&skip_flooding);
4378}
4379
Ben Murdochda12d292016-06-02 14:46:10 +01004380void MacroAssembler::StubPrologue(StackFrame::Type type) {
4381 pushq(rbp); // Caller's frame pointer.
4382 movp(rbp, rsp);
4383 Push(Smi::FromInt(type));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004384}
4385
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004386void MacroAssembler::Prologue(bool code_pre_aging) {
4387 PredictableCodeSizeScope predictible_code_size_scope(this,
4388 kNoCodeAgeSequenceLength);
4389 if (code_pre_aging) {
4390 // Pre-age the code.
4391 Call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
4392 RelocInfo::CODE_AGE_SEQUENCE);
4393 Nop(kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength);
4394 } else {
4395 pushq(rbp); // Caller's frame pointer.
4396 movp(rbp, rsp);
4397 Push(rsi); // Callee's context.
4398 Push(rdi); // Callee's JS function.
4399 }
4400}
4401
4402
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004403void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
4404 movp(vector, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01004405 movp(vector, FieldOperand(vector, JSFunction::kLiteralsOffset));
4406 movp(vector, FieldOperand(vector, LiteralsArray::kFeedbackVectorOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004407}
4408
4409
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004410void MacroAssembler::EnterFrame(StackFrame::Type type,
4411 bool load_constant_pool_pointer_reg) {
4412 // Out-of-line constant pool not implemented on x64.
4413 UNREACHABLE();
4414}
4415
4416
Steve Blocka7e24c12009-10-30 11:49:00 +00004417void MacroAssembler::EnterFrame(StackFrame::Type type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004418 pushq(rbp);
4419 movp(rbp, rsp);
Steve Block3ce2e202009-11-05 08:53:23 +00004420 Push(Smi::FromInt(type));
Ben Murdochda12d292016-06-02 14:46:10 +01004421 if (type == StackFrame::INTERNAL) {
4422 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
4423 Push(kScratchRegister);
4424 }
Steve Block44f0eee2011-05-26 01:26:41 +01004425 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004426 Move(kScratchRegister,
Ben Murdoch257744e2011-11-30 15:57:28 +00004427 isolate()->factory()->undefined_value(),
Steve Blocka7e24c12009-10-30 11:49:00 +00004428 RelocInfo::EMBEDDED_OBJECT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004429 cmpp(Operand(rsp, 0), kScratchRegister);
4430 Check(not_equal, kCodeObjectNotProperlyPatched);
Steve Blocka7e24c12009-10-30 11:49:00 +00004431 }
4432}
4433
4434
4435void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +01004436 if (emit_debug_code()) {
Steve Block3ce2e202009-11-05 08:53:23 +00004437 Move(kScratchRegister, Smi::FromInt(type));
Ben Murdochda12d292016-06-02 14:46:10 +01004438 cmpp(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset),
4439 kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004440 Check(equal, kStackFrameTypesMustMatch);
Steve Blocka7e24c12009-10-30 11:49:00 +00004441 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004442 movp(rsp, rbp);
4443 popq(rbp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004444}
4445
4446
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004447void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004448 // Set up the frame structure on the stack.
Steve Blocka7e24c12009-10-30 11:49:00 +00004449 // All constants are relative to the frame pointer of the exit frame.
Ben Murdochda12d292016-06-02 14:46:10 +01004450 DCHECK_EQ(kFPOnStackSize + kPCOnStackSize,
4451 ExitFrameConstants::kCallerSPDisplacement);
4452 DCHECK_EQ(kFPOnStackSize, ExitFrameConstants::kCallerPCOffset);
4453 DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004454 pushq(rbp);
4455 movp(rbp, rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004456
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004457 // Reserve room for entry stack pointer and push the code object.
Ben Murdochda12d292016-06-02 14:46:10 +01004458 Push(Smi::FromInt(StackFrame::EXIT));
4459 DCHECK_EQ(-2 * kPointerSize, ExitFrameConstants::kSPOffset);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004460 Push(Immediate(0)); // Saved entry sp, patched before call.
4461 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
4462 Push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00004463
4464 // Save the frame pointer and the context in top.
Ben Murdochbb769b22010-08-11 14:56:33 +01004465 if (save_rax) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004466 movp(r14, rax); // Backup rax in callee-save register.
Ben Murdochbb769b22010-08-11 14:56:33 +01004467 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004468
Ben Murdoch589d6972011-11-30 16:04:58 +00004469 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()), rbp);
4470 Store(ExternalReference(Isolate::kContextAddress, isolate()), rsi);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004471 Store(ExternalReference(Isolate::kCFunctionAddress, isolate()), rbx);
Ben Murdochbb769b22010-08-11 14:56:33 +01004472}
Steve Blocka7e24c12009-10-30 11:49:00 +00004473
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004474
Steve Block1e0659c2011-05-24 12:43:12 +01004475void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
4476 bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004477#ifdef _WIN64
Steve Block1e0659c2011-05-24 12:43:12 +01004478 const int kShadowSpace = 4;
4479 arg_stack_space += kShadowSpace;
Steve Blocka7e24c12009-10-30 11:49:00 +00004480#endif
Steve Block1e0659c2011-05-24 12:43:12 +01004481 // Optionally save all XMM registers.
4482 if (save_doubles) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004483 int space = XMMRegister::kMaxNumRegisters * kDoubleSize +
4484 arg_stack_space * kRegisterSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004485 subp(rsp, Immediate(space));
Ben Murdochda12d292016-06-02 14:46:10 +01004486 int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
Ben Murdoch61f157c2016-09-16 13:49:30 +01004487 const RegisterConfiguration* config = RegisterConfiguration::Crankshaft();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004488 for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
4489 DoubleRegister reg =
4490 DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
4491 Movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
Steve Block1e0659c2011-05-24 12:43:12 +01004492 }
4493 } else if (arg_stack_space > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004494 subp(rsp, Immediate(arg_stack_space * kRegisterSize));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004495 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004496
4497 // Get the required frame alignment for the OS.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004498 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +00004499 if (kFrameAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004500 DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
4501 DCHECK(is_int8(kFrameAlignment));
4502 andp(rsp, Immediate(-kFrameAlignment));
Steve Blocka7e24c12009-10-30 11:49:00 +00004503 }
4504
4505 // Patch the saved entry sp.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004506 movp(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004507}
4508
4509
Steve Block1e0659c2011-05-24 12:43:12 +01004510void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004511 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01004512
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004513 // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
Ben Murdochbb769b22010-08-11 14:56:33 +01004514 // so it must be retained across the C-call.
4515 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004516 leap(r15, Operand(rbp, r14, times_pointer_size, offset));
Ben Murdochbb769b22010-08-11 14:56:33 +01004517
Steve Block1e0659c2011-05-24 12:43:12 +01004518 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
Ben Murdochbb769b22010-08-11 14:56:33 +01004519}
4520
4521
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004522void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004523 EnterExitFramePrologue(false);
Steve Block1e0659c2011-05-24 12:43:12 +01004524 EnterExitFrameEpilogue(arg_stack_space, false);
Ben Murdochbb769b22010-08-11 14:56:33 +01004525}
4526
4527
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004528void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004529 // Registers:
Steve Block44f0eee2011-05-26 01:26:41 +01004530 // r15 : argv
Steve Block1e0659c2011-05-24 12:43:12 +01004531 if (save_doubles) {
Ben Murdochda12d292016-06-02 14:46:10 +01004532 int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
Ben Murdoch61f157c2016-09-16 13:49:30 +01004533 const RegisterConfiguration* config = RegisterConfiguration::Crankshaft();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004534 for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
4535 DoubleRegister reg =
4536 DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
4537 Movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
Steve Block1e0659c2011-05-24 12:43:12 +01004538 }
4539 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004540
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004541 if (pop_arguments) {
4542 // Get the return address from the stack and restore the frame pointer.
4543 movp(rcx, Operand(rbp, kFPOnStackSize));
4544 movp(rbp, Operand(rbp, 0 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00004545
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004546 // Drop everything up to and including the arguments and the receiver
4547 // from the caller stack.
4548 leap(rsp, Operand(r15, 1 * kPointerSize));
4549
4550 PushReturnAddressFrom(rcx);
4551 } else {
4552 // Otherwise just leave the exit frame.
4553 leave();
4554 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004555
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004556 LeaveExitFrameEpilogue(true);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004557}
4558
4559
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004560void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
4561 movp(rsp, rbp);
4562 popq(rbp);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004563
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004564 LeaveExitFrameEpilogue(restore_context);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004565}
4566
4567
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004568void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004569 // Restore current context from top and clear it in debug mode.
Ben Murdoch589d6972011-11-30 16:04:58 +00004570 ExternalReference context_address(Isolate::kContextAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01004571 Operand context_operand = ExternalOperand(context_address);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004572 if (restore_context) {
4573 movp(rsi, context_operand);
4574 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004575#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004576 movp(context_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004577#endif
4578
Steve Blocka7e24c12009-10-30 11:49:00 +00004579 // Clear the top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00004580 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +01004581 isolate());
4582 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004583 movp(c_entry_fp_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004584}
4585
4586
Steve Blocka7e24c12009-10-30 11:49:00 +00004587void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
4588 Register scratch,
4589 Label* miss) {
4590 Label same_contexts;
4591
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004592 DCHECK(!holder_reg.is(scratch));
4593 DCHECK(!scratch.is(kScratchRegister));
Ben Murdochda12d292016-06-02 14:46:10 +01004594 // Load current lexical context from the active StandardFrame, which
4595 // may require crawling past STUB frames.
4596 Label load_context;
4597 Label has_context;
4598 movp(scratch, rbp);
4599 bind(&load_context);
4600 DCHECK(SmiValuesAre32Bits());
4601 // This is "JumpIfNotSmi" but without loading the value into a register.
4602 cmpl(MemOperand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset),
4603 Immediate(0));
4604 j(not_equal, &has_context);
4605 movp(scratch, MemOperand(scratch, CommonFrameConstants::kCallerFPOffset));
4606 jmp(&load_context);
4607 bind(&has_context);
4608 movp(scratch,
4609 MemOperand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004610
4611 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +01004612 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004613 cmpp(scratch, Immediate(0));
4614 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00004615 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004616 // Load the native context of the current context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004617 movp(scratch, ContextOperand(scratch, Context::NATIVE_CONTEXT_INDEX));
Steve Blocka7e24c12009-10-30 11:49:00 +00004618
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004619 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01004620 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004621 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004622 isolate()->factory()->native_context_map());
4623 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00004624 }
4625
4626 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004627 cmpp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004628 j(equal, &same_contexts);
4629
4630 // Compare security tokens.
4631 // Check that the security token in the calling global object is
4632 // compatible with the security token in the receiving global
4633 // object.
4634
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004635 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01004636 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004637 // Preserve original value of holder_reg.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004638 Push(holder_reg);
4639 movp(holder_reg,
4640 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004641 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004642 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
Steve Blocka7e24c12009-10-30 11:49:00 +00004643
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004644 // Read the first word and compare to native_context_map(),
4645 movp(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
4646 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
4647 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
4648 Pop(holder_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +00004649 }
4650
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004651 movp(kScratchRegister,
4652 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00004653 int token_offset =
4654 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004655 movp(scratch, FieldOperand(scratch, token_offset));
4656 cmpp(scratch, FieldOperand(kScratchRegister, token_offset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004657 j(not_equal, miss);
4658
4659 bind(&same_contexts);
4660}
4661
4662
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004663// Compute the hash code from the untagged key. This must be kept in sync with
4664// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
4665// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +00004666void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
4667 // First of all we assign the hash seed to scratch.
4668 LoadRoot(scratch, Heap::kHashSeedRootIndex);
4669 SmiToInteger32(scratch, scratch);
4670
4671 // Xor original key with a seed.
4672 xorl(r0, scratch);
4673
4674 // Compute the hash code from the untagged key. This must be kept in sync
4675 // with ComputeIntegerHash in utils.h.
4676 //
4677 // hash = ~hash + (hash << 15);
4678 movl(scratch, r0);
4679 notl(r0);
4680 shll(scratch, Immediate(15));
4681 addl(r0, scratch);
4682 // hash = hash ^ (hash >> 12);
4683 movl(scratch, r0);
4684 shrl(scratch, Immediate(12));
4685 xorl(r0, scratch);
4686 // hash = hash + (hash << 2);
4687 leal(r0, Operand(r0, r0, times_4, 0));
4688 // hash = hash ^ (hash >> 4);
4689 movl(scratch, r0);
4690 shrl(scratch, Immediate(4));
4691 xorl(r0, scratch);
4692 // hash = hash * 2057;
4693 imull(r0, r0, Immediate(2057));
4694 // hash = hash ^ (hash >> 16);
4695 movl(scratch, r0);
4696 shrl(scratch, Immediate(16));
4697 xorl(r0, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004698 andl(r0, Immediate(0x3fffffff));
Ben Murdochc7cc0282012-03-05 14:35:55 +00004699}
4700
4701
4702
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004703void MacroAssembler::LoadFromNumberDictionary(Label* miss,
4704 Register elements,
4705 Register key,
4706 Register r0,
4707 Register r1,
4708 Register r2,
4709 Register result) {
4710 // Register use:
4711 //
4712 // elements - holds the slow-case elements of the receiver on entry.
4713 // Unchanged unless 'result' is the same register.
4714 //
4715 // key - holds the smi key on entry.
4716 // Unchanged unless 'result' is the same register.
4717 //
4718 // Scratch registers:
4719 //
4720 // r0 - holds the untagged key on entry and holds the hash once computed.
4721 //
4722 // r1 - used to hold the capacity mask of the dictionary
4723 //
4724 // r2 - used for the index into the dictionary.
4725 //
4726 // result - holds the result on exit if the load succeeded.
4727 // Allowed to be the same as 'key' or 'result'.
4728 // Unchanged on bailout so 'key' or 'result' can be used
4729 // in further computation.
4730
4731 Label done;
4732
Ben Murdochc7cc0282012-03-05 14:35:55 +00004733 GetNumberHash(r0, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004734
4735 // Compute capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00004736 SmiToInteger32(r1, FieldOperand(elements,
4737 SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004738 decl(r1);
4739
4740 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004741 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004742 // Use r2 for index calculations and keep the hash intact in r0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004743 movp(r2, r0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004744 // Compute the masked index: (hash + i + i * i) & mask.
4745 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00004746 addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004747 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004748 andp(r2, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004749
4750 // Scale the index by multiplying by the entry size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004751 DCHECK(SeededNumberDictionary::kEntrySize == 3);
4752 leap(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004753
4754 // Check if the key matches.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004755 cmpp(key, FieldOperand(elements,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004756 r2,
4757 times_pointer_size,
Ben Murdochc7cc0282012-03-05 14:35:55 +00004758 SeededNumberDictionary::kElementsStartOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004759 if (i != (kNumberDictionaryProbes - 1)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004760 j(equal, &done);
4761 } else {
4762 j(not_equal, miss);
4763 }
4764 }
4765
4766 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004767 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004768 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00004769 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004770 DCHECK_EQ(DATA, 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004771 Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00004772 Smi::FromInt(PropertyDetails::TypeField::kMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004773 j(not_zero, miss);
4774
4775 // Get the value at the masked, scaled index.
4776 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00004777 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004778 movp(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004779}
4780
4781
Steve Blocka7e24c12009-10-30 11:49:00 +00004782void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00004783 Register scratch,
4784 AllocationFlags flags) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004785 ExternalReference allocation_top =
4786 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004787
4788 // Just return if allocation top is already known.
4789 if ((flags & RESULT_CONTAINS_TOP) != 0) {
4790 // No use of scratch if allocation top is provided.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004791 DCHECK(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00004792#ifdef DEBUG
4793 // Assert that result actually contains top on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004794 Operand top_operand = ExternalOperand(allocation_top);
4795 cmpp(result, top_operand);
4796 Check(equal, kUnexpectedAllocationTop);
Steve Blocka7e24c12009-10-30 11:49:00 +00004797#endif
4798 return;
4799 }
4800
Steve Block6ded16b2010-05-10 14:33:55 +01004801 // Move address of new object to result. Use scratch register if available,
4802 // and keep address in scratch until call to UpdateAllocationTopHelper.
4803 if (scratch.is_valid()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004804 LoadAddress(scratch, allocation_top);
4805 movp(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01004806 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004807 Load(result, allocation_top);
4808 }
4809}
4810
4811
4812void MacroAssembler::MakeSureDoubleAlignedHelper(Register result,
4813 Register scratch,
4814 Label* gc_required,
4815 AllocationFlags flags) {
4816 if (kPointerSize == kDoubleSize) {
4817 if (FLAG_debug_code) {
4818 testl(result, Immediate(kDoubleAlignmentMask));
4819 Check(zero, kAllocationIsNotDoubleAligned);
4820 }
4821 } else {
4822 // Align the next allocation. Storing the filler map without checking top
4823 // is safe in new-space because the limit of the heap is aligned there.
4824 DCHECK(kPointerSize * 2 == kDoubleSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004825 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
4826 // Make sure scratch is not clobbered by this function as it might be
4827 // used in UpdateAllocationTopHelper later.
4828 DCHECK(!scratch.is(kScratchRegister));
4829 Label aligned;
4830 testl(result, Immediate(kDoubleAlignmentMask));
4831 j(zero, &aligned, Label::kNear);
Ben Murdochc5610432016-08-08 18:44:38 +01004832 if (((flags & ALLOCATION_FOLDED) == 0) && ((flags & PRETENURE) != 0)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004833 ExternalReference allocation_limit =
4834 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
4835 cmpp(result, ExternalOperand(allocation_limit));
4836 j(above_equal, gc_required);
4837 }
4838 LoadRoot(kScratchRegister, Heap::kOnePointerFillerMapRootIndex);
4839 movp(Operand(result, 0), kScratchRegister);
4840 addp(result, Immediate(kDoubleSize / 2));
4841 bind(&aligned);
Steve Blocka7e24c12009-10-30 11:49:00 +00004842 }
4843}
4844
4845
4846void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004847 Register scratch,
4848 AllocationFlags flags) {
Steve Block44f0eee2011-05-26 01:26:41 +01004849 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004850 testp(result_end, Immediate(kObjectAlignmentMask));
4851 Check(zero, kUnalignedAllocationInNewSpace);
Steve Blockd0582a62009-12-15 09:54:21 +00004852 }
4853
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004854 ExternalReference allocation_top =
4855 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004856
4857 // Update new top.
Steve Block44f0eee2011-05-26 01:26:41 +01004858 if (scratch.is_valid()) {
4859 // Scratch already contains address of allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004860 movp(Operand(scratch, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00004861 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004862 Store(allocation_top, result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00004863 }
4864}
4865
4866
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004867void MacroAssembler::Allocate(int object_size,
4868 Register result,
4869 Register result_end,
4870 Register scratch,
4871 Label* gc_required,
4872 AllocationFlags flags) {
4873 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
4874 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
Ben Murdochc5610432016-08-08 18:44:38 +01004875 DCHECK((flags & ALLOCATION_FOLDED) == 0);
John Reck59135872010-11-02 12:39:01 -07004876 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01004877 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07004878 // Trash the registers to simulate an allocation failure.
4879 movl(result, Immediate(0x7091));
4880 if (result_end.is_valid()) {
4881 movl(result_end, Immediate(0x7191));
4882 }
4883 if (scratch.is_valid()) {
4884 movl(scratch, Immediate(0x7291));
4885 }
4886 }
4887 jmp(gc_required);
4888 return;
4889 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004890 DCHECK(!result.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00004891
4892 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004893 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004894
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004895 if ((flags & DOUBLE_ALIGNMENT) != 0) {
4896 MakeSureDoubleAlignedHelper(result, scratch, gc_required, flags);
4897 }
4898
Steve Blocka7e24c12009-10-30 11:49:00 +00004899 // Calculate new top and bail out if new space is exhausted.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004900 ExternalReference allocation_limit =
4901 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Steve Block6ded16b2010-05-10 14:33:55 +01004902
4903 Register top_reg = result_end.is_valid() ? result_end : result;
4904
Steve Block1e0659c2011-05-24 12:43:12 +01004905 if (!top_reg.is(result)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004906 movp(top_reg, result);
Steve Block6ded16b2010-05-10 14:33:55 +01004907 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004908 addp(top_reg, Immediate(object_size));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004909 Operand limit_operand = ExternalOperand(allocation_limit);
4910 cmpp(top_reg, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004911 j(above, gc_required);
4912
Ben Murdochc5610432016-08-08 18:44:38 +01004913 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
4914 // The top pointer is not updated for allocation folding dominators.
4915 UpdateAllocationTopHelper(top_reg, scratch, flags);
4916 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004917
Steve Block6ded16b2010-05-10 14:33:55 +01004918 if (top_reg.is(result)) {
Ben Murdochc5610432016-08-08 18:44:38 +01004919 subp(result, Immediate(object_size - kHeapObjectTag));
4920 } else {
4921 // Tag the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004922 DCHECK(kHeapObjectTag == 1);
4923 incp(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004924 }
4925}
4926
4927
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004928void MacroAssembler::Allocate(int header_size,
4929 ScaleFactor element_size,
4930 Register element_count,
4931 Register result,
4932 Register result_end,
4933 Register scratch,
4934 Label* gc_required,
4935 AllocationFlags flags) {
4936 DCHECK((flags & SIZE_IN_WORDS) == 0);
Ben Murdochc5610432016-08-08 18:44:38 +01004937 DCHECK((flags & ALLOCATION_FOLDING_DOMINATOR) == 0);
4938 DCHECK((flags & ALLOCATION_FOLDED) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004939 leap(result_end, Operand(element_count, element_size, header_size));
4940 Allocate(result_end, result, result_end, scratch, gc_required, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004941}
4942
4943
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004944void MacroAssembler::Allocate(Register object_size,
4945 Register result,
4946 Register result_end,
4947 Register scratch,
4948 Label* gc_required,
4949 AllocationFlags flags) {
4950 DCHECK((flags & SIZE_IN_WORDS) == 0);
Ben Murdochc5610432016-08-08 18:44:38 +01004951 DCHECK((flags & ALLOCATION_FOLDED) == 0);
John Reck59135872010-11-02 12:39:01 -07004952 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01004953 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07004954 // Trash the registers to simulate an allocation failure.
4955 movl(result, Immediate(0x7091));
4956 movl(result_end, Immediate(0x7191));
4957 if (scratch.is_valid()) {
4958 movl(scratch, Immediate(0x7291));
4959 }
4960 // object_size is left unchanged by this function.
4961 }
4962 jmp(gc_required);
4963 return;
4964 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004965 DCHECK(!result.is(result_end));
John Reck59135872010-11-02 12:39:01 -07004966
Steve Blocka7e24c12009-10-30 11:49:00 +00004967 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004968 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004969
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004970 if ((flags & DOUBLE_ALIGNMENT) != 0) {
4971 MakeSureDoubleAlignedHelper(result, scratch, gc_required, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004972 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004973
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004974 ExternalReference allocation_limit =
4975 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
4976 if (!object_size.is(result_end)) {
4977 movp(result_end, object_size);
4978 }
4979 addp(result_end, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004980 Operand limit_operand = ExternalOperand(allocation_limit);
4981 cmpp(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004982 j(above, gc_required);
4983
Ben Murdochc5610432016-08-08 18:44:38 +01004984 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
4985 // The top pointer is not updated for allocation folding dominators.
4986 UpdateAllocationTopHelper(result_end, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004987 }
Ben Murdochc5610432016-08-08 18:44:38 +01004988
4989 // Tag the result.
4990 addp(result, Immediate(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00004991}
4992
Ben Murdochc5610432016-08-08 18:44:38 +01004993void MacroAssembler::FastAllocate(int object_size, Register result,
4994 Register result_end, AllocationFlags flags) {
4995 DCHECK(!result.is(result_end));
4996 // Load address of new object into result.
4997 LoadAllocationTopHelper(result, no_reg, flags);
4998
4999 if ((flags & DOUBLE_ALIGNMENT) != 0) {
5000 MakeSureDoubleAlignedHelper(result, no_reg, NULL, flags);
5001 }
5002
5003 leap(result_end, Operand(result, object_size));
5004
5005 UpdateAllocationTopHelper(result_end, no_reg, flags);
5006
5007 addp(result, Immediate(kHeapObjectTag));
5008}
5009
5010void MacroAssembler::FastAllocate(Register object_size, Register result,
5011 Register result_end, AllocationFlags flags) {
5012 DCHECK(!result.is(result_end));
5013 // Load address of new object into result.
5014 LoadAllocationTopHelper(result, no_reg, flags);
5015
5016 if ((flags & DOUBLE_ALIGNMENT) != 0) {
5017 MakeSureDoubleAlignedHelper(result, no_reg, NULL, flags);
5018 }
5019
5020 leap(result_end, Operand(result, object_size, times_1, 0));
5021
5022 UpdateAllocationTopHelper(result_end, no_reg, flags);
5023
5024 addp(result, Immediate(kHeapObjectTag));
5025}
Steve Blocka7e24c12009-10-30 11:49:00 +00005026
Steve Block3ce2e202009-11-05 08:53:23 +00005027void MacroAssembler::AllocateHeapNumber(Register result,
5028 Register scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005029 Label* gc_required,
5030 MutableMode mode) {
Steve Block3ce2e202009-11-05 08:53:23 +00005031 // Allocate heap number in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01005032 Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required,
5033 NO_ALLOCATION_FLAGS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005034
5035 Heap::RootListIndex map_index = mode == MUTABLE
5036 ? Heap::kMutableHeapNumberMapRootIndex
5037 : Heap::kHeapNumberMapRootIndex;
Steve Block3ce2e202009-11-05 08:53:23 +00005038
5039 // Set the map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005040 LoadRoot(kScratchRegister, map_index);
5041 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +00005042}
5043
5044
Leon Clarkee46be812010-01-19 14:06:41 +00005045void MacroAssembler::AllocateTwoByteString(Register result,
5046 Register length,
5047 Register scratch1,
5048 Register scratch2,
5049 Register scratch3,
5050 Label* gc_required) {
5051 // Calculate the number of bytes needed for the characters in the string while
5052 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01005053 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
5054 kObjectAlignmentMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005055 DCHECK(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +00005056 // scratch1 = length * 2 + kObjectAlignmentMask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005057 leap(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
Steve Block6ded16b2010-05-10 14:33:55 +01005058 kHeaderAlignment));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005059 andp(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01005060 if (kHeaderAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005061 subp(scratch1, Immediate(kHeaderAlignment));
Steve Block6ded16b2010-05-10 14:33:55 +01005062 }
Leon Clarkee46be812010-01-19 14:06:41 +00005063
5064 // Allocate two byte string in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01005065 Allocate(SeqTwoByteString::kHeaderSize, times_1, scratch1, result, scratch2,
5066 scratch3, gc_required, NO_ALLOCATION_FLAGS);
Leon Clarkee46be812010-01-19 14:06:41 +00005067
5068 // Set the map, length and hash field.
5069 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005070 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01005071 Integer32ToSmi(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005072 movp(FieldOperand(result, String::kLengthOffset), scratch1);
5073 movp(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00005074 Immediate(String::kEmptyHashField));
5075}
5076
5077
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005078void MacroAssembler::AllocateOneByteString(Register result, Register length,
5079 Register scratch1, Register scratch2,
5080 Register scratch3,
5081 Label* gc_required) {
Leon Clarkee46be812010-01-19 14:06:41 +00005082 // Calculate the number of bytes needed for the characters in the string while
5083 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005084 const int kHeaderAlignment = SeqOneByteString::kHeaderSize &
Steve Block6ded16b2010-05-10 14:33:55 +01005085 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00005086 movl(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005087 DCHECK(kCharSize == 1);
5088 addp(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
5089 andp(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01005090 if (kHeaderAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005091 subp(scratch1, Immediate(kHeaderAlignment));
Steve Block6ded16b2010-05-10 14:33:55 +01005092 }
Leon Clarkee46be812010-01-19 14:06:41 +00005093
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005094 // Allocate one-byte string in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01005095 Allocate(SeqOneByteString::kHeaderSize, times_1, scratch1, result, scratch2,
5096 scratch3, gc_required, NO_ALLOCATION_FLAGS);
Leon Clarkee46be812010-01-19 14:06:41 +00005097
5098 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005099 LoadRoot(kScratchRegister, Heap::kOneByteStringMapRootIndex);
5100 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01005101 Integer32ToSmi(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005102 movp(FieldOperand(result, String::kLengthOffset), scratch1);
5103 movp(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00005104 Immediate(String::kEmptyHashField));
5105}
5106
5107
Ben Murdoch589d6972011-11-30 16:04:58 +00005108void MacroAssembler::AllocateTwoByteConsString(Register result,
Leon Clarkee46be812010-01-19 14:06:41 +00005109 Register scratch1,
5110 Register scratch2,
5111 Label* gc_required) {
5112 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005113 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01005114 NO_ALLOCATION_FLAGS);
Leon Clarkee46be812010-01-19 14:06:41 +00005115
5116 // Set the map. The other fields are left uninitialized.
5117 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005118 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +00005119}
5120
5121
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005122void MacroAssembler::AllocateOneByteConsString(Register result,
5123 Register scratch1,
5124 Register scratch2,
5125 Label* gc_required) {
Ben Murdochc5610432016-08-08 18:44:38 +01005126 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
5127 NO_ALLOCATION_FLAGS);
Leon Clarkee46be812010-01-19 14:06:41 +00005128
5129 // Set the map. The other fields are left uninitialized.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005130 LoadRoot(kScratchRegister, Heap::kConsOneByteStringMapRootIndex);
5131 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +00005132}
5133
5134
Ben Murdoch589d6972011-11-30 16:04:58 +00005135void MacroAssembler::AllocateTwoByteSlicedString(Register result,
5136 Register scratch1,
5137 Register scratch2,
5138 Label* gc_required) {
5139 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005140 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01005141 NO_ALLOCATION_FLAGS);
Ben Murdoch589d6972011-11-30 16:04:58 +00005142
5143 // Set the map. The other fields are left uninitialized.
5144 LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005145 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Ben Murdoch589d6972011-11-30 16:04:58 +00005146}
5147
5148
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005149void MacroAssembler::AllocateOneByteSlicedString(Register result,
5150 Register scratch1,
5151 Register scratch2,
5152 Label* gc_required) {
Ben Murdoch589d6972011-11-30 16:04:58 +00005153 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005154 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01005155 NO_ALLOCATION_FLAGS);
Ben Murdoch589d6972011-11-30 16:04:58 +00005156
5157 // Set the map. The other fields are left uninitialized.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005158 LoadRoot(kScratchRegister, Heap::kSlicedOneByteStringMapRootIndex);
5159 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Ben Murdoch589d6972011-11-30 16:04:58 +00005160}
5161
5162
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005163void MacroAssembler::AllocateJSValue(Register result, Register constructor,
5164 Register value, Register scratch,
5165 Label* gc_required) {
5166 DCHECK(!result.is(constructor));
5167 DCHECK(!result.is(scratch));
5168 DCHECK(!result.is(value));
5169
5170 // Allocate JSValue in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01005171 Allocate(JSValue::kSize, result, scratch, no_reg, gc_required,
5172 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005173
5174 // Initialize the JSValue.
5175 LoadGlobalFunctionInitialMap(constructor, scratch);
5176 movp(FieldOperand(result, HeapObject::kMapOffset), scratch);
5177 LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
5178 movp(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
5179 movp(FieldOperand(result, JSObject::kElementsOffset), scratch);
5180 movp(FieldOperand(result, JSValue::kValueOffset), value);
5181 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
5182}
5183
5184
Steve Block44f0eee2011-05-26 01:26:41 +01005185// Copy memory, byte-by-byte, from source to destination. Not optimized for
5186// long or aligned copies. The contents of scratch and length are destroyed.
5187// Destination is incremented by length, source, length and scratch are
5188// clobbered.
5189// A simpler loop is faster on small copies, but slower on large ones.
5190// The cld() instruction must have been emitted, to set the direction flag(),
5191// before calling this function.
5192void MacroAssembler::CopyBytes(Register destination,
5193 Register source,
5194 Register length,
5195 int min_length,
5196 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005197 DCHECK(min_length >= 0);
5198 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01005199 cmpl(length, Immediate(min_length));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005200 Assert(greater_equal, kInvalidMinLength);
Steve Block44f0eee2011-05-26 01:26:41 +01005201 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005202 Label short_loop, len8, len16, len24, done, short_string;
Steve Block44f0eee2011-05-26 01:26:41 +01005203
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005204 const int kLongStringLimit = 4 * kPointerSize;
Steve Block44f0eee2011-05-26 01:26:41 +01005205 if (min_length <= kLongStringLimit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005206 cmpl(length, Immediate(kPointerSize));
5207 j(below, &short_string, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01005208 }
5209
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005210 DCHECK(source.is(rsi));
5211 DCHECK(destination.is(rdi));
5212 DCHECK(length.is(rcx));
5213
5214 if (min_length <= kLongStringLimit) {
5215 cmpl(length, Immediate(2 * kPointerSize));
5216 j(below_equal, &len8, Label::kNear);
5217 cmpl(length, Immediate(3 * kPointerSize));
5218 j(below_equal, &len16, Label::kNear);
5219 cmpl(length, Immediate(4 * kPointerSize));
5220 j(below_equal, &len24, Label::kNear);
5221 }
Steve Block44f0eee2011-05-26 01:26:41 +01005222
5223 // Because source is 8-byte aligned in our uses of this function,
5224 // we keep source aligned for the rep movs operation by copying the odd bytes
5225 // at the end of the ranges.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005226 movp(scratch, length);
5227 shrl(length, Immediate(kPointerSizeLog2));
5228 repmovsp();
Steve Block44f0eee2011-05-26 01:26:41 +01005229 // Move remaining bytes of length.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005230 andl(scratch, Immediate(kPointerSize - 1));
5231 movp(length, Operand(source, scratch, times_1, -kPointerSize));
5232 movp(Operand(destination, scratch, times_1, -kPointerSize), length);
5233 addp(destination, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01005234
5235 if (min_length <= kLongStringLimit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005236 jmp(&done, Label::kNear);
5237 bind(&len24);
5238 movp(scratch, Operand(source, 2 * kPointerSize));
5239 movp(Operand(destination, 2 * kPointerSize), scratch);
5240 bind(&len16);
5241 movp(scratch, Operand(source, kPointerSize));
5242 movp(Operand(destination, kPointerSize), scratch);
5243 bind(&len8);
5244 movp(scratch, Operand(source, 0));
5245 movp(Operand(destination, 0), scratch);
5246 // Move remaining bytes of length.
5247 movp(scratch, Operand(source, length, times_1, -kPointerSize));
5248 movp(Operand(destination, length, times_1, -kPointerSize), scratch);
5249 addp(destination, length);
5250 jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01005251
5252 bind(&short_string);
5253 if (min_length == 0) {
5254 testl(length, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005255 j(zero, &done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01005256 }
Steve Block44f0eee2011-05-26 01:26:41 +01005257
5258 bind(&short_loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005259 movb(scratch, Operand(source, 0));
5260 movb(Operand(destination, 0), scratch);
5261 incp(source);
5262 incp(destination);
5263 decl(length);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005264 j(not_zero, &short_loop, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01005265 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005266
5267 bind(&done);
Steve Block44f0eee2011-05-26 01:26:41 +01005268}
5269
5270
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005271void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
5272 Register end_address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005273 Register filler) {
5274 Label loop, entry;
Ben Murdoch097c5b22016-05-18 11:27:45 +01005275 jmp(&entry, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005276 bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005277 movp(Operand(current_address, 0), filler);
5278 addp(current_address, Immediate(kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005279 bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005280 cmpp(current_address, end_address);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005281 j(below, &loop, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005282}
5283
5284
Steve Blockd0582a62009-12-15 09:54:21 +00005285void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
5286 if (context_chain_length > 0) {
5287 // Move up the chain of contexts to the context containing the slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005288 movp(dst, Operand(rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00005289 for (int i = 1; i < context_chain_length; i++) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005290 movp(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00005291 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005292 } else {
5293 // Slot is in the current function context. Move it into the
5294 // destination register in case we store into it (the write barrier
5295 // cannot be allowed to destroy the context in rsi).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005296 movp(dst, rsi);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005297 }
5298
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005299 // We should not have found a with context by walking the context
5300 // chain (i.e., the static scope chain and runtime context chain do
5301 // not agree). A variable occurring in such a scope should have
5302 // slot type LOOKUP and not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01005303 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005304 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
5305 Heap::kWithContextMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005306 Check(not_equal, kVariableResolvedToWithContext);
Steve Blockd0582a62009-12-15 09:54:21 +00005307 }
5308}
5309
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005310
5311void MacroAssembler::LoadTransitionedArrayMapConditional(
5312 ElementsKind expected_kind,
5313 ElementsKind transitioned_kind,
5314 Register map_in_out,
5315 Register scratch,
5316 Label* no_map_match) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005317 DCHECK(IsFastElementsKind(expected_kind));
5318 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005319
5320 // Check that the function's map is the same as the expected cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005321 movp(scratch, NativeContextOperand());
5322 cmpp(map_in_out,
5323 ContextOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005324 j(not_equal, no_map_match);
5325
5326 // Use the transitioned cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005327 movp(map_in_out,
5328 ContextOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005329}
5330
5331
Steve Block44f0eee2011-05-26 01:26:41 +01005332#ifdef _WIN64
5333static const int kRegisterPassedArguments = 4;
5334#else
5335static const int kRegisterPassedArguments = 6;
5336#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005337
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005338
5339void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
5340 movp(dst, NativeContextOperand());
5341 movp(dst, ContextOperand(dst, index));
Ben Murdochb0fe1622011-05-05 13:52:32 +01005342}
5343
5344
5345void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
5346 Register map) {
5347 // Load the initial map. The global functions all have initial maps.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005348 movp(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01005349 if (emit_debug_code()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01005350 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00005351 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005352 jmp(&ok);
5353 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005354 Abort(kGlobalFunctionsMustHaveInitialMap);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005355 bind(&ok);
5356 }
5357}
5358
5359
Leon Clarke4515c472010-02-03 11:58:03 +00005360int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005361 // On Windows 64 stack slots are reserved by the caller for all arguments
5362 // including the ones passed in registers, and space is always allocated for
5363 // the four register arguments even if the function takes fewer than four
5364 // arguments.
5365 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
5366 // and the caller does not reserve stack slots for them.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005367 DCHECK(num_arguments >= 0);
Leon Clarke4515c472010-02-03 11:58:03 +00005368#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01005369 const int kMinimumStackSlots = kRegisterPassedArguments;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005370 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
5371 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00005372#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005373 if (num_arguments < kRegisterPassedArguments) return 0;
5374 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00005375#endif
Leon Clarke4515c472010-02-03 11:58:03 +00005376}
5377
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005378
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005379void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
5380 Register index,
5381 Register value,
5382 uint32_t encoding_mask) {
5383 Label is_object;
5384 JumpIfNotSmi(string, &is_object);
5385 Abort(kNonObject);
5386 bind(&is_object);
5387
5388 Push(value);
5389 movp(value, FieldOperand(string, HeapObject::kMapOffset));
5390 movzxbp(value, FieldOperand(value, Map::kInstanceTypeOffset));
5391
5392 andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
5393 cmpp(value, Immediate(encoding_mask));
5394 Pop(value);
5395 Check(equal, kUnexpectedStringType);
5396
5397 // The index is assumed to be untagged coming in, tag it to compare with the
5398 // string length without using a temp register, it is restored at the end of
5399 // this function.
5400 Integer32ToSmi(index, index);
5401 SmiCompare(index, FieldOperand(string, String::kLengthOffset));
5402 Check(less, kIndexIsTooLarge);
5403
5404 SmiCompare(index, Smi::FromInt(0));
5405 Check(greater_equal, kIndexIsNegative);
5406
5407 // Restore the index
5408 SmiToInteger32(index, index);
5409}
5410
5411
Leon Clarke4515c472010-02-03 11:58:03 +00005412void MacroAssembler::PrepareCallCFunction(int num_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005413 int frame_alignment = base::OS::ActivationFrameAlignment();
5414 DCHECK(frame_alignment != 0);
5415 DCHECK(num_arguments >= 0);
Steve Block44f0eee2011-05-26 01:26:41 +01005416
Leon Clarke4515c472010-02-03 11:58:03 +00005417 // Make stack end at alignment and allocate space for arguments and old rsp.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005418 movp(kScratchRegister, rsp);
5419 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Leon Clarke4515c472010-02-03 11:58:03 +00005420 int argument_slots_on_stack =
5421 ArgumentStackSlotsForCFunctionCall(num_arguments);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005422 subp(rsp, Immediate((argument_slots_on_stack + 1) * kRegisterSize));
5423 andp(rsp, Immediate(-frame_alignment));
5424 movp(Operand(rsp, argument_slots_on_stack * kRegisterSize), kScratchRegister);
Leon Clarke4515c472010-02-03 11:58:03 +00005425}
5426
5427
5428void MacroAssembler::CallCFunction(ExternalReference function,
5429 int num_arguments) {
Steve Block44f0eee2011-05-26 01:26:41 +01005430 LoadAddress(rax, function);
Leon Clarke4515c472010-02-03 11:58:03 +00005431 CallCFunction(rax, num_arguments);
5432}
5433
5434
5435void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005436 DCHECK(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01005437 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01005438 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005439 CheckStackAlignment();
5440 }
5441
Leon Clarke4515c472010-02-03 11:58:03 +00005442 call(function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005443 DCHECK(base::OS::ActivationFrameAlignment() != 0);
5444 DCHECK(num_arguments >= 0);
Leon Clarke4515c472010-02-03 11:58:03 +00005445 int argument_slots_on_stack =
5446 ArgumentStackSlotsForCFunctionCall(num_arguments);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005447 movp(rsp, Operand(rsp, argument_slots_on_stack * kRegisterSize));
Leon Clarke4515c472010-02-03 11:58:03 +00005448}
5449
Steve Blockd0582a62009-12-15 09:54:21 +00005450
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005451#ifdef DEBUG
5452bool AreAliased(Register reg1,
5453 Register reg2,
5454 Register reg3,
5455 Register reg4,
5456 Register reg5,
5457 Register reg6,
5458 Register reg7,
5459 Register reg8) {
5460 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
5461 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
5462 reg7.is_valid() + reg8.is_valid();
5463
5464 RegList regs = 0;
5465 if (reg1.is_valid()) regs |= reg1.bit();
5466 if (reg2.is_valid()) regs |= reg2.bit();
5467 if (reg3.is_valid()) regs |= reg3.bit();
5468 if (reg4.is_valid()) regs |= reg4.bit();
5469 if (reg5.is_valid()) regs |= reg5.bit();
5470 if (reg6.is_valid()) regs |= reg6.bit();
5471 if (reg7.is_valid()) regs |= reg7.bit();
5472 if (reg8.is_valid()) regs |= reg8.bit();
5473 int n_of_non_aliasing_regs = NumRegs(regs);
5474
5475 return n_of_valid_regs != n_of_non_aliasing_regs;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005476}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005477#endif
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005478
5479
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005480CodePatcher::CodePatcher(Isolate* isolate, byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01005481 : address_(address),
5482 size_(size),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005483 masm_(isolate, address, size + Assembler::kGap, CodeObjectRequired::kNo) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005484 // Create a new macro assembler pointing to the address of the code to patch.
5485 // The size is adjusted with kGap on order for the assembler to generate size
5486 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005487 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00005488}
5489
5490
5491CodePatcher::~CodePatcher() {
5492 // Indicate that code has changed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005493 Assembler::FlushICache(masm_.isolate(), address_, size_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005494
5495 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005496 DCHECK(masm_.pc_ == address_ + size_);
5497 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00005498}
5499
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005500
5501void MacroAssembler::CheckPageFlag(
5502 Register object,
5503 Register scratch,
5504 int mask,
5505 Condition cc,
5506 Label* condition_met,
5507 Label::Distance condition_met_distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005508 DCHECK(cc == zero || cc == not_zero);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005509 if (scratch.is(object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005510 andp(scratch, Immediate(~Page::kPageAlignmentMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005511 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005512 movp(scratch, Immediate(~Page::kPageAlignmentMask));
5513 andp(scratch, object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005514 }
5515 if (mask < (1 << kBitsPerByte)) {
5516 testb(Operand(scratch, MemoryChunk::kFlagsOffset),
5517 Immediate(static_cast<uint8_t>(mask)));
5518 } else {
5519 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
5520 }
5521 j(cc, condition_met, condition_met_distance);
5522}
5523
5524
5525void MacroAssembler::JumpIfBlack(Register object,
5526 Register bitmap_scratch,
5527 Register mask_scratch,
5528 Label* on_black,
5529 Label::Distance on_black_distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005530 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, rcx));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005531
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005532 GetMarkBits(object, bitmap_scratch, mask_scratch);
5533
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005534 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005535 // The mask_scratch register contains a 1 at the position of the first bit
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005536 // and a 1 at a position of the second bit. All other positions are zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005537 movp(rcx, mask_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005538 andp(rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
5539 cmpp(mask_scratch, rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005540 j(equal, on_black, on_black_distance);
5541}
5542
5543
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005544void MacroAssembler::GetMarkBits(Register addr_reg,
5545 Register bitmap_reg,
5546 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005547 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, rcx));
5548 movp(bitmap_reg, addr_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005549 // Sign extended 32 bit immediate.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005550 andp(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
5551 movp(rcx, addr_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005552 int shift =
5553 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
5554 shrl(rcx, Immediate(shift));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005555 andp(rcx,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005556 Immediate((Page::kPageAlignmentMask >> shift) &
5557 ~(Bitmap::kBytesPerCell - 1)));
5558
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005559 addp(bitmap_reg, rcx);
5560 movp(rcx, addr_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005561 shrl(rcx, Immediate(kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005562 andp(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005563 movl(mask_reg, Immediate(3));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005564 shlp_cl(mask_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005565}
5566
5567
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005568void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
5569 Register mask_scratch, Label* value_is_white,
5570 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005571 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, rcx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005572 GetMarkBits(value, bitmap_scratch, mask_scratch);
5573
5574 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005575 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005576 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
5577 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005578 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005579
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005580 // Since both black and grey have a 1 in the first position and white does
5581 // not have a 1 there we only need to check one bit.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005582 testp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005583 j(zero, value_is_white, distance);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005584}
5585
5586
Ben Murdoch097c5b22016-05-18 11:27:45 +01005587void MacroAssembler::CheckEnumCache(Label* call_runtime) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005588 Label next, start;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005589 Register empty_fixed_array_value = r8;
5590 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005591 movp(rcx, rax);
5592
5593 // Check if the enum length field is properly initialized, indicating that
5594 // there is an enum cache.
5595 movp(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
5596
5597 EnumLength(rdx, rbx);
5598 Cmp(rdx, Smi::FromInt(kInvalidEnumCacheSentinel));
5599 j(equal, call_runtime);
5600
5601 jmp(&start);
5602
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005603 bind(&next);
5604
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005605 movp(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005606
5607 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005608 EnumLength(rdx, rbx);
5609 Cmp(rdx, Smi::FromInt(0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005610 j(not_equal, call_runtime);
5611
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005612 bind(&start);
5613
5614 // Check that there are no elements. Register rcx contains the current JS
5615 // object we've reached through the prototype chain.
5616 Label no_elements;
5617 cmpp(empty_fixed_array_value,
5618 FieldOperand(rcx, JSObject::kElementsOffset));
5619 j(equal, &no_elements);
5620
5621 // Second chance, the object may be using the empty slow element dictionary.
5622 LoadRoot(kScratchRegister, Heap::kEmptySlowElementDictionaryRootIndex);
5623 cmpp(kScratchRegister, FieldOperand(rcx, JSObject::kElementsOffset));
5624 j(not_equal, call_runtime);
5625
5626 bind(&no_elements);
5627 movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005628 CompareRoot(rcx, Heap::kNullValueRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005629 j(not_equal, &next);
5630}
5631
Ben Murdoch097c5b22016-05-18 11:27:45 +01005632
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005633void MacroAssembler::TestJSArrayForAllocationMemento(
5634 Register receiver_reg,
5635 Register scratch_reg,
5636 Label* no_memento_found) {
Ben Murdochda12d292016-06-02 14:46:10 +01005637 Label map_check;
5638 Label top_check;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005639 ExternalReference new_space_allocation_top =
5640 ExternalReference::new_space_allocation_top_address(isolate());
Ben Murdochda12d292016-06-02 14:46:10 +01005641 const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
5642 const int kMementoEndOffset = kMementoMapOffset + AllocationMemento::kSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005643
Ben Murdochda12d292016-06-02 14:46:10 +01005644 // Bail out if the object is not in new space.
5645 JumpIfNotInNewSpace(receiver_reg, scratch_reg, no_memento_found);
5646 // If the object is in new space, we need to check whether it is on the same
5647 // page as the current top.
5648 leap(scratch_reg, Operand(receiver_reg, kMementoEndOffset));
5649 xorp(scratch_reg, ExternalOperand(new_space_allocation_top));
5650 testp(scratch_reg, Immediate(~Page::kPageAlignmentMask));
5651 j(zero, &top_check);
5652 // The object is on a different page than allocation top. Bail out if the
5653 // object sits on the page boundary as no memento can follow and we cannot
5654 // touch the memory following it.
5655 leap(scratch_reg, Operand(receiver_reg, kMementoEndOffset));
5656 xorp(scratch_reg, receiver_reg);
5657 testp(scratch_reg, Immediate(~Page::kPageAlignmentMask));
5658 j(not_zero, no_memento_found);
5659 // Continue with the actual map check.
5660 jmp(&map_check);
5661 // If top is on the same page as the current object, we need to check whether
5662 // we are below top.
5663 bind(&top_check);
5664 leap(scratch_reg, Operand(receiver_reg, kMementoEndOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005665 cmpp(scratch_reg, ExternalOperand(new_space_allocation_top));
5666 j(greater, no_memento_found);
Ben Murdochda12d292016-06-02 14:46:10 +01005667 // Memento map check.
5668 bind(&map_check);
5669 CompareRoot(MemOperand(receiver_reg, kMementoMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005670 Heap::kAllocationMementoMapRootIndex);
5671}
5672
5673
5674void MacroAssembler::JumpIfDictionaryInPrototypeChain(
5675 Register object,
5676 Register scratch0,
5677 Register scratch1,
5678 Label* found) {
5679 DCHECK(!(scratch0.is(kScratchRegister) && scratch1.is(kScratchRegister)));
5680 DCHECK(!scratch1.is(scratch0));
5681 Register current = scratch0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005682 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005683
5684 movp(current, object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005685 movp(current, FieldOperand(current, HeapObject::kMapOffset));
5686 movp(current, FieldOperand(current, Map::kPrototypeOffset));
5687 CompareRoot(current, Heap::kNullValueRootIndex);
5688 j(equal, &end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005689
5690 // Loop based on the map going up the prototype chain.
5691 bind(&loop_again);
5692 movp(current, FieldOperand(current, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005693 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
5694 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
5695 CmpInstanceType(current, JS_OBJECT_TYPE);
5696 j(below, found);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005697 movp(scratch1, FieldOperand(current, Map::kBitField2Offset));
5698 DecodeField<Map::ElementsKindBits>(scratch1);
5699 cmpp(scratch1, Immediate(DICTIONARY_ELEMENTS));
5700 j(equal, found);
5701 movp(current, FieldOperand(current, Map::kPrototypeOffset));
5702 CompareRoot(current, Heap::kNullValueRootIndex);
5703 j(not_equal, &loop_again);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005704
5705 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005706}
5707
5708
5709void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
5710 DCHECK(!dividend.is(rax));
5711 DCHECK(!dividend.is(rdx));
5712 base::MagicNumbersForDivision<uint32_t> mag =
5713 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
5714 movl(rax, Immediate(mag.multiplier));
5715 imull(dividend);
5716 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
5717 if (divisor > 0 && neg) addl(rdx, dividend);
5718 if (divisor < 0 && !neg && mag.multiplier > 0) subl(rdx, dividend);
5719 if (mag.shift > 0) sarl(rdx, Immediate(mag.shift));
5720 movl(rax, dividend);
5721 shrl(rax, Immediate(31));
5722 addl(rdx, rax);
5723}
5724
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005725
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005726} // namespace internal
5727} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01005728
5729#endif // V8_TARGET_ARCH_X64