blob: 566091df4e2d48c65dc4c91b0e67f118f7ac7afc [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_X64
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/base/bits.h"
8#include "src/base/division-by-constant.h"
9#include "src/bootstrapper.h"
10#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/debug/debug.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012#include "src/heap/heap.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/register-configuration.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014#include "src/x64/assembler-x64.h"
15#include "src/x64/macro-assembler-x64.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000016
17namespace v8 {
18namespace internal {
19
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
21 CodeObjectRequired create_code_object)
Ben Murdoch8b112d22011-06-08 16:22:53 +010022 : Assembler(arg_isolate, buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000023 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010024 has_frame_(false),
Ben Murdoch8b112d22011-06-08 16:22:53 +010025 root_array_available_(true) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000026 if (create_code_object == CodeObjectRequired::kYes) {
27 code_object_ =
28 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +010029 }
Steve Block44f0eee2011-05-26 01:26:41 +010030}
31
32
Ben Murdochb8a8cc12014-11-26 15:28:44 +000033static const int64_t kInvalidRootRegisterDelta = -1;
34
35
36int64_t MacroAssembler::RootRegisterDelta(ExternalReference other) {
37 if (predictable_code_size() &&
38 (other.address() < reinterpret_cast<Address>(isolate()) ||
39 other.address() >= reinterpret_cast<Address>(isolate() + 1))) {
40 return kInvalidRootRegisterDelta;
41 }
Steve Block44f0eee2011-05-26 01:26:41 +010042 Address roots_register_value = kRootRegisterBias +
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 reinterpret_cast<Address>(isolate()->heap()->roots_array_start());
44
45 int64_t delta = kInvalidRootRegisterDelta; // Bogus initialization.
46 if (kPointerSize == kInt64Size) {
47 delta = other.address() - roots_register_value;
48 } else {
49 // For x32, zero extend the address to 64-bit and calculate the delta.
50 uint64_t o = static_cast<uint32_t>(
51 reinterpret_cast<intptr_t>(other.address()));
52 uint64_t r = static_cast<uint32_t>(
53 reinterpret_cast<intptr_t>(roots_register_value));
54 delta = o - r;
55 }
Steve Block44f0eee2011-05-26 01:26:41 +010056 return delta;
57}
58
59
60Operand MacroAssembler::ExternalOperand(ExternalReference target,
61 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000062 if (root_array_available_ && !serializer_enabled()) {
63 int64_t delta = RootRegisterDelta(target);
64 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
Steve Block44f0eee2011-05-26 01:26:41 +010065 return Operand(kRootRegister, static_cast<int32_t>(delta));
66 }
67 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000068 Move(scratch, target);
Steve Block44f0eee2011-05-26 01:26:41 +010069 return Operand(scratch, 0);
70}
71
72
73void MacroAssembler::Load(Register destination, ExternalReference source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000074 if (root_array_available_ && !serializer_enabled()) {
75 int64_t delta = RootRegisterDelta(source);
76 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
77 movp(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
Steve Block44f0eee2011-05-26 01:26:41 +010078 return;
79 }
80 }
81 // Safe code.
82 if (destination.is(rax)) {
83 load_rax(source);
84 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085 Move(kScratchRegister, source);
86 movp(destination, Operand(kScratchRegister, 0));
Steve Block44f0eee2011-05-26 01:26:41 +010087 }
88}
89
90
91void MacroAssembler::Store(ExternalReference destination, Register source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000092 if (root_array_available_ && !serializer_enabled()) {
93 int64_t delta = RootRegisterDelta(destination);
94 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
95 movp(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
Steve Block44f0eee2011-05-26 01:26:41 +010096 return;
97 }
98 }
99 // Safe code.
100 if (source.is(rax)) {
101 store_rax(destination);
102 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000103 Move(kScratchRegister, destination);
104 movp(Operand(kScratchRegister, 0), source);
Steve Block44f0eee2011-05-26 01:26:41 +0100105 }
106}
107
108
109void MacroAssembler::LoadAddress(Register destination,
110 ExternalReference source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000111 if (root_array_available_ && !serializer_enabled()) {
112 int64_t delta = RootRegisterDelta(source);
113 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
114 leap(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
Steve Block44f0eee2011-05-26 01:26:41 +0100115 return;
116 }
117 }
118 // Safe code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000119 Move(destination, source);
Steve Block44f0eee2011-05-26 01:26:41 +0100120}
121
122
123int MacroAssembler::LoadAddressSize(ExternalReference source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000124 if (root_array_available_ && !serializer_enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +0100125 // This calculation depends on the internals of LoadAddress.
126 // It's correctness is ensured by the asserts in the Call
127 // instruction below.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000128 int64_t delta = RootRegisterDelta(source);
129 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
130 // Operand is leap(scratch, Operand(kRootRegister, delta));
Steve Block44f0eee2011-05-26 01:26:41 +0100131 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
132 int size = 4;
133 if (!is_int8(static_cast<int32_t>(delta))) {
134 size += 3; // Need full four-byte displacement in lea.
135 }
136 return size;
137 }
138 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000139 // Size of movp(destination, src);
140 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength;
141}
142
143
144void MacroAssembler::PushAddress(ExternalReference source) {
145 int64_t address = reinterpret_cast<int64_t>(source.address());
146 if (is_int32(address) && !serializer_enabled()) {
147 if (emit_debug_code()) {
148 Move(kScratchRegister, kZapValue, Assembler::RelocInfoNone());
149 }
150 Push(Immediate(static_cast<int32_t>(address)));
151 return;
152 }
153 LoadAddress(kScratchRegister, source);
154 Push(kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000155}
156
157
Steve Block3ce2e202009-11-05 08:53:23 +0000158void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000159 DCHECK(root_array_available_);
160 movp(destination, Operand(kRootRegister,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100161 (index << kPointerSizeLog2) - kRootRegisterBias));
162}
163
164
165void MacroAssembler::LoadRootIndexed(Register destination,
166 Register variable_offset,
167 int fixed_offset) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000168 DCHECK(root_array_available_);
169 movp(destination,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100170 Operand(kRootRegister,
171 variable_offset, times_pointer_size,
172 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000173}
174
175
Kristian Monsen25f61362010-05-21 11:50:48 +0100176void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000177 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000178 DCHECK(root_array_available_);
179 movp(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100180 source);
Kristian Monsen25f61362010-05-21 11:50:48 +0100181}
182
183
Steve Blocka7e24c12009-10-30 11:49:00 +0000184void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000185 DCHECK(root_array_available_);
186 Push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000187}
188
189
Steve Block3ce2e202009-11-05 08:53:23 +0000190void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000191 DCHECK(root_array_available_);
192 cmpp(with, Operand(kRootRegister,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100193 (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000194}
195
196
Steve Block1e0659c2011-05-24 12:43:12 +0100197void MacroAssembler::CompareRoot(const Operand& with,
198 Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000199 DCHECK(root_array_available_);
200 DCHECK(!with.AddressUsesRegister(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000201 LoadRoot(kScratchRegister, index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000202 cmpp(with, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000203}
204
205
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100206void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
207 Register addr,
208 Register scratch,
209 SaveFPRegsMode save_fp,
210 RememberedSetFinalAction and_then) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000211 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100212 Label ok;
213 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
214 int3();
215 bind(&ok);
Steve Block6ded16b2010-05-10 14:33:55 +0100216 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100217 // Load store buffer top.
Ben Murdochda12d292016-06-02 14:46:10 +0100218 ExternalReference store_buffer =
219 ExternalReference::store_buffer_top(isolate());
220 movp(scratch, ExternalOperand(store_buffer));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100221 // Store pointer to buffer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000222 movp(Operand(scratch, 0), addr);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100223 // Increment buffer top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000224 addp(scratch, Immediate(kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100225 // Write back new top of buffer.
Ben Murdochda12d292016-06-02 14:46:10 +0100226 movp(ExternalOperand(store_buffer), scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100227 // Call stub on end of buffer.
228 Label done;
229 // Check for end of buffer.
Ben Murdochda12d292016-06-02 14:46:10 +0100230 testp(scratch, Immediate(StoreBuffer::kStoreBufferMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100231 if (and_then == kReturnAtEnd) {
232 Label buffer_overflowed;
Ben Murdochda12d292016-06-02 14:46:10 +0100233 j(equal, &buffer_overflowed, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100234 ret(0);
235 bind(&buffer_overflowed);
236 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000237 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdochda12d292016-06-02 14:46:10 +0100238 j(not_equal, &done, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100239 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000240 StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100241 CallStub(&store_buffer_overflow);
242 if (and_then == kReturnAtEnd) {
243 ret(0);
244 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000245 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100246 bind(&done);
247 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000248}
249
250
Ben Murdoch257744e2011-11-30 15:57:28 +0000251void MacroAssembler::InNewSpace(Register object,
252 Register scratch,
253 Condition cc,
254 Label* branch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100255 Label::Distance distance) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100256 const int mask =
257 (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
258 CheckPageFlag(object, scratch, mask, cc, branch, distance);
Ben Murdoch257744e2011-11-30 15:57:28 +0000259}
260
261
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100262void MacroAssembler::RecordWriteField(
263 Register object,
264 int offset,
265 Register value,
266 Register dst,
267 SaveFPRegsMode save_fp,
268 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000269 SmiCheck smi_check,
270 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100271 // First, check if a write barrier is even needed. The tests below
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100272 // catch stores of Smis.
Steve Blocka7e24c12009-10-30 11:49:00 +0000273 Label done;
274
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100275 // Skip barrier if writing a smi.
276 if (smi_check == INLINE_SMI_CHECK) {
277 JumpIfSmi(value, &done);
278 }
279
280 // Although the object register is tagged, the offset is relative to the start
281 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000282 DCHECK(IsAligned(offset, kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100283
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000284 leap(dst, FieldOperand(object, offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100285 if (emit_debug_code()) {
286 Label ok;
287 testb(dst, Immediate((1 << kPointerSizeLog2) - 1));
288 j(zero, &ok, Label::kNear);
289 int3();
290 bind(&ok);
291 }
292
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000293 RecordWrite(object, dst, value, save_fp, remembered_set_action,
294 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100295
Steve Block3ce2e202009-11-05 08:53:23 +0000296 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000297
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100298 // Clobber clobbered input registers when running with the debug-code flag
299 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100300 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000301 Move(value, kZapValue, Assembler::RelocInfoNone());
302 Move(dst, kZapValue, Assembler::RelocInfoNone());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100303 }
304}
305
306
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000307void MacroAssembler::RecordWriteArray(
308 Register object,
309 Register value,
310 Register index,
311 SaveFPRegsMode save_fp,
312 RememberedSetAction remembered_set_action,
313 SmiCheck smi_check,
314 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100315 // First, check if a write barrier is even needed. The tests below
316 // catch stores of Smis.
317 Label done;
318
319 // Skip barrier if writing a smi.
320 if (smi_check == INLINE_SMI_CHECK) {
321 JumpIfSmi(value, &done);
322 }
323
324 // Array access: calculate the destination address. Index is not a smi.
325 Register dst = index;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000326 leap(dst, Operand(object, index, times_pointer_size,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100327 FixedArray::kHeaderSize - kHeapObjectTag));
328
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000329 RecordWrite(object, dst, value, save_fp, remembered_set_action,
330 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100331
332 bind(&done);
333
334 // Clobber clobbered input registers when running with the debug-code flag
335 // turned on to provoke errors.
336 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000337 Move(value, kZapValue, Assembler::RelocInfoNone());
338 Move(index, kZapValue, Assembler::RelocInfoNone());
Leon Clarke4515c472010-02-03 11:58:03 +0000339 }
Steve Block3ce2e202009-11-05 08:53:23 +0000340}
341
342
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000343void MacroAssembler::RecordWriteForMap(Register object,
344 Register map,
345 Register dst,
346 SaveFPRegsMode fp_mode) {
347 DCHECK(!object.is(kScratchRegister));
348 DCHECK(!object.is(map));
349 DCHECK(!object.is(dst));
350 DCHECK(!map.is(dst));
351 AssertNotSmi(object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100352
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100353 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000354 Label ok;
355 if (map.is(kScratchRegister)) pushq(map);
356 CompareMap(map, isolate()->factory()->meta_map());
357 if (map.is(kScratchRegister)) popq(map);
358 j(equal, &ok, Label::kNear);
359 int3();
360 bind(&ok);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100361 }
362
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000363 if (!FLAG_incremental_marking) {
364 return;
365 }
366
367 if (emit_debug_code()) {
368 Label ok;
369 if (map.is(kScratchRegister)) pushq(map);
370 cmpp(map, FieldOperand(object, HeapObject::kMapOffset));
371 if (map.is(kScratchRegister)) popq(map);
372 j(equal, &ok, Label::kNear);
373 int3();
374 bind(&ok);
375 }
376
377 // Compute the address.
378 leap(dst, FieldOperand(object, HeapObject::kMapOffset));
379
380 // First, check if a write barrier is even needed. The tests below
381 // catch stores of smis and stores into the young generation.
382 Label done;
383
384 // A single check of the map's pages interesting flag suffices, since it is
385 // only set during incremental collection, and then it's also guaranteed that
386 // the from object's page's interesting flag is also set. This optimization
387 // relies on the fact that maps can never be in new space.
388 CheckPageFlag(map,
389 map, // Used as scratch.
390 MemoryChunk::kPointersToHereAreInterestingMask,
391 zero,
392 &done,
393 Label::kNear);
394
395 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
396 fp_mode);
397 CallStub(&stub);
398
399 bind(&done);
400
401 // Count number of write barriers in generated code.
402 isolate()->counters()->write_barriers_static()->Increment();
403 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
404
405 // Clobber clobbered registers when running with the debug-code flag
406 // turned on to provoke errors.
407 if (emit_debug_code()) {
408 Move(dst, kZapValue, Assembler::RelocInfoNone());
409 Move(map, kZapValue, Assembler::RelocInfoNone());
410 }
411}
412
413
414void MacroAssembler::RecordWrite(
415 Register object,
416 Register address,
417 Register value,
418 SaveFPRegsMode fp_mode,
419 RememberedSetAction remembered_set_action,
420 SmiCheck smi_check,
421 PointersToHereCheck pointers_to_here_check_for_value) {
422 DCHECK(!object.is(value));
423 DCHECK(!object.is(address));
424 DCHECK(!value.is(address));
425 AssertNotSmi(object);
426
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100427 if (remembered_set_action == OMIT_REMEMBERED_SET &&
428 !FLAG_incremental_marking) {
429 return;
430 }
431
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000432 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100433 Label ok;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000434 cmpp(value, Operand(address, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100435 j(equal, &ok, Label::kNear);
436 int3();
437 bind(&ok);
438 }
Steve Block8defd9f2010-07-08 12:39:36 +0100439
440 // First, check if a write barrier is even needed. The tests below
Steve Block44f0eee2011-05-26 01:26:41 +0100441 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100442 Label done;
Steve Block8defd9f2010-07-08 12:39:36 +0100443
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100444 if (smi_check == INLINE_SMI_CHECK) {
445 // Skip barrier if writing a smi.
446 JumpIfSmi(value, &done);
447 }
Steve Block8defd9f2010-07-08 12:39:36 +0100448
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000449 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
450 CheckPageFlag(value,
451 value, // Used as scratch.
452 MemoryChunk::kPointersToHereAreInterestingMask,
453 zero,
454 &done,
455 Label::kNear);
456 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100457
458 CheckPageFlag(object,
459 value, // Used as scratch.
460 MemoryChunk::kPointersFromHereAreInterestingMask,
461 zero,
462 &done,
463 Label::kNear);
464
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000465 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
466 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100467 CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +0100468
469 bind(&done);
470
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000471 // Count number of write barriers in generated code.
472 isolate()->counters()->write_barriers_static()->Increment();
473 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
474
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100475 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100476 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100477 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000478 Move(address, kZapValue, Assembler::RelocInfoNone());
479 Move(value, kZapValue, Assembler::RelocInfoNone());
Steve Block8defd9f2010-07-08 12:39:36 +0100480 }
481}
482
Ben Murdoch097c5b22016-05-18 11:27:45 +0100483void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
484 Register code_entry,
485 Register scratch) {
486 const int offset = JSFunction::kCodeEntryOffset;
487
488 // The input registers are fixed to make calling the C write barrier function
489 // easier.
490 DCHECK(js_function.is(rdi));
491 DCHECK(code_entry.is(rcx));
492 DCHECK(scratch.is(rax));
493
494 // Since a code entry (value) is always in old space, we don't need to update
495 // remembered set. If incremental marking is off, there is nothing for us to
496 // do.
497 if (!FLAG_incremental_marking) return;
498
499 AssertNotSmi(js_function);
500
501 if (emit_debug_code()) {
502 Label ok;
503 leap(scratch, FieldOperand(js_function, offset));
504 cmpp(code_entry, Operand(scratch, 0));
505 j(equal, &ok, Label::kNear);
506 int3();
507 bind(&ok);
508 }
509
510 // First, check if a write barrier is even needed. The tests below
511 // catch stores of Smis and stores into young gen.
512 Label done;
513
514 CheckPageFlag(code_entry, scratch,
515 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
516 Label::kNear);
517 CheckPageFlag(js_function, scratch,
518 MemoryChunk::kPointersFromHereAreInterestingMask, zero, &done,
519 Label::kNear);
520
521 // Save input registers.
522 Push(js_function);
523 Push(code_entry);
524
525 const Register dst = scratch;
526 leap(dst, FieldOperand(js_function, offset));
527
528 // Save caller-saved registers.
529 PushCallerSaved(kDontSaveFPRegs, js_function, code_entry);
530
531 int argument_count = 3;
532 PrepareCallCFunction(argument_count);
533
534 // Load the argument registers.
535 if (arg_reg_1.is(rcx)) {
536 // Windows calling convention.
537 DCHECK(arg_reg_2.is(rdx) && arg_reg_3.is(r8));
538
539 movp(arg_reg_1, js_function); // rcx gets rdi.
540 movp(arg_reg_2, dst); // rdx gets rax.
541 } else {
542 // AMD64 calling convention.
543 DCHECK(arg_reg_1.is(rdi) && arg_reg_2.is(rsi) && arg_reg_3.is(rdx));
544
545 // rdi is already loaded with js_function.
546 movp(arg_reg_2, dst); // rsi gets rax.
547 }
548 Move(arg_reg_3, ExternalReference::isolate_address(isolate()));
549
550 {
551 AllowExternalCallThatCantCauseGC scope(this);
552 CallCFunction(
553 ExternalReference::incremental_marking_record_write_code_entry_function(
554 isolate()),
555 argument_count);
556 }
557
558 // Restore caller-saved registers.
559 PopCallerSaved(kDontSaveFPRegs, js_function, code_entry);
560
561 // Restore input registers.
562 Pop(code_entry);
563 Pop(js_function);
564
565 bind(&done);
566}
Steve Block8defd9f2010-07-08 12:39:36 +0100567
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000568void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
569 if (emit_debug_code()) Check(cc, reason);
Steve Blocka7e24c12009-10-30 11:49:00 +0000570}
571
572
Iain Merrick75681382010-08-19 15:07:18 +0100573void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +0100574 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000575 Label ok;
Iain Merrick75681382010-08-19 15:07:18 +0100576 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
577 Heap::kFixedArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000578 j(equal, &ok, Label::kNear);
Iain Merrick75681382010-08-19 15:07:18 +0100579 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000580 Heap::kFixedDoubleArrayMapRootIndex);
581 j(equal, &ok, Label::kNear);
582 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Iain Merrick75681382010-08-19 15:07:18 +0100583 Heap::kFixedCOWArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000584 j(equal, &ok, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000585 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Iain Merrick75681382010-08-19 15:07:18 +0100586 bind(&ok);
587 }
588}
589
590
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000591void MacroAssembler::Check(Condition cc, BailoutReason reason) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000592 Label L;
593 j(cc, &L, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594 Abort(reason);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100595 // Control will not return here.
Steve Blocka7e24c12009-10-30 11:49:00 +0000596 bind(&L);
597}
598
599
Steve Block6ded16b2010-05-10 14:33:55 +0100600void MacroAssembler::CheckStackAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000601 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +0100602 int frame_alignment_mask = frame_alignment - 1;
603 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000604 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +0000605 Label alignment_as_expected;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000606 testp(rsp, Immediate(frame_alignment_mask));
Ben Murdoch257744e2011-11-30 15:57:28 +0000607 j(zero, &alignment_as_expected, Label::kNear);
Steve Block6ded16b2010-05-10 14:33:55 +0100608 // Abort if stack is not aligned.
609 int3();
610 bind(&alignment_as_expected);
611 }
612}
613
614
Steve Blocka7e24c12009-10-30 11:49:00 +0000615void MacroAssembler::NegativeZeroTest(Register result,
616 Register op,
617 Label* then_label) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000618 Label ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000619 testl(result, result);
Ben Murdoch257744e2011-11-30 15:57:28 +0000620 j(not_zero, &ok, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000621 testl(op, op);
622 j(sign, then_label);
623 bind(&ok);
624}
625
626
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000627void MacroAssembler::Abort(BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000628#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000629 const char* msg = GetBailoutReason(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +0000630 if (msg != NULL) {
631 RecordComment("Abort message: ");
632 RecordComment(msg);
633 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000634
635 if (FLAG_trap_on_abort) {
636 int3();
637 return;
638 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000639#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000640
641 Move(kScratchRegister, Smi::FromInt(static_cast<int>(reason)),
642 Assembler::RelocInfoNone());
643 Push(kScratchRegister);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100644
645 if (!has_frame_) {
646 // We don't actually want to generate a pile of code for this, so just
647 // claim there is a stack frame, without generating one.
648 FrameScope scope(this, StackFrame::NONE);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100649 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100650 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100651 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100652 }
653 // Control will not return here.
Steve Blockd0582a62009-12-15 09:54:21 +0000654 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000655}
656
657
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000658void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
659 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs
Ben Murdoch257744e2011-11-30 15:57:28 +0000660 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000661}
662
663
Leon Clarkee46be812010-01-19 14:06:41 +0000664void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000665 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
666}
667
668
Steve Blocka7e24c12009-10-30 11:49:00 +0000669void MacroAssembler::StubReturn(int argc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000670 DCHECK(argc >= 1 && generating_stub());
Steve Blocka7e24c12009-10-30 11:49:00 +0000671 ret((argc - 1) * kPointerSize);
672}
673
674
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100675bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000676 return has_frame_ || !stub->SometimesSetsUpAFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000677}
678
679
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100680void MacroAssembler::IndexFromHash(Register hash, Register index) {
681 // The assert checks that the constants for the maximum number of digits
682 // for an array index cached in the hash field and the number of bits
683 // reserved for it does not conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000684 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100685 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000686 if (!hash.is(index)) {
687 movl(index, hash);
688 }
689 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
Steve Block1e0659c2011-05-24 12:43:12 +0100690}
691
692
Steve Block44f0eee2011-05-26 01:26:41 +0100693void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000694 int num_arguments,
695 SaveFPRegsMode save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000696 // If the expected number of arguments of the runtime function is
697 // constant, we check that the actual number of arguments match the
698 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000699 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000700
Leon Clarke4515c472010-02-03 11:58:03 +0000701 // TODO(1236192): Most runtime routines don't need the number of
702 // arguments passed in because it is constant. At some point we
703 // should remove this need and make the runtime routine entry code
704 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100705 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100706 LoadAddress(rbx, ExternalReference(f, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000707 CEntryStub ces(isolate(), f->result_size, save_doubles);
Leon Clarke4515c472010-02-03 11:58:03 +0000708 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000709}
710
711
Andrei Popescu402d9372010-02-26 13:31:12 +0000712void MacroAssembler::CallExternalReference(const ExternalReference& ext,
713 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100714 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100715 LoadAddress(rbx, ext);
Andrei Popescu402d9372010-02-26 13:31:12 +0000716
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000717 CEntryStub stub(isolate(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +0000718 CallStub(&stub);
719}
720
721
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000722void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000723 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000724 // -- rsp[0] : return address
725 // -- rsp[8] : argument num_arguments - 1
Steve Blocka7e24c12009-10-30 11:49:00 +0000726 // ...
727 // -- rsp[8 * num_arguments] : argument 0 (receiver)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000728 //
729 // For runtime functions with variable arguments:
730 // -- rax : number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +0000731 // -----------------------------------
732
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000733 const Runtime::Function* function = Runtime::FunctionForId(fid);
734 DCHECK_EQ(1, function->result_size);
735 if (function->nargs >= 0) {
736 Set(rax, function->nargs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000737 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000738 JumpToExternalReference(ExternalReference(fid, isolate()));
Ben Murdochbb769b22010-08-11 14:56:33 +0100739}
740
741
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000742void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000743 // Set the entry point and jump to the C entry runtime stub.
Steve Block44f0eee2011-05-26 01:26:41 +0100744 LoadAddress(rbx, ext);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000745 CEntryStub ces(isolate(), 1);
Steve Block3ce2e202009-11-05 08:53:23 +0000746 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000747}
748
749
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000750#define REG(Name) \
751 { Register::kCode_##Name }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100752
753static const Register saved_regs[] = {
754 REG(rax), REG(rcx), REG(rdx), REG(rbx), REG(rbp), REG(rsi), REG(rdi), REG(r8),
755 REG(r9), REG(r10), REG(r11)
756};
757
758#undef REG
759
760static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
761
762
763void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
764 Register exclusion1,
765 Register exclusion2,
766 Register exclusion3) {
767 // We don't allow a GC during a store buffer overflow so there is no need to
768 // store the registers in any particular way, but we do have to store and
769 // restore them.
770 for (int i = 0; i < kNumberOfSavedRegs; i++) {
771 Register reg = saved_regs[i];
772 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000773 pushq(reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100774 }
775 }
776 // R12 to r15 are callee save on all platforms.
777 if (fp_mode == kSaveFPRegs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000778 subp(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
779 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100780 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000781 Movsd(Operand(rsp, i * kDoubleSize), reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100782 }
783 }
784}
785
786
787void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode,
788 Register exclusion1,
789 Register exclusion2,
790 Register exclusion3) {
791 if (fp_mode == kSaveFPRegs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000792 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100793 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000794 Movsd(reg, Operand(rsp, i * kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100795 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000796 addp(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100797 }
798 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
799 Register reg = saved_regs[i];
800 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000801 popq(reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100802 }
803 }
804}
805
806
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000807void MacroAssembler::Cvtss2sd(XMMRegister dst, XMMRegister src) {
808 if (CpuFeatures::IsSupported(AVX)) {
809 CpuFeatureScope scope(this, AVX);
810 vcvtss2sd(dst, src, src);
811 } else {
812 cvtss2sd(dst, src);
813 }
814}
815
816
817void MacroAssembler::Cvtss2sd(XMMRegister dst, const Operand& src) {
818 if (CpuFeatures::IsSupported(AVX)) {
819 CpuFeatureScope scope(this, AVX);
820 vcvtss2sd(dst, dst, src);
821 } else {
822 cvtss2sd(dst, src);
823 }
824}
825
826
827void MacroAssembler::Cvtsd2ss(XMMRegister dst, XMMRegister src) {
828 if (CpuFeatures::IsSupported(AVX)) {
829 CpuFeatureScope scope(this, AVX);
830 vcvtsd2ss(dst, src, src);
831 } else {
832 cvtsd2ss(dst, src);
833 }
834}
835
836
837void MacroAssembler::Cvtsd2ss(XMMRegister dst, const Operand& src) {
838 if (CpuFeatures::IsSupported(AVX)) {
839 CpuFeatureScope scope(this, AVX);
840 vcvtsd2ss(dst, dst, src);
841 } else {
842 cvtsd2ss(dst, src);
843 }
844}
845
846
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000847void MacroAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000848 if (CpuFeatures::IsSupported(AVX)) {
849 CpuFeatureScope scope(this, AVX);
850 vxorpd(dst, dst, dst);
851 vcvtlsi2sd(dst, dst, src);
852 } else {
853 xorpd(dst, dst);
854 cvtlsi2sd(dst, src);
855 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000856}
857
858
859void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000860 if (CpuFeatures::IsSupported(AVX)) {
861 CpuFeatureScope scope(this, AVX);
862 vxorpd(dst, dst, dst);
863 vcvtlsi2sd(dst, dst, src);
864 } else {
865 xorpd(dst, dst);
866 cvtlsi2sd(dst, src);
867 }
868}
869
870
Ben Murdoch097c5b22016-05-18 11:27:45 +0100871void MacroAssembler::Cvtlsi2ss(XMMRegister dst, Register src) {
872 if (CpuFeatures::IsSupported(AVX)) {
873 CpuFeatureScope scope(this, AVX);
874 vxorps(dst, dst, dst);
875 vcvtlsi2ss(dst, dst, src);
876 } else {
877 xorps(dst, dst);
878 cvtlsi2ss(dst, src);
879 }
880}
881
882
883void MacroAssembler::Cvtlsi2ss(XMMRegister dst, const Operand& src) {
884 if (CpuFeatures::IsSupported(AVX)) {
885 CpuFeatureScope scope(this, AVX);
886 vxorps(dst, dst, dst);
887 vcvtlsi2ss(dst, dst, src);
888 } else {
889 xorps(dst, dst);
890 cvtlsi2ss(dst, src);
891 }
892}
893
894
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000895void MacroAssembler::Cvtqsi2ss(XMMRegister dst, Register src) {
896 if (CpuFeatures::IsSupported(AVX)) {
897 CpuFeatureScope scope(this, AVX);
898 vxorps(dst, dst, dst);
899 vcvtqsi2ss(dst, dst, src);
900 } else {
901 xorps(dst, dst);
902 cvtqsi2ss(dst, src);
903 }
904}
905
906
907void MacroAssembler::Cvtqsi2ss(XMMRegister dst, const Operand& src) {
908 if (CpuFeatures::IsSupported(AVX)) {
909 CpuFeatureScope scope(this, AVX);
910 vxorps(dst, dst, dst);
911 vcvtqsi2ss(dst, dst, src);
912 } else {
913 xorps(dst, dst);
914 cvtqsi2ss(dst, src);
915 }
916}
917
918
919void MacroAssembler::Cvtqsi2sd(XMMRegister dst, Register src) {
920 if (CpuFeatures::IsSupported(AVX)) {
921 CpuFeatureScope scope(this, AVX);
922 vxorpd(dst, dst, dst);
923 vcvtqsi2sd(dst, dst, src);
924 } else {
925 xorpd(dst, dst);
926 cvtqsi2sd(dst, src);
927 }
928}
929
930
931void MacroAssembler::Cvtqsi2sd(XMMRegister dst, const Operand& src) {
932 if (CpuFeatures::IsSupported(AVX)) {
933 CpuFeatureScope scope(this, AVX);
934 vxorpd(dst, dst, dst);
935 vcvtqsi2sd(dst, dst, src);
936 } else {
937 xorpd(dst, dst);
938 cvtqsi2sd(dst, src);
939 }
940}
941
942
943void MacroAssembler::Cvtqui2ss(XMMRegister dst, Register src, Register tmp) {
944 Label msb_set_src;
945 Label jmp_return;
946 testq(src, src);
947 j(sign, &msb_set_src, Label::kNear);
948 Cvtqsi2ss(dst, src);
949 jmp(&jmp_return, Label::kNear);
950 bind(&msb_set_src);
951 movq(tmp, src);
952 shrq(src, Immediate(1));
953 // Recover the least significant bit to avoid rounding errors.
954 andq(tmp, Immediate(1));
955 orq(src, tmp);
956 Cvtqsi2ss(dst, src);
957 addss(dst, dst);
958 bind(&jmp_return);
959}
960
961
962void MacroAssembler::Cvtqui2sd(XMMRegister dst, Register src, Register tmp) {
963 Label msb_set_src;
964 Label jmp_return;
965 testq(src, src);
966 j(sign, &msb_set_src, Label::kNear);
967 Cvtqsi2sd(dst, src);
968 jmp(&jmp_return, Label::kNear);
969 bind(&msb_set_src);
970 movq(tmp, src);
971 shrq(src, Immediate(1));
972 andq(tmp, Immediate(1));
973 orq(src, tmp);
974 Cvtqsi2sd(dst, src);
975 addsd(dst, dst);
976 bind(&jmp_return);
977}
978
979
980void MacroAssembler::Cvtsd2si(Register dst, XMMRegister src) {
981 if (CpuFeatures::IsSupported(AVX)) {
982 CpuFeatureScope scope(this, AVX);
983 vcvtsd2si(dst, src);
984 } else {
985 cvtsd2si(dst, src);
986 }
987}
988
989
Ben Murdoch097c5b22016-05-18 11:27:45 +0100990void MacroAssembler::Cvttss2si(Register dst, XMMRegister src) {
991 if (CpuFeatures::IsSupported(AVX)) {
992 CpuFeatureScope scope(this, AVX);
993 vcvttss2si(dst, src);
994 } else {
995 cvttss2si(dst, src);
996 }
997}
998
999
1000void MacroAssembler::Cvttss2si(Register dst, const Operand& src) {
1001 if (CpuFeatures::IsSupported(AVX)) {
1002 CpuFeatureScope scope(this, AVX);
1003 vcvttss2si(dst, src);
1004 } else {
1005 cvttss2si(dst, src);
1006 }
1007}
1008
1009
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001010void MacroAssembler::Cvttsd2si(Register dst, XMMRegister src) {
1011 if (CpuFeatures::IsSupported(AVX)) {
1012 CpuFeatureScope scope(this, AVX);
1013 vcvttsd2si(dst, src);
1014 } else {
1015 cvttsd2si(dst, src);
1016 }
1017}
1018
1019
1020void MacroAssembler::Cvttsd2si(Register dst, const Operand& src) {
1021 if (CpuFeatures::IsSupported(AVX)) {
1022 CpuFeatureScope scope(this, AVX);
1023 vcvttsd2si(dst, src);
1024 } else {
1025 cvttsd2si(dst, src);
1026 }
1027}
1028
1029
1030void MacroAssembler::Cvttss2siq(Register dst, XMMRegister src) {
1031 if (CpuFeatures::IsSupported(AVX)) {
1032 CpuFeatureScope scope(this, AVX);
1033 vcvttss2siq(dst, src);
1034 } else {
1035 cvttss2siq(dst, src);
1036 }
1037}
1038
1039
1040void MacroAssembler::Cvttss2siq(Register dst, const Operand& src) {
1041 if (CpuFeatures::IsSupported(AVX)) {
1042 CpuFeatureScope scope(this, AVX);
1043 vcvttss2siq(dst, src);
1044 } else {
1045 cvttss2siq(dst, src);
1046 }
1047}
1048
1049
1050void MacroAssembler::Cvttsd2siq(Register dst, XMMRegister src) {
1051 if (CpuFeatures::IsSupported(AVX)) {
1052 CpuFeatureScope scope(this, AVX);
1053 vcvttsd2siq(dst, src);
1054 } else {
1055 cvttsd2siq(dst, src);
1056 }
1057}
1058
1059
1060void MacroAssembler::Cvttsd2siq(Register dst, const Operand& src) {
1061 if (CpuFeatures::IsSupported(AVX)) {
1062 CpuFeatureScope scope(this, AVX);
1063 vcvttsd2siq(dst, src);
1064 } else {
1065 cvttsd2siq(dst, src);
1066 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001067}
1068
1069
1070void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
1071 DCHECK(!r.IsDouble());
1072 if (r.IsInteger8()) {
1073 movsxbq(dst, src);
1074 } else if (r.IsUInteger8()) {
1075 movzxbl(dst, src);
1076 } else if (r.IsInteger16()) {
1077 movsxwq(dst, src);
1078 } else if (r.IsUInteger16()) {
1079 movzxwl(dst, src);
1080 } else if (r.IsInteger32()) {
1081 movl(dst, src);
1082 } else {
1083 movp(dst, src);
1084 }
1085}
1086
1087
1088void MacroAssembler::Store(const Operand& dst, Register src, Representation r) {
1089 DCHECK(!r.IsDouble());
1090 if (r.IsInteger8() || r.IsUInteger8()) {
1091 movb(dst, src);
1092 } else if (r.IsInteger16() || r.IsUInteger16()) {
1093 movw(dst, src);
1094 } else if (r.IsInteger32()) {
1095 movl(dst, src);
1096 } else {
1097 if (r.IsHeapObject()) {
1098 AssertNotSmi(src);
1099 } else if (r.IsSmi()) {
1100 AssertSmi(src);
1101 }
1102 movp(dst, src);
1103 }
1104}
1105
1106
Steve Blocka7e24c12009-10-30 11:49:00 +00001107void MacroAssembler::Set(Register dst, int64_t x) {
1108 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +01001109 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001110 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +00001111 movl(dst, Immediate(static_cast<uint32_t>(x)));
Ben Murdoch8b112d22011-06-08 16:22:53 +01001112 } else if (is_int32(x)) {
1113 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001114 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001115 movq(dst, x);
Steve Blocka7e24c12009-10-30 11:49:00 +00001116 }
1117}
1118
Ben Murdochda12d292016-06-02 14:46:10 +01001119void MacroAssembler::Set(Register dst, int64_t x, RelocInfo::Mode rmode) {
1120 if (rmode == RelocInfo::WASM_MEMORY_REFERENCE) {
1121 DCHECK(x != 0);
1122 movq(dst, x, rmode);
1123 } else {
1124 DCHECK(RelocInfo::IsNone(rmode));
1125 }
1126}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001127
1128void MacroAssembler::Set(const Operand& dst, intptr_t x) {
1129 if (kPointerSize == kInt64Size) {
1130 if (is_int32(x)) {
1131 movp(dst, Immediate(static_cast<int32_t>(x)));
1132 } else {
1133 Set(kScratchRegister, x);
1134 movp(dst, kScratchRegister);
1135 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001136 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001137 movp(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001138 }
1139}
1140
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001141
Steve Blocka7e24c12009-10-30 11:49:00 +00001142// ----------------------------------------------------------------------------
1143// Smi tagging, untagging and tag detection.
1144
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001145bool MacroAssembler::IsUnsafeInt(const int32_t x) {
1146 static const int kMaxBits = 17;
1147 return !is_intn(x, kMaxBits);
1148}
1149
1150
1151void MacroAssembler::SafeMove(Register dst, Smi* src) {
1152 DCHECK(!dst.is(kScratchRegister));
1153 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1154 if (SmiValuesAre32Bits()) {
1155 // JIT cookie can be converted to Smi.
1156 Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
1157 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
1158 xorp(dst, kScratchRegister);
1159 } else {
1160 DCHECK(SmiValuesAre31Bits());
1161 int32_t value = static_cast<int32_t>(reinterpret_cast<intptr_t>(src));
1162 movp(dst, Immediate(value ^ jit_cookie()));
1163 xorp(dst, Immediate(jit_cookie()));
1164 }
1165 } else {
1166 Move(dst, src);
1167 }
1168}
1169
1170
1171void MacroAssembler::SafePush(Smi* src) {
1172 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1173 if (SmiValuesAre32Bits()) {
1174 // JIT cookie can be converted to Smi.
1175 Push(Smi::FromInt(src->value() ^ jit_cookie()));
1176 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
1177 xorp(Operand(rsp, 0), kScratchRegister);
1178 } else {
1179 DCHECK(SmiValuesAre31Bits());
1180 int32_t value = static_cast<int32_t>(reinterpret_cast<intptr_t>(src));
1181 Push(Immediate(value ^ jit_cookie()));
1182 xorp(Operand(rsp, 0), Immediate(jit_cookie()));
1183 }
1184 } else {
1185 Push(src);
1186 }
1187}
1188
1189
Steve Block8defd9f2010-07-08 12:39:36 +01001190Register MacroAssembler::GetSmiConstant(Smi* source) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001191 STATIC_ASSERT(kSmiTag == 0);
Steve Block8defd9f2010-07-08 12:39:36 +01001192 int value = source->value();
1193 if (value == 0) {
1194 xorl(kScratchRegister, kScratchRegister);
1195 return kScratchRegister;
1196 }
Steve Block8defd9f2010-07-08 12:39:36 +01001197 LoadSmiConstant(kScratchRegister, source);
1198 return kScratchRegister;
1199}
1200
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001201
Steve Block8defd9f2010-07-08 12:39:36 +01001202void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001203 STATIC_ASSERT(kSmiTag == 0);
Steve Block44f0eee2011-05-26 01:26:41 +01001204 int value = source->value();
1205 if (value == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +01001206 xorl(dst, dst);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001207 } else {
1208 Move(dst, source, Assembler::RelocInfoNone());
Steve Block8defd9f2010-07-08 12:39:36 +01001209 }
1210}
1211
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001212
Steve Blocka7e24c12009-10-30 11:49:00 +00001213void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001214 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001215 if (!dst.is(src)) {
1216 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001217 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001218 shlp(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001219}
1220
1221
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001222void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001223 if (emit_debug_code()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001224 testb(dst, Immediate(0x01));
Ben Murdoch257744e2011-11-30 15:57:28 +00001225 Label ok;
1226 j(zero, &ok, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001227 Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001228 bind(&ok);
1229 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001230
1231 if (SmiValuesAre32Bits()) {
1232 DCHECK(kSmiShift % kBitsPerByte == 0);
1233 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
1234 } else {
1235 DCHECK(SmiValuesAre31Bits());
1236 Integer32ToSmi(kScratchRegister, src);
1237 movp(dst, kScratchRegister);
1238 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001239}
1240
1241
Steve Block3ce2e202009-11-05 08:53:23 +00001242void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
1243 Register src,
1244 int constant) {
1245 if (dst.is(src)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001246 addl(dst, Immediate(constant));
Steve Block3ce2e202009-11-05 08:53:23 +00001247 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001248 leal(dst, Operand(src, constant));
Steve Block3ce2e202009-11-05 08:53:23 +00001249 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001250 shlp(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001251}
1252
1253
1254void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001255 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001256 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001257 movp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001258 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001259
1260 if (SmiValuesAre32Bits()) {
1261 shrp(dst, Immediate(kSmiShift));
1262 } else {
1263 DCHECK(SmiValuesAre31Bits());
1264 sarl(dst, Immediate(kSmiShift));
1265 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001266}
1267
1268
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001269void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001270 if (SmiValuesAre32Bits()) {
1271 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1272 } else {
1273 DCHECK(SmiValuesAre31Bits());
1274 movl(dst, src);
1275 sarl(dst, Immediate(kSmiShift));
1276 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001277}
1278
1279
Steve Blocka7e24c12009-10-30 11:49:00 +00001280void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001281 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001282 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001283 movp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001284 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001285 sarp(dst, Immediate(kSmiShift));
1286 if (kPointerSize == kInt32Size) {
1287 // Sign extend to 64-bit.
1288 movsxlq(dst, dst);
1289 }
Steve Block3ce2e202009-11-05 08:53:23 +00001290}
1291
1292
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001293void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001294 if (SmiValuesAre32Bits()) {
1295 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
1296 } else {
1297 DCHECK(SmiValuesAre31Bits());
1298 movp(dst, src);
1299 SmiToInteger64(dst, dst);
1300 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001301}
1302
1303
Steve Block3ce2e202009-11-05 08:53:23 +00001304void MacroAssembler::SmiTest(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001305 AssertSmi(src);
1306 testp(src, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001307}
1308
1309
Steve Block44f0eee2011-05-26 01:26:41 +01001310void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001311 AssertSmi(smi1);
1312 AssertSmi(smi2);
1313 cmpp(smi1, smi2);
Steve Block3ce2e202009-11-05 08:53:23 +00001314}
1315
1316
1317void MacroAssembler::SmiCompare(Register dst, Smi* src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001318 AssertSmi(dst);
Steve Block44f0eee2011-05-26 01:26:41 +01001319 Cmp(dst, src);
1320}
1321
1322
1323void MacroAssembler::Cmp(Register dst, Smi* src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001324 DCHECK(!dst.is(kScratchRegister));
Steve Block3ce2e202009-11-05 08:53:23 +00001325 if (src->value() == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001326 testp(dst, dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001327 } else {
Iain Merrick75681382010-08-19 15:07:18 +01001328 Register constant_reg = GetSmiConstant(src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001329 cmpp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001330 }
1331}
1332
1333
Leon Clarkef7060e22010-06-03 12:02:55 +01001334void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001335 AssertSmi(dst);
1336 AssertSmi(src);
1337 cmpp(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01001338}
1339
1340
Steve Block3ce2e202009-11-05 08:53:23 +00001341void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001342 AssertSmi(dst);
1343 AssertSmi(src);
1344 cmpp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001345}
1346
1347
1348void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001349 AssertSmi(dst);
1350 if (SmiValuesAre32Bits()) {
1351 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
1352 } else {
1353 DCHECK(SmiValuesAre31Bits());
1354 cmpl(dst, Immediate(src));
Steve Block44f0eee2011-05-26 01:26:41 +01001355 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001356}
1357
1358
Steve Block44f0eee2011-05-26 01:26:41 +01001359void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1360 // The Operand cannot use the smi register.
1361 Register smi_reg = GetSmiConstant(src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001362 DCHECK(!dst.AddressUsesRegister(smi_reg));
1363 cmpp(dst, smi_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01001364}
1365
1366
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001367void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001368 if (SmiValuesAre32Bits()) {
1369 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1370 } else {
1371 DCHECK(SmiValuesAre31Bits());
1372 SmiToInteger32(kScratchRegister, dst);
1373 cmpl(kScratchRegister, src);
1374 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001375}
1376
1377
Steve Blocka7e24c12009-10-30 11:49:00 +00001378void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1379 Register src,
1380 int power) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001381 DCHECK(power >= 0);
1382 DCHECK(power < 64);
Steve Blocka7e24c12009-10-30 11:49:00 +00001383 if (power == 0) {
1384 SmiToInteger64(dst, src);
1385 return;
1386 }
Steve Block3ce2e202009-11-05 08:53:23 +00001387 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001388 movp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001389 }
1390 if (power < kSmiShift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001391 sarp(dst, Immediate(kSmiShift - power));
Steve Block3ce2e202009-11-05 08:53:23 +00001392 } else if (power > kSmiShift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001393 shlp(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001394 }
1395}
1396
1397
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001398void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1399 Register src,
1400 int power) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001401 DCHECK((0 <= power) && (power < 32));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001402 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001403 shrp(dst, Immediate(power + kSmiShift));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001404 } else {
1405 UNIMPLEMENTED(); // Not used.
1406 }
1407}
1408
1409
Ben Murdoch257744e2011-11-30 15:57:28 +00001410void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1411 Label* on_not_smis,
1412 Label::Distance near_jump) {
1413 if (dst.is(src1) || dst.is(src2)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001414 DCHECK(!src1.is(kScratchRegister));
1415 DCHECK(!src2.is(kScratchRegister));
1416 movp(kScratchRegister, src1);
1417 orp(kScratchRegister, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001418 JumpIfNotSmi(kScratchRegister, on_not_smis, near_jump);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001419 movp(dst, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001420 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001421 movp(dst, src1);
1422 orp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001423 JumpIfNotSmi(dst, on_not_smis, near_jump);
1424 }
1425}
1426
1427
Steve Blocka7e24c12009-10-30 11:49:00 +00001428Condition MacroAssembler::CheckSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001429 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001430 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +00001431 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001432}
1433
1434
Steve Block1e0659c2011-05-24 12:43:12 +01001435Condition MacroAssembler::CheckSmi(const Operand& src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001436 STATIC_ASSERT(kSmiTag == 0);
Steve Block1e0659c2011-05-24 12:43:12 +01001437 testb(src, Immediate(kSmiTagMask));
1438 return zero;
1439}
1440
1441
Ben Murdochf87a2032010-10-22 12:50:53 +01001442Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001443 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001444 // Test that both bits of the mask 0x8000000000000001 are zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001445 movp(kScratchRegister, src);
1446 rolp(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +01001447 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +00001448 return zero;
1449}
1450
1451
Steve Blocka7e24c12009-10-30 11:49:00 +00001452Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1453 if (first.is(second)) {
1454 return CheckSmi(first);
1455 }
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001456 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001457 if (SmiValuesAre32Bits()) {
1458 leal(kScratchRegister, Operand(first, second, times_1, 0));
1459 testb(kScratchRegister, Immediate(0x03));
1460 } else {
1461 DCHECK(SmiValuesAre31Bits());
1462 movl(kScratchRegister, first);
1463 orl(kScratchRegister, second);
1464 testb(kScratchRegister, Immediate(kSmiTagMask));
1465 }
Steve Block3ce2e202009-11-05 08:53:23 +00001466 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001467}
1468
1469
Ben Murdochf87a2032010-10-22 12:50:53 +01001470Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1471 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001472 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +01001473 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +00001474 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001475 movp(kScratchRegister, first);
1476 orp(kScratchRegister, second);
1477 rolp(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +01001478 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +00001479 return zero;
1480}
1481
1482
Ben Murdochbb769b22010-08-11 14:56:33 +01001483Condition MacroAssembler::CheckEitherSmi(Register first,
1484 Register second,
1485 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +00001486 if (first.is(second)) {
1487 return CheckSmi(first);
1488 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001489 if (scratch.is(second)) {
1490 andl(scratch, first);
1491 } else {
1492 if (!scratch.is(first)) {
1493 movl(scratch, first);
1494 }
1495 andl(scratch, second);
1496 }
1497 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +00001498 return zero;
1499}
1500
1501
Steve Blocka7e24c12009-10-30 11:49:00 +00001502Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001503 if (SmiValuesAre32Bits()) {
1504 // A 32-bit integer value can always be converted to a smi.
1505 return always;
1506 } else {
1507 DCHECK(SmiValuesAre31Bits());
1508 cmpl(src, Immediate(0xc0000000));
1509 return positive;
1510 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001511}
1512
1513
Steve Block3ce2e202009-11-05 08:53:23 +00001514Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001515 if (SmiValuesAre32Bits()) {
1516 // An unsigned 32-bit integer value is valid as long as the high bit
1517 // is not set.
1518 testl(src, src);
1519 return positive;
1520 } else {
1521 DCHECK(SmiValuesAre31Bits());
1522 testl(src, Immediate(0xc0000000));
1523 return zero;
1524 }
Steve Block3ce2e202009-11-05 08:53:23 +00001525}
1526
1527
Steve Block1e0659c2011-05-24 12:43:12 +01001528void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1529 if (dst.is(src)) {
1530 andl(dst, Immediate(kSmiTagMask));
1531 } else {
1532 movl(dst, Immediate(kSmiTagMask));
1533 andl(dst, src);
1534 }
1535}
1536
1537
1538void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
1539 if (!(src.AddressUsesRegister(dst))) {
1540 movl(dst, Immediate(kSmiTagMask));
1541 andl(dst, src);
1542 } else {
1543 movl(dst, src);
1544 andl(dst, Immediate(kSmiTagMask));
1545 }
1546}
1547
1548
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001549void MacroAssembler::JumpIfValidSmiValue(Register src,
1550 Label* on_valid,
1551 Label::Distance near_jump) {
1552 Condition is_valid = CheckInteger32ValidSmiValue(src);
1553 j(is_valid, on_valid, near_jump);
1554}
1555
1556
Ben Murdoch257744e2011-11-30 15:57:28 +00001557void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1558 Label* on_invalid,
1559 Label::Distance near_jump) {
1560 Condition is_valid = CheckInteger32ValidSmiValue(src);
1561 j(NegateCondition(is_valid), on_invalid, near_jump);
1562}
1563
1564
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001565void MacroAssembler::JumpIfUIntValidSmiValue(Register src,
1566 Label* on_valid,
1567 Label::Distance near_jump) {
1568 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1569 j(is_valid, on_valid, near_jump);
1570}
1571
1572
Ben Murdoch257744e2011-11-30 15:57:28 +00001573void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1574 Label* on_invalid,
1575 Label::Distance near_jump) {
1576 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1577 j(NegateCondition(is_valid), on_invalid, near_jump);
1578}
1579
1580
1581void MacroAssembler::JumpIfSmi(Register src,
1582 Label* on_smi,
1583 Label::Distance near_jump) {
1584 Condition smi = CheckSmi(src);
1585 j(smi, on_smi, near_jump);
1586}
1587
1588
1589void MacroAssembler::JumpIfNotSmi(Register src,
1590 Label* on_not_smi,
1591 Label::Distance near_jump) {
1592 Condition smi = CheckSmi(src);
1593 j(NegateCondition(smi), on_not_smi, near_jump);
1594}
1595
1596
1597void MacroAssembler::JumpUnlessNonNegativeSmi(
1598 Register src, Label* on_not_smi_or_negative,
1599 Label::Distance near_jump) {
1600 Condition non_negative_smi = CheckNonNegativeSmi(src);
1601 j(NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump);
1602}
1603
1604
1605void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1606 Smi* constant,
1607 Label* on_equals,
1608 Label::Distance near_jump) {
1609 SmiCompare(src, constant);
1610 j(equal, on_equals, near_jump);
1611}
1612
1613
1614void MacroAssembler::JumpIfNotBothSmi(Register src1,
1615 Register src2,
1616 Label* on_not_both_smi,
1617 Label::Distance near_jump) {
1618 Condition both_smi = CheckBothSmi(src1, src2);
1619 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1620}
1621
1622
1623void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1624 Register src2,
1625 Label* on_not_both_smi,
1626 Label::Distance near_jump) {
1627 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1628 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1629}
1630
1631
Steve Block3ce2e202009-11-05 08:53:23 +00001632void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1633 if (constant->value() == 0) {
1634 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001635 movp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001636 }
Steve Block8defd9f2010-07-08 12:39:36 +01001637 return;
Steve Block3ce2e202009-11-05 08:53:23 +00001638 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001639 DCHECK(!dst.is(kScratchRegister));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001640 Register constant_reg = GetSmiConstant(constant);
1641 addp(dst, constant_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +00001642 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001643 LoadSmiConstant(dst, constant);
1644 addp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001645 }
1646}
1647
1648
Leon Clarkef7060e22010-06-03 12:02:55 +01001649void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1650 if (constant->value() != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001651 if (SmiValuesAre32Bits()) {
1652 addl(Operand(dst, kSmiShift / kBitsPerByte),
1653 Immediate(constant->value()));
1654 } else {
1655 DCHECK(SmiValuesAre31Bits());
1656 addp(dst, Immediate(constant));
1657 }
Leon Clarkef7060e22010-06-03 12:02:55 +01001658 }
1659}
1660
1661
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001662void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant,
1663 SmiOperationConstraints constraints,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001664 Label* bailout_label,
Ben Murdoch257744e2011-11-30 15:57:28 +00001665 Label::Distance near_jump) {
1666 if (constant->value() == 0) {
1667 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001668 movp(dst, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00001669 }
1670 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001671 DCHECK(!dst.is(kScratchRegister));
Ben Murdoch257744e2011-11-30 15:57:28 +00001672 LoadSmiConstant(kScratchRegister, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001673 addp(dst, kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001674 if (constraints & SmiOperationConstraint::kBailoutOnNoOverflow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001675 j(no_overflow, bailout_label, near_jump);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001676 DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001677 subp(dst, kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001678 } else if (constraints & SmiOperationConstraint::kBailoutOnOverflow) {
1679 if (constraints & SmiOperationConstraint::kPreserveSourceRegister) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001680 Label done;
1681 j(no_overflow, &done, Label::kNear);
1682 subp(dst, kScratchRegister);
1683 jmp(bailout_label, near_jump);
1684 bind(&done);
1685 } else {
1686 // Bailout if overflow without reserving src.
1687 j(overflow, bailout_label, near_jump);
1688 }
1689 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001690 UNREACHABLE();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001691 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001692 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001693 DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
1694 DCHECK(constraints & SmiOperationConstraint::kBailoutOnOverflow);
Ben Murdoch257744e2011-11-30 15:57:28 +00001695 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001696 addp(dst, src);
1697 j(overflow, bailout_label, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001698 }
1699}
1700
1701
Steve Block3ce2e202009-11-05 08:53:23 +00001702void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1703 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001704 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001705 movp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001706 }
Steve Block3ce2e202009-11-05 08:53:23 +00001707 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001708 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001709 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001710 subp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001711 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001712 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001713 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001714 // Adding and subtracting the min-value gives the same result, it only
1715 // differs on the overflow bit, which we don't check here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001716 addp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001717 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001718 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001719 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001720 addp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001721 }
1722 }
1723}
1724
1725
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001726void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant,
1727 SmiOperationConstraints constraints,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001728 Label* bailout_label,
Ben Murdoch257744e2011-11-30 15:57:28 +00001729 Label::Distance near_jump) {
1730 if (constant->value() == 0) {
1731 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001732 movp(dst, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00001733 }
1734 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001735 DCHECK(!dst.is(kScratchRegister));
1736 LoadSmiConstant(kScratchRegister, constant);
1737 subp(dst, kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001738 if (constraints & SmiOperationConstraint::kBailoutOnNoOverflow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001739 j(no_overflow, bailout_label, near_jump);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001740 DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001741 addp(dst, kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001742 } else if (constraints & SmiOperationConstraint::kBailoutOnOverflow) {
1743 if (constraints & SmiOperationConstraint::kPreserveSourceRegister) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001744 Label done;
1745 j(no_overflow, &done, Label::kNear);
1746 addp(dst, kScratchRegister);
1747 jmp(bailout_label, near_jump);
1748 bind(&done);
1749 } else {
1750 // Bailout if overflow without reserving src.
1751 j(overflow, bailout_label, near_jump);
1752 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001753 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001754 UNREACHABLE();
Ben Murdoch257744e2011-11-30 15:57:28 +00001755 }
1756 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001757 DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
1758 DCHECK(constraints & SmiOperationConstraint::kBailoutOnOverflow);
Ben Murdoch257744e2011-11-30 15:57:28 +00001759 if (constant->value() == Smi::kMinValue) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001760 DCHECK(!dst.is(kScratchRegister));
1761 movp(dst, src);
1762 LoadSmiConstant(kScratchRegister, constant);
1763 subp(dst, kScratchRegister);
1764 j(overflow, bailout_label, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001765 } else {
1766 // Subtract by adding the negation.
1767 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001768 addp(dst, src);
1769 j(overflow, bailout_label, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001770 }
1771 }
1772}
1773
1774
1775void MacroAssembler::SmiNeg(Register dst,
1776 Register src,
1777 Label* on_smi_result,
1778 Label::Distance near_jump) {
1779 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001780 DCHECK(!dst.is(kScratchRegister));
1781 movp(kScratchRegister, src);
1782 negp(dst); // Low 32 bits are retained as zero by negation.
Ben Murdoch257744e2011-11-30 15:57:28 +00001783 // Test if result is zero or Smi::kMinValue.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001784 cmpp(dst, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001785 j(not_equal, on_smi_result, near_jump);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001786 movp(src, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001787 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001788 movp(dst, src);
1789 negp(dst);
1790 cmpp(dst, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00001791 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1792 j(not_equal, on_smi_result, near_jump);
1793 }
1794}
1795
1796
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001797template<class T>
1798static void SmiAddHelper(MacroAssembler* masm,
1799 Register dst,
1800 Register src1,
1801 T src2,
1802 Label* on_not_smi_result,
1803 Label::Distance near_jump) {
1804 if (dst.is(src1)) {
1805 Label done;
1806 masm->addp(dst, src2);
1807 masm->j(no_overflow, &done, Label::kNear);
1808 // Restore src1.
1809 masm->subp(dst, src2);
1810 masm->jmp(on_not_smi_result, near_jump);
1811 masm->bind(&done);
1812 } else {
1813 masm->movp(dst, src1);
1814 masm->addp(dst, src2);
1815 masm->j(overflow, on_not_smi_result, near_jump);
1816 }
1817}
1818
1819
Ben Murdoch257744e2011-11-30 15:57:28 +00001820void MacroAssembler::SmiAdd(Register dst,
1821 Register src1,
1822 Register src2,
1823 Label* on_not_smi_result,
1824 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001825 DCHECK_NOT_NULL(on_not_smi_result);
1826 DCHECK(!dst.is(src2));
1827 SmiAddHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001828}
1829
1830
1831void MacroAssembler::SmiAdd(Register dst,
1832 Register src1,
1833 const Operand& src2,
1834 Label* on_not_smi_result,
1835 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001836 DCHECK_NOT_NULL(on_not_smi_result);
1837 DCHECK(!src2.AddressUsesRegister(dst));
1838 SmiAddHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001839}
1840
1841
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001842void MacroAssembler::SmiAdd(Register dst,
1843 Register src1,
1844 Register src2) {
1845 // No overflow checking. Use only when it's known that
1846 // overflowing is impossible.
Steve Block44f0eee2011-05-26 01:26:41 +01001847 if (!dst.is(src1)) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001848 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001849 movp(kScratchRegister, src1);
1850 addp(kScratchRegister, src2);
1851 Check(no_overflow, kSmiAdditionOverflow);
Ben Murdoch257744e2011-11-30 15:57:28 +00001852 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001853 leap(dst, Operand(src1, src2, times_1, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001854 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001855 addp(dst, src2);
1856 Assert(no_overflow, kSmiAdditionOverflow);
1857 }
1858}
1859
1860
1861template<class T>
1862static void SmiSubHelper(MacroAssembler* masm,
1863 Register dst,
1864 Register src1,
1865 T src2,
1866 Label* on_not_smi_result,
1867 Label::Distance near_jump) {
1868 if (dst.is(src1)) {
1869 Label done;
1870 masm->subp(dst, src2);
1871 masm->j(no_overflow, &done, Label::kNear);
1872 // Restore src1.
1873 masm->addp(dst, src2);
1874 masm->jmp(on_not_smi_result, near_jump);
1875 masm->bind(&done);
1876 } else {
1877 masm->movp(dst, src1);
1878 masm->subp(dst, src2);
1879 masm->j(overflow, on_not_smi_result, near_jump);
Steve Blocka7e24c12009-10-30 11:49:00 +00001880 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001881}
1882
1883
1884void MacroAssembler::SmiSub(Register dst,
1885 Register src1,
1886 Register src2,
1887 Label* on_not_smi_result,
1888 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001889 DCHECK_NOT_NULL(on_not_smi_result);
1890 DCHECK(!dst.is(src2));
1891 SmiSubHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
Steve Blocka7e24c12009-10-30 11:49:00 +00001892}
1893
1894
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001895void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001896 Register src1,
Ben Murdoch257744e2011-11-30 15:57:28 +00001897 const Operand& src2,
1898 Label* on_not_smi_result,
1899 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001900 DCHECK_NOT_NULL(on_not_smi_result);
1901 DCHECK(!src2.AddressUsesRegister(dst));
1902 SmiSubHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
1903}
1904
1905
1906template<class T>
1907static void SmiSubNoOverflowHelper(MacroAssembler* masm,
1908 Register dst,
1909 Register src1,
1910 T src2) {
1911 // No overflow checking. Use only when it's known that
1912 // overflowing is impossible (e.g., subtracting two positive smis).
1913 if (!dst.is(src1)) {
1914 masm->movp(dst, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001915 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001916 masm->subp(dst, src2);
1917 masm->Assert(no_overflow, kSmiSubtractionOverflow);
1918}
1919
1920
1921void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1922 DCHECK(!dst.is(src2));
1923 SmiSubNoOverflowHelper<Register>(this, dst, src1, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001924}
1925
1926
1927void MacroAssembler::SmiSub(Register dst,
1928 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001929 const Operand& src2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001930 SmiSubNoOverflowHelper<Operand>(this, dst, src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001931}
1932
1933
Ben Murdoch257744e2011-11-30 15:57:28 +00001934void MacroAssembler::SmiMul(Register dst,
1935 Register src1,
1936 Register src2,
1937 Label* on_not_smi_result,
1938 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001939 DCHECK(!dst.is(src2));
1940 DCHECK(!dst.is(kScratchRegister));
1941 DCHECK(!src1.is(kScratchRegister));
1942 DCHECK(!src2.is(kScratchRegister));
Ben Murdoch257744e2011-11-30 15:57:28 +00001943
1944 if (dst.is(src1)) {
1945 Label failure, zero_correct_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001946 movp(kScratchRegister, src1); // Create backup for later testing.
Ben Murdoch257744e2011-11-30 15:57:28 +00001947 SmiToInteger64(dst, src1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001948 imulp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001949 j(overflow, &failure, Label::kNear);
1950
1951 // Check for negative zero result. If product is zero, and one
1952 // argument is negative, go to slow case.
1953 Label correct_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001954 testp(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00001955 j(not_zero, &correct_result, Label::kNear);
1956
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001957 movp(dst, kScratchRegister);
1958 xorp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001959 // Result was positive zero.
1960 j(positive, &zero_correct_result, Label::kNear);
1961
1962 bind(&failure); // Reused failure exit, restores src1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001963 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001964 jmp(on_not_smi_result, near_jump);
1965
1966 bind(&zero_correct_result);
1967 Set(dst, 0);
1968
1969 bind(&correct_result);
1970 } else {
1971 SmiToInteger64(dst, src1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001972 imulp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001973 j(overflow, on_not_smi_result, near_jump);
1974 // Check for negative zero result. If product is zero, and one
1975 // argument is negative, go to slow case.
1976 Label correct_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001977 testp(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00001978 j(not_zero, &correct_result, Label::kNear);
1979 // One of src1 and src2 is zero, the check whether the other is
1980 // negative.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001981 movp(kScratchRegister, src1);
1982 xorp(kScratchRegister, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001983 j(negative, on_not_smi_result, near_jump);
1984 bind(&correct_result);
1985 }
1986}
1987
1988
1989void MacroAssembler::SmiDiv(Register dst,
1990 Register src1,
1991 Register src2,
1992 Label* on_not_smi_result,
1993 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001994 DCHECK(!src1.is(kScratchRegister));
1995 DCHECK(!src2.is(kScratchRegister));
1996 DCHECK(!dst.is(kScratchRegister));
1997 DCHECK(!src2.is(rax));
1998 DCHECK(!src2.is(rdx));
1999 DCHECK(!src1.is(rdx));
Ben Murdoch257744e2011-11-30 15:57:28 +00002000
2001 // Check for 0 divisor (result is +/-Infinity).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002002 testp(src2, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002003 j(zero, on_not_smi_result, near_jump);
2004
2005 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002006 movp(kScratchRegister, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002007 }
2008 SmiToInteger32(rax, src1);
2009 // We need to rule out dividing Smi::kMinValue by -1, since that would
2010 // overflow in idiv and raise an exception.
2011 // We combine this with negative zero test (negative zero only happens
2012 // when dividing zero by a negative number).
2013
2014 // We overshoot a little and go to slow case if we divide min-value
2015 // by any negative value, not just -1.
2016 Label safe_div;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002017 testl(rax, Immediate(~Smi::kMinValue));
Ben Murdoch257744e2011-11-30 15:57:28 +00002018 j(not_zero, &safe_div, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002019 testp(src2, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002020 if (src1.is(rax)) {
2021 j(positive, &safe_div, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002022 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002023 jmp(on_not_smi_result, near_jump);
2024 } else {
2025 j(negative, on_not_smi_result, near_jump);
2026 }
2027 bind(&safe_div);
2028
2029 SmiToInteger32(src2, src2);
2030 // Sign extend src1 into edx:eax.
2031 cdq();
2032 idivl(src2);
2033 Integer32ToSmi(src2, src2);
2034 // Check that the remainder is zero.
2035 testl(rdx, rdx);
2036 if (src1.is(rax)) {
2037 Label smi_result;
2038 j(zero, &smi_result, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002039 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002040 jmp(on_not_smi_result, near_jump);
2041 bind(&smi_result);
2042 } else {
2043 j(not_zero, on_not_smi_result, near_jump);
2044 }
2045 if (!dst.is(src1) && src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002046 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002047 }
2048 Integer32ToSmi(dst, rax);
2049}
2050
2051
2052void MacroAssembler::SmiMod(Register dst,
2053 Register src1,
2054 Register src2,
2055 Label* on_not_smi_result,
2056 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002057 DCHECK(!dst.is(kScratchRegister));
2058 DCHECK(!src1.is(kScratchRegister));
2059 DCHECK(!src2.is(kScratchRegister));
2060 DCHECK(!src2.is(rax));
2061 DCHECK(!src2.is(rdx));
2062 DCHECK(!src1.is(rdx));
2063 DCHECK(!src1.is(src2));
Ben Murdoch257744e2011-11-30 15:57:28 +00002064
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002065 testp(src2, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002066 j(zero, on_not_smi_result, near_jump);
2067
2068 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002069 movp(kScratchRegister, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002070 }
2071 SmiToInteger32(rax, src1);
2072 SmiToInteger32(src2, src2);
2073
2074 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
2075 Label safe_div;
2076 cmpl(rax, Immediate(Smi::kMinValue));
2077 j(not_equal, &safe_div, Label::kNear);
2078 cmpl(src2, Immediate(-1));
2079 j(not_equal, &safe_div, Label::kNear);
2080 // Retag inputs and go slow case.
2081 Integer32ToSmi(src2, src2);
2082 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002083 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002084 }
2085 jmp(on_not_smi_result, near_jump);
2086 bind(&safe_div);
2087
2088 // Sign extend eax into edx:eax.
2089 cdq();
2090 idivl(src2);
2091 // Restore smi tags on inputs.
2092 Integer32ToSmi(src2, src2);
2093 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002094 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002095 }
2096 // Check for a negative zero result. If the result is zero, and the
2097 // dividend is negative, go slow to return a floating point negative zero.
2098 Label smi_result;
2099 testl(rdx, rdx);
2100 j(not_zero, &smi_result, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002101 testp(src1, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002102 j(negative, on_not_smi_result, near_jump);
2103 bind(&smi_result);
2104 Integer32ToSmi(dst, rdx);
2105}
2106
2107
Steve Blocka7e24c12009-10-30 11:49:00 +00002108void MacroAssembler::SmiNot(Register dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002109 DCHECK(!dst.is(kScratchRegister));
2110 DCHECK(!src.is(kScratchRegister));
2111 if (SmiValuesAre32Bits()) {
2112 // Set tag and padding bits before negating, so that they are zero
2113 // afterwards.
2114 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002115 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002116 DCHECK(SmiValuesAre31Bits());
2117 movl(kScratchRegister, Immediate(1));
Steve Blocka7e24c12009-10-30 11:49:00 +00002118 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002119 if (dst.is(src)) {
2120 xorp(dst, kScratchRegister);
2121 } else {
2122 leap(dst, Operand(src, kScratchRegister, times_1, 0));
2123 }
2124 notp(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00002125}
2126
2127
2128void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002129 DCHECK(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00002130 if (!dst.is(src1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002131 movp(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002132 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002133 andp(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002134}
2135
2136
Steve Block3ce2e202009-11-05 08:53:23 +00002137void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
2138 if (constant->value() == 0) {
Steve Block9fac8402011-05-12 15:51:54 +01002139 Set(dst, 0);
Steve Block3ce2e202009-11-05 08:53:23 +00002140 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002141 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01002142 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002143 andp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00002144 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002145 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002146 andp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002147 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002148}
2149
2150
2151void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
2152 if (!dst.is(src1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002153 DCHECK(!src1.is(src2));
2154 movp(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002155 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002156 orp(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002157}
2158
2159
Steve Block3ce2e202009-11-05 08:53:23 +00002160void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
2161 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002162 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01002163 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002164 orp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00002165 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002166 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002167 orp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002168 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002169}
2170
Steve Block3ce2e202009-11-05 08:53:23 +00002171
Steve Blocka7e24c12009-10-30 11:49:00 +00002172void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
2173 if (!dst.is(src1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002174 DCHECK(!src1.is(src2));
2175 movp(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002176 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002177 xorp(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002178}
2179
2180
Steve Block3ce2e202009-11-05 08:53:23 +00002181void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
2182 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002183 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01002184 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002185 xorp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00002186 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002187 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002188 xorp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002189 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002190}
2191
2192
Steve Blocka7e24c12009-10-30 11:49:00 +00002193void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
2194 Register src,
2195 int shift_value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002196 DCHECK(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002197 if (shift_value > 0) {
2198 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002199 sarp(dst, Immediate(shift_value + kSmiShift));
2200 shlp(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00002201 } else {
2202 UNIMPLEMENTED(); // Not used.
2203 }
2204 }
2205}
2206
2207
Steve Blocka7e24c12009-10-30 11:49:00 +00002208void MacroAssembler::SmiShiftLeftConstant(Register dst,
2209 Register src,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002210 int shift_value,
2211 Label* on_not_smi_result,
2212 Label::Distance near_jump) {
2213 if (SmiValuesAre32Bits()) {
2214 if (!dst.is(src)) {
2215 movp(dst, src);
2216 }
2217 if (shift_value > 0) {
2218 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2219 shlq(dst, Immediate(shift_value & 0x1f));
2220 }
2221 } else {
2222 DCHECK(SmiValuesAre31Bits());
2223 if (dst.is(src)) {
2224 UNIMPLEMENTED(); // Not used.
2225 } else {
2226 SmiToInteger32(dst, src);
2227 shll(dst, Immediate(shift_value));
2228 JumpIfNotValidSmiValue(dst, on_not_smi_result, near_jump);
2229 Integer32ToSmi(dst, dst);
2230 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002231 }
2232}
2233
2234
Ben Murdoch257744e2011-11-30 15:57:28 +00002235void MacroAssembler::SmiShiftLogicalRightConstant(
2236 Register dst, Register src, int shift_value,
2237 Label* on_not_smi_result, Label::Distance near_jump) {
2238 // Logic right shift interprets its result as an *unsigned* number.
2239 if (dst.is(src)) {
2240 UNIMPLEMENTED(); // Not used.
2241 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002242 if (shift_value == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002243 testp(src, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00002244 j(negative, on_not_smi_result, near_jump);
2245 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002246 if (SmiValuesAre32Bits()) {
2247 movp(dst, src);
2248 shrp(dst, Immediate(shift_value + kSmiShift));
2249 shlp(dst, Immediate(kSmiShift));
2250 } else {
2251 DCHECK(SmiValuesAre31Bits());
2252 SmiToInteger32(dst, src);
2253 shrp(dst, Immediate(shift_value));
2254 JumpIfUIntNotValidSmiValue(dst, on_not_smi_result, near_jump);
2255 Integer32ToSmi(dst, dst);
2256 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002257 }
2258}
2259
2260
Steve Blocka7e24c12009-10-30 11:49:00 +00002261void MacroAssembler::SmiShiftLeft(Register dst,
2262 Register src1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002263 Register src2,
2264 Label* on_not_smi_result,
2265 Label::Distance near_jump) {
2266 if (SmiValuesAre32Bits()) {
2267 DCHECK(!dst.is(rcx));
2268 if (!dst.is(src1)) {
2269 movp(dst, src1);
2270 }
2271 // Untag shift amount.
2272 SmiToInteger32(rcx, src2);
2273 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2274 andp(rcx, Immediate(0x1f));
2275 shlq_cl(dst);
2276 } else {
2277 DCHECK(SmiValuesAre31Bits());
2278 DCHECK(!dst.is(kScratchRegister));
2279 DCHECK(!src1.is(kScratchRegister));
2280 DCHECK(!src2.is(kScratchRegister));
2281 DCHECK(!dst.is(src2));
2282 DCHECK(!dst.is(rcx));
2283
2284 if (src1.is(rcx) || src2.is(rcx)) {
2285 movq(kScratchRegister, rcx);
2286 }
2287 if (dst.is(src1)) {
2288 UNIMPLEMENTED(); // Not used.
2289 } else {
2290 Label valid_result;
2291 SmiToInteger32(dst, src1);
2292 SmiToInteger32(rcx, src2);
2293 shll_cl(dst);
2294 JumpIfValidSmiValue(dst, &valid_result, Label::kNear);
2295 // As src1 or src2 could not be dst, we do not need to restore them for
2296 // clobbering dst.
2297 if (src1.is(rcx) || src2.is(rcx)) {
2298 if (src1.is(rcx)) {
2299 movq(src1, kScratchRegister);
2300 } else {
2301 movq(src2, kScratchRegister);
2302 }
2303 }
2304 jmp(on_not_smi_result, near_jump);
2305 bind(&valid_result);
2306 Integer32ToSmi(dst, dst);
2307 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002308 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002309}
2310
2311
Ben Murdoch257744e2011-11-30 15:57:28 +00002312void MacroAssembler::SmiShiftLogicalRight(Register dst,
2313 Register src1,
2314 Register src2,
2315 Label* on_not_smi_result,
2316 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002317 DCHECK(!dst.is(kScratchRegister));
2318 DCHECK(!src1.is(kScratchRegister));
2319 DCHECK(!src2.is(kScratchRegister));
2320 DCHECK(!dst.is(src2));
2321 DCHECK(!dst.is(rcx));
Ben Murdoch257744e2011-11-30 15:57:28 +00002322 if (src1.is(rcx) || src2.is(rcx)) {
2323 movq(kScratchRegister, rcx);
2324 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002325 if (dst.is(src1)) {
2326 UNIMPLEMENTED(); // Not used.
Ben Murdoch257744e2011-11-30 15:57:28 +00002327 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002328 Label valid_result;
2329 SmiToInteger32(dst, src1);
2330 SmiToInteger32(rcx, src2);
2331 shrl_cl(dst);
2332 JumpIfUIntValidSmiValue(dst, &valid_result, Label::kNear);
2333 // As src1 or src2 could not be dst, we do not need to restore them for
2334 // clobbering dst.
2335 if (src1.is(rcx) || src2.is(rcx)) {
2336 if (src1.is(rcx)) {
2337 movq(src1, kScratchRegister);
2338 } else {
2339 movq(src2, kScratchRegister);
2340 }
2341 }
2342 jmp(on_not_smi_result, near_jump);
2343 bind(&valid_result);
2344 Integer32ToSmi(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00002345 }
2346}
2347
2348
Steve Blocka7e24c12009-10-30 11:49:00 +00002349void MacroAssembler::SmiShiftArithmeticRight(Register dst,
2350 Register src1,
2351 Register src2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002352 DCHECK(!dst.is(kScratchRegister));
2353 DCHECK(!src1.is(kScratchRegister));
2354 DCHECK(!src2.is(kScratchRegister));
2355 DCHECK(!dst.is(rcx));
2356
Steve Blocka7e24c12009-10-30 11:49:00 +00002357 SmiToInteger32(rcx, src2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002358 if (!dst.is(src1)) {
2359 movp(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00002360 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002361 SmiToInteger32(dst, dst);
2362 sarl_cl(dst);
2363 Integer32ToSmi(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00002364}
2365
2366
Ben Murdoch257744e2011-11-30 15:57:28 +00002367void MacroAssembler::SelectNonSmi(Register dst,
2368 Register src1,
2369 Register src2,
2370 Label* on_not_smis,
2371 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002372 DCHECK(!dst.is(kScratchRegister));
2373 DCHECK(!src1.is(kScratchRegister));
2374 DCHECK(!src2.is(kScratchRegister));
2375 DCHECK(!dst.is(src1));
2376 DCHECK(!dst.is(src2));
Ben Murdoch257744e2011-11-30 15:57:28 +00002377 // Both operands must not be smis.
2378#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002379 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
2380 Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
Ben Murdoch257744e2011-11-30 15:57:28 +00002381#endif
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002382 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002383 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002384 movl(kScratchRegister, Immediate(kSmiTagMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002385 andp(kScratchRegister, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002386 testl(kScratchRegister, src2);
2387 // If non-zero then both are smis.
2388 j(not_zero, on_not_smis, near_jump);
2389
2390 // Exactly one operand is a smi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002391 DCHECK_EQ(1, static_cast<int>(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002392 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002393 subp(kScratchRegister, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00002394 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002395 movp(dst, src1);
2396 xorp(dst, src2);
2397 andp(dst, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002398 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002399 xorp(dst, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002400 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
2401}
2402
2403
Steve Block3ce2e202009-11-05 08:53:23 +00002404SmiIndex MacroAssembler::SmiToIndex(Register dst,
2405 Register src,
2406 int shift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002407 if (SmiValuesAre32Bits()) {
2408 DCHECK(is_uint6(shift));
2409 // There is a possible optimization if shift is in the range 60-63, but that
2410 // will (and must) never happen.
2411 if (!dst.is(src)) {
2412 movp(dst, src);
2413 }
2414 if (shift < kSmiShift) {
2415 sarp(dst, Immediate(kSmiShift - shift));
2416 } else {
2417 shlp(dst, Immediate(shift - kSmiShift));
2418 }
2419 return SmiIndex(dst, times_1);
Steve Block3ce2e202009-11-05 08:53:23 +00002420 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002421 DCHECK(SmiValuesAre31Bits());
2422 DCHECK(shift >= times_1 && shift <= (static_cast<int>(times_8) + 1));
2423 if (!dst.is(src)) {
2424 movp(dst, src);
2425 }
2426 // We have to sign extend the index register to 64-bit as the SMI might
2427 // be negative.
2428 movsxlq(dst, dst);
2429 if (shift == times_1) {
2430 sarq(dst, Immediate(kSmiShift));
2431 return SmiIndex(dst, times_1);
2432 }
2433 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
Steve Blocka7e24c12009-10-30 11:49:00 +00002434 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002435}
2436
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002437
Steve Blocka7e24c12009-10-30 11:49:00 +00002438SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2439 Register src,
2440 int shift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002441 if (SmiValuesAre32Bits()) {
2442 // Register src holds a positive smi.
2443 DCHECK(is_uint6(shift));
2444 if (!dst.is(src)) {
2445 movp(dst, src);
2446 }
2447 negp(dst);
2448 if (shift < kSmiShift) {
2449 sarp(dst, Immediate(kSmiShift - shift));
2450 } else {
2451 shlp(dst, Immediate(shift - kSmiShift));
2452 }
2453 return SmiIndex(dst, times_1);
Steve Block3ce2e202009-11-05 08:53:23 +00002454 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002455 DCHECK(SmiValuesAre31Bits());
2456 DCHECK(shift >= times_1 && shift <= (static_cast<int>(times_8) + 1));
2457 if (!dst.is(src)) {
2458 movp(dst, src);
2459 }
2460 negq(dst);
2461 if (shift == times_1) {
2462 sarq(dst, Immediate(kSmiShift));
2463 return SmiIndex(dst, times_1);
2464 }
2465 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
Steve Block3ce2e202009-11-05 08:53:23 +00002466 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002467}
2468
2469
Steve Block44f0eee2011-05-26 01:26:41 +01002470void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002471 if (SmiValuesAre32Bits()) {
2472 DCHECK_EQ(0, kSmiShift % kBitsPerByte);
2473 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
2474 } else {
2475 DCHECK(SmiValuesAre31Bits());
2476 SmiToInteger32(kScratchRegister, src);
2477 addl(dst, kScratchRegister);
2478 }
2479}
2480
2481
2482void MacroAssembler::Push(Smi* source) {
2483 intptr_t smi = reinterpret_cast<intptr_t>(source);
2484 if (is_int32(smi)) {
2485 Push(Immediate(static_cast<int32_t>(smi)));
2486 } else {
2487 Register constant = GetSmiConstant(source);
2488 Push(constant);
2489 }
2490}
2491
2492
2493void MacroAssembler::PushRegisterAsTwoSmis(Register src, Register scratch) {
2494 DCHECK(!src.is(scratch));
2495 movp(scratch, src);
2496 // High bits.
2497 shrp(src, Immediate(kPointerSize * kBitsPerByte - kSmiShift));
2498 shlp(src, Immediate(kSmiShift));
2499 Push(src);
2500 // Low bits.
2501 shlp(scratch, Immediate(kSmiShift));
2502 Push(scratch);
2503}
2504
2505
2506void MacroAssembler::PopRegisterAsTwoSmis(Register dst, Register scratch) {
2507 DCHECK(!dst.is(scratch));
2508 Pop(scratch);
2509 // Low bits.
2510 shrp(scratch, Immediate(kSmiShift));
2511 Pop(dst);
2512 shrp(dst, Immediate(kSmiShift));
2513 // High bits.
2514 shlp(dst, Immediate(kPointerSize * kBitsPerByte - kSmiShift));
2515 orp(dst, scratch);
2516}
2517
2518
2519void MacroAssembler::Test(const Operand& src, Smi* source) {
2520 if (SmiValuesAre32Bits()) {
2521 testl(Operand(src, kIntSize), Immediate(source->value()));
2522 } else {
2523 DCHECK(SmiValuesAre31Bits());
2524 testl(src, Immediate(source));
2525 }
2526}
2527
2528
2529// ----------------------------------------------------------------------------
2530
2531
Ben Murdoch257744e2011-11-30 15:57:28 +00002532void MacroAssembler::JumpIfNotString(Register object,
2533 Register object_map,
2534 Label* not_string,
2535 Label::Distance near_jump) {
2536 Condition is_smi = CheckSmi(object);
2537 j(is_smi, not_string, near_jump);
2538 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
2539 j(above_equal, not_string, near_jump);
2540}
2541
2542
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002543void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(
2544 Register first_object, Register second_object, Register scratch1,
2545 Register scratch2, Label* on_fail, Label::Distance near_jump) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002546 // Check that both objects are not smis.
2547 Condition either_smi = CheckEitherSmi(first_object, second_object);
2548 j(either_smi, on_fail, near_jump);
2549
2550 // Load instance type for both strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002551 movp(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
2552 movp(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002553 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2554 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2555
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002556 // Check that both are flat one-byte strings.
2557 DCHECK(kNotStringTag != 0);
2558 const int kFlatOneByteStringMask =
Ben Murdoch257744e2011-11-30 15:57:28 +00002559 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002560 const int kFlatOneByteStringTag =
2561 kStringTag | kOneByteStringTag | kSeqStringTag;
Ben Murdoch257744e2011-11-30 15:57:28 +00002562
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002563 andl(scratch1, Immediate(kFlatOneByteStringMask));
2564 andl(scratch2, Immediate(kFlatOneByteStringMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002565 // Interleave the bits to check both scratch1 and scratch2 in one test.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002566 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2567 leap(scratch1, Operand(scratch1, scratch2, times_8, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002568 cmpl(scratch1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002569 Immediate(kFlatOneByteStringTag + (kFlatOneByteStringTag << 3)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002570 j(not_equal, on_fail, near_jump);
2571}
2572
2573
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002574void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2575 Register instance_type, Register scratch, Label* failure,
Ben Murdoch257744e2011-11-30 15:57:28 +00002576 Label::Distance near_jump) {
2577 if (!scratch.is(instance_type)) {
2578 movl(scratch, instance_type);
2579 }
2580
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002581 const int kFlatOneByteStringMask =
Ben Murdoch257744e2011-11-30 15:57:28 +00002582 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2583
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002584 andl(scratch, Immediate(kFlatOneByteStringMask));
2585 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kOneByteStringTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00002586 j(not_equal, failure, near_jump);
2587}
2588
2589
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002590void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
2591 Register first_object_instance_type, Register second_object_instance_type,
2592 Register scratch1, Register scratch2, Label* on_fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00002593 Label::Distance near_jump) {
2594 // Load instance type for both strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002595 movp(scratch1, first_object_instance_type);
2596 movp(scratch2, second_object_instance_type);
Ben Murdoch257744e2011-11-30 15:57:28 +00002597
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002598 // Check that both are flat one-byte strings.
2599 DCHECK(kNotStringTag != 0);
2600 const int kFlatOneByteStringMask =
Ben Murdoch257744e2011-11-30 15:57:28 +00002601 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002602 const int kFlatOneByteStringTag =
2603 kStringTag | kOneByteStringTag | kSeqStringTag;
Ben Murdoch257744e2011-11-30 15:57:28 +00002604
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002605 andl(scratch1, Immediate(kFlatOneByteStringMask));
2606 andl(scratch2, Immediate(kFlatOneByteStringMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002607 // Interleave the bits to check both scratch1 and scratch2 in one test.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002608 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2609 leap(scratch1, Operand(scratch1, scratch2, times_8, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002610 cmpl(scratch1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002611 Immediate(kFlatOneByteStringTag + (kFlatOneByteStringTag << 3)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002612 j(not_equal, on_fail, near_jump);
2613}
2614
2615
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002616template<class T>
2617static void JumpIfNotUniqueNameHelper(MacroAssembler* masm,
2618 T operand_or_register,
2619 Label* not_unique_name,
2620 Label::Distance distance) {
2621 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2622 Label succeed;
2623 masm->testb(operand_or_register,
2624 Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2625 masm->j(zero, &succeed, Label::kNear);
2626 masm->cmpb(operand_or_register, Immediate(static_cast<uint8_t>(SYMBOL_TYPE)));
2627 masm->j(not_equal, not_unique_name, distance);
2628
2629 masm->bind(&succeed);
2630}
2631
2632
2633void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2634 Label* not_unique_name,
2635 Label::Distance distance) {
2636 JumpIfNotUniqueNameHelper<Operand>(this, operand, not_unique_name, distance);
2637}
2638
2639
2640void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
2641 Label* not_unique_name,
2642 Label::Distance distance) {
2643 JumpIfNotUniqueNameHelper<Register>(this, reg, not_unique_name, distance);
2644}
2645
Steve Block44f0eee2011-05-26 01:26:41 +01002646
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002647void MacroAssembler::Move(Register dst, Register src) {
2648 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002649 movp(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01002650 }
Steve Block6ded16b2010-05-10 14:33:55 +01002651}
2652
2653
Steve Blocka7e24c12009-10-30 11:49:00 +00002654void MacroAssembler::Move(Register dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002655 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002656 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002657 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002658 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002659 MoveHeapObject(dst, source);
Steve Blocka7e24c12009-10-30 11:49:00 +00002660 }
2661}
2662
2663
2664void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002665 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002666 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002667 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002668 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002669 MoveHeapObject(kScratchRegister, source);
2670 movp(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00002671 }
2672}
2673
2674
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002675void MacroAssembler::Move(XMMRegister dst, uint32_t src) {
2676 if (src == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002677 Xorpd(dst, dst);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002678 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002679 unsigned pop = base::bits::CountPopulation32(src);
2680 DCHECK_NE(0u, pop);
2681 if (pop == 32) {
2682 Pcmpeqd(dst, dst);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002683 } else {
2684 movl(kScratchRegister, Immediate(src));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002685 Movq(dst, kScratchRegister);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002686 }
2687 }
2688}
2689
2690
2691void MacroAssembler::Move(XMMRegister dst, uint64_t src) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002692 if (src == 0) {
2693 Xorpd(dst, dst);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002694 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002695 unsigned nlz = base::bits::CountLeadingZeros64(src);
2696 unsigned ntz = base::bits::CountTrailingZeros64(src);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002697 unsigned pop = base::bits::CountPopulation64(src);
2698 DCHECK_NE(0u, pop);
2699 if (pop == 64) {
2700 Pcmpeqd(dst, dst);
2701 } else if (pop + ntz == 64) {
2702 Pcmpeqd(dst, dst);
2703 Psllq(dst, ntz);
2704 } else if (pop + nlz == 64) {
2705 Pcmpeqd(dst, dst);
2706 Psrlq(dst, nlz);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002707 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002708 uint32_t lower = static_cast<uint32_t>(src);
2709 uint32_t upper = static_cast<uint32_t>(src >> 32);
2710 if (upper == 0) {
2711 Move(dst, lower);
2712 } else {
2713 movq(kScratchRegister, src);
2714 Movq(dst, kScratchRegister);
2715 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002716 }
2717 }
2718}
2719
2720
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002721void MacroAssembler::Movaps(XMMRegister dst, XMMRegister src) {
2722 if (CpuFeatures::IsSupported(AVX)) {
2723 CpuFeatureScope scope(this, AVX);
2724 vmovaps(dst, src);
2725 } else {
2726 movaps(dst, src);
2727 }
2728}
2729
2730
2731void MacroAssembler::Movapd(XMMRegister dst, XMMRegister src) {
2732 if (CpuFeatures::IsSupported(AVX)) {
2733 CpuFeatureScope scope(this, AVX);
2734 vmovapd(dst, src);
2735 } else {
2736 movapd(dst, src);
2737 }
2738}
2739
2740
2741void MacroAssembler::Movsd(XMMRegister dst, XMMRegister src) {
2742 if (CpuFeatures::IsSupported(AVX)) {
2743 CpuFeatureScope scope(this, AVX);
2744 vmovsd(dst, dst, src);
2745 } else {
2746 movsd(dst, src);
2747 }
2748}
2749
2750
2751void MacroAssembler::Movsd(XMMRegister dst, const Operand& src) {
2752 if (CpuFeatures::IsSupported(AVX)) {
2753 CpuFeatureScope scope(this, AVX);
2754 vmovsd(dst, src);
2755 } else {
2756 movsd(dst, src);
2757 }
2758}
2759
2760
2761void MacroAssembler::Movsd(const Operand& dst, XMMRegister src) {
2762 if (CpuFeatures::IsSupported(AVX)) {
2763 CpuFeatureScope scope(this, AVX);
2764 vmovsd(dst, src);
2765 } else {
2766 movsd(dst, src);
2767 }
2768}
2769
2770
2771void MacroAssembler::Movss(XMMRegister dst, XMMRegister src) {
2772 if (CpuFeatures::IsSupported(AVX)) {
2773 CpuFeatureScope scope(this, AVX);
2774 vmovss(dst, dst, src);
2775 } else {
2776 movss(dst, src);
2777 }
2778}
2779
2780
2781void MacroAssembler::Movss(XMMRegister dst, const Operand& src) {
2782 if (CpuFeatures::IsSupported(AVX)) {
2783 CpuFeatureScope scope(this, AVX);
2784 vmovss(dst, src);
2785 } else {
2786 movss(dst, src);
2787 }
2788}
2789
2790
2791void MacroAssembler::Movss(const Operand& dst, XMMRegister src) {
2792 if (CpuFeatures::IsSupported(AVX)) {
2793 CpuFeatureScope scope(this, AVX);
2794 vmovss(dst, src);
2795 } else {
2796 movss(dst, src);
2797 }
2798}
2799
2800
2801void MacroAssembler::Movd(XMMRegister dst, Register src) {
2802 if (CpuFeatures::IsSupported(AVX)) {
2803 CpuFeatureScope scope(this, AVX);
2804 vmovd(dst, src);
2805 } else {
2806 movd(dst, src);
2807 }
2808}
2809
2810
2811void MacroAssembler::Movd(XMMRegister dst, const Operand& src) {
2812 if (CpuFeatures::IsSupported(AVX)) {
2813 CpuFeatureScope scope(this, AVX);
2814 vmovd(dst, src);
2815 } else {
2816 movd(dst, src);
2817 }
2818}
2819
2820
2821void MacroAssembler::Movd(Register dst, XMMRegister src) {
2822 if (CpuFeatures::IsSupported(AVX)) {
2823 CpuFeatureScope scope(this, AVX);
2824 vmovd(dst, src);
2825 } else {
2826 movd(dst, src);
2827 }
2828}
2829
2830
2831void MacroAssembler::Movq(XMMRegister dst, Register src) {
2832 if (CpuFeatures::IsSupported(AVX)) {
2833 CpuFeatureScope scope(this, AVX);
2834 vmovq(dst, src);
2835 } else {
2836 movq(dst, src);
2837 }
2838}
2839
2840
2841void MacroAssembler::Movq(Register dst, XMMRegister src) {
2842 if (CpuFeatures::IsSupported(AVX)) {
2843 CpuFeatureScope scope(this, AVX);
2844 vmovq(dst, src);
2845 } else {
2846 movq(dst, src);
2847 }
2848}
2849
2850
2851void MacroAssembler::Movmskpd(Register dst, XMMRegister src) {
2852 if (CpuFeatures::IsSupported(AVX)) {
2853 CpuFeatureScope scope(this, AVX);
2854 vmovmskpd(dst, src);
2855 } else {
2856 movmskpd(dst, src);
2857 }
2858}
2859
2860
2861void MacroAssembler::Roundss(XMMRegister dst, XMMRegister src,
2862 RoundingMode mode) {
2863 if (CpuFeatures::IsSupported(AVX)) {
2864 CpuFeatureScope scope(this, AVX);
2865 vroundss(dst, dst, src, mode);
2866 } else {
2867 roundss(dst, src, mode);
2868 }
2869}
2870
2871
2872void MacroAssembler::Roundsd(XMMRegister dst, XMMRegister src,
2873 RoundingMode mode) {
2874 if (CpuFeatures::IsSupported(AVX)) {
2875 CpuFeatureScope scope(this, AVX);
2876 vroundsd(dst, dst, src, mode);
2877 } else {
2878 roundsd(dst, src, mode);
2879 }
2880}
2881
2882
2883void MacroAssembler::Sqrtsd(XMMRegister dst, XMMRegister src) {
2884 if (CpuFeatures::IsSupported(AVX)) {
2885 CpuFeatureScope scope(this, AVX);
2886 vsqrtsd(dst, dst, src);
2887 } else {
2888 sqrtsd(dst, src);
2889 }
2890}
2891
2892
2893void MacroAssembler::Sqrtsd(XMMRegister dst, const Operand& src) {
2894 if (CpuFeatures::IsSupported(AVX)) {
2895 CpuFeatureScope scope(this, AVX);
2896 vsqrtsd(dst, dst, src);
2897 } else {
2898 sqrtsd(dst, src);
2899 }
2900}
2901
2902
2903void MacroAssembler::Ucomiss(XMMRegister src1, XMMRegister src2) {
2904 if (CpuFeatures::IsSupported(AVX)) {
2905 CpuFeatureScope scope(this, AVX);
2906 vucomiss(src1, src2);
2907 } else {
2908 ucomiss(src1, src2);
2909 }
2910}
2911
2912
2913void MacroAssembler::Ucomiss(XMMRegister src1, const Operand& src2) {
2914 if (CpuFeatures::IsSupported(AVX)) {
2915 CpuFeatureScope scope(this, AVX);
2916 vucomiss(src1, src2);
2917 } else {
2918 ucomiss(src1, src2);
2919 }
2920}
2921
2922
2923void MacroAssembler::Ucomisd(XMMRegister src1, XMMRegister src2) {
2924 if (CpuFeatures::IsSupported(AVX)) {
2925 CpuFeatureScope scope(this, AVX);
2926 vucomisd(src1, src2);
2927 } else {
2928 ucomisd(src1, src2);
2929 }
2930}
2931
2932
2933void MacroAssembler::Ucomisd(XMMRegister src1, const Operand& src2) {
2934 if (CpuFeatures::IsSupported(AVX)) {
2935 CpuFeatureScope scope(this, AVX);
2936 vucomisd(src1, src2);
2937 } else {
2938 ucomisd(src1, src2);
2939 }
2940}
2941
2942
Steve Blocka7e24c12009-10-30 11:49:00 +00002943void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002944 AllowDeferredHandleDereference smi_check;
Steve Block3ce2e202009-11-05 08:53:23 +00002945 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002946 Cmp(dst, Smi::cast(*source));
Steve Block3ce2e202009-11-05 08:53:23 +00002947 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002948 MoveHeapObject(kScratchRegister, source);
2949 cmpp(dst, kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +00002950 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002951}
2952
2953
2954void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002955 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002956 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002957 Cmp(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002958 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002959 MoveHeapObject(kScratchRegister, source);
2960 cmpp(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00002961 }
2962}
2963
2964
2965void MacroAssembler::Push(Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002966 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002967 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002968 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002969 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002970 MoveHeapObject(kScratchRegister, source);
2971 Push(kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00002972 }
2973}
2974
2975
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002976void MacroAssembler::MoveHeapObject(Register result,
2977 Handle<Object> object) {
2978 AllowDeferredHandleDereference using_raw_address;
2979 DCHECK(object->IsHeapObject());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002980 if (isolate()->heap()->InNewSpace(*object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002981 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2982 Move(result, cell, RelocInfo::CELL);
2983 movp(result, Operand(result, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002984 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002985 Move(result, object, RelocInfo::EMBEDDED_OBJECT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002986 }
2987}
2988
2989
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002990void MacroAssembler::LoadGlobalCell(Register dst, Handle<Cell> cell) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002991 if (dst.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002992 AllowDeferredHandleDereference embedding_raw_address;
2993 load_rax(cell.location(), RelocInfo::CELL);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002994 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002995 Move(dst, cell, RelocInfo::CELL);
2996 movp(dst, Operand(dst, 0));
Steve Block3ce2e202009-11-05 08:53:23 +00002997 }
2998}
2999
3000
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003001void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
3002 Register scratch) {
3003 Move(scratch, cell, RelocInfo::EMBEDDED_OBJECT);
3004 cmpp(value, FieldOperand(scratch, WeakCell::kValueOffset));
3005}
3006
3007
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003008void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003009 Move(value, cell, RelocInfo::EMBEDDED_OBJECT);
3010 movp(value, FieldOperand(value, WeakCell::kValueOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003011}
3012
3013
3014void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
3015 Label* miss) {
3016 GetWeakValue(value, cell);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003017 JumpIfSmi(value, miss);
3018}
3019
3020
Leon Clarkee46be812010-01-19 14:06:41 +00003021void MacroAssembler::Drop(int stack_elements) {
3022 if (stack_elements > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003023 addp(rsp, Immediate(stack_elements * kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +00003024 }
3025}
3026
3027
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003028void MacroAssembler::DropUnderReturnAddress(int stack_elements,
3029 Register scratch) {
3030 DCHECK(stack_elements > 0);
3031 if (kPointerSize == kInt64Size && stack_elements == 1) {
3032 popq(MemOperand(rsp, 0));
3033 return;
3034 }
3035
3036 PopReturnAddressTo(scratch);
3037 Drop(stack_elements);
3038 PushReturnAddressFrom(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00003039}
3040
3041
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003042void MacroAssembler::Push(Register src) {
3043 if (kPointerSize == kInt64Size) {
3044 pushq(src);
3045 } else {
3046 // x32 uses 64-bit push for rbp in the prologue.
3047 DCHECK(src.code() != rbp.code());
3048 leal(rsp, Operand(rsp, -4));
3049 movp(Operand(rsp, 0), src);
3050 }
3051}
3052
3053
3054void MacroAssembler::Push(const Operand& src) {
3055 if (kPointerSize == kInt64Size) {
3056 pushq(src);
3057 } else {
3058 movp(kScratchRegister, src);
3059 leal(rsp, Operand(rsp, -4));
3060 movp(Operand(rsp, 0), kScratchRegister);
3061 }
3062}
3063
3064
3065void MacroAssembler::PushQuad(const Operand& src) {
3066 if (kPointerSize == kInt64Size) {
3067 pushq(src);
3068 } else {
3069 movp(kScratchRegister, src);
3070 pushq(kScratchRegister);
3071 }
3072}
3073
3074
3075void MacroAssembler::Push(Immediate value) {
3076 if (kPointerSize == kInt64Size) {
3077 pushq(value);
3078 } else {
3079 leal(rsp, Operand(rsp, -4));
3080 movp(Operand(rsp, 0), value);
3081 }
3082}
3083
3084
3085void MacroAssembler::PushImm32(int32_t imm32) {
3086 if (kPointerSize == kInt64Size) {
3087 pushq_imm32(imm32);
3088 } else {
3089 leal(rsp, Operand(rsp, -4));
3090 movp(Operand(rsp, 0), Immediate(imm32));
3091 }
3092}
3093
3094
3095void MacroAssembler::Pop(Register dst) {
3096 if (kPointerSize == kInt64Size) {
3097 popq(dst);
3098 } else {
3099 // x32 uses 64-bit pop for rbp in the epilogue.
3100 DCHECK(dst.code() != rbp.code());
3101 movp(dst, Operand(rsp, 0));
3102 leal(rsp, Operand(rsp, 4));
3103 }
3104}
3105
3106
3107void MacroAssembler::Pop(const Operand& dst) {
3108 if (kPointerSize == kInt64Size) {
3109 popq(dst);
3110 } else {
3111 Register scratch = dst.AddressUsesRegister(kScratchRegister)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003112 ? kRootRegister : kScratchRegister;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003113 movp(scratch, Operand(rsp, 0));
3114 movp(dst, scratch);
3115 leal(rsp, Operand(rsp, 4));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003116 if (scratch.is(kRootRegister)) {
3117 // Restore kRootRegister.
3118 InitializeRootRegister();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003119 }
3120 }
3121}
3122
3123
3124void MacroAssembler::PopQuad(const Operand& dst) {
3125 if (kPointerSize == kInt64Size) {
3126 popq(dst);
3127 } else {
3128 popq(kScratchRegister);
3129 movp(dst, kScratchRegister);
3130 }
3131}
3132
3133
3134void MacroAssembler::LoadSharedFunctionInfoSpecialField(Register dst,
3135 Register base,
3136 int offset) {
3137 DCHECK(offset > SharedFunctionInfo::kLengthOffset &&
3138 offset <= SharedFunctionInfo::kSize &&
3139 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1));
3140 if (kPointerSize == kInt64Size) {
3141 movsxlq(dst, FieldOperand(base, offset));
3142 } else {
3143 movp(dst, FieldOperand(base, offset));
3144 SmiToInteger32(dst, dst);
3145 }
3146}
3147
3148
3149void MacroAssembler::TestBitSharedFunctionInfoSpecialField(Register base,
3150 int offset,
3151 int bits) {
3152 DCHECK(offset > SharedFunctionInfo::kLengthOffset &&
3153 offset <= SharedFunctionInfo::kSize &&
3154 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1));
3155 if (kPointerSize == kInt32Size) {
3156 // On x32, this field is represented by SMI.
3157 bits += kSmiShift;
3158 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003159 int byte_offset = bits / kBitsPerByte;
3160 int bit_in_byte = bits & (kBitsPerByte - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003161 testb(FieldOperand(base, offset + byte_offset), Immediate(1 << bit_in_byte));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003162}
3163
3164
Steve Blocka7e24c12009-10-30 11:49:00 +00003165void MacroAssembler::Jump(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01003166 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00003167 jmp(kScratchRegister);
3168}
3169
3170
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003171void MacroAssembler::Jump(const Operand& op) {
3172 if (kPointerSize == kInt64Size) {
3173 jmp(op);
3174 } else {
3175 movp(kScratchRegister, op);
3176 jmp(kScratchRegister);
3177 }
3178}
3179
3180
Steve Blocka7e24c12009-10-30 11:49:00 +00003181void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003182 Move(kScratchRegister, destination, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00003183 jmp(kScratchRegister);
3184}
3185
3186
3187void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00003188 // TODO(X64): Inline this
3189 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00003190}
3191
3192
Steve Block44f0eee2011-05-26 01:26:41 +01003193int MacroAssembler::CallSize(ExternalReference ext) {
3194 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003195 return LoadAddressSize(ext) +
3196 Assembler::kCallScratchRegisterInstructionLength;
Steve Block44f0eee2011-05-26 01:26:41 +01003197}
3198
3199
Steve Blocka7e24c12009-10-30 11:49:00 +00003200void MacroAssembler::Call(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01003201#ifdef DEBUG
3202 int end_position = pc_offset() + CallSize(ext);
3203#endif
3204 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00003205 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01003206#ifdef DEBUG
3207 CHECK_EQ(end_position, pc_offset());
3208#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003209}
3210
3211
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003212void MacroAssembler::Call(const Operand& op) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003213 if (kPointerSize == kInt64Size && !CpuFeatures::IsSupported(ATOM)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003214 call(op);
3215 } else {
3216 movp(kScratchRegister, op);
3217 call(kScratchRegister);
3218 }
3219}
3220
3221
Steve Blocka7e24c12009-10-30 11:49:00 +00003222void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
Steve Block44f0eee2011-05-26 01:26:41 +01003223#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003224 int end_position = pc_offset() + CallSize(destination);
Steve Block44f0eee2011-05-26 01:26:41 +01003225#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003226 Move(kScratchRegister, destination, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00003227 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01003228#ifdef DEBUG
3229 CHECK_EQ(pc_offset(), end_position);
3230#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003231}
3232
3233
Ben Murdoch257744e2011-11-30 15:57:28 +00003234void MacroAssembler::Call(Handle<Code> code_object,
3235 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003236 TypeFeedbackId ast_id) {
Steve Block44f0eee2011-05-26 01:26:41 +01003237#ifdef DEBUG
3238 int end_position = pc_offset() + CallSize(code_object);
3239#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003240 DCHECK(RelocInfo::IsCodeTarget(rmode) ||
3241 rmode == RelocInfo::CODE_AGE_SEQUENCE);
Ben Murdoch257744e2011-11-30 15:57:28 +00003242 call(code_object, rmode, ast_id);
Steve Block44f0eee2011-05-26 01:26:41 +01003243#ifdef DEBUG
3244 CHECK_EQ(end_position, pc_offset());
3245#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003246}
3247
3248
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003249void MacroAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
3250 if (imm8 == 0) {
3251 Movd(dst, src);
3252 return;
3253 }
3254 DCHECK_EQ(1, imm8);
3255 if (CpuFeatures::IsSupported(SSE4_1)) {
3256 CpuFeatureScope sse_scope(this, SSE4_1);
3257 pextrd(dst, src, imm8);
3258 return;
3259 }
3260 movq(dst, src);
3261 shrq(dst, Immediate(32));
3262}
3263
3264
3265void MacroAssembler::Pinsrd(XMMRegister dst, Register src, int8_t imm8) {
3266 if (CpuFeatures::IsSupported(SSE4_1)) {
3267 CpuFeatureScope sse_scope(this, SSE4_1);
3268 pinsrd(dst, src, imm8);
3269 return;
3270 }
3271 Movd(xmm0, src);
3272 if (imm8 == 1) {
3273 punpckldq(dst, xmm0);
3274 } else {
3275 DCHECK_EQ(0, imm8);
3276 Movss(dst, xmm0);
3277 }
3278}
3279
3280
3281void MacroAssembler::Pinsrd(XMMRegister dst, const Operand& src, int8_t imm8) {
3282 DCHECK(imm8 == 0 || imm8 == 1);
3283 if (CpuFeatures::IsSupported(SSE4_1)) {
3284 CpuFeatureScope sse_scope(this, SSE4_1);
3285 pinsrd(dst, src, imm8);
3286 return;
3287 }
3288 Movd(xmm0, src);
3289 if (imm8 == 1) {
3290 punpckldq(dst, xmm0);
3291 } else {
3292 DCHECK_EQ(0, imm8);
3293 Movss(dst, xmm0);
3294 }
3295}
3296
3297
3298void MacroAssembler::Lzcntl(Register dst, Register src) {
3299 if (CpuFeatures::IsSupported(LZCNT)) {
3300 CpuFeatureScope scope(this, LZCNT);
3301 lzcntl(dst, src);
3302 return;
3303 }
3304 Label not_zero_src;
3305 bsrl(dst, src);
3306 j(not_zero, &not_zero_src, Label::kNear);
3307 Set(dst, 63); // 63^31 == 32
3308 bind(&not_zero_src);
3309 xorl(dst, Immediate(31)); // for x in [0..31], 31^x == 31 - x
3310}
3311
3312
3313void MacroAssembler::Lzcntl(Register dst, const Operand& src) {
3314 if (CpuFeatures::IsSupported(LZCNT)) {
3315 CpuFeatureScope scope(this, LZCNT);
3316 lzcntl(dst, src);
3317 return;
3318 }
3319 Label not_zero_src;
3320 bsrl(dst, src);
3321 j(not_zero, &not_zero_src, Label::kNear);
3322 Set(dst, 63); // 63^31 == 32
3323 bind(&not_zero_src);
3324 xorl(dst, Immediate(31)); // for x in [0..31], 31^x == 31 - x
3325}
3326
3327
3328void MacroAssembler::Lzcntq(Register dst, Register src) {
3329 if (CpuFeatures::IsSupported(LZCNT)) {
3330 CpuFeatureScope scope(this, LZCNT);
3331 lzcntq(dst, src);
3332 return;
3333 }
3334 Label not_zero_src;
3335 bsrq(dst, src);
3336 j(not_zero, &not_zero_src, Label::kNear);
3337 Set(dst, 127); // 127^63 == 64
3338 bind(&not_zero_src);
3339 xorl(dst, Immediate(63)); // for x in [0..63], 63^x == 63 - x
3340}
3341
3342
3343void MacroAssembler::Lzcntq(Register dst, const Operand& src) {
3344 if (CpuFeatures::IsSupported(LZCNT)) {
3345 CpuFeatureScope scope(this, LZCNT);
3346 lzcntq(dst, src);
3347 return;
3348 }
3349 Label not_zero_src;
3350 bsrq(dst, src);
3351 j(not_zero, &not_zero_src, Label::kNear);
3352 Set(dst, 127); // 127^63 == 64
3353 bind(&not_zero_src);
3354 xorl(dst, Immediate(63)); // for x in [0..63], 63^x == 63 - x
3355}
3356
3357
3358void MacroAssembler::Tzcntq(Register dst, Register src) {
3359 if (CpuFeatures::IsSupported(BMI1)) {
3360 CpuFeatureScope scope(this, BMI1);
3361 tzcntq(dst, src);
3362 return;
3363 }
3364 Label not_zero_src;
3365 bsfq(dst, src);
3366 j(not_zero, &not_zero_src, Label::kNear);
3367 // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
3368 Set(dst, 64);
3369 bind(&not_zero_src);
3370}
3371
3372
3373void MacroAssembler::Tzcntq(Register dst, const Operand& src) {
3374 if (CpuFeatures::IsSupported(BMI1)) {
3375 CpuFeatureScope scope(this, BMI1);
3376 tzcntq(dst, src);
3377 return;
3378 }
3379 Label not_zero_src;
3380 bsfq(dst, src);
3381 j(not_zero, &not_zero_src, Label::kNear);
3382 // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
3383 Set(dst, 64);
3384 bind(&not_zero_src);
3385}
3386
3387
3388void MacroAssembler::Tzcntl(Register dst, Register src) {
3389 if (CpuFeatures::IsSupported(BMI1)) {
3390 CpuFeatureScope scope(this, BMI1);
3391 tzcntl(dst, src);
3392 return;
3393 }
3394 Label not_zero_src;
3395 bsfl(dst, src);
3396 j(not_zero, &not_zero_src, Label::kNear);
3397 Set(dst, 32); // The result of tzcnt is 32 if src = 0.
3398 bind(&not_zero_src);
3399}
3400
3401
3402void MacroAssembler::Tzcntl(Register dst, const Operand& src) {
3403 if (CpuFeatures::IsSupported(BMI1)) {
3404 CpuFeatureScope scope(this, BMI1);
3405 tzcntl(dst, src);
3406 return;
3407 }
3408 Label not_zero_src;
3409 bsfl(dst, src);
3410 j(not_zero, &not_zero_src, Label::kNear);
3411 Set(dst, 32); // The result of tzcnt is 32 if src = 0.
3412 bind(&not_zero_src);
3413}
3414
3415
3416void MacroAssembler::Popcntl(Register dst, Register src) {
3417 if (CpuFeatures::IsSupported(POPCNT)) {
3418 CpuFeatureScope scope(this, POPCNT);
3419 popcntl(dst, src);
3420 return;
3421 }
3422 UNREACHABLE();
3423}
3424
3425
3426void MacroAssembler::Popcntl(Register dst, const Operand& src) {
3427 if (CpuFeatures::IsSupported(POPCNT)) {
3428 CpuFeatureScope scope(this, POPCNT);
3429 popcntl(dst, src);
3430 return;
3431 }
3432 UNREACHABLE();
3433}
3434
3435
3436void MacroAssembler::Popcntq(Register dst, Register src) {
3437 if (CpuFeatures::IsSupported(POPCNT)) {
3438 CpuFeatureScope scope(this, POPCNT);
3439 popcntq(dst, src);
3440 return;
3441 }
3442 UNREACHABLE();
3443}
3444
3445
3446void MacroAssembler::Popcntq(Register dst, const Operand& src) {
3447 if (CpuFeatures::IsSupported(POPCNT)) {
3448 CpuFeatureScope scope(this, POPCNT);
3449 popcntq(dst, src);
3450 return;
3451 }
3452 UNREACHABLE();
3453}
3454
3455
Steve Block1e0659c2011-05-24 12:43:12 +01003456void MacroAssembler::Pushad() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003457 Push(rax);
3458 Push(rcx);
3459 Push(rdx);
3460 Push(rbx);
Steve Block1e0659c2011-05-24 12:43:12 +01003461 // Not pushing rsp or rbp.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003462 Push(rsi);
3463 Push(rdi);
3464 Push(r8);
3465 Push(r9);
Steve Block1e0659c2011-05-24 12:43:12 +01003466 // r10 is kScratchRegister.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003467 Push(r11);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003468 Push(r12);
Steve Block1e0659c2011-05-24 12:43:12 +01003469 // r13 is kRootRegister.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003470 Push(r14);
3471 Push(r15);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003472 STATIC_ASSERT(12 == kNumSafepointSavedRegisters);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003473 // Use lea for symmetry with Popad.
3474 int sp_delta =
3475 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003476 leap(rsp, Operand(rsp, -sp_delta));
Steve Block1e0659c2011-05-24 12:43:12 +01003477}
3478
3479
3480void MacroAssembler::Popad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003481 // Popad must not change the flags, so use lea instead of addq.
3482 int sp_delta =
3483 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003484 leap(rsp, Operand(rsp, sp_delta));
3485 Pop(r15);
3486 Pop(r14);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003487 Pop(r12);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003488 Pop(r11);
3489 Pop(r9);
3490 Pop(r8);
3491 Pop(rdi);
3492 Pop(rsi);
3493 Pop(rbx);
3494 Pop(rdx);
3495 Pop(rcx);
3496 Pop(rax);
Steve Block1e0659c2011-05-24 12:43:12 +01003497}
3498
3499
3500void MacroAssembler::Dropad() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003501 addp(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
Steve Block1e0659c2011-05-24 12:43:12 +01003502}
3503
3504
3505// Order general registers are pushed by Pushad:
Steve Block44f0eee2011-05-26 01:26:41 +01003506// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003507const int
3508MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
Steve Block1e0659c2011-05-24 12:43:12 +01003509 0,
3510 1,
3511 2,
3512 3,
3513 -1,
3514 -1,
3515 4,
3516 5,
3517 6,
3518 7,
3519 -1,
3520 8,
Steve Block44f0eee2011-05-26 01:26:41 +01003521 9,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003522 -1,
3523 10,
3524 11
Steve Block1e0659c2011-05-24 12:43:12 +01003525};
3526
3527
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003528void MacroAssembler::StoreToSafepointRegisterSlot(Register dst,
3529 const Immediate& imm) {
3530 movp(SafepointRegisterSlot(dst), imm);
3531}
3532
3533
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003534void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003535 movp(SafepointRegisterSlot(dst), src);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003536}
3537
3538
3539void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003540 movp(dst, SafepointRegisterSlot(src));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003541}
3542
3543
3544Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
3545 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
3546}
3547
3548
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003549void MacroAssembler::PushStackHandler() {
Steve Blocka7e24c12009-10-30 11:49:00 +00003550 // Adjust this code if not the case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003551 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003552 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003553
3554 // Link the current handler as the next handler.
3555 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003556 Push(ExternalOperand(handler_address));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003557
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003558 // Set this new handler as the current one.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003559 movp(ExternalOperand(handler_address), rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00003560}
3561
3562
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003563void MacroAssembler::PopStackHandler() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003564 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
3565 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003566 Pop(ExternalOperand(handler_address));
3567 addp(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +00003568}
3569
3570
Steve Blocka7e24c12009-10-30 11:49:00 +00003571void MacroAssembler::Ret() {
3572 ret(0);
3573}
3574
3575
Steve Block1e0659c2011-05-24 12:43:12 +01003576void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
3577 if (is_uint16(bytes_dropped)) {
3578 ret(bytes_dropped);
3579 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003580 PopReturnAddressTo(scratch);
3581 addp(rsp, Immediate(bytes_dropped));
3582 PushReturnAddressFrom(scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01003583 ret(0);
3584 }
3585}
3586
3587
Steve Blocka7e24c12009-10-30 11:49:00 +00003588void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00003589 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01003590 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003591}
3592
3593
3594void MacroAssembler::CmpObjectType(Register heap_object,
3595 InstanceType type,
3596 Register map) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003597 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003598 CmpInstanceType(map, type);
3599}
3600
3601
3602void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
3603 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
3604 Immediate(static_cast<int8_t>(type)));
3605}
3606
3607
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003608void MacroAssembler::CheckFastElements(Register map,
3609 Label* fail,
3610 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003611 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3612 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3613 STATIC_ASSERT(FAST_ELEMENTS == 2);
3614 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003615 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003616 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003617 j(above, fail, distance);
3618}
3619
3620
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003621void MacroAssembler::CheckFastObjectElements(Register map,
3622 Label* fail,
3623 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003624 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3625 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3626 STATIC_ASSERT(FAST_ELEMENTS == 2);
3627 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003628 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003629 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003630 j(below_equal, fail, distance);
3631 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003632 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003633 j(above, fail, distance);
3634}
3635
3636
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003637void MacroAssembler::CheckFastSmiElements(Register map,
3638 Label* fail,
3639 Label::Distance distance) {
3640 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3641 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003642 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003643 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003644 j(above, fail, distance);
3645}
3646
3647
3648void MacroAssembler::StoreNumberToDoubleElements(
3649 Register maybe_number,
3650 Register elements,
3651 Register index,
3652 XMMRegister xmm_scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003653 Label* fail,
3654 int elements_offset) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003655 Label smi_value, done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003656
3657 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
3658
3659 CheckMap(maybe_number,
3660 isolate()->factory()->heap_number_map(),
3661 fail,
3662 DONT_DO_SMI_CHECK);
3663
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003664 // Double value, turn potential sNaN into qNaN.
3665 Move(xmm_scratch, 1.0);
3666 mulsd(xmm_scratch, FieldOperand(maybe_number, HeapNumber::kValueOffset));
3667 jmp(&done, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003668
3669 bind(&smi_value);
3670 // Value is a smi. convert to a double and store.
3671 // Preserve original value.
3672 SmiToInteger32(kScratchRegister, maybe_number);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003673 Cvtlsi2sd(xmm_scratch, kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003674 bind(&done);
3675 Movsd(FieldOperand(elements, index, times_8,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003676 FixedDoubleArray::kHeaderSize - elements_offset),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003677 xmm_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003678}
3679
3680
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003681void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003682 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003683}
3684
3685
Andrei Popescu31002712010-02-23 13:46:05 +00003686void MacroAssembler::CheckMap(Register obj,
3687 Handle<Map> map,
3688 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003689 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003690 if (smi_check_type == DO_SMI_CHECK) {
Andrei Popescu31002712010-02-23 13:46:05 +00003691 JumpIfSmi(obj, fail);
3692 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003693
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003694 CompareMap(obj, map);
Andrei Popescu31002712010-02-23 13:46:05 +00003695 j(not_equal, fail);
3696}
3697
3698
Ben Murdoch257744e2011-11-30 15:57:28 +00003699void MacroAssembler::ClampUint8(Register reg) {
3700 Label done;
3701 testl(reg, Immediate(0xFFFFFF00));
3702 j(zero, &done, Label::kNear);
3703 setcc(negative, reg); // 1 if negative, 0 if positive.
3704 decb(reg); // 0 if negative, 255 if positive.
3705 bind(&done);
3706}
3707
3708
3709void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
3710 XMMRegister temp_xmm_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003711 Register result_reg) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003712 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003713 Label conv_failure;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003714 Xorpd(temp_xmm_reg, temp_xmm_reg);
3715 Cvtsd2si(result_reg, input_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00003716 testl(result_reg, Immediate(0xFFFFFF00));
3717 j(zero, &done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003718 cmpl(result_reg, Immediate(1));
3719 j(overflow, &conv_failure, Label::kNear);
3720 movl(result_reg, Immediate(0));
3721 setcc(sign, result_reg);
3722 subl(result_reg, Immediate(1));
3723 andl(result_reg, Immediate(255));
3724 jmp(&done, Label::kNear);
3725 bind(&conv_failure);
3726 Set(result_reg, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003727 Ucomisd(input_reg, temp_xmm_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003728 j(below, &done, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00003729 Set(result_reg, 255);
3730 bind(&done);
3731}
3732
3733
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003734void MacroAssembler::LoadUint32(XMMRegister dst,
3735 Register src) {
3736 if (FLAG_debug_code) {
3737 cmpq(src, Immediate(0xffffffff));
3738 Assert(below_equal, kInputGPRIsExpectedToHaveUpper32Cleared);
3739 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003740 Cvtqsi2sd(dst, src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003741}
3742
3743
3744void MacroAssembler::SlowTruncateToI(Register result_reg,
3745 Register input_reg,
3746 int offset) {
3747 DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
3748 call(stub.GetCode(), RelocInfo::CODE_TARGET);
3749}
3750
3751
3752void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
3753 Register input_reg) {
3754 Label done;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003755 Movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3756 Cvttsd2siq(result_reg, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003757 cmpq(result_reg, Immediate(1));
3758 j(no_overflow, &done, Label::kNear);
3759
3760 // Slow case.
3761 if (input_reg.is(result_reg)) {
3762 subp(rsp, Immediate(kDoubleSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003763 Movsd(MemOperand(rsp, 0), xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003764 SlowTruncateToI(result_reg, rsp, 0);
3765 addp(rsp, Immediate(kDoubleSize));
3766 } else {
3767 SlowTruncateToI(result_reg, input_reg);
3768 }
3769
3770 bind(&done);
3771 // Keep our invariant that the upper 32 bits are zero.
3772 movl(result_reg, result_reg);
3773}
3774
3775
3776void MacroAssembler::TruncateDoubleToI(Register result_reg,
3777 XMMRegister input_reg) {
3778 Label done;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003779 Cvttsd2siq(result_reg, input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003780 cmpq(result_reg, Immediate(1));
3781 j(no_overflow, &done, Label::kNear);
3782
3783 subp(rsp, Immediate(kDoubleSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003784 Movsd(MemOperand(rsp, 0), input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003785 SlowTruncateToI(result_reg, rsp, 0);
3786 addp(rsp, Immediate(kDoubleSize));
3787
3788 bind(&done);
3789 // Keep our invariant that the upper 32 bits are zero.
3790 movl(result_reg, result_reg);
3791}
3792
3793
3794void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
3795 XMMRegister scratch,
3796 MinusZeroMode minus_zero_mode,
3797 Label* lost_precision, Label* is_nan,
3798 Label* minus_zero, Label::Distance dst) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003799 Cvttsd2si(result_reg, input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003800 Cvtlsi2sd(xmm0, result_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003801 Ucomisd(xmm0, input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003802 j(not_equal, lost_precision, dst);
3803 j(parity_even, is_nan, dst); // NaN.
3804 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
3805 Label done;
3806 // The integer converted back is equal to the original. We
3807 // only have to test if we got -0 as an input.
3808 testl(result_reg, result_reg);
3809 j(not_zero, &done, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003810 Movmskpd(result_reg, input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003811 // Bit 0 contains the sign of the double in input_reg.
3812 // If input was positive, we are ok and return 0, otherwise
3813 // jump to minus_zero.
3814 andl(result_reg, Immediate(1));
3815 j(not_zero, minus_zero, dst);
3816 bind(&done);
3817 }
3818}
3819
3820
Ben Murdoch257744e2011-11-30 15:57:28 +00003821void MacroAssembler::LoadInstanceDescriptors(Register map,
3822 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003823 movp(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
3824}
3825
3826
3827void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3828 movl(dst, FieldOperand(map, Map::kBitField3Offset));
3829 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3830}
3831
3832
3833void MacroAssembler::EnumLength(Register dst, Register map) {
3834 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3835 movl(dst, FieldOperand(map, Map::kBitField3Offset));
3836 andl(dst, Immediate(Map::EnumLengthBits::kMask));
3837 Integer32ToSmi(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00003838}
3839
3840
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003841void MacroAssembler::LoadAccessor(Register dst, Register holder,
3842 int accessor_index,
3843 AccessorComponent accessor) {
3844 movp(dst, FieldOperand(holder, HeapObject::kMapOffset));
3845 LoadInstanceDescriptors(dst, dst);
3846 movp(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
3847 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
3848 : AccessorPair::kSetterOffset;
3849 movp(dst, FieldOperand(dst, offset));
3850}
3851
3852
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003853void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
3854 Register scratch2, Handle<WeakCell> cell,
3855 Handle<Code> success,
3856 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003857 Label fail;
3858 if (smi_check_type == DO_SMI_CHECK) {
3859 JumpIfSmi(obj, &fail);
3860 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003861 movq(scratch1, FieldOperand(obj, HeapObject::kMapOffset));
3862 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003863 j(equal, success, RelocInfo::CODE_TARGET);
Ben Murdoch257744e2011-11-30 15:57:28 +00003864 bind(&fail);
3865}
3866
3867
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003868void MacroAssembler::AssertNumber(Register object) {
3869 if (emit_debug_code()) {
3870 Label ok;
3871 Condition is_smi = CheckSmi(object);
3872 j(is_smi, &ok, Label::kNear);
3873 Cmp(FieldOperand(object, HeapObject::kMapOffset),
3874 isolate()->factory()->heap_number_map());
3875 Check(equal, kOperandIsNotANumber);
3876 bind(&ok);
3877 }
Andrei Popescu402d9372010-02-26 13:31:12 +00003878}
3879
Ben Murdochda12d292016-06-02 14:46:10 +01003880void MacroAssembler::AssertNotNumber(Register object) {
3881 if (emit_debug_code()) {
3882 Condition is_smi = CheckSmi(object);
3883 Check(NegateCondition(is_smi), kOperandIsANumber);
3884 Cmp(FieldOperand(object, HeapObject::kMapOffset),
3885 isolate()->factory()->heap_number_map());
3886 Check(not_equal, kOperandIsANumber);
3887 }
3888}
Andrei Popescu402d9372010-02-26 13:31:12 +00003889
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003890void MacroAssembler::AssertNotSmi(Register object) {
3891 if (emit_debug_code()) {
3892 Condition is_smi = CheckSmi(object);
3893 Check(NegateCondition(is_smi), kOperandIsASmi);
3894 }
Iain Merrick75681382010-08-19 15:07:18 +01003895}
3896
3897
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003898void MacroAssembler::AssertSmi(Register object) {
3899 if (emit_debug_code()) {
3900 Condition is_smi = CheckSmi(object);
3901 Check(is_smi, kOperandIsNotASmi);
3902 }
Steve Block44f0eee2011-05-26 01:26:41 +01003903}
3904
3905
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003906void MacroAssembler::AssertSmi(const Operand& object) {
3907 if (emit_debug_code()) {
3908 Condition is_smi = CheckSmi(object);
3909 Check(is_smi, kOperandIsNotASmi);
3910 }
Steve Block6ded16b2010-05-10 14:33:55 +01003911}
3912
3913
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003914void MacroAssembler::AssertZeroExtended(Register int32_register) {
3915 if (emit_debug_code()) {
3916 DCHECK(!int32_register.is(kScratchRegister));
3917 movq(kScratchRegister, V8_INT64_C(0x0000000100000000));
3918 cmpq(kScratchRegister, int32_register);
3919 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended);
3920 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003921}
3922
3923
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003924void MacroAssembler::AssertString(Register object) {
3925 if (emit_debug_code()) {
3926 testb(object, Immediate(kSmiTagMask));
3927 Check(not_equal, kOperandIsASmiAndNotAString);
3928 Push(object);
3929 movp(object, FieldOperand(object, HeapObject::kMapOffset));
3930 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
3931 Pop(object);
3932 Check(below, kOperandIsNotAString);
3933 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003934}
3935
3936
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003937void MacroAssembler::AssertName(Register object) {
3938 if (emit_debug_code()) {
3939 testb(object, Immediate(kSmiTagMask));
3940 Check(not_equal, kOperandIsASmiAndNotAName);
3941 Push(object);
3942 movp(object, FieldOperand(object, HeapObject::kMapOffset));
3943 CmpInstanceType(object, LAST_NAME_TYPE);
3944 Pop(object);
3945 Check(below_equal, kOperandIsNotAName);
3946 }
3947}
3948
3949
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003950void MacroAssembler::AssertFunction(Register object) {
3951 if (emit_debug_code()) {
3952 testb(object, Immediate(kSmiTagMask));
3953 Check(not_equal, kOperandIsASmiAndNotAFunction);
3954 Push(object);
3955 CmpObjectType(object, JS_FUNCTION_TYPE, object);
3956 Pop(object);
3957 Check(equal, kOperandIsNotAFunction);
3958 }
3959}
3960
3961
3962void MacroAssembler::AssertBoundFunction(Register object) {
3963 if (emit_debug_code()) {
3964 testb(object, Immediate(kSmiTagMask));
3965 Check(not_equal, kOperandIsASmiAndNotABoundFunction);
3966 Push(object);
3967 CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
3968 Pop(object);
3969 Check(equal, kOperandIsNotABoundFunction);
3970 }
3971}
3972
3973
Ben Murdoch097c5b22016-05-18 11:27:45 +01003974void MacroAssembler::AssertReceiver(Register object) {
3975 if (emit_debug_code()) {
3976 testb(object, Immediate(kSmiTagMask));
3977 Check(not_equal, kOperandIsASmiAndNotAReceiver);
3978 Push(object);
3979 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3980 CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, object);
3981 Pop(object);
3982 Check(above_equal, kOperandIsNotAReceiver);
3983 }
3984}
3985
3986
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003987void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
3988 if (emit_debug_code()) {
3989 Label done_checking;
3990 AssertNotSmi(object);
3991 Cmp(object, isolate()->factory()->undefined_value());
3992 j(equal, &done_checking);
3993 Cmp(FieldOperand(object, 0), isolate()->factory()->allocation_site_map());
3994 Assert(equal, kExpectedUndefinedOrCell);
3995 bind(&done_checking);
3996 }
3997}
3998
3999
4000void MacroAssembler::AssertRootValue(Register src,
4001 Heap::RootListIndex root_value_index,
4002 BailoutReason reason) {
4003 if (emit_debug_code()) {
4004 DCHECK(!src.is(kScratchRegister));
4005 LoadRoot(kScratchRegister, root_value_index);
4006 cmpp(src, kScratchRegister);
4007 Check(equal, reason);
4008 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004009}
4010
4011
4012
Leon Clarked91b9f72010-01-27 17:25:45 +00004013Condition MacroAssembler::IsObjectStringType(Register heap_object,
4014 Register map,
4015 Register instance_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004016 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00004017 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004018 STATIC_ASSERT(kNotStringTag != 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00004019 testb(instance_type, Immediate(kIsNotStringMask));
4020 return zero;
4021}
4022
4023
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004024Condition MacroAssembler::IsObjectNameType(Register heap_object,
4025 Register map,
4026 Register instance_type) {
4027 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
4028 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
4029 cmpb(instance_type, Immediate(static_cast<uint8_t>(LAST_NAME_TYPE)));
4030 return below_equal;
4031}
4032
4033
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004034void MacroAssembler::GetMapConstructor(Register result, Register map,
4035 Register temp) {
4036 Label done, loop;
4037 movp(result, FieldOperand(map, Map::kConstructorOrBackPointerOffset));
4038 bind(&loop);
4039 JumpIfSmi(result, &done, Label::kNear);
4040 CmpObjectType(result, MAP_TYPE, temp);
4041 j(not_equal, &done, Label::kNear);
4042 movp(result, FieldOperand(result, Map::kConstructorOrBackPointerOffset));
4043 jmp(&loop);
4044 bind(&done);
4045}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004046
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004047
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004048void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
4049 Label* miss) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004050 // Get the prototype or initial map from the function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004051 movp(result,
Steve Blocka7e24c12009-10-30 11:49:00 +00004052 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
4053
4054 // If the prototype or initial map is the hole, don't return it and
4055 // simply miss the cache instead. This will allow us to allocate a
4056 // prototype object on-demand in the runtime system.
4057 CompareRoot(result, Heap::kTheHoleValueRootIndex);
4058 j(equal, miss);
4059
4060 // If the function does not have an initial map, we're done.
Ben Murdoch257744e2011-11-30 15:57:28 +00004061 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +00004062 CmpObjectType(result, MAP_TYPE, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00004063 j(not_equal, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00004064
4065 // Get the prototype from the initial map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004066 movp(result, FieldOperand(result, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004067
Steve Blocka7e24c12009-10-30 11:49:00 +00004068 // All done.
4069 bind(&done);
4070}
4071
4072
4073void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
4074 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004075 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Ben Murdoch8b112d22011-06-08 16:22:53 +01004076 movl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00004077 }
4078}
4079
4080
4081void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004082 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004083 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004084 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00004085 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01004086 incl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004087 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01004088 addl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00004089 }
4090 }
4091}
4092
4093
4094void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004095 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004096 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004097 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00004098 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01004099 decl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004100 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01004101 subl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00004102 }
4103 }
4104}
4105
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004106
Andrei Popescu402d9372010-02-26 13:31:12 +00004107void MacroAssembler::DebugBreak() {
Steve Block9fac8402011-05-12 15:51:54 +01004108 Set(rax, 0); // No arguments.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004109 LoadAddress(rbx,
4110 ExternalReference(Runtime::kHandleDebuggerStatement, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004111 CEntryStub ces(isolate(), 1);
4112 DCHECK(AllowThisStubCall(&ces));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004113 Call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004114}
Ben Murdoch257744e2011-11-30 15:57:28 +00004115
Ben Murdochda12d292016-06-02 14:46:10 +01004116void MacroAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
4117 Register caller_args_count_reg,
4118 Register scratch0, Register scratch1,
4119 ReturnAddressState ra_state) {
4120#if DEBUG
4121 if (callee_args_count.is_reg()) {
4122 DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
4123 scratch1));
4124 } else {
4125 DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
4126 }
4127#endif
4128
4129 // Calculate the destination address where we will put the return address
4130 // after we drop current frame.
4131 Register new_sp_reg = scratch0;
4132 if (callee_args_count.is_reg()) {
4133 subp(caller_args_count_reg, callee_args_count.reg());
4134 leap(new_sp_reg, Operand(rbp, caller_args_count_reg, times_pointer_size,
4135 StandardFrameConstants::kCallerPCOffset));
4136 } else {
4137 leap(new_sp_reg, Operand(rbp, caller_args_count_reg, times_pointer_size,
4138 StandardFrameConstants::kCallerPCOffset -
4139 callee_args_count.immediate() * kPointerSize));
4140 }
4141
4142 if (FLAG_debug_code) {
4143 cmpp(rsp, new_sp_reg);
4144 Check(below, kStackAccessBelowStackPointer);
4145 }
4146
4147 // Copy return address from caller's frame to current frame's return address
4148 // to avoid its trashing and let the following loop copy it to the right
4149 // place.
4150 Register tmp_reg = scratch1;
4151 if (ra_state == ReturnAddressState::kOnStack) {
4152 movp(tmp_reg, Operand(rbp, StandardFrameConstants::kCallerPCOffset));
4153 movp(Operand(rsp, 0), tmp_reg);
4154 } else {
4155 DCHECK(ReturnAddressState::kNotOnStack == ra_state);
4156 Push(Operand(rbp, StandardFrameConstants::kCallerPCOffset));
4157 }
4158
4159 // Restore caller's frame pointer now as it could be overwritten by
4160 // the copying loop.
4161 movp(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
4162
4163 // +2 here is to copy both receiver and return address.
4164 Register count_reg = caller_args_count_reg;
4165 if (callee_args_count.is_reg()) {
4166 leap(count_reg, Operand(callee_args_count.reg(), 2));
4167 } else {
4168 movp(count_reg, Immediate(callee_args_count.immediate() + 2));
4169 // TODO(ishell): Unroll copying loop for small immediate values.
4170 }
4171
4172 // Now copy callee arguments to the caller frame going backwards to avoid
4173 // callee arguments corruption (source and destination areas could overlap).
4174 Label loop, entry;
4175 jmp(&entry, Label::kNear);
4176 bind(&loop);
4177 decp(count_reg);
4178 movp(tmp_reg, Operand(rsp, count_reg, times_pointer_size, 0));
4179 movp(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg);
4180 bind(&entry);
4181 cmpp(count_reg, Immediate(0));
4182 j(not_equal, &loop, Label::kNear);
4183
4184 // Leave current frame.
4185 movp(rsp, new_sp_reg);
4186}
Ben Murdoch257744e2011-11-30 15:57:28 +00004187
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004188void MacroAssembler::InvokeFunction(Register function,
4189 Register new_target,
4190 const ParameterCount& actual,
4191 InvokeFlag flag,
4192 const CallWrapper& call_wrapper) {
4193 movp(rbx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
4194 LoadSharedFunctionInfoSpecialField(
4195 rbx, rbx, SharedFunctionInfo::kFormalParameterCountOffset);
4196
4197 ParameterCount expected(rbx);
4198 InvokeFunction(function, new_target, expected, actual, flag, call_wrapper);
4199}
4200
4201
4202void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
4203 const ParameterCount& expected,
4204 const ParameterCount& actual,
4205 InvokeFlag flag,
4206 const CallWrapper& call_wrapper) {
4207 Move(rdi, function);
4208 InvokeFunction(rdi, no_reg, expected, actual, flag, call_wrapper);
4209}
4210
4211
4212void MacroAssembler::InvokeFunction(Register function,
4213 Register new_target,
4214 const ParameterCount& expected,
4215 const ParameterCount& actual,
4216 InvokeFlag flag,
4217 const CallWrapper& call_wrapper) {
4218 DCHECK(function.is(rdi));
4219 movp(rsi, FieldOperand(function, JSFunction::kContextOffset));
4220 InvokeFunctionCode(rdi, new_target, expected, actual, flag, call_wrapper);
4221}
4222
4223
4224void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
4225 const ParameterCount& expected,
4226 const ParameterCount& actual,
4227 InvokeFlag flag,
4228 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004229 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004230 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004231 DCHECK(function.is(rdi));
4232 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(rdx));
4233
4234 if (call_wrapper.NeedsDebugStepCheck()) {
4235 FloodFunctionIfStepping(function, new_target, expected, actual);
4236 }
4237
4238 // Clear the new.target register if not given.
4239 if (!new_target.is_valid()) {
4240 LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
4241 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004242
Ben Murdoch257744e2011-11-30 15:57:28 +00004243 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004244 bool definitely_mismatches = false;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004245 InvokePrologue(expected,
4246 actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004247 &done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004248 &definitely_mismatches,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004249 flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00004250 Label::kNear,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004251 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004252 if (!definitely_mismatches) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004253 // We call indirectly through the code field in the function to
4254 // allow recompilation to take effect without changing any of the
4255 // call sites.
4256 Operand code = FieldOperand(function, JSFunction::kCodeEntryOffset);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004257 if (flag == CALL_FUNCTION) {
4258 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004259 call(code);
4260 call_wrapper.AfterCall();
4261 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004262 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004263 jmp(code);
4264 }
4265 bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00004266 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004267}
4268
4269
Ben Murdoch257744e2011-11-30 15:57:28 +00004270void MacroAssembler::InvokePrologue(const ParameterCount& expected,
4271 const ParameterCount& actual,
Ben Murdoch257744e2011-11-30 15:57:28 +00004272 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004273 bool* definitely_mismatches,
Ben Murdoch257744e2011-11-30 15:57:28 +00004274 InvokeFlag flag,
4275 Label::Distance near_jump,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004276 const CallWrapper& call_wrapper) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004277 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004278 *definitely_mismatches = false;
Ben Murdoch257744e2011-11-30 15:57:28 +00004279 Label invoke;
4280 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004281 DCHECK(actual.is_immediate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004282 Set(rax, actual.immediate());
Ben Murdoch257744e2011-11-30 15:57:28 +00004283 if (expected.immediate() == actual.immediate()) {
4284 definitely_matches = true;
4285 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004286 if (expected.immediate() ==
4287 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
4288 // Don't worry about adapting arguments for built-ins that
4289 // don't want that done. Skip adaption code by making it look
4290 // like we have a match between expected and actual number of
4291 // arguments.
4292 definitely_matches = true;
4293 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004294 *definitely_mismatches = true;
Ben Murdoch257744e2011-11-30 15:57:28 +00004295 Set(rbx, expected.immediate());
4296 }
4297 }
4298 } else {
4299 if (actual.is_immediate()) {
4300 // Expected is in register, actual is immediate. This is the
4301 // case when we invoke function values without going through the
4302 // IC mechanism.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004303 Set(rax, actual.immediate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004304 cmpp(expected.reg(), Immediate(actual.immediate()));
Ben Murdoch257744e2011-11-30 15:57:28 +00004305 j(equal, &invoke, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004306 DCHECK(expected.reg().is(rbx));
Ben Murdoch257744e2011-11-30 15:57:28 +00004307 } else if (!expected.reg().is(actual.reg())) {
4308 // Both expected and actual are in (different) registers. This
4309 // is the case when we invoke functions using call and apply.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004310 cmpp(expected.reg(), actual.reg());
Ben Murdoch257744e2011-11-30 15:57:28 +00004311 j(equal, &invoke, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004312 DCHECK(actual.reg().is(rax));
4313 DCHECK(expected.reg().is(rbx));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004314 } else {
4315 Move(rax, actual.reg());
Ben Murdoch257744e2011-11-30 15:57:28 +00004316 }
4317 }
4318
4319 if (!definitely_matches) {
4320 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
Ben Murdoch257744e2011-11-30 15:57:28 +00004321 if (flag == CALL_FUNCTION) {
4322 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch257744e2011-11-30 15:57:28 +00004323 Call(adaptor, RelocInfo::CODE_TARGET);
4324 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004325 if (!*definitely_mismatches) {
4326 jmp(done, near_jump);
4327 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004328 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004329 Jump(adaptor, RelocInfo::CODE_TARGET);
4330 }
4331 bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01004332 }
Andrei Popescu402d9372010-02-26 13:31:12 +00004333}
4334
4335
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004336void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
4337 const ParameterCount& expected,
4338 const ParameterCount& actual) {
4339 Label skip_flooding;
4340 ExternalReference step_in_enabled =
4341 ExternalReference::debug_step_in_enabled_address(isolate());
4342 Operand step_in_enabled_operand = ExternalOperand(step_in_enabled);
4343 cmpb(step_in_enabled_operand, Immediate(0));
4344 j(equal, &skip_flooding);
4345 {
4346 FrameScope frame(this,
4347 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
4348 if (expected.is_reg()) {
4349 Integer32ToSmi(expected.reg(), expected.reg());
4350 Push(expected.reg());
4351 }
4352 if (actual.is_reg()) {
4353 Integer32ToSmi(actual.reg(), actual.reg());
4354 Push(actual.reg());
4355 }
4356 if (new_target.is_valid()) {
4357 Push(new_target);
4358 }
4359 Push(fun);
4360 Push(fun);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004361 CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004362 Pop(fun);
4363 if (new_target.is_valid()) {
4364 Pop(new_target);
4365 }
4366 if (actual.is_reg()) {
4367 Pop(actual.reg());
4368 SmiToInteger64(actual.reg(), actual.reg());
4369 }
4370 if (expected.is_reg()) {
4371 Pop(expected.reg());
4372 SmiToInteger64(expected.reg(), expected.reg());
4373 }
4374 }
4375 bind(&skip_flooding);
4376}
4377
Ben Murdochda12d292016-06-02 14:46:10 +01004378void MacroAssembler::StubPrologue(StackFrame::Type type) {
4379 pushq(rbp); // Caller's frame pointer.
4380 movp(rbp, rsp);
4381 Push(Smi::FromInt(type));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004382}
4383
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004384void MacroAssembler::Prologue(bool code_pre_aging) {
4385 PredictableCodeSizeScope predictible_code_size_scope(this,
4386 kNoCodeAgeSequenceLength);
4387 if (code_pre_aging) {
4388 // Pre-age the code.
4389 Call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
4390 RelocInfo::CODE_AGE_SEQUENCE);
4391 Nop(kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength);
4392 } else {
4393 pushq(rbp); // Caller's frame pointer.
4394 movp(rbp, rsp);
4395 Push(rsi); // Callee's context.
4396 Push(rdi); // Callee's JS function.
4397 }
4398}
4399
4400
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004401void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
4402 movp(vector, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4403 movp(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
4404 movp(vector, FieldOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
4405}
4406
4407
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004408void MacroAssembler::EnterFrame(StackFrame::Type type,
4409 bool load_constant_pool_pointer_reg) {
4410 // Out-of-line constant pool not implemented on x64.
4411 UNREACHABLE();
4412}
4413
4414
Steve Blocka7e24c12009-10-30 11:49:00 +00004415void MacroAssembler::EnterFrame(StackFrame::Type type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004416 pushq(rbp);
4417 movp(rbp, rsp);
Steve Block3ce2e202009-11-05 08:53:23 +00004418 Push(Smi::FromInt(type));
Ben Murdochda12d292016-06-02 14:46:10 +01004419 if (type == StackFrame::INTERNAL) {
4420 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
4421 Push(kScratchRegister);
4422 }
Steve Block44f0eee2011-05-26 01:26:41 +01004423 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004424 Move(kScratchRegister,
Ben Murdoch257744e2011-11-30 15:57:28 +00004425 isolate()->factory()->undefined_value(),
Steve Blocka7e24c12009-10-30 11:49:00 +00004426 RelocInfo::EMBEDDED_OBJECT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004427 cmpp(Operand(rsp, 0), kScratchRegister);
4428 Check(not_equal, kCodeObjectNotProperlyPatched);
Steve Blocka7e24c12009-10-30 11:49:00 +00004429 }
4430}
4431
4432
4433void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +01004434 if (emit_debug_code()) {
Steve Block3ce2e202009-11-05 08:53:23 +00004435 Move(kScratchRegister, Smi::FromInt(type));
Ben Murdochda12d292016-06-02 14:46:10 +01004436 cmpp(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset),
4437 kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004438 Check(equal, kStackFrameTypesMustMatch);
Steve Blocka7e24c12009-10-30 11:49:00 +00004439 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004440 movp(rsp, rbp);
4441 popq(rbp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004442}
4443
4444
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004445void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004446 // Set up the frame structure on the stack.
Steve Blocka7e24c12009-10-30 11:49:00 +00004447 // All constants are relative to the frame pointer of the exit frame.
Ben Murdochda12d292016-06-02 14:46:10 +01004448 DCHECK_EQ(kFPOnStackSize + kPCOnStackSize,
4449 ExitFrameConstants::kCallerSPDisplacement);
4450 DCHECK_EQ(kFPOnStackSize, ExitFrameConstants::kCallerPCOffset);
4451 DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004452 pushq(rbp);
4453 movp(rbp, rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004454
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004455 // Reserve room for entry stack pointer and push the code object.
Ben Murdochda12d292016-06-02 14:46:10 +01004456 Push(Smi::FromInt(StackFrame::EXIT));
4457 DCHECK_EQ(-2 * kPointerSize, ExitFrameConstants::kSPOffset);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004458 Push(Immediate(0)); // Saved entry sp, patched before call.
4459 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
4460 Push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00004461
4462 // Save the frame pointer and the context in top.
Ben Murdochbb769b22010-08-11 14:56:33 +01004463 if (save_rax) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004464 movp(r14, rax); // Backup rax in callee-save register.
Ben Murdochbb769b22010-08-11 14:56:33 +01004465 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004466
Ben Murdoch589d6972011-11-30 16:04:58 +00004467 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()), rbp);
4468 Store(ExternalReference(Isolate::kContextAddress, isolate()), rsi);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004469 Store(ExternalReference(Isolate::kCFunctionAddress, isolate()), rbx);
Ben Murdochbb769b22010-08-11 14:56:33 +01004470}
Steve Blocka7e24c12009-10-30 11:49:00 +00004471
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004472
Steve Block1e0659c2011-05-24 12:43:12 +01004473void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
4474 bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004475#ifdef _WIN64
Steve Block1e0659c2011-05-24 12:43:12 +01004476 const int kShadowSpace = 4;
4477 arg_stack_space += kShadowSpace;
Steve Blocka7e24c12009-10-30 11:49:00 +00004478#endif
Steve Block1e0659c2011-05-24 12:43:12 +01004479 // Optionally save all XMM registers.
4480 if (save_doubles) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004481 int space = XMMRegister::kMaxNumRegisters * kDoubleSize +
4482 arg_stack_space * kRegisterSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004483 subp(rsp, Immediate(space));
Ben Murdochda12d292016-06-02 14:46:10 +01004484 int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004485 const RegisterConfiguration* config =
4486 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
4487 for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
4488 DoubleRegister reg =
4489 DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
4490 Movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
Steve Block1e0659c2011-05-24 12:43:12 +01004491 }
4492 } else if (arg_stack_space > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004493 subp(rsp, Immediate(arg_stack_space * kRegisterSize));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004494 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004495
4496 // Get the required frame alignment for the OS.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004497 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +00004498 if (kFrameAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004499 DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
4500 DCHECK(is_int8(kFrameAlignment));
4501 andp(rsp, Immediate(-kFrameAlignment));
Steve Blocka7e24c12009-10-30 11:49:00 +00004502 }
4503
4504 // Patch the saved entry sp.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004505 movp(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004506}
4507
4508
Steve Block1e0659c2011-05-24 12:43:12 +01004509void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004510 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01004511
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004512 // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
Ben Murdochbb769b22010-08-11 14:56:33 +01004513 // so it must be retained across the C-call.
4514 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004515 leap(r15, Operand(rbp, r14, times_pointer_size, offset));
Ben Murdochbb769b22010-08-11 14:56:33 +01004516
Steve Block1e0659c2011-05-24 12:43:12 +01004517 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
Ben Murdochbb769b22010-08-11 14:56:33 +01004518}
4519
4520
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004521void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004522 EnterExitFramePrologue(false);
Steve Block1e0659c2011-05-24 12:43:12 +01004523 EnterExitFrameEpilogue(arg_stack_space, false);
Ben Murdochbb769b22010-08-11 14:56:33 +01004524}
4525
4526
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004527void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004528 // Registers:
Steve Block44f0eee2011-05-26 01:26:41 +01004529 // r15 : argv
Steve Block1e0659c2011-05-24 12:43:12 +01004530 if (save_doubles) {
Ben Murdochda12d292016-06-02 14:46:10 +01004531 int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004532 const RegisterConfiguration* config =
4533 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
4534 for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
4535 DoubleRegister reg =
4536 DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
4537 Movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
Steve Block1e0659c2011-05-24 12:43:12 +01004538 }
4539 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004540
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004541 if (pop_arguments) {
4542 // Get the return address from the stack and restore the frame pointer.
4543 movp(rcx, Operand(rbp, kFPOnStackSize));
4544 movp(rbp, Operand(rbp, 0 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00004545
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004546 // Drop everything up to and including the arguments and the receiver
4547 // from the caller stack.
4548 leap(rsp, Operand(r15, 1 * kPointerSize));
4549
4550 PushReturnAddressFrom(rcx);
4551 } else {
4552 // Otherwise just leave the exit frame.
4553 leave();
4554 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004555
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004556 LeaveExitFrameEpilogue(true);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004557}
4558
4559
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004560void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
4561 movp(rsp, rbp);
4562 popq(rbp);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004563
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004564 LeaveExitFrameEpilogue(restore_context);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004565}
4566
4567
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004568void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004569 // Restore current context from top and clear it in debug mode.
Ben Murdoch589d6972011-11-30 16:04:58 +00004570 ExternalReference context_address(Isolate::kContextAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01004571 Operand context_operand = ExternalOperand(context_address);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004572 if (restore_context) {
4573 movp(rsi, context_operand);
4574 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004575#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004576 movp(context_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004577#endif
4578
Steve Blocka7e24c12009-10-30 11:49:00 +00004579 // Clear the top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00004580 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +01004581 isolate());
4582 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004583 movp(c_entry_fp_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004584}
4585
4586
Steve Blocka7e24c12009-10-30 11:49:00 +00004587void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
4588 Register scratch,
4589 Label* miss) {
4590 Label same_contexts;
4591
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004592 DCHECK(!holder_reg.is(scratch));
4593 DCHECK(!scratch.is(kScratchRegister));
Ben Murdochda12d292016-06-02 14:46:10 +01004594 // Load current lexical context from the active StandardFrame, which
4595 // may require crawling past STUB frames.
4596 Label load_context;
4597 Label has_context;
4598 movp(scratch, rbp);
4599 bind(&load_context);
4600 DCHECK(SmiValuesAre32Bits());
4601 // This is "JumpIfNotSmi" but without loading the value into a register.
4602 cmpl(MemOperand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset),
4603 Immediate(0));
4604 j(not_equal, &has_context);
4605 movp(scratch, MemOperand(scratch, CommonFrameConstants::kCallerFPOffset));
4606 jmp(&load_context);
4607 bind(&has_context);
4608 movp(scratch,
4609 MemOperand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004610
4611 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +01004612 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004613 cmpp(scratch, Immediate(0));
4614 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00004615 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004616 // Load the native context of the current context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004617 movp(scratch, ContextOperand(scratch, Context::NATIVE_CONTEXT_INDEX));
Steve Blocka7e24c12009-10-30 11:49:00 +00004618
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004619 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01004620 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004621 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004622 isolate()->factory()->native_context_map());
4623 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00004624 }
4625
4626 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004627 cmpp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004628 j(equal, &same_contexts);
4629
4630 // Compare security tokens.
4631 // Check that the security token in the calling global object is
4632 // compatible with the security token in the receiving global
4633 // object.
4634
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004635 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01004636 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004637 // Preserve original value of holder_reg.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004638 Push(holder_reg);
4639 movp(holder_reg,
4640 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004641 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004642 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
Steve Blocka7e24c12009-10-30 11:49:00 +00004643
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004644 // Read the first word and compare to native_context_map(),
4645 movp(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
4646 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
4647 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
4648 Pop(holder_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +00004649 }
4650
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004651 movp(kScratchRegister,
4652 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00004653 int token_offset =
4654 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004655 movp(scratch, FieldOperand(scratch, token_offset));
4656 cmpp(scratch, FieldOperand(kScratchRegister, token_offset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004657 j(not_equal, miss);
4658
4659 bind(&same_contexts);
4660}
4661
4662
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004663// Compute the hash code from the untagged key. This must be kept in sync with
4664// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
4665// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +00004666void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
4667 // First of all we assign the hash seed to scratch.
4668 LoadRoot(scratch, Heap::kHashSeedRootIndex);
4669 SmiToInteger32(scratch, scratch);
4670
4671 // Xor original key with a seed.
4672 xorl(r0, scratch);
4673
4674 // Compute the hash code from the untagged key. This must be kept in sync
4675 // with ComputeIntegerHash in utils.h.
4676 //
4677 // hash = ~hash + (hash << 15);
4678 movl(scratch, r0);
4679 notl(r0);
4680 shll(scratch, Immediate(15));
4681 addl(r0, scratch);
4682 // hash = hash ^ (hash >> 12);
4683 movl(scratch, r0);
4684 shrl(scratch, Immediate(12));
4685 xorl(r0, scratch);
4686 // hash = hash + (hash << 2);
4687 leal(r0, Operand(r0, r0, times_4, 0));
4688 // hash = hash ^ (hash >> 4);
4689 movl(scratch, r0);
4690 shrl(scratch, Immediate(4));
4691 xorl(r0, scratch);
4692 // hash = hash * 2057;
4693 imull(r0, r0, Immediate(2057));
4694 // hash = hash ^ (hash >> 16);
4695 movl(scratch, r0);
4696 shrl(scratch, Immediate(16));
4697 xorl(r0, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004698 andl(r0, Immediate(0x3fffffff));
Ben Murdochc7cc0282012-03-05 14:35:55 +00004699}
4700
4701
4702
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004703void MacroAssembler::LoadFromNumberDictionary(Label* miss,
4704 Register elements,
4705 Register key,
4706 Register r0,
4707 Register r1,
4708 Register r2,
4709 Register result) {
4710 // Register use:
4711 //
4712 // elements - holds the slow-case elements of the receiver on entry.
4713 // Unchanged unless 'result' is the same register.
4714 //
4715 // key - holds the smi key on entry.
4716 // Unchanged unless 'result' is the same register.
4717 //
4718 // Scratch registers:
4719 //
4720 // r0 - holds the untagged key on entry and holds the hash once computed.
4721 //
4722 // r1 - used to hold the capacity mask of the dictionary
4723 //
4724 // r2 - used for the index into the dictionary.
4725 //
4726 // result - holds the result on exit if the load succeeded.
4727 // Allowed to be the same as 'key' or 'result'.
4728 // Unchanged on bailout so 'key' or 'result' can be used
4729 // in further computation.
4730
4731 Label done;
4732
Ben Murdochc7cc0282012-03-05 14:35:55 +00004733 GetNumberHash(r0, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004734
4735 // Compute capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00004736 SmiToInteger32(r1, FieldOperand(elements,
4737 SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004738 decl(r1);
4739
4740 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004741 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004742 // Use r2 for index calculations and keep the hash intact in r0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004743 movp(r2, r0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004744 // Compute the masked index: (hash + i + i * i) & mask.
4745 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00004746 addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004747 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004748 andp(r2, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004749
4750 // Scale the index by multiplying by the entry size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004751 DCHECK(SeededNumberDictionary::kEntrySize == 3);
4752 leap(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004753
4754 // Check if the key matches.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004755 cmpp(key, FieldOperand(elements,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004756 r2,
4757 times_pointer_size,
Ben Murdochc7cc0282012-03-05 14:35:55 +00004758 SeededNumberDictionary::kElementsStartOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004759 if (i != (kNumberDictionaryProbes - 1)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004760 j(equal, &done);
4761 } else {
4762 j(not_equal, miss);
4763 }
4764 }
4765
4766 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004767 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004768 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00004769 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004770 DCHECK_EQ(DATA, 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004771 Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00004772 Smi::FromInt(PropertyDetails::TypeField::kMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004773 j(not_zero, miss);
4774
4775 // Get the value at the masked, scaled index.
4776 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00004777 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004778 movp(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004779}
4780
4781
Steve Blocka7e24c12009-10-30 11:49:00 +00004782void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00004783 Register scratch,
4784 AllocationFlags flags) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004785 ExternalReference allocation_top =
4786 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004787
4788 // Just return if allocation top is already known.
4789 if ((flags & RESULT_CONTAINS_TOP) != 0) {
4790 // No use of scratch if allocation top is provided.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004791 DCHECK(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00004792#ifdef DEBUG
4793 // Assert that result actually contains top on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004794 Operand top_operand = ExternalOperand(allocation_top);
4795 cmpp(result, top_operand);
4796 Check(equal, kUnexpectedAllocationTop);
Steve Blocka7e24c12009-10-30 11:49:00 +00004797#endif
4798 return;
4799 }
4800
Steve Block6ded16b2010-05-10 14:33:55 +01004801 // Move address of new object to result. Use scratch register if available,
4802 // and keep address in scratch until call to UpdateAllocationTopHelper.
4803 if (scratch.is_valid()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004804 LoadAddress(scratch, allocation_top);
4805 movp(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01004806 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004807 Load(result, allocation_top);
4808 }
4809}
4810
4811
4812void MacroAssembler::MakeSureDoubleAlignedHelper(Register result,
4813 Register scratch,
4814 Label* gc_required,
4815 AllocationFlags flags) {
4816 if (kPointerSize == kDoubleSize) {
4817 if (FLAG_debug_code) {
4818 testl(result, Immediate(kDoubleAlignmentMask));
4819 Check(zero, kAllocationIsNotDoubleAligned);
4820 }
4821 } else {
4822 // Align the next allocation. Storing the filler map without checking top
4823 // is safe in new-space because the limit of the heap is aligned there.
4824 DCHECK(kPointerSize * 2 == kDoubleSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004825 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
4826 // Make sure scratch is not clobbered by this function as it might be
4827 // used in UpdateAllocationTopHelper later.
4828 DCHECK(!scratch.is(kScratchRegister));
4829 Label aligned;
4830 testl(result, Immediate(kDoubleAlignmentMask));
4831 j(zero, &aligned, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004832 if ((flags & PRETENURE) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004833 ExternalReference allocation_limit =
4834 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
4835 cmpp(result, ExternalOperand(allocation_limit));
4836 j(above_equal, gc_required);
4837 }
4838 LoadRoot(kScratchRegister, Heap::kOnePointerFillerMapRootIndex);
4839 movp(Operand(result, 0), kScratchRegister);
4840 addp(result, Immediate(kDoubleSize / 2));
4841 bind(&aligned);
Steve Blocka7e24c12009-10-30 11:49:00 +00004842 }
4843}
4844
4845
4846void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004847 Register scratch,
4848 AllocationFlags flags) {
Steve Block44f0eee2011-05-26 01:26:41 +01004849 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004850 testp(result_end, Immediate(kObjectAlignmentMask));
4851 Check(zero, kUnalignedAllocationInNewSpace);
Steve Blockd0582a62009-12-15 09:54:21 +00004852 }
4853
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004854 ExternalReference allocation_top =
4855 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004856
4857 // Update new top.
Steve Block44f0eee2011-05-26 01:26:41 +01004858 if (scratch.is_valid()) {
4859 // Scratch already contains address of allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004860 movp(Operand(scratch, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00004861 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004862 Store(allocation_top, result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00004863 }
4864}
4865
4866
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004867void MacroAssembler::Allocate(int object_size,
4868 Register result,
4869 Register result_end,
4870 Register scratch,
4871 Label* gc_required,
4872 AllocationFlags flags) {
4873 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
4874 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
John Reck59135872010-11-02 12:39:01 -07004875 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01004876 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07004877 // Trash the registers to simulate an allocation failure.
4878 movl(result, Immediate(0x7091));
4879 if (result_end.is_valid()) {
4880 movl(result_end, Immediate(0x7191));
4881 }
4882 if (scratch.is_valid()) {
4883 movl(scratch, Immediate(0x7291));
4884 }
4885 }
4886 jmp(gc_required);
4887 return;
4888 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004889 DCHECK(!result.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00004890
4891 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004892 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004893
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004894 if ((flags & DOUBLE_ALIGNMENT) != 0) {
4895 MakeSureDoubleAlignedHelper(result, scratch, gc_required, flags);
4896 }
4897
Steve Blocka7e24c12009-10-30 11:49:00 +00004898 // Calculate new top and bail out if new space is exhausted.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004899 ExternalReference allocation_limit =
4900 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Steve Block6ded16b2010-05-10 14:33:55 +01004901
4902 Register top_reg = result_end.is_valid() ? result_end : result;
4903
Steve Block1e0659c2011-05-24 12:43:12 +01004904 if (!top_reg.is(result)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004905 movp(top_reg, result);
Steve Block6ded16b2010-05-10 14:33:55 +01004906 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004907 addp(top_reg, Immediate(object_size));
Steve Block1e0659c2011-05-24 12:43:12 +01004908 j(carry, gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004909 Operand limit_operand = ExternalOperand(allocation_limit);
4910 cmpp(top_reg, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004911 j(above, gc_required);
4912
4913 // Update allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004914 UpdateAllocationTopHelper(top_reg, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004915
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004916 bool tag_result = (flags & TAG_OBJECT) != 0;
Steve Block6ded16b2010-05-10 14:33:55 +01004917 if (top_reg.is(result)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004918 if (tag_result) {
4919 subp(result, Immediate(object_size - kHeapObjectTag));
Steve Block6ded16b2010-05-10 14:33:55 +01004920 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004921 subp(result, Immediate(object_size));
Steve Block6ded16b2010-05-10 14:33:55 +01004922 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004923 } else if (tag_result) {
Steve Block6ded16b2010-05-10 14:33:55 +01004924 // Tag the result if requested.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004925 DCHECK(kHeapObjectTag == 1);
4926 incp(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004927 }
4928}
4929
4930
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004931void MacroAssembler::Allocate(int header_size,
4932 ScaleFactor element_size,
4933 Register element_count,
4934 Register result,
4935 Register result_end,
4936 Register scratch,
4937 Label* gc_required,
4938 AllocationFlags flags) {
4939 DCHECK((flags & SIZE_IN_WORDS) == 0);
4940 leap(result_end, Operand(element_count, element_size, header_size));
4941 Allocate(result_end, result, result_end, scratch, gc_required, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004942}
4943
4944
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004945void MacroAssembler::Allocate(Register object_size,
4946 Register result,
4947 Register result_end,
4948 Register scratch,
4949 Label* gc_required,
4950 AllocationFlags flags) {
4951 DCHECK((flags & SIZE_IN_WORDS) == 0);
John Reck59135872010-11-02 12:39:01 -07004952 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01004953 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07004954 // Trash the registers to simulate an allocation failure.
4955 movl(result, Immediate(0x7091));
4956 movl(result_end, Immediate(0x7191));
4957 if (scratch.is_valid()) {
4958 movl(scratch, Immediate(0x7291));
4959 }
4960 // object_size is left unchanged by this function.
4961 }
4962 jmp(gc_required);
4963 return;
4964 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004965 DCHECK(!result.is(result_end));
John Reck59135872010-11-02 12:39:01 -07004966
Steve Blocka7e24c12009-10-30 11:49:00 +00004967 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004968 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004969
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004970 if ((flags & DOUBLE_ALIGNMENT) != 0) {
4971 MakeSureDoubleAlignedHelper(result, scratch, gc_required, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004972 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004973
4974 // Calculate new top and bail out if new space is exhausted.
4975 ExternalReference allocation_limit =
4976 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
4977 if (!object_size.is(result_end)) {
4978 movp(result_end, object_size);
4979 }
4980 addp(result_end, result);
Steve Block1e0659c2011-05-24 12:43:12 +01004981 j(carry, gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004982 Operand limit_operand = ExternalOperand(allocation_limit);
4983 cmpp(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004984 j(above, gc_required);
4985
4986 // Update allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004987 UpdateAllocationTopHelper(result_end, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004988
4989 // Tag the result if requested.
4990 if ((flags & TAG_OBJECT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004991 addp(result, Immediate(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00004992 }
4993}
4994
4995
Steve Block3ce2e202009-11-05 08:53:23 +00004996void MacroAssembler::AllocateHeapNumber(Register result,
4997 Register scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004998 Label* gc_required,
4999 MutableMode mode) {
Steve Block3ce2e202009-11-05 08:53:23 +00005000 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005001 Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT);
5002
5003 Heap::RootListIndex map_index = mode == MUTABLE
5004 ? Heap::kMutableHeapNumberMapRootIndex
5005 : Heap::kHeapNumberMapRootIndex;
Steve Block3ce2e202009-11-05 08:53:23 +00005006
5007 // Set the map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005008 LoadRoot(kScratchRegister, map_index);
5009 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +00005010}
5011
5012
Leon Clarkee46be812010-01-19 14:06:41 +00005013void MacroAssembler::AllocateTwoByteString(Register result,
5014 Register length,
5015 Register scratch1,
5016 Register scratch2,
5017 Register scratch3,
5018 Label* gc_required) {
5019 // Calculate the number of bytes needed for the characters in the string while
5020 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01005021 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
5022 kObjectAlignmentMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005023 DCHECK(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +00005024 // scratch1 = length * 2 + kObjectAlignmentMask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005025 leap(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
Steve Block6ded16b2010-05-10 14:33:55 +01005026 kHeaderAlignment));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005027 andp(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01005028 if (kHeaderAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005029 subp(scratch1, Immediate(kHeaderAlignment));
Steve Block6ded16b2010-05-10 14:33:55 +01005030 }
Leon Clarkee46be812010-01-19 14:06:41 +00005031
5032 // Allocate two byte string in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005033 Allocate(SeqTwoByteString::kHeaderSize,
5034 times_1,
5035 scratch1,
5036 result,
5037 scratch2,
5038 scratch3,
5039 gc_required,
5040 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00005041
5042 // Set the map, length and hash field.
5043 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005044 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01005045 Integer32ToSmi(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005046 movp(FieldOperand(result, String::kLengthOffset), scratch1);
5047 movp(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00005048 Immediate(String::kEmptyHashField));
5049}
5050
5051
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005052void MacroAssembler::AllocateOneByteString(Register result, Register length,
5053 Register scratch1, Register scratch2,
5054 Register scratch3,
5055 Label* gc_required) {
Leon Clarkee46be812010-01-19 14:06:41 +00005056 // Calculate the number of bytes needed for the characters in the string while
5057 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005058 const int kHeaderAlignment = SeqOneByteString::kHeaderSize &
Steve Block6ded16b2010-05-10 14:33:55 +01005059 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00005060 movl(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005061 DCHECK(kCharSize == 1);
5062 addp(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
5063 andp(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01005064 if (kHeaderAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005065 subp(scratch1, Immediate(kHeaderAlignment));
Steve Block6ded16b2010-05-10 14:33:55 +01005066 }
Leon Clarkee46be812010-01-19 14:06:41 +00005067
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005068 // Allocate one-byte string in new space.
5069 Allocate(SeqOneByteString::kHeaderSize,
5070 times_1,
5071 scratch1,
5072 result,
5073 scratch2,
5074 scratch3,
5075 gc_required,
5076 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00005077
5078 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005079 LoadRoot(kScratchRegister, Heap::kOneByteStringMapRootIndex);
5080 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01005081 Integer32ToSmi(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005082 movp(FieldOperand(result, String::kLengthOffset), scratch1);
5083 movp(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00005084 Immediate(String::kEmptyHashField));
5085}
5086
5087
Ben Murdoch589d6972011-11-30 16:04:58 +00005088void MacroAssembler::AllocateTwoByteConsString(Register result,
Leon Clarkee46be812010-01-19 14:06:41 +00005089 Register scratch1,
5090 Register scratch2,
5091 Label* gc_required) {
5092 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005093 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
5094 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00005095
5096 // Set the map. The other fields are left uninitialized.
5097 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005098 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +00005099}
5100
5101
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005102void MacroAssembler::AllocateOneByteConsString(Register result,
5103 Register scratch1,
5104 Register scratch2,
5105 Label* gc_required) {
5106 Allocate(ConsString::kSize,
5107 result,
5108 scratch1,
5109 scratch2,
5110 gc_required,
5111 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00005112
5113 // Set the map. The other fields are left uninitialized.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005114 LoadRoot(kScratchRegister, Heap::kConsOneByteStringMapRootIndex);
5115 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +00005116}
5117
5118
Ben Murdoch589d6972011-11-30 16:04:58 +00005119void MacroAssembler::AllocateTwoByteSlicedString(Register result,
5120 Register scratch1,
5121 Register scratch2,
5122 Label* gc_required) {
5123 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005124 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
5125 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00005126
5127 // Set the map. The other fields are left uninitialized.
5128 LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005129 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Ben Murdoch589d6972011-11-30 16:04:58 +00005130}
5131
5132
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005133void MacroAssembler::AllocateOneByteSlicedString(Register result,
5134 Register scratch1,
5135 Register scratch2,
5136 Label* gc_required) {
Ben Murdoch589d6972011-11-30 16:04:58 +00005137 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005138 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
5139 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00005140
5141 // Set the map. The other fields are left uninitialized.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005142 LoadRoot(kScratchRegister, Heap::kSlicedOneByteStringMapRootIndex);
5143 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Ben Murdoch589d6972011-11-30 16:04:58 +00005144}
5145
5146
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005147void MacroAssembler::AllocateJSValue(Register result, Register constructor,
5148 Register value, Register scratch,
5149 Label* gc_required) {
5150 DCHECK(!result.is(constructor));
5151 DCHECK(!result.is(scratch));
5152 DCHECK(!result.is(value));
5153
5154 // Allocate JSValue in new space.
5155 Allocate(JSValue::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT);
5156
5157 // Initialize the JSValue.
5158 LoadGlobalFunctionInitialMap(constructor, scratch);
5159 movp(FieldOperand(result, HeapObject::kMapOffset), scratch);
5160 LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
5161 movp(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
5162 movp(FieldOperand(result, JSObject::kElementsOffset), scratch);
5163 movp(FieldOperand(result, JSValue::kValueOffset), value);
5164 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
5165}
5166
5167
Steve Block44f0eee2011-05-26 01:26:41 +01005168// Copy memory, byte-by-byte, from source to destination. Not optimized for
5169// long or aligned copies. The contents of scratch and length are destroyed.
5170// Destination is incremented by length, source, length and scratch are
5171// clobbered.
5172// A simpler loop is faster on small copies, but slower on large ones.
5173// The cld() instruction must have been emitted, to set the direction flag(),
5174// before calling this function.
5175void MacroAssembler::CopyBytes(Register destination,
5176 Register source,
5177 Register length,
5178 int min_length,
5179 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005180 DCHECK(min_length >= 0);
5181 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01005182 cmpl(length, Immediate(min_length));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005183 Assert(greater_equal, kInvalidMinLength);
Steve Block44f0eee2011-05-26 01:26:41 +01005184 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005185 Label short_loop, len8, len16, len24, done, short_string;
Steve Block44f0eee2011-05-26 01:26:41 +01005186
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005187 const int kLongStringLimit = 4 * kPointerSize;
Steve Block44f0eee2011-05-26 01:26:41 +01005188 if (min_length <= kLongStringLimit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005189 cmpl(length, Immediate(kPointerSize));
5190 j(below, &short_string, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01005191 }
5192
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005193 DCHECK(source.is(rsi));
5194 DCHECK(destination.is(rdi));
5195 DCHECK(length.is(rcx));
5196
5197 if (min_length <= kLongStringLimit) {
5198 cmpl(length, Immediate(2 * kPointerSize));
5199 j(below_equal, &len8, Label::kNear);
5200 cmpl(length, Immediate(3 * kPointerSize));
5201 j(below_equal, &len16, Label::kNear);
5202 cmpl(length, Immediate(4 * kPointerSize));
5203 j(below_equal, &len24, Label::kNear);
5204 }
Steve Block44f0eee2011-05-26 01:26:41 +01005205
5206 // Because source is 8-byte aligned in our uses of this function,
5207 // we keep source aligned for the rep movs operation by copying the odd bytes
5208 // at the end of the ranges.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005209 movp(scratch, length);
5210 shrl(length, Immediate(kPointerSizeLog2));
5211 repmovsp();
Steve Block44f0eee2011-05-26 01:26:41 +01005212 // Move remaining bytes of length.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005213 andl(scratch, Immediate(kPointerSize - 1));
5214 movp(length, Operand(source, scratch, times_1, -kPointerSize));
5215 movp(Operand(destination, scratch, times_1, -kPointerSize), length);
5216 addp(destination, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01005217
5218 if (min_length <= kLongStringLimit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005219 jmp(&done, Label::kNear);
5220 bind(&len24);
5221 movp(scratch, Operand(source, 2 * kPointerSize));
5222 movp(Operand(destination, 2 * kPointerSize), scratch);
5223 bind(&len16);
5224 movp(scratch, Operand(source, kPointerSize));
5225 movp(Operand(destination, kPointerSize), scratch);
5226 bind(&len8);
5227 movp(scratch, Operand(source, 0));
5228 movp(Operand(destination, 0), scratch);
5229 // Move remaining bytes of length.
5230 movp(scratch, Operand(source, length, times_1, -kPointerSize));
5231 movp(Operand(destination, length, times_1, -kPointerSize), scratch);
5232 addp(destination, length);
5233 jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01005234
5235 bind(&short_string);
5236 if (min_length == 0) {
5237 testl(length, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005238 j(zero, &done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01005239 }
Steve Block44f0eee2011-05-26 01:26:41 +01005240
5241 bind(&short_loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005242 movb(scratch, Operand(source, 0));
5243 movb(Operand(destination, 0), scratch);
5244 incp(source);
5245 incp(destination);
5246 decl(length);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005247 j(not_zero, &short_loop, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01005248 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005249
5250 bind(&done);
Steve Block44f0eee2011-05-26 01:26:41 +01005251}
5252
5253
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005254void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
5255 Register end_address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005256 Register filler) {
5257 Label loop, entry;
Ben Murdoch097c5b22016-05-18 11:27:45 +01005258 jmp(&entry, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005259 bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005260 movp(Operand(current_address, 0), filler);
5261 addp(current_address, Immediate(kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005262 bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005263 cmpp(current_address, end_address);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005264 j(below, &loop, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005265}
5266
5267
Steve Blockd0582a62009-12-15 09:54:21 +00005268void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
5269 if (context_chain_length > 0) {
5270 // Move up the chain of contexts to the context containing the slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005271 movp(dst, Operand(rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00005272 for (int i = 1; i < context_chain_length; i++) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005273 movp(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00005274 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005275 } else {
5276 // Slot is in the current function context. Move it into the
5277 // destination register in case we store into it (the write barrier
5278 // cannot be allowed to destroy the context in rsi).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005279 movp(dst, rsi);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005280 }
5281
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005282 // We should not have found a with context by walking the context
5283 // chain (i.e., the static scope chain and runtime context chain do
5284 // not agree). A variable occurring in such a scope should have
5285 // slot type LOOKUP and not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01005286 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005287 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
5288 Heap::kWithContextMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005289 Check(not_equal, kVariableResolvedToWithContext);
Steve Blockd0582a62009-12-15 09:54:21 +00005290 }
5291}
5292
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005293
5294void MacroAssembler::LoadTransitionedArrayMapConditional(
5295 ElementsKind expected_kind,
5296 ElementsKind transitioned_kind,
5297 Register map_in_out,
5298 Register scratch,
5299 Label* no_map_match) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005300 DCHECK(IsFastElementsKind(expected_kind));
5301 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005302
5303 // Check that the function's map is the same as the expected cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005304 movp(scratch, NativeContextOperand());
5305 cmpp(map_in_out,
5306 ContextOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005307 j(not_equal, no_map_match);
5308
5309 // Use the transitioned cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005310 movp(map_in_out,
5311 ContextOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005312}
5313
5314
Steve Block44f0eee2011-05-26 01:26:41 +01005315#ifdef _WIN64
5316static const int kRegisterPassedArguments = 4;
5317#else
5318static const int kRegisterPassedArguments = 6;
5319#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005320
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005321
5322void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
5323 movp(dst, NativeContextOperand());
5324 movp(dst, ContextOperand(dst, index));
Ben Murdochb0fe1622011-05-05 13:52:32 +01005325}
5326
5327
5328void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
5329 Register map) {
5330 // Load the initial map. The global functions all have initial maps.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005331 movp(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01005332 if (emit_debug_code()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01005333 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00005334 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005335 jmp(&ok);
5336 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005337 Abort(kGlobalFunctionsMustHaveInitialMap);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005338 bind(&ok);
5339 }
5340}
5341
5342
Leon Clarke4515c472010-02-03 11:58:03 +00005343int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005344 // On Windows 64 stack slots are reserved by the caller for all arguments
5345 // including the ones passed in registers, and space is always allocated for
5346 // the four register arguments even if the function takes fewer than four
5347 // arguments.
5348 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
5349 // and the caller does not reserve stack slots for them.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005350 DCHECK(num_arguments >= 0);
Leon Clarke4515c472010-02-03 11:58:03 +00005351#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01005352 const int kMinimumStackSlots = kRegisterPassedArguments;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005353 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
5354 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00005355#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005356 if (num_arguments < kRegisterPassedArguments) return 0;
5357 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00005358#endif
Leon Clarke4515c472010-02-03 11:58:03 +00005359}
5360
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005361
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005362void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
5363 Register index,
5364 Register value,
5365 uint32_t encoding_mask) {
5366 Label is_object;
5367 JumpIfNotSmi(string, &is_object);
5368 Abort(kNonObject);
5369 bind(&is_object);
5370
5371 Push(value);
5372 movp(value, FieldOperand(string, HeapObject::kMapOffset));
5373 movzxbp(value, FieldOperand(value, Map::kInstanceTypeOffset));
5374
5375 andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
5376 cmpp(value, Immediate(encoding_mask));
5377 Pop(value);
5378 Check(equal, kUnexpectedStringType);
5379
5380 // The index is assumed to be untagged coming in, tag it to compare with the
5381 // string length without using a temp register, it is restored at the end of
5382 // this function.
5383 Integer32ToSmi(index, index);
5384 SmiCompare(index, FieldOperand(string, String::kLengthOffset));
5385 Check(less, kIndexIsTooLarge);
5386
5387 SmiCompare(index, Smi::FromInt(0));
5388 Check(greater_equal, kIndexIsNegative);
5389
5390 // Restore the index
5391 SmiToInteger32(index, index);
5392}
5393
5394
Leon Clarke4515c472010-02-03 11:58:03 +00005395void MacroAssembler::PrepareCallCFunction(int num_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005396 int frame_alignment = base::OS::ActivationFrameAlignment();
5397 DCHECK(frame_alignment != 0);
5398 DCHECK(num_arguments >= 0);
Steve Block44f0eee2011-05-26 01:26:41 +01005399
Leon Clarke4515c472010-02-03 11:58:03 +00005400 // Make stack end at alignment and allocate space for arguments and old rsp.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005401 movp(kScratchRegister, rsp);
5402 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Leon Clarke4515c472010-02-03 11:58:03 +00005403 int argument_slots_on_stack =
5404 ArgumentStackSlotsForCFunctionCall(num_arguments);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005405 subp(rsp, Immediate((argument_slots_on_stack + 1) * kRegisterSize));
5406 andp(rsp, Immediate(-frame_alignment));
5407 movp(Operand(rsp, argument_slots_on_stack * kRegisterSize), kScratchRegister);
Leon Clarke4515c472010-02-03 11:58:03 +00005408}
5409
5410
5411void MacroAssembler::CallCFunction(ExternalReference function,
5412 int num_arguments) {
Steve Block44f0eee2011-05-26 01:26:41 +01005413 LoadAddress(rax, function);
Leon Clarke4515c472010-02-03 11:58:03 +00005414 CallCFunction(rax, num_arguments);
5415}
5416
5417
5418void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005419 DCHECK(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01005420 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01005421 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005422 CheckStackAlignment();
5423 }
5424
Leon Clarke4515c472010-02-03 11:58:03 +00005425 call(function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005426 DCHECK(base::OS::ActivationFrameAlignment() != 0);
5427 DCHECK(num_arguments >= 0);
Leon Clarke4515c472010-02-03 11:58:03 +00005428 int argument_slots_on_stack =
5429 ArgumentStackSlotsForCFunctionCall(num_arguments);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005430 movp(rsp, Operand(rsp, argument_slots_on_stack * kRegisterSize));
Leon Clarke4515c472010-02-03 11:58:03 +00005431}
5432
Steve Blockd0582a62009-12-15 09:54:21 +00005433
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005434#ifdef DEBUG
5435bool AreAliased(Register reg1,
5436 Register reg2,
5437 Register reg3,
5438 Register reg4,
5439 Register reg5,
5440 Register reg6,
5441 Register reg7,
5442 Register reg8) {
5443 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
5444 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
5445 reg7.is_valid() + reg8.is_valid();
5446
5447 RegList regs = 0;
5448 if (reg1.is_valid()) regs |= reg1.bit();
5449 if (reg2.is_valid()) regs |= reg2.bit();
5450 if (reg3.is_valid()) regs |= reg3.bit();
5451 if (reg4.is_valid()) regs |= reg4.bit();
5452 if (reg5.is_valid()) regs |= reg5.bit();
5453 if (reg6.is_valid()) regs |= reg6.bit();
5454 if (reg7.is_valid()) regs |= reg7.bit();
5455 if (reg8.is_valid()) regs |= reg8.bit();
5456 int n_of_non_aliasing_regs = NumRegs(regs);
5457
5458 return n_of_valid_regs != n_of_non_aliasing_regs;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005459}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005460#endif
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005461
5462
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005463CodePatcher::CodePatcher(Isolate* isolate, byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01005464 : address_(address),
5465 size_(size),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005466 masm_(isolate, address, size + Assembler::kGap, CodeObjectRequired::kNo) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005467 // Create a new macro assembler pointing to the address of the code to patch.
5468 // The size is adjusted with kGap on order for the assembler to generate size
5469 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005470 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00005471}
5472
5473
5474CodePatcher::~CodePatcher() {
5475 // Indicate that code has changed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005476 Assembler::FlushICache(masm_.isolate(), address_, size_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005477
5478 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005479 DCHECK(masm_.pc_ == address_ + size_);
5480 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00005481}
5482
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005483
5484void MacroAssembler::CheckPageFlag(
5485 Register object,
5486 Register scratch,
5487 int mask,
5488 Condition cc,
5489 Label* condition_met,
5490 Label::Distance condition_met_distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005491 DCHECK(cc == zero || cc == not_zero);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005492 if (scratch.is(object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005493 andp(scratch, Immediate(~Page::kPageAlignmentMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005494 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005495 movp(scratch, Immediate(~Page::kPageAlignmentMask));
5496 andp(scratch, object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005497 }
5498 if (mask < (1 << kBitsPerByte)) {
5499 testb(Operand(scratch, MemoryChunk::kFlagsOffset),
5500 Immediate(static_cast<uint8_t>(mask)));
5501 } else {
5502 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
5503 }
5504 j(cc, condition_met, condition_met_distance);
5505}
5506
5507
5508void MacroAssembler::JumpIfBlack(Register object,
5509 Register bitmap_scratch,
5510 Register mask_scratch,
5511 Label* on_black,
5512 Label::Distance on_black_distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005513 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, rcx));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005514
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005515 GetMarkBits(object, bitmap_scratch, mask_scratch);
5516
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005517 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005518 // The mask_scratch register contains a 1 at the position of the first bit
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005519 // and a 1 at a position of the second bit. All other positions are zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005520 movp(rcx, mask_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005521 andp(rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
5522 cmpp(mask_scratch, rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005523 j(equal, on_black, on_black_distance);
5524}
5525
5526
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005527void MacroAssembler::GetMarkBits(Register addr_reg,
5528 Register bitmap_reg,
5529 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005530 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, rcx));
5531 movp(bitmap_reg, addr_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005532 // Sign extended 32 bit immediate.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005533 andp(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
5534 movp(rcx, addr_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005535 int shift =
5536 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
5537 shrl(rcx, Immediate(shift));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005538 andp(rcx,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005539 Immediate((Page::kPageAlignmentMask >> shift) &
5540 ~(Bitmap::kBytesPerCell - 1)));
5541
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005542 addp(bitmap_reg, rcx);
5543 movp(rcx, addr_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005544 shrl(rcx, Immediate(kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005545 andp(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005546 movl(mask_reg, Immediate(3));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005547 shlp_cl(mask_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005548}
5549
5550
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005551void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
5552 Register mask_scratch, Label* value_is_white,
5553 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005554 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, rcx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005555 GetMarkBits(value, bitmap_scratch, mask_scratch);
5556
5557 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005558 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005559 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
5560 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005561 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005562
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005563 // Since both black and grey have a 1 in the first position and white does
5564 // not have a 1 there we only need to check one bit.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005565 testp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005566 j(zero, value_is_white, distance);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005567}
5568
5569
Ben Murdoch097c5b22016-05-18 11:27:45 +01005570void MacroAssembler::CheckEnumCache(Label* call_runtime) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005571 Label next, start;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005572 Register empty_fixed_array_value = r8;
5573 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005574 movp(rcx, rax);
5575
5576 // Check if the enum length field is properly initialized, indicating that
5577 // there is an enum cache.
5578 movp(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
5579
5580 EnumLength(rdx, rbx);
5581 Cmp(rdx, Smi::FromInt(kInvalidEnumCacheSentinel));
5582 j(equal, call_runtime);
5583
5584 jmp(&start);
5585
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005586 bind(&next);
5587
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005588 movp(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005589
5590 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005591 EnumLength(rdx, rbx);
5592 Cmp(rdx, Smi::FromInt(0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005593 j(not_equal, call_runtime);
5594
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005595 bind(&start);
5596
5597 // Check that there are no elements. Register rcx contains the current JS
5598 // object we've reached through the prototype chain.
5599 Label no_elements;
5600 cmpp(empty_fixed_array_value,
5601 FieldOperand(rcx, JSObject::kElementsOffset));
5602 j(equal, &no_elements);
5603
5604 // Second chance, the object may be using the empty slow element dictionary.
5605 LoadRoot(kScratchRegister, Heap::kEmptySlowElementDictionaryRootIndex);
5606 cmpp(kScratchRegister, FieldOperand(rcx, JSObject::kElementsOffset));
5607 j(not_equal, call_runtime);
5608
5609 bind(&no_elements);
5610 movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005611 CompareRoot(rcx, Heap::kNullValueRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005612 j(not_equal, &next);
5613}
5614
Ben Murdoch097c5b22016-05-18 11:27:45 +01005615
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005616void MacroAssembler::TestJSArrayForAllocationMemento(
5617 Register receiver_reg,
5618 Register scratch_reg,
5619 Label* no_memento_found) {
Ben Murdochda12d292016-06-02 14:46:10 +01005620 Label map_check;
5621 Label top_check;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005622 ExternalReference new_space_allocation_top =
5623 ExternalReference::new_space_allocation_top_address(isolate());
Ben Murdochda12d292016-06-02 14:46:10 +01005624 const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
5625 const int kMementoEndOffset = kMementoMapOffset + AllocationMemento::kSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005626
Ben Murdochda12d292016-06-02 14:46:10 +01005627 // Bail out if the object is not in new space.
5628 JumpIfNotInNewSpace(receiver_reg, scratch_reg, no_memento_found);
5629 // If the object is in new space, we need to check whether it is on the same
5630 // page as the current top.
5631 leap(scratch_reg, Operand(receiver_reg, kMementoEndOffset));
5632 xorp(scratch_reg, ExternalOperand(new_space_allocation_top));
5633 testp(scratch_reg, Immediate(~Page::kPageAlignmentMask));
5634 j(zero, &top_check);
5635 // The object is on a different page than allocation top. Bail out if the
5636 // object sits on the page boundary as no memento can follow and we cannot
5637 // touch the memory following it.
5638 leap(scratch_reg, Operand(receiver_reg, kMementoEndOffset));
5639 xorp(scratch_reg, receiver_reg);
5640 testp(scratch_reg, Immediate(~Page::kPageAlignmentMask));
5641 j(not_zero, no_memento_found);
5642 // Continue with the actual map check.
5643 jmp(&map_check);
5644 // If top is on the same page as the current object, we need to check whether
5645 // we are below top.
5646 bind(&top_check);
5647 leap(scratch_reg, Operand(receiver_reg, kMementoEndOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005648 cmpp(scratch_reg, ExternalOperand(new_space_allocation_top));
5649 j(greater, no_memento_found);
Ben Murdochda12d292016-06-02 14:46:10 +01005650 // Memento map check.
5651 bind(&map_check);
5652 CompareRoot(MemOperand(receiver_reg, kMementoMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005653 Heap::kAllocationMementoMapRootIndex);
5654}
5655
5656
5657void MacroAssembler::JumpIfDictionaryInPrototypeChain(
5658 Register object,
5659 Register scratch0,
5660 Register scratch1,
5661 Label* found) {
5662 DCHECK(!(scratch0.is(kScratchRegister) && scratch1.is(kScratchRegister)));
5663 DCHECK(!scratch1.is(scratch0));
5664 Register current = scratch0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005665 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005666
5667 movp(current, object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005668 movp(current, FieldOperand(current, HeapObject::kMapOffset));
5669 movp(current, FieldOperand(current, Map::kPrototypeOffset));
5670 CompareRoot(current, Heap::kNullValueRootIndex);
5671 j(equal, &end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005672
5673 // Loop based on the map going up the prototype chain.
5674 bind(&loop_again);
5675 movp(current, FieldOperand(current, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005676 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
5677 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
5678 CmpInstanceType(current, JS_OBJECT_TYPE);
5679 j(below, found);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005680 movp(scratch1, FieldOperand(current, Map::kBitField2Offset));
5681 DecodeField<Map::ElementsKindBits>(scratch1);
5682 cmpp(scratch1, Immediate(DICTIONARY_ELEMENTS));
5683 j(equal, found);
5684 movp(current, FieldOperand(current, Map::kPrototypeOffset));
5685 CompareRoot(current, Heap::kNullValueRootIndex);
5686 j(not_equal, &loop_again);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005687
5688 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005689}
5690
5691
5692void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
5693 DCHECK(!dividend.is(rax));
5694 DCHECK(!dividend.is(rdx));
5695 base::MagicNumbersForDivision<uint32_t> mag =
5696 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
5697 movl(rax, Immediate(mag.multiplier));
5698 imull(dividend);
5699 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
5700 if (divisor > 0 && neg) addl(rdx, dividend);
5701 if (divisor < 0 && !neg && mag.multiplier > 0) subl(rdx, dividend);
5702 if (mag.shift > 0) sarl(rdx, Immediate(mag.shift));
5703 movl(rax, dividend);
5704 shrl(rax, Immediate(31));
5705 addl(rdx, rax);
5706}
5707
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005708
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005709} // namespace internal
5710} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01005711
5712#endif // V8_TARGET_ARCH_X64